fetch batch translate job details using SDK - amazon-web-services

I was trying to find the LanguagePair and Operation associated with a given AWS translate job using the JAVA SDK.
Using the AWS web console, i created a couple of batch jobs to translate a few english sentences to french. In CloudWatch, i could see the metric dimensions as
LanguagePair: en-fr
Operation: TranslateText
Can i retrieve the same information (LanguagePair and Operation) for a given job,
using the TranslateAsyncClient.describeTextTranslationJob(...) method ?

You can use the TranslateClient to describes a translation job given the job number as input. This uses the TranslateClient; however you can use the Async version as well.
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.translate.TranslateClient;
import software.amazon.awssdk.services.translate.model.DescribeTextTranslationJobRequest;
import software.amazon.awssdk.services.translate.model.DescribeTextTranslationJobResponse;
import software.amazon.awssdk.services.translate.model.TranslateException;
// snippet-end:[translate.java2._describe_jobs.import]
/**
* To run this Java V2 code example, ensure that you have setup your development environment, including your credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class DescribeTextTranslationJob {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" DescribeTextTranslationJob <id> \n\n" +
"Where:\n" +
" id - a translation job ID value. You can obtain this value from the BatchTranslation example.\n";
if (args.length != 1) {
System.out.println(USAGE);
System.exit(1);
}
String id = args[0];
Region region = Region.US_WEST_2;
TranslateClient translateClient = TranslateClient.builder()
.region(region)
.build();
describeTextTranslationJob(translateClient, id);
translateClient.close();
}
// snippet-start:[translate.java2._describe_jobs.main]
public static void describeTextTranslationJob(TranslateClient translateClient, String id) {
try {
DescribeTextTranslationJobRequest textTranslationJobRequest = DescribeTextTranslationJobRequest.builder()
.jobId(id)
.build();
DescribeTextTranslationJobResponse jobResponse = translateClient.describeTextTranslationJob(textTranslationJobRequest);
System.out.println("The job status is "+jobResponse.textTranslationJobProperties().jobStatus());
System.out.println("The source language is "+jobResponse.textTranslationJobProperties().sourceLanguageCode());
System.out.println("The target language is "+jobResponse.textTranslationJobProperties().targetLanguageCodes());
} catch (TranslateException e) {
System.err.println(e.getMessage());
System.exit(1);
}
// snippet-end:[translate.java2._describe_jobs.main]
}
}
To see all the data you can get back using this code, see this JavaDoc - https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/translate/model/TextTranslationJobProperties.html

Related

How to enter data from Spring Boot Application into Amazon Kinesis?

I want to add data into kinesis using Sprint Boot Application and React. I am a complete beginner when it comes to Kinesis, AWS, etc. so a beginner friendly guide would be appriciated.
To add data records into an Amazon Kinesis data stream from a Spring BOOT app, you can use the AWS SDK for Java V2 and specifically the Amazon Kinesis Java API. You can use the software.amazon.awssdk.services.kinesis.KinesisClient.
Because you are a beginner, I recommend that you read the AWS SDK Java V2 Developer Guide to become familiar with how to work with this Java API. See Developer guide - AWS SDK for Java 2.x.
Here is a code example that shows you how to add data records using this Service Client. See Github that has the other required classes here.
package com.example.kinesis;
//snippet-start:[kinesis.java2.putrecord.import]
import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider;
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.kinesis.KinesisClient;
import software.amazon.awssdk.services.kinesis.model.PutRecordRequest;
import software.amazon.awssdk.services.kinesis.model.KinesisException;
import software.amazon.awssdk.services.kinesis.model.DescribeStreamRequest;
import software.amazon.awssdk.services.kinesis.model.DescribeStreamResponse;
//snippet-end:[kinesis.java2.putrecord.import]
/**
* Before running this Java V2 code example, set up your development environment, including your credentials.
*
* For more information, see the following documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class StockTradesWriter {
public static void main(String[] args) {
final String usage = "\n" +
"Usage:\n" +
" <streamName>\n\n" +
"Where:\n" +
" streamName - The Amazon Kinesis data stream to which records are written (for example, StockTradeStream)\n\n";
if (args.length != 1) {
System.out.println(usage);
System.exit(1);
}
String streamName = args[0];
Region region = Region.US_EAST_1;
KinesisClient kinesisClient = KinesisClient.builder()
.region(region)
.credentialsProvider(ProfileCredentialsProvider.create())
.build();
// Ensure that the Kinesis Stream is valid.
validateStream(kinesisClient, streamName);
setStockData( kinesisClient, streamName);
kinesisClient.close();
}
// snippet-start:[kinesis.java2.putrecord.main]
public static void setStockData( KinesisClient kinesisClient, String streamName) {
try {
// Repeatedly send stock trades with a 100 milliseconds wait in between
StockTradeGenerator stockTradeGenerator = new StockTradeGenerator();
// Put in 50 Records for this example
int index = 50;
for (int x=0; x<index; x++){
StockTrade trade = stockTradeGenerator.getRandomTrade();
sendStockTrade(trade, kinesisClient, streamName);
Thread.sleep(100);
}
} catch (KinesisException | InterruptedException e) {
System.err.println(e.getMessage());
System.exit(1);
}
System.out.println("Done");
}
private static void sendStockTrade(StockTrade trade, KinesisClient kinesisClient,
String streamName) {
byte[] bytes = trade.toJsonAsBytes();
// The bytes could be null if there is an issue with the JSON serialization by the Jackson JSON library.
if (bytes == null) {
System.out.println("Could not get JSON bytes for stock trade");
return;
}
System.out.println("Putting trade: " + trade);
PutRecordRequest request = PutRecordRequest.builder()
.partitionKey(trade.getTickerSymbol()) // We use the ticker symbol as the partition key, explained in the Supplemental Information section below.
.streamName(streamName)
.data(SdkBytes.fromByteArray(bytes))
.build();
try {
kinesisClient.putRecord(request);
} catch (KinesisException e) {
e.getMessage();
}
}
private static void validateStream(KinesisClient kinesisClient, String streamName) {
try {
DescribeStreamRequest describeStreamRequest = DescribeStreamRequest.builder()
.streamName(streamName)
.build();
DescribeStreamResponse describeStreamResponse = kinesisClient.describeStream(describeStreamRequest);
if(!describeStreamResponse.streamDescription().streamStatus().toString().equals("ACTIVE")) {
System.err.println("Stream " + streamName + " is not active. Please wait a few moments and try again.");
System.exit(1);
}
}catch (KinesisException e) {
System.err.println("Error found while describing the stream " + streamName);
System.err.println(e);
System.exit(1);
}
}
// snippet-end:[kinesis.java2.putrecord.main]
}

AWS cognito `connection pool shutdown`

In my spring boot project, I am first creating a user pool in cognito and then adding a user to the user pool. when adding the user to user pool I need to create a userPoolClient
I am creating the user client with all the required params with
CreateUserPoolClientResponse response = cognitoClient.createUserPoolClient(clientRequest);
but its behaving strangely. when i am running the code its throwing connection pool shut down but when i run my code in debug mode with breakpoint, its able to create the user pool client successfully. I am not sure if creation of connection pool is async process and that's why with the debugger its getting some time before creating it?
Does anyone faced this issue? would appreciate any help. thanks
Try running AWS SDK Java V2 for this use case. Here is the code for V2.
package com.example.cognito;
//snippet-start:[cognito.java2.user_pool.create_user_pool_client.import]
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.cognitoidentityprovider.CognitoIdentityProviderClient;
import software.amazon.awssdk.services.cognitoidentityprovider.model.CognitoIdentityProviderException;
import software.amazon.awssdk.services.cognitoidentityprovider.model.CreateUserPoolClientRequest;
import software.amazon.awssdk.services.cognitoidentityprovider.model.CreateUserPoolClientResponse;
//snippet-end:[cognito.java2.user_pool.create_user_pool_client.import]
/**
* To run this Java V2 code example, ensure that you have setup your development environment, including your credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class CreateUserPoolClient {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" <clientName> <userPoolId> \n\n" +
"Where:\n" +
" clientName - the name for the user pool client to create.\n\n" +
" userPoolId - the ID for the user pool.\n\n" ;
if (args.length != 2) {
System.out.println(USAGE);
System.exit(1);
}
String clientName = args[0];
String userPoolId = args[1];
CognitoIdentityProviderClient cognitoClient = CognitoIdentityProviderClient.builder()
.region(Region.US_EAST_1)
.build();
createPoolClient (cognitoClient, clientName, userPoolId) ;
cognitoClient.close();
}
//snippet-start:[cognito.java2.user_pool.create_user_pool_client.main]
public static void createPoolClient ( CognitoIdentityProviderClient cognitoClient,
String clientName,
String userPoolId ) {
try {
CreateUserPoolClientResponse response = cognitoClient.createUserPoolClient(
CreateUserPoolClientRequest.builder()
.clientName(clientName)
.userPoolId(userPoolId)
.build()
);
System.out.println("User pool " + response.userPoolClient().clientName() + " created. ID: " + response.userPoolClient().clientId());
} catch (CognitoIdentityProviderException e){
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
//snippet-end:[cognito.java2.user_pool.create_user_pool_client.main]
}
I just ran this in a unit test and it worked fine.
You can find this one and other code examples for this service here:
https://github.com/awsdocs/aws-doc-sdk-examples/tree/main/javav2/example_code/cognito

Put event in Amazaon Event bus using Jmeter

We are introducing an event bridge to communicate btw 2 components and we want to Performance test the inbound event bus.
We are using jmeter and want to to do a put event in this inbound event bus. has anyone done something like this ?
Probably the most straightforward way is using AWS SDK for Java from JSR223 Test Elements with Groovy
Example code can be found at Working with Amazon EventBridge, I'll add it here just in case
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package com.example.eventbridge;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.eventbridge.EventBridgeClient;
import software.amazon.awssdk.services.eventbridge.model.EventBridgeException;
import software.amazon.awssdk.services.eventbridge.model.PutEventsRequest;
import software.amazon.awssdk.services.eventbridge.model.PutEventsRequestEntry;
import software.amazon.awssdk.services.eventbridge.model.PutEventsResponse;
import software.amazon.awssdk.services.eventbridge.model.PutEventsResultEntry;
import java.util.ArrayList;
import java.util.List;
/**
* To run this Java V2 code example, ensure that you have setup your development environment, including your credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class PutEvents {
public static void main(String[] args) {
final String USAGE =
"To run this example, supply two resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. " +
"Any number, including zero, may be present. \n" +
"For example: PutEvents <resourceArn> <resourceArn2>\n";
if (args.length != 2) {
System.out.println(USAGE);
System.exit(1);
}
String resourceArn = args[0];
String resourceArn2 = args[1];
Region region = Region.US_WEST_2;
EventBridgeClient eventBrClient = EventBridgeClient.builder()
.region(region)
.build();
putEBEvents(eventBrClient, resourceArn, resourceArn2);
eventBrClient.close();
}
public static void putEBEvents(EventBridgeClient eventBrClient, String resourceArn, String resourceArn2 ) {
try {
// Populate a List with the resource ARN values
List<String> resources = new ArrayList<String>();
resources.add(resourceArn);
resources.add(resourceArn2);
PutEventsRequestEntry reqEntry = PutEventsRequestEntry.builder()
.resources(resources)
.source("com.mycompany.myapp")
.detailType("myDetailType")
.detail("{ \"key1\": \"value1\", \"key2\": \"value2\" }")
.build();
// Add the PutEventsRequestEntry to a list
List<PutEventsRequestEntry> list = new ArrayList<PutEventsRequestEntry>();
list.add(reqEntry);
PutEventsRequest eventsRequest = PutEventsRequest.builder()
.entries(reqEntry)
.build();
PutEventsResponse result = eventBrClient.putEvents(eventsRequest);
for (PutEventsResultEntry resultEntry : result.entries()) {
if (resultEntry.eventId() != null) {
System.out.println("Event Id: " + resultEntry.eventId());
} else {
System.out.println("Injection failed with Error Code: " + resultEntry.errorCode());
}
}
} catch (EventBridgeException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
}
Was able to do it by using the os process sampler and using the AWS cli commands.

Pentaho ETL Transformation Using Lamda

Is it possible to run Pentaho ETL Jobs/transformation using AWS Lamda functions?
I have Pentaho ETL jobs running on schedule on the Windows server, we are planning to migrate to AWS. I am considering the Lambda function. just to understand if it is possible to schedule the Pentaho ETL Jobs using AWS Lamdba
Here is the snippet of code that I was able to successfully run in AWS Lambda Function.
handleRequest Function is called from AWS Lambda Function
public Integer handleRequest(String input, Context context) {
parseInput(input);
return executeKtr(transName);
}
parseInput: This function is used to parse out a string parameter passed by Lambda Function to extract KTR name and its parameters with value. Format of the input is "ktrfilename param1=value1 param2=value2"
public static void parseInput(String input) {
String[] tokens = input.split(" ");
transName = tokens[0].replace(".ktr", "") + ".ktr";
for (int i=1; i<tokens.length; i++) {
params.add(tokens[i]);
}
}
Executing KTR: I am using git repo to store all my KTR files and based on the name passed as a parameter KTR is executed
public static Integer executeKtr(String ktrName) {
try {
System.out.println("Present Project Directory : " + System.getProperty("user.dir"));
String transName = ktrName.replace(".ktr", "") + ".ktr";
String gitURI = awsSSM.getParaValue("kattle-trans-git-url");
String repoLocalPath = clonePDIrepo.cloneRepo(gitURI);
String path = new File(repoLocalPath + "/" + transName).getAbsolutePath();
File ktrFile = new File(path);
System.out.println("KTR Path: " + path);
try {
/**
* IMPORTANT NOTE FOR LAMBDA FUNCTION MUST CREATE .KEETLE DIRECOTRY OTHERWISE
* CODE WILL FAIL IN LAMBDA FUNCTION WITH ERROR CANT CREATE
* .kettle/kettle.properties file.
*
* ALSO SET ENVIRNOMENT VARIABLE ON LAMBDA FUNCTION TO POINT
* KETTLE_HOME=/tmp/.kettle
*/
Files.createDirectories(Paths.get("/tmp/.kettle"));
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Error Creating /tmp/.kettle directory");
}
if (ktrFile.exists()) {
KettleEnvironment.init();
TransMeta metaData = new TransMeta(path);
Trans trans = new Trans(metaData);
// SETTING PARAMETERS
trans = parameterSetting(trans);
trans.execute( null );
trans.waitUntilFinished();
if (trans.getErrors() > 0) {
System.out.print("Error Executing transformation");
throw new RuntimeException("There are errors in running transformations");
} else {
System.out.print("Successfully Executed Transformation");
return 1;
}
} else {
System.out.print("KTR File:" + path + " not found in repo");
throw new RuntimeException("KTR File:" + path + " not found in repo");
}
} catch (KettleException e) {
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
parameterSetting: If KTR is accepting parameter and it is passed while calling AWS Lambda function, it is set using parameterSetting function.
public static Trans parameterSetting(Trans trans) {
String[] transParams = trans.listParameters();
for (String param : transParams) {
for (String p: params) {
String name = p.split("=")[0];
String val = p.split("=")[1];
if (name.trim().equals(param.trim())) {
try {
System.out.println("Setting Parameter:"+ name + "=" + val);
trans.setParameterValue(name, val);
} catch (UnknownParamException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
trans.activateParameters();
return trans;
}
CloneGitRepo:
public class clonePDIrepo {
/**
* Clones the given repo to local folder
*
* #param pathWithPwd Gir repo URL with access token included in the url. e.g.
* https://token_name:token_value#github.com/ktr-git-repo.git
* #return returns Local Repository String Path
*/
public static String cloneRepo(String pathWithPwd) {
try {
/**
* CREATING TEMP DIR TO AVOID FOLDER EXISTS ERROR, THIS TEMP DIRECTORY LATER CAN
* BE USED TO GET ABSOLETE PATH FOR FILES IN DIRECTORY
*/
File pdiLocalPath = Files.createTempDirectory("repodir").toFile();
Git git = Git.cloneRepository().setURI(pathWithPwd).setDirectory(pdiLocalPath).call();
System.out.println("Git repository cloned successfully");
System.out.println("Local Repository Path:" + pdiLocalPath.getAbsolutePath());
// }
return pdiLocalPath.getAbsolutePath();
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
}
AWSSSMgetParaValue: Gets string value of the parameter passed.
public static String getParaValue(String paraName) {
try {
Region region = Region.US_EAST_1;
SsmClient ssmClient = SsmClient.builder()
.region(region)
.build();
GetParameterRequest parameterRequest = GetParameterRequest.builder()
.name(paraName)
.withDecryption(true)
.build();
GetParameterResponse parameterResponse = ssmClient.getParameter(parameterRequest);
System.out.println(paraName+ " value retreived from AWS SSM");
ssmClient.close();
return parameterResponse.parameter().value();
} catch (SsmException e) {
System.err.println(e.getMessage());
return null;
}
}
Assumptions:
Git repo is created with KTR files in the root of the repo
git repo url exists on the aws SSM with valid tokens to clone the repo
Input string contains name of the KTR file
Environment Variable is configured on Lambda Function for KETTLE_HOME=/tmp/.kettle
Lambda Function has necessary permissions for SSM and S3 VPC Network
Proper Security Group rules are setup to allow required network access for the KTR File
I am planning to upload complete code to git. I will update this post with the URL of the repository.

AWS QuickSight programmatic access

I have been recently involved in a project where I have to leverage the QuickSight APIs and update a dashboard programmatically. I can perform all the other actions but I am unable to update the dashboard from a template. I have tried a couple of different ideas, but all in vain.
Is there anyone who has already worked with the UpdateDashboard API or point me to some detailed documentation where I can understand if I am actually missing anything?
Thanks.
I got this to work using the AWS QuickSight Java V2 API. TO make this work, you need to follow the quick start instructions here:
https://docs.aws.amazon.com/quicksight/latest/user/getting-started.html
You need to get these values:
account - your account number
dashboardId - the dashboard id value
dataSetArn -- the data set ID value
analysisArn - the analysis Arn value
Once you go through the above topics - you will have all of these resource and ready to call UpdateDashboard . Here is the Java example that updates a Dashboard.
package com.example.quicksight;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.quicksight.QuickSightClient;
import software.amazon.awssdk.services.quicksight.model.*;
/*
Before running this code example, follow the Getting Started with Data Analysis in Amazon QuickSight located here:
https://docs.aws.amazon.com/quicksight/latest/user/getting-started.html
This code example uses resources that you created by following that topic such as the DataSet Arn value.
*/
public class UpdateDashboard {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage: UpdateDashboard <account> <dashboardId> <>\n\n" +
"Where:\n" +
" account - the account to use.\n\n" +
" dashboardId - the dashboard id value to use.\n\n" +
" dataSetArn - the ARN of the dataset.\n\n" +
" analysisArn - the ARN of an existing analysis";
String account = "<account id>";
String dashboardId = "<dashboardId>";
String dataSetArn = "<dataSetArn>";
String analysisArn = "<Analysis Arn>";
QuickSightClient qsClient = QuickSightClient.builder()
.region(Region.US_EAST_1)
.build();
try {
DataSetReference dataSetReference = DataSetReference.builder()
.dataSetArn(dataSetArn)
.dataSetPlaceholder("Dataset placeholder2")
.build();
// Get a template ARN to use.
String arn = getTemplateARN(qsClient, account, dataSetArn, analysisArn);
DashboardSourceTemplate sourceTemplate = DashboardSourceTemplate.builder()
.dataSetReferences(dataSetReference)
.arn(arn)
.build();
DashboardSourceEntity sourceEntity = DashboardSourceEntity.builder()
.sourceTemplate(sourceTemplate)
.build();
UpdateDashboardRequest dashboardRequest = UpdateDashboardRequest.builder()
.awsAccountId(account)
.dashboardId(dashboardId)
.name("UpdateTest")
.sourceEntity(sourceEntity)
.themeArn("arn:aws:quicksight::aws:theme/SEASIDE")
.build();
UpdateDashboardResponse response = qsClient.updateDashboard(dashboardRequest);
System.out.println("Dashboard " + response.dashboardId() + " has been updated");
} catch (QuickSightException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
}
private static String getTemplateARN(QuickSightClient qsClient, String account, String dataset, String analysisArn) {
String arn = "";
try {
DataSetReference setReference = DataSetReference.builder()
.dataSetArn(dataset)
.dataSetPlaceholder("Dataset placeholder2")
.build();
TemplateSourceAnalysis templateSourceAnalysis = TemplateSourceAnalysis.builder()
.dataSetReferences(setReference)
.arn(analysisArn)
.build();
TemplateSourceEntity sourceEntity = TemplateSourceEntity.builder()
.sourceAnalysis(templateSourceAnalysis)
.build();
CreateTemplateRequest createTemplateRequest = CreateTemplateRequest.builder()
.awsAccountId(account)
.name("NewTemplate")
.sourceEntity(sourceEntity)
.templateId("a9a277fb-7239-4890-bc7a-8a3e82d67a37") // Specify a GUID value
.build();
CreateTemplateResponse response = qsClient.createTemplate(createTemplateRequest);
arn = response.arn();
} catch (QuickSightException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return arn;
}
}