I have a custom training job that I run on a fixed schedule using Cloud Scheduler. When I create the job using either the Python client or gcp, the job runs fine. However, when I create the cloud scheduler job using the Java SDK, the job gets created but it fails. The SUMMARY of the error message I get in Cloud Logging is:
{"#type":"type.googleapis.com/google.cloud.scheduler.logging.AttemptFinished", "jobName":"projects/{my_project_id}/locations/us-central1/jobs/java_job", "status":"INVALID_ARGUMENT", "targetType":"HTTP", "url":"https://us-central1-aiplatform.googleapis.com/v1/projects/{my_project_id}/locations/us-central1/customJobs"}
I looked at the jobs created in gcp, all fields for the three jobs (the one created using python client, the one created using java SDK and the one created directly in gcp) are the same. I cannot figure out why the job created using the Java SDK keeps failing.
Java SDK code:
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.cloud.scheduler.v1.Job;
import com.google.cloud.scheduler.v1.LocationName;
import com.google.cloud.scheduler.v1.OAuthToken;
import com.google.protobuf.ByteString;
import com.google.cloud.scheduler.v1.CloudSchedulerClient;
import com.google.cloud.scheduler.v1.HttpMethod;
import com.google.cloud.scheduler.v1.HttpTarget;
public class Temp
{
static String projectId = "...";
static String location = "...";
static String serviceAccountEmail = "...-compute#developer.gserviceaccount.com";
static String outputUriPrefix = "gs://.../.../";
static String imageUri = String.format("%s-docker.pkg.dev/%s/.../...", location, projectId);
static String trainingJobName = "custom_training_job";
static String schedulerJobName = String.format("projects/%s/locations/%s/jobs/java_job", projectId, location);
static String scope = "https://www.googleapis.com/auth/cloud-platform";
static String httpTargetUri = String.format("https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/customJobs",
location, projectId, location);
static String machineType = "n1-standard-4";
static long replicaCount = 1;
static String getJobBody() throws JSONException {
JSONObject jobBody = new JSONObject();
jobBody.put("display_name", trainingJobName);
JSONObject base_output_directory = new JSONObject();
base_output_directory.put("output_uri_prefix", outputUriPrefix);
jobBody.put("base_output_directory", base_output_directory);
JSONObject jobSpec = new JSONObject();
JSONArray worker_pool_specs = new JSONArray();
JSONObject spec = new JSONObject();
spec.put("replica_count", replicaCount);
JSONObject machine_spec = new JSONObject();
machine_spec.put("machine_type", machineType);
spec.put("machine_spec", machine_spec);
JSONObject container_spec = new JSONObject();
container_spec.put( "image_uri", imageUri);
JSONArray args = new JSONArray();
args.put("--msg=hello!");
container_spec.put( "args", args);
spec.put("container_spec", container_spec);
worker_pool_specs.put(spec);
jobSpec.put("worker_pool_specs", worker_pool_specs);
jobBody.put("job_spec", jobSpec);
return jobBody.toString();
}
public static void main( String[] args ) throws IOException, JSONException
{
System.out.println(String.format("=======STARTING APPLICATION, version %s =======", "v5"));
CloudSchedulerClient client = CloudSchedulerClient.create();
String parent = LocationName.of(projectId, location).toString();
Map<String, String> headers = new HashMap<String, String>();
headers.put("User-Agent", "Google-Cloud-Scheduler");
headers.put("Content-Type", "application/json; charset=utf-8");
OAuthToken token = OAuthToken.newBuilder()
.setServiceAccountEmail(serviceAccountEmail)
.setScope(scope)
.build();
HttpTarget httpTarget = HttpTarget.newBuilder()
.setUri(httpTargetUri)
.setHttpMethod(HttpMethod.POST)
.putAllHeaders(headers)
.setBody(ByteString.copyFromUtf8(getJobBody()))
.setOauthToken(token)
.build();
Job job = Job.newBuilder()
.setName(schedulerJobName)
.setDescription("test java job")
.setSchedule("* * * * *")
.setTimeZone("Africa/Abidjan")
.setHttpTarget(httpTarget)
.build();
client.createJob(parent, job);
client.close();
}
}
Python Client code:
from google.cloud import scheduler
import json
project_id = "..."
location = "..."
service_account_email = "...-compute#developer.gserviceaccount.com"
output_uri_prefix="gs://.../.../"
image_uri=f'{location}-docker.pkg.dev/{project_id}/.../...'
traning_job__name ="custom_training_job"
scheduler_job_name = f'projects/{project_id}/locations/{location}/jobs/python_job'
scope = "https://www.googleapis.com/auth/cloud-platform"
http_target_uri = f'https://{location}-aiplatform.googleapis.com/v1/projects/{project_id}/locations/{location}/customJobs'
machine_type = "n1-standard-4"
replica_count = 1
job_spec = {
"display_name": traning_job__name,
"job_spec": {
"worker_pool_specs": [
{
"machine_spec": {
"machine_type": machine_type,
},
"replica_count": replica_count,
"container_spec": {
"image_uri": image_uri,
"args": [
"--msg=hello!"
]
}
}
],
"base_output_directory": {
"output_uri_prefix": output_uri_prefix
}
}
}
job = {
"name": scheduler_job_name,
"description": "Created from Python client",
"http_target": {
"uri": http_target_uri,
"http_method": "POST",
"headers": {
"User-Agent": "Google-Cloud-Scheduler",
"Content-Type": "application/json; charset=utf-8"
},
"body": json.dumps(job_spec).encode('utf-8'),
"oauth_token": {
"service_account_email": service_account_email,
"scope": scope
}
},
"schedule": "* * * * *",
"time_zone": "Africa/Abidjan"
}
client = scheduler.CloudSchedulerClient()
parent = f'projects/{project_id}/locations/{location}'
response = client.create_job(parent = parent, job = job)
EDIT
The problem was that in the getJobBody function, I was setting base_output_directory as a top level field, whereas it should be a nested field inside the job_spec. The problem is solved but is there a better way to do this? I know there is a CustomJobSpec class, but could not find a way to convert it into a Json style string.
As mentioned in the edit, the problem was that in the getJobBody function, the base_output_directory was being set as a top level field, whereas it should be a nested field inside the job_spec. So currently, as far as I know, the way to avoid this mistake is to set the jobBody carefully, I don't know of a way to do this in a more structured manner.
Related
I am using wso2 IS 5.10, for adding custom claim which needs to be added by fetching from db I am using custom adaptive function. But the below code is not working.
package org.wso2.custom.auth.functions;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.application.authentication.framework.config.model.graph.js.JsAuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.config.model.graph.js.JsAuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import org.wso2.custom.auth.functions.internal.CustomAuthFuncComponent;
import org.wso2.carbon.identity.application.authentication.framework.config.model.graph.js.*;
public class SetForceAuthFunctionImpl implements SetForceAuthFunction {
private static final Log LOGGER = LogFactory.getLog(SetForceAuthFunctionImpl.class);
#Override
public JsAuthenticatedUser setForceAuth(JsAuthenticationContext context, boolean forceAuth) {
AuthenticatedUser lastAuthenticatedUser = context.getContext().getLastAuthenticatedUser();
LOGGER.info("lastAuthenticatedUser****:::::::::::"+lastAuthenticatedUser);
String userName = lastAuthenticatedUser.getUserName();
LOGGER.info("userName2****:::::::::::"+userName);
String tenantDomain = MultitenantUtils.getTenantDomain(userName);
String fullyQualifiedUserName=("USERS"+"/"+userName+"#"+tenantDomain);
Map<org.wso2.carbon.identity.application.common.model.ClaimMapping, String> claims = new HashMap<org.wso2.carbon.identity.application.common.model.ClaimMapping, String>();
claims.put(org.wso2.carbon.identity.application.common.model.ClaimMapping.build("test123", "test123", null, true), org.apache.commons.lang3.StringUtils.join("*******************",",,,"));
AuthenticatedUser authenticatedUserObj = AuthenticatedUser.createLocalAuthenticatedUserFromSubjectIdentifier(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
authenticatedUserObj.setAuthenticatedSubjectIdentifier(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
authenticatedUserObj.setUserAttributes(claims);
authenticatedUserObj.setUserName(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
return new JsAuthenticatedUser(authenticatedUserObj);
}
}
package org.wso2.custom.auth.functions.internal;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
import org.wso2.carbon.identity.application.authentication.framework.JsFunctionRegistry;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.custom.auth.functions.GenerateHashFunction;
import org.wso2.custom.auth.functions.GenerateHashFunctionImpl;
import org.wso2.custom.auth.functions.GetClaimsForUsernameFunction;
import org.wso2.custom.auth.functions.GetClaimsForUsernameFunctionImpl;
import org.wso2.custom.auth.functions.GetUsernameFromContextFunction;
import org.wso2.custom.auth.functions.GetUsernameFromContextFunctionImpl;
import org.wso2.custom.auth.functions.SetForceAuthFunction;
import org.wso2.custom.auth.functions.SetForceAuthFunctionImpl;
#Component(
name = "custom.auth.functions.component",
immediate = true
)
public class CustomAuthFuncComponent {
private static final Log LOG = LogFactory.getLog(CustomAuthFuncComponent.class);
private static JsFunctionRegistry jsFunctionRegistry;
#Activate
protected void activate(ComponentContext ctxt) {
SetForceAuthFunction setForceAuthFunctionImpl = new SetForceAuthFunctionImpl();
jsFunctionRegistry.register(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "setForceAuth",
setForceAuthFunctionImpl);
GetUsernameFromContextFunction getUsernameFromContextFunctionImpl = new GetUsernameFromContextFunctionImpl();
jsFunctionRegistry.register(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "getUsernameFromContext",
getUsernameFromContextFunctionImpl);
GetClaimsForUsernameFunction getClaimsForUsernameFunctionImpl = new GetClaimsForUsernameFunctionImpl();
jsFunctionRegistry.register(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "getClaimsForUsername",
getClaimsForUsernameFunctionImpl);
GenerateHashFunction generateHashFunctionImpl = new GenerateHashFunctionImpl();
jsFunctionRegistry.register(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "generateHash",
generateHashFunctionImpl);
}
#Deactivate
protected void deactivate(ComponentContext ctxt) {
if (jsFunctionRegistry != null) {
jsFunctionRegistry.deRegister(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "setForceAuth");
jsFunctionRegistry.deRegister(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "getUsernameFromContext");
jsFunctionRegistry.deRegister(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "getClaimsForUsername");
jsFunctionRegistry.deRegister(JsFunctionRegistry.Subsystem.SEQUENCE_HANDLER, "generateHash");
}
}
#Reference(
name = "user.realmservice.default",
service = RealmService.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetRealmService"
)
protected void setRealmService(RealmService realmService) {
if (LOG.isDebugEnabled()) {
LOG.debug("RealmService is set in the custom conditional authentication user functions bundle");
}
CustomAuthFuncHolder.getInstance().setRealmService(realmService);
}
protected void unsetRealmService(RealmService realmService) {
if (LOG.isDebugEnabled()) {
LOG.debug("RealmService is unset in the custom conditional authentication user functions bundle");
}
CustomAuthFuncHolder.getInstance().setRealmService(null);
}
#Reference(
name = "registry.service",
service = RegistryService.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetRegistryService"
)
protected void setRegistryService(RegistryService registryService) {
if (LOG.isDebugEnabled()) {
LOG.debug("RegistryService is set in the custom conditional authentication user functions bundle");
}
CustomAuthFuncHolder.getInstance().setRegistryService(registryService);
}
protected void unsetRegistryService(RegistryService registryService) {
if (LOG.isDebugEnabled()) {
LOG.debug("RegistryService is unset in the custom conditional authentication user functions bundle");
}
CustomAuthFuncHolder.getInstance().setRegistryService(null);
}
#Reference(
service = JsFunctionRegistry.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetJsFunctionRegistry"
)
public void setJsFunctionRegistry(JsFunctionRegistry jsFunctionRegistry) {
this.jsFunctionRegistry = jsFunctionRegistry;
}
public void unsetJsFunctionRegistry(JsFunctionRegistry jsFunctionRegistry) {
this.jsFunctionRegistry = null;
}
}
But when I am using setForceAuth(context, true); in adaptive authentication function to add custom claims its not working but working in custom authenticator.
Adaptive authentication script:
function onLoginRequest(context) {
doLogin(context);
}
function doLogin(context) {
executeStep(1,{
onSuccess: function (context) {
},
onFail: function(context){
executeStep(4,{
onSuccess: function (context) {
var subject = context.currentKnownSubject;
setForceAuth(context, true);
},
onFail: function(context){
}
});
}
});
}
The issue is that
setForceAuth(context, true);
executes the below code block
return new JsAuthenticatedUser(authenticatedUserObj);
There is no point in your code (authencation script or the java function) where the newly created JsAuthenticatedUser is set to the context.
What you need to do is to change the script like this
onSuccess: function (context) { context.currentKnownSubject = setForceAuth(context, true);}
or
the java function as below
`#Override
public JsAuthenticatedUser setForceAuth(JsAuthenticationContext context, boolean forceAuth) {
AuthenticatedUser lastAuthenticatedUser = context.getContext().getLastAuthenticatedUser();
LOGGER.info("lastAuthenticatedUser****:::::::::::"+lastAuthenticatedUser);
String userName = lastAuthenticatedUser.getUserName();
LOGGER.info("userName2****:::::::::::"+userName);
String tenantDomain = MultitenantUtils.getTenantDomain(userName);
String fullyQualifiedUserName=("USERS"+"/"+userName+"#"+tenantDomain);
Map<org.wso2.carbon.identity.application.common.model.ClaimMapping, String> claims = new HashMap<org.wso2.carbon.identity.application.common.model.ClaimMapping, String>();
claims.put(org.wso2.carbon.identity.application.common.model.ClaimMapping.build("test123", "test123", null, true), org.apache.commons.lang3.StringUtils.join("*******************",",,,"));
AuthenticatedUser authenticatedUserObj = AuthenticatedUser.createLocalAuthenticatedUserFromSubjectIdentifier(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
authenticatedUserObj.setAuthenticatedSubjectIdentifier(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
authenticatedUserObj.setUserAttributes(claims);
authenticatedUserObj.setUserName(MultitenantUtils.getTenantAwareUsername
(fullyQualifiedUserName));
context.getContext().setSubject(authenticatedUserObj);
}
Still it is not advisable to have a tenant aware user name on authentication scripts. Rather can you think of not using
setForceAuth()
function and use the provided user.localClaims[] instead?
I have application which queries our BQ datasets and store result to the BQ tables :
My Code :
BigQuery bigquery = bigQuery();
TableId destinationTable = TableId.of(datasetName, TableName);
QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query)
.setDestinationTable(destinationTable).setWriteDisposition(JobInfo.WriteDisposition.WRITE_APPEND)
.build();
TableResult results = bigquery.query(queryConfig);
While writing the result to BQ dataset i want to append a column to every row similar like this :
queryConfig.addNewColumnToEveryRow("ID", "123");
How to do that ?
This should be possible adding it to your query string.
String query = "SELECT yourOtherFields, 123 AS ID FROM yourSource";
The efficient solution is to change the query itself as shown in #Brent's solution. The other solution mentioned by #Mikhail is to post-process the returned result from the query execution. Please refer to the below code snippet for the programmatic way to post-process (add a new column) and load the data into BigQuery.
The flow of the program is as follows
Execute the query and obtain the results.
Iterate over the result and construct a JSON array.
Write the JSON array to a local file in NDJSON format.
Load the local file into a BigQuery table by creating a Batch load job (implemented below). You can also use the streaming API to load the data.
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.Channels;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.UUID;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobId;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.TableDataWriteChannel;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableResult;
import com.google.cloud.bigquery.WriteChannelConfiguration;
import com.google.common.io.Files;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
public class AddNewColumn {
public static void main(String[] args) throws IOException {
runSimpleQuery();
}
public static void runSimpleQuery() throws IOException {
String query = "SELECT corpus, SUM(word_count) as word_count FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus ORDER BY word_count LIMIT 5;";
simpleQuery(query);
}
public static void simpleQuery(String query) throws IOException {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
// Create the query job.
QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
// Execute the query.
TableResult result = bigquery.query(queryConfig);
System.out.println("\nQuery ran successfully");
// Construct JSON array from the individual rows
ArrayList<String> columnNames = new ArrayList<String>();
result.getSchema().getFields().forEach(field -> columnNames.add(field.getName())); // get column names
JsonArray jsonArray = new JsonArray();
result.iterateAll().forEach(rows -> {
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty("ID", 123);
columnNames.forEach(
column -> {
jsonObject.addProperty(column, rows.get(column).getValue().toString());
}
);
jsonArray.add(jsonObject);
});
// Writing JSON array to a temporary file in NDJSON format
FileWriter file = new FileWriter("./tempfile.json");
jsonArray.forEach(jsonElement -> {
try {
file.write(jsonElement.toString());
file.write("\n");
} catch (IOException e) {
e.printStackTrace();
}
});
file.close();
System.out.println("Data written to temporary file.");
// Create a load job to insert data
// TODO: Change the destination dataset and table information.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
Path jsonPath = FileSystems.getDefault().getPath(".", "tempfile.json");
insertDataIntoDestinationTable(datasetName, tableName, jsonPath, FormatOptions.json());
} catch (BigQueryException | InterruptedException e) {
System.out.println("Query did not run \n" + e.toString());
}
}
private static void insertDataIntoDestinationTable(String datasetName, String tableName, Path jsonPath, FormatOptions formatOptions) throws InterruptedException, IOException {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
TableId tableId = TableId.of(datasetName, tableName);
WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(formatOptions).build();
// The location and JobName must be specified; other fields can be auto-detected.
String jobName = "jobId_" + UUID.randomUUID().toString();
JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build();
// Imports a local file into a table.
try (TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
OutputStream stream = Channels.newOutputStream(writer)) {
Files.copy(jsonPath.toFile(), stream);
}
// Get the Job created by the TableDataWriteChannel and wait for it to complete.
Job job = bigquery.getJob(jobId);
Job completedJob = job.waitFor();
if (completedJob == null) {
System.out.println("Job not executed since it no longer exists.");
return;
} else if (completedJob.getStatus().getError() != null) {
System.out.println(
"BigQuery was unable to load local file to the table due to an error: \n"
+ job.getStatus().getError());
return;
}
} catch (BigQueryException e) {
System.out.println("Local file not loaded. \n" + e.toString());
}
}
}
Output:
The query results have been successfully inserted into the destination table.
In my project there is a need for creating share link for external users without aws user from my researching found out a couple ways for doing so
Bucket policy based on tag
Lambda that creates sign url every time some user request the file
The question is what is the best practice for doing so
I need the download to be available until the user sharing the file stopes it
Thank guys for any answers
Using the AWS SDK, you can use Amazon S3 Pre-sign functionality. You can perform this task in any of the supported programming languages (Java, JS, Python, etc).
The following code shows how to sign an object via the Amazon S3 Java V2 API.
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.time.Duration;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
import software.amazon.awssdk.services.s3.model.S3Exception;
import software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest;
import software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest;
import software.amazon.awssdk.services.s3.presigner.S3Presigner;
import software.amazon.awssdk.utils.IoUtils;
// snippet-end:[presigned.java2.getobjectpresigned.import]
/**
* To run this AWS code example, ensure that you have setup your development environment, including your AWS credentials.
*
* For information, see this documentation topic:
*
* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html
*/
public class GetObjectPresignedUrl {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" GetObjectPresignedUrl <bucketName> <keyName> \n\n" +
"Where:\n" +
" bucketName - the Amazon S3 bucket name. \n\n"+
" keyName - a key name that represents a text file. \n\n";
if (args.length != 2) {
System.out.println(USAGE);
System.exit(1);
}
String bucketName = args[0];
String keyName = args[1];
Region region = Region.US_WEST_2;
S3Presigner presigner = S3Presigner.builder()
.region(region)
.build();
getPresignedUrl(presigner, bucketName, keyName);
presigner.close();
}
// snippet-start:[presigned.java2.getobjectpresigned.main]
public static void getPresignedUrl(S3Presigner presigner, String bucketName, String keyName ) {
try {
GetObjectRequest getObjectRequest =
GetObjectRequest.builder()
.bucket(bucketName)
.key(keyName)
.build();
GetObjectPresignRequest getObjectPresignRequest = GetObjectPresignRequest.builder()
.signatureDuration(Duration.ofMinutes(10))
.getObjectRequest(getObjectRequest)
.build();
// Generate the presigned request
PresignedGetObjectRequest presignedGetObjectRequest =
presigner.presignGetObject(getObjectPresignRequest);
// Log the presigned URL
System.out.println("Presigned URL: " + presignedGetObjectRequest.url());
HttpURLConnection connection = (HttpURLConnection) presignedGetObjectRequest.url().openConnection();
presignedGetObjectRequest.httpRequest().headers().forEach((header, values) -> {
values.forEach(value -> {
connection.addRequestProperty(header, value);
});
});
// Send any request payload that the service needs (not needed when isBrowserExecutable is true)
if (presignedGetObjectRequest.signedPayload().isPresent()) {
connection.setDoOutput(true);
try (InputStream signedPayload = presignedGetObjectRequest.signedPayload().get().asInputStream();
OutputStream httpOutputStream = connection.getOutputStream()) {
IoUtils.copy(signedPayload, httpOutputStream);
}
}
// Download the result of executing the request
try (InputStream content = connection.getInputStream()) {
System.out.println("Service returned response: ");
IoUtils.copy(content, System.out);
}
} catch (S3Exception e) {
e.getStackTrace();
} catch (IOException e) {
e.getStackTrace();
}
// snippet-end:[presigned.java2.getobjectpresigned.main]
}
}
I'm using Flutters' aws_s3_upload plugin which I found on Github. I am able to upload images to my AWS s3 bucket. However, the images are missing the "image/jpeg" mime/type required so that I may view them in a browser window as images.
At the moment when clicking on the URL the image downloads instead of appearing in my browser. Can I update this code so that it is uploaded to my S3 bucket as an image?
library aws_s3_upload;
import 'dart:io';
import 'package:amazon_cognito_identity_dart_2/sig_v4.dart';
import 'package:http/http.dart' as http;
import 'package:path/path.dart' as path;
import './src/policy.dart';
class AwsS3 {
static Future<String> uploadFile(
{
String accessKey,
String secretKey,
String bucket,
String destDir,
String region = 'us-east-2',
File file,
String filename}) async {
final endpoint = 'https://$bucket.s3-$region.amazonaws.com';
final uploadDest = '$destDir/${filename ?? path.basename(file.path)}';
final stream = http.ByteStream(Stream.castFrom(file.openRead()));
final length = await file.length();
final uri = Uri.parse(endpoint);
final req = http.MultipartRequest("POST", uri);
final multipartFile = http.MultipartFile('file', stream, length, filename: path.basename(file.path));
final policy = Policy.fromS3PresignedPost(uploadDest, bucket, accessKey, 15, length, region: region);
final key = SigV4.calculateSigningKey(secretKey, policy.datetime, region, 's3');
final signature = SigV4.calculateSignature(key, policy.encode());
req.files.add(multipartFile);
req.fields['key'] = policy.key;
req.fields['acl'] = 'public-read';
req.fields['X-Amz-Credential'] = policy.credential;
req.fields['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256';
req.fields['X-Amz-Date'] = policy.datetime;
req.fields['Policy'] = policy.encode();
req.fields['X-Amz-Signature'] = signature;
try {
final res = await req.send();
if (res.statusCode == 204) return '$endpoint/$uploadDest';
} catch (e) {
print(e.toString());
}
}
}
So I went with using Minio like this;
await minio.fPutObject('mybucket', objectpath, croppedFile.path,
{
'x-amz-acl': 'public-read'
}
);
In order to do that I needed the following dependencies;
import 'package:minio/minio.dart';
import 'package:minio/io.dart';
I want to fetch all the failed executions and need to re-trigger them dynamically.
PS: In stepfunction definition I had proper retry mechanism, now I want to rerun the failed executions dynamically.
I need to implement it in java. Please help me with the approach.
Thanks in advance.
You can use the AWS Step Functions API to get a list of excutions:
https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/sfn/SfnClient.html#listExecutions-
Then you can get a list of ExecutionListItem by calling the executions() method that belongs to the ListExecutionsResponse object (returned by the listExecutions method)
https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/sfn/model/ExecutionListItem.html
Using this object - you can do two things:
1 - check status - https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/sfn/model/ExecutionStatus.html
2 - get state machine ARN value - https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/sfn/model/ExecutionListItem.html#stateMachineArn--
Using the state machine ARN value, you can execute a state machine with the AWS Step Functions Java API V2:
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.sfn.SfnClient;
import software.amazon.awssdk.services.sfn.model.*;
import java.io.FileReader;
import java.io.IOException;
import java.util.UUID;
// snippet-end:[stepfunctions.java2.start_execute.import]
public class StartExecution {
public static void main(String[] args) {
final String USAGE = "\n" +
"Usage:\n" +
" StartExecution <stateMachineArn> <jsonFile>\n\n" +
"Where:\n" +
" stateMachineArn - the ARN of the state machine.\n\n" +
" jsonFile - A JSON file that contains the values to pass to the worflow.\n" ;
if (args.length != 2) {
System.out.println(USAGE);
System.exit(1);
}
String stateMachineArn = args[0];
String jsonFile = args[1];
Region region = Region.US_EAST_1;
SfnClient sfnClient = SfnClient.builder()
.region(region)
.build();
String exeArn = startWorkflow(sfnClient,stateMachineArn, jsonFile);
System.out.println("The execution ARN is" +exeArn);
sfnClient.close();
}
// snippet-start:[stepfunctions.java2.start_execute.main]
public static String startWorkflow(SfnClient sfnClient, String stateMachineArn, String jsonFile) {
String json = getJSONString(jsonFile);
// Specify the name of the execution by using a GUID value.
UUID uuid = UUID.randomUUID();
String uuidValue = uuid.toString();
try {
StartExecutionRequest executionRequest = StartExecutionRequest.builder()
.input(json)
.stateMachineArn(stateMachineArn)
.name(uuidValue)
.build();
StartExecutionResponse response = sfnClient.startExecution(executionRequest);
return response.executionArn();
} catch (SfnException e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return "";
}
private static String getJSONString(String path) {
try {
JSONParser parser = new JSONParser();
JSONObject data = (JSONObject) parser.parse(new FileReader(path));//path to the JSON file.
String json = data.toJSONString();
return json;
} catch (IOException | org.json.simple.parser.ParseException e) {
e.printStackTrace();
}
return "";
}
// snippet-end:[stepfunctions.java2.start_execute.main]
}