Modify Cloudfront origin response with Lambda - read-only headers - amazon-web-services

I have a Cloudfront distribution with a single React site, which is hosting in S3. The origin is connected via REST api. To properly handle queries, I use custom error responses on status 403 and 404 to 200 and route them to root. The root object is index.html and everything seems to be fine.
Now I have a task to add to a distribution an another site, which should be accessible through a subdirectory.
To do this I have to set a root object for a subdirectory and to catch 404 and 403 responses and transfer them to a root object. I've already set up origin and behaviour.
I tried to use theese manuals:
example
source
but it seems that something went wrong
The first approach (CloudFrontSubdirectoryIndex) seems not working at all (the function is not invoked and no rewrite happens), so i tried CloudFront function and it seems to work fine.
The last step is to handle 404 and 403 responses.
Here is the function from the manual:
'use strict';
const http = require('https');
const indexPage = 'index.html';
exports.handler = async (event, context, callback) => {
const cf = event.Records[0].cf;
const request = cf.request;
const response = cf.response;
const statusCode = response.status;
// Only replace 403 and 404 requests typically received
// when loading a page for a SPA that uses client-side routing
const doReplace = request.method === 'GET'
&& (statusCode == '403' || statusCode == '404');
const result = doReplace
? await generateResponseAndLog(cf, request, indexPage)
: response;
callback(null, result);
};
async function generateResponseAndLog(cf, request, indexPage){
const domain = cf.config.distributionDomainName;
const appPath = getAppPath(request.uri);
const indexPath = `/${appPath}/${indexPage}`;
const response = await generateResponse(domain, indexPath);
console.log('response: ' + JSON.stringify(response));
return response;
}
async function generateResponse(domain, path){
try {
// Load HTML index from the CloudFront cache
const s3Response = await httpGet({ hostname: domain, path: path });
const headers = s3Response.headers ||
{
'content-type': [{ value: 'text/html;charset=UTF-8' }]
};
return {
status: '200',
headers: wrapAndFilterHeaders(headers),
body: s3Response.body
};
} catch (error) {
return {
status: '500',
headers:{
'content-type': [{ value: 'text/plain' }]
},
body: 'An error occurred loading the page'
};
}
}
function httpGet(params) {
return new Promise((resolve, reject) => {
http.get(params, (resp) => {
console.log(`Fetching ${params.hostname}${params.path}, status code : ${resp.statusCode}`);
let result = {
headers: resp.headers,
body: ''
};
resp.on('data', (chunk) => { result.body += chunk; });
resp.on('end', () => { resolve(result); });
}).on('error', (err) => {
console.log(`Couldn't fetch ${params.hostname}${params.path} : ${err.message}`);
reject(err, null);
});
});
}
// Get the app path segment e.g. candidates.app, employers.client etc
function getAppPath(path){
if(!path){
return '';
}
if(path[0] === '/'){
path = path.slice(1);
}
const segments = path.split('/');
// will always have at least one segment (may be empty)
return segments[0];
}
// Cloudfront requires header values to be wrapped in an array
function wrapAndFilterHeaders(headers){
const allowedHeaders = [
'content-type',
'content-length',
'last-modified',
'date',
'etag'
];
const responseHeaders = {};
if(!headers){
return responseHeaders;
}
for(var propName in headers) {
// only include allowed headers
if(allowedHeaders.includes(propName.toLowerCase())){
var header = headers[propName];
if (Array.isArray(header)){
// assume already 'wrapped' format
responseHeaders[propName] = header;
} else {
// fix to required format
responseHeaders[propName] = [{ value: header }];
}
}
}
return responseHeaders;
}
When i try to implement this solution (attach the function to origin response) I get
The Lambda function result failed validation: The function tried to add, delete, or change a read-only header.
Here is a list of restricted headers, but I'm not modifying any of them.
If I try not to attach any headers to a response at all, the message is the same.
If I try to attach all headers, CloudFront says that i'm modifying a black-listed header.
Objects in a bucket have only one customized Cache-Control: no-cache metadata.
It seemed to be a fast task, but I'm stuck for two days already.
Any help will be appreciated.
UPD: I've searched the logs and found
ERROR Validation error: Lambda function result failed validation, the function tried to delete read-only header, headerName : Transfer-Encoding.
I'm a little bit confused. This header is not present in origin response, but CF is telling that I deleted it...

I tried to find the value of the header "Transfer-Encoding" that should come from origin (S3) but it seems that it has been disappeared. And CloudFront says that this header is essential.
So I've just hard-coded it and everything becomes fine.
'use strict';
const http = require('https');
const indexPage = 'index.html';
exports.handler = async (event, context, callback) => {
const cf = event.Records[0].cf;
const request = cf.request;
const response = cf.response;
const statusCode = response.status;
// Only replace 403 and 404 requests typically received
// when loading a page for a SPA that uses client-side routing
const doReplace = request.method === 'GET'
&& (statusCode == '403' || statusCode == '404');
const result = doReplace
? await generateResponseAndLog(cf, request, indexPage)
: response;
callback(null, result);
};
async function generateResponseAndLog(cf, request, indexPage){
const domain = cf.config.distributionDomainName;
const appPath = getAppPath(request.uri);
const indexPath = `/${appPath}/${indexPage}`;
const response = await generateResponse(domain, indexPath);
console.log('response: ' + JSON.stringify(response));
return response;
}
async function generateResponse(domain, path){
try {
// Load HTML index from the CloudFront cache
const s3Response = await httpGet({ hostname: domain, path: path });
const headers = s3Response.headers ||
{
'content-type': [{ value: 'text/html;charset=UTF-8' }]
};
s3Response.headers['transfer-encoding'] = 'chunked';
return {
status: '200',
headers: wrapAndFilterHeaders(headers),
body: s3Response.body
};
} catch (error) {
return {
status: '500',
headers:{
'content-type': [{ value: 'text/plain' }]
},
body: 'An error occurred loading the page'
};
}
}
function httpGet(params) {
return new Promise((resolve, reject) => {
http.get(params, (resp) => {
console.log(`Fetching ${params.hostname}${params.path}, status code : ${resp.statusCode}`);
let result = {
headers: resp.headers,
body: ''
};
resp.on('data', (chunk) => { result.body += chunk; });
resp.on('end', () => { resolve(result); });
}).on('error', (err) => {
console.log(`Couldn't fetch ${params.hostname}${params.path} : ${err.message}`);
reject(err, null);
});
});
}
// Get the app path segment e.g. candidates.app, employers.client etc
function getAppPath(path){
if(!path){
return '';
}
if(path[0] === '/'){
path = path.slice(1);
}
const segments = path.split('/');
// will always have at least one segment (may be empty)
return segments[0];
}
// Cloudfront requires header values to be wrapped in an array
function wrapAndFilterHeaders(headers){
const allowedHeaders = [
'content-type',
'content-length',
'content-encoding',
'transfer-encoding',
'last-modified',
'date',
'etag'
];
const responseHeaders = {};
if(!headers){
return responseHeaders;
}
for(var propName in headers) {
// only include allowed headers
if(allowedHeaders.includes(propName.toLowerCase())){
var header = headers[propName];
if (Array.isArray(header)){
// assume already 'wrapped' format
responseHeaders[propName] = header;
} else {
// fix to required format
responseHeaders[propName] = [{ value: header }];
}
}
}
return responseHeaders;
}

Related

Handling multipart/form-data in aws lambda

I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};

SvelteKit does not set cookie returned from GET endpoint

I'm reformulating my question and code examples so it is easy to understand.
When I call this PUT endpoint, cookies are set correctly.
export const put: RequestHandler<Locals> = async (event) => {
const userInfo = {
refresh_token: Math.random().toString(),
};
const json = JSON.stringify(userInfo);
const jwt = cookie.serialize("jwt", json, {
httpOnly: true,
path: "/",
});
const headers = {
"Set-Cookie": [jwt],
};
return {
status: 200,
headers,
body: {},
};
};
When I call this GET endpoint, cookies are not set.
export const get: RequestHandler<Locals> = async (event) => {
const userInfo = {
refresh_token: Math.random().toString(),
};
const json = JSON.stringify(userInfo);
const jwt = cookie.serialize("jwt", json, {
httpOnly: true,
path: "/",
});
const headers = {
"Set-Cookie": [jwt],
};
return {
status: 200,
headers,
body: {},
};
};
This is the option sent to fetch, where method equals "GET" of "PUT" for each case:
const opts: RequestInit = {
method,
credentials: "include",
headers: {
"Content-Type": "application/json",
},
};
if (data) {
opts.body = JSON.stringify(data);
}
Set the cookie in the handle hook where it can be set in a single place for both endpoints. See https://kit.svelte.dev/docs#hooks-handle.
Pass the JWT to the handle method from your endpoint through the event.locals object.
e.g.,
/** #type {import('#sveltejs/kit').Handle} */
export async function handle({ event, resolve }) {
const response = await resolve(event);
response.headers.append('set-cookie', event.locals.jwt);
return response;
}

Use the AWS API to send the url of a file to Lambda so it can be fetched and stored in S3?

I'm very new to Lambda.
My goal is to have an API endpoint where I can include an URL as argument (and probably a password), and have Lambda retrieve the file on that url, and save it into an S3 bucket.
I have the bucket ready, and have been reading all tutorials and examples I could find regarding lambda, but so far I can't figure out the pieces needed to get this to work.
Any guidance would be appreciated.
EDIT 1:
I got this far. This receives the url parameters from the API and checks the password, but it doesn't execute the get:
exports.handler = async (event) => {
let url = '';
let key = '';
let out = 'empty';
url = event["queryStringParameters"]['url'];
key = event["queryStringParameters"]['key'];
if (key != 'secret')
{
const response = {
statusCode: 200,
body: JSON.stringify('Unauthorized')
};
return response;
}
console.log("START");
var https = require('https');
https.get(url, function(res) {
console.log("Got response: " + res.statusCode);
const response = {
statusCode: 200,
body: JSON.stringify("success")
};
return response;
}).on('error', function(e) {
console.log("Got error: " + e.message);
const response = {
statusCode: 200,
body: JSON.stringify("fail")
};
return response;
}).end();
const response = {
statusCode: 200,
body: JSON.stringify(out)
};
return response;
};

How to add GET request parameters in AWS canaries

I'm new in Cloudwatch synthetics. I want to test my API with Get request with parameters. Please suggest the configuration for adding parameters or provide blue print scripts which uses parameters in get request.
Thanks
Try This...
var synthetics = require('Synthetics');
const log = require('SyntheticsLogger');
const apiCanaryBlueprint = async function () {
// Handle validation for positive scenario
const validatePositiveCase = async function(res) {
return new Promise((resolve, reject) => {
if (res.statusCode < 200 || res.statusCode > 299) {
throw res.statusCode + ' ' + res.statusMessage;
}
let responseBody = '';
res.on('data', (d) => {
responseBody += d;
});
res.on('end', () => {
// Add validation on 'responseBody' here if required. For ex, your status code is 200 but data might be empty
resolve();
});
});
};
// Handle validation for negative scenario
const validateNegativeCase = async function(res) {
return new Promise((resolve, reject) => {
if (res.statusCode < 400) {
throw res.statusCode + ' ' + res.statusMessage;
}
});
};
let requestOptionsStep1 = {
'hostname': 'myproductsEndpoint.com',
'method': 'GET',
'path': '/test/product/validProductName',
'port': 443,
'protocol': 'https:'
};
let headers = {};
headers['User-Agent'] = [synthetics.getCanaryUserAgentString(), headers['User-Agent']].join(' ');
requestOptionsStep1['headers'] = headers;
// By default headers, post data and response body are not included in the report for security reasons.
// Change the configuration at global level or add as step configuration for individual steps
let stepConfig = {
includeRequestHeaders: true,
includeResponseHeaders: true,
restrictedHeaders: ['X-Amz-Security-Token', 'Authorization'], // Restricted header values do not appear in report generated.
includeRequestBody: true,
includeResponseBody: true
};
await synthetics.executeHttpStep('Verify GET products API with valid name', requestOptionsStep1, validatePositiveCase, stepConfig);
let requestOptionsStep2 = {
'hostname': ‘myproductsEndpoint.com',
'method': 'GET',
'path': '/test/canary/InvalidName(',
'port': 443,
'protocol': 'https:'
};
headers = {};
headers['User-Agent'] = [synthetics.getCanaryUserAgentString(), headers['User-Agent']].join(' ');
requestOptionsStep2['headers'] = headers;
// By default headers, post data and response body are not included in the report for security reasons.
// Change the configuration at global level or add as step configuration for individual steps
stepConfig = {
includeRequestHeaders: true,
includeResponseHeaders: true,
restrictedHeaders: ['X-Amz-Security-Token', 'Authorization'], // Restricted header values do not appear in report generated.
includeRequestBody: true,
includeResponseBody: true
};
await synthetics.executeHttpStep('Verify GET products API with invalid name', requestOptionsStep2, validateNegativeCase, stepConfig);
};
exports.handler = async () => {
return await apiCanaryBlueprint();
};

React Native upload to S3 with presigned URL

Been trying with no luck to upload an image to S3 from React Native using pre-signed url. Here is my code:
generate pre-signed url in node:
const s3 = new aws.S3();
const s3Params = {
Bucket: bucket,
Key: fileName,
Expires: 60,
ContentType: 'image/jpeg',
ACL: 'public-read'
};
return s3.getSignedUrl('putObject', s3Params);
here is RN request to S3:
var file = {
uri: game.pictureToSubmitUri,
type: 'image/jpeg',
name: 'image.jpg',
};
const xhr = new XMLHttpRequest();
var body = new FormData();
body.append('file', file);
xhr.open('PUT', signedRequest);
xhr.onreadystatechange = () => {
if(xhr.readyState === 4){
if(xhr.status === 200){
alert('Posted!');
}
else{
alert('Could not upload file.');
}
}
};
xhr.send(body);
game.pictureToSubmitUri = assets-library://asset/asset.JPG?id=A282A2C5-31C8-489F-9652-7D3BD5A1FAA4&ext=JPG
signedRequest = https://my-bucket.s3-us-west-1.amazonaws.com/8bd2d4b9-3206-4bff-944d-e06f872d8be3?AWSAccessKeyId=AKIAIOLHQY4GAXN26FOQ&Content-Type=image%2Fjpeg&Expires=1465671117&Signature=bkQIp5lgzuYrt2vyl7rqpCXPcps%3D&x-amz-acl=public-read
Error message:
<Code>SignatureDoesNotMatch</Code>
<Message>
The request signature we calculated does not match the signature you provided. Check your key and signing method.
</Message>
I can successfully curl and image to S3 using the generated url, and I seem to be able to successfully post to requestb.in from RN (however I can only see the raw data on requestb.in so not 100% sure the image is properly there).
Based on all this, I've narrowed my issue down to 1) my image is not correctly uploading period, or 2) somehow the way S3 wants my request is different then how it is coming in.
Any help would be muuuuuucchhhh appreciated!
UPDATE
Can successfully post from RN to S3 if body is just text ({'data': 'foo'}). Perhaps AWS does not like mutliform data? How can I send as just a file in RN???
To upload pre-signed S3 URL on both iOS and Android use react-native-blob-util lib
Code snippet:
import RNBlobUtil from 'react-native-blob-util'
const preSignedURL = 'pre-signed url'
const pathToImage = '/path/to/image.jpg' // without file:// scheme at the beginning
const headers = {}
RNBlobUtil.fetch('PUT', preSignedURL, headers, RNBlobUtil.wrap(pathToImage))
Edited 19 Oct 2022 and swapped unsupported RN Fetch Blob for React Native Blob Util package.
FormData will create a multipart/form-data request. S3 PUT object needs its request body to be a file.
You just need to send your file in the request body without wrapping it into FormData:
function uploadFile(file, signedRequest, url) {
const xhr = new XMLHttpRequest();
xhr.open('PUT', signedRequest);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
if(xhr.status === 200) {
alert(url);
} else {
alert('Could not upload file.');
}
}
};
xhr.send(file);
};
See https://devcenter.heroku.com/articles/s3-upload-node for example in a browser. Please also ensure your Content-Type header is matched with the signed URL request.
"rn-fetch-blob": 0.12.0,
"react-native": 0.61.5
This code works for both Android & iOS
const response = await RNFetchBlob.fetch(
'PUT',
presignedUrl,
{
'Content-Type': undefined
},
RNFetchBlob.wrap(file.path.replace('file://', '')),
)
Note {'Content-Type': undefined} is needed for iOS
sorry if none worked for any body. took me 5 days to get this to work . 5 crazy days of no result until my sleepy eyes turned green after little nap. Guess i had a sweet dream that brought the idea. so quickly say u have an end point on ur server to generate the sign url for the request from react native end or from react side or any web frontier. i would be doing this for both react native and react(can serve for html pages and angular pages).
WEB APPROACH
UPLOAD IMAGE TO S3 BUCKET PRESIGNED URI
/*
Function to carry out the actual PUT request to S3 using the signed request from the app.
*/
function uploadFile(file, signedRequest, url){
// document.getElementById('preview').src = url; // THE PREVIEW PORTION
// document.getElementById('avatar-url').value = url; //
const xhr = new XMLHttpRequest();
xhr.open('PUT', signedRequest);
xhr.onreadystatechange = () => {
if(xhr.readyState === 4){
if(xhr.status === 200){
document.getElementById('preview').src = url;
// document.getElementById('avatar-url').value = url;
}
else{
alert('Could not upload file.');
}
}
};
xhr.send(file);
}
/*
Function to get the temporary signed request from the app.
If request successful, continue to upload the file using this signed
request.
*/
function getSignedRequest(file){
const xhr = new XMLHttpRequest();
xhr.open('GET', 'http://localhost:1234'+`/sign-s3?file-name=${file.name}&file-type=${file.type}`);
xhr.setRequestHeader('Access-Control-Allow-Headers', '*');
xhr.setRequestHeader('Content-type', 'application/json');
xhr.setRequestHeader('Access-Control-Allow-Origin', '*');
xhr.onreadystatechange = () => {
if(xhr.readyState === 4){
if(xhr.status === 200){
const response = JSON.parse(xhr.responseText);
uploadFile(file, response.signedRequest, response.url);
}
else{
alert('Could not get signed URL.');
}
}
};
xhr.send();
}
/*
Function called when file input updated. If there is a file selected, then
start upload procedure by asking for a signed request from the app.
*/
function initUpload(){
const files = document.getElementById('file-input').files;
const file = files[0];
if(file == null){
return alert('No file selected.');
}
getSignedRequest(file);
}
/*
Bind listeners when the page loads.
*/
//check if user is actually on the profile page
//just ensure that the id profile page exist on your html
if (document.getElementById('profile-page')) {
document.addEventListener('DOMContentLoaded',() => {
///here is ur upload trigger bttn effect
document.getElementById('file-input').onchange = initUpload;
});
}
FOR REACT NATIVE I WILL NOT BE USING ANY 3RD PARTY LIBS.
i have my pick image function that picks the image and upload using xhr
const pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
// mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
base64:true
});
console.log(result);
if (!result.cancelled) {
// setImage(result.uri);
let base64Img = `data:image/jpg;base64,${result.uri}`;
// ImagePicker saves the taken photo to disk and returns a local URI to it
let localUri = result.uri;
let filename = localUri.split('/').pop();
// Infer the type of the image
let match = /\.(\w+)$/.exec(filename);
let type = match ? `image/${match[1]}` : `image`;
// Upload the image using the fetch and FormData APIs
let formData = new FormData();
// Assume "photo" is the name of the form field the server expects
formData.append('file', { uri: base64Img, name: filename, type });
const xhr = new XMLHttpRequest();
xhr.open('GET', ENVIRONMENTS.CLIENT_API+`/sign-s3?file-name=${filename}&file-type=${type}`);
xhr.setRequestHeader('Access-Control-Allow-Headers', '*');
xhr.setRequestHeader('Content-type', 'application/json');
// xhr.setRequestHeader('Content-type', 'multipart/form-data');
xhr.setRequestHeader('Access-Control-Allow-Origin', '*');
xhr.setRequestHeader('X-Amz-ACL', 'public-read') //added
xhr.setRequestHeader('Content-Type', type) //added
xhr.onreadystatechange = () => {
if(xhr.readyState === 4){
if(xhr.status === 200){
const response = JSON.parse(xhr.responseText);
alert(JSON.stringify( response.signedRequest, response.url))
// uploadFile(file, response.signedRequest, response.url);
// this.setState({imagename:file.name})
const xhr2 = new XMLHttpRequest();
xhr2.open('PUT', response.signedRequest);
xhr2.setRequestHeader('Access-Control-Allow-Headers', '*');
xhr2.setRequestHeader('Content-type', 'application/json');
// xhr2.setRequestHeader('Content-type', 'multipart/form-data');
xhr2.setRequestHeader('Access-Control-Allow-Origin', '*');
// xhr2.setRequestHeader('X-Amz-ACL', 'public-read') //added
xhr2.setRequestHeader('Content-Type', type) //added
xhr2.onreadystatechange = () => {
if(xhr2.readyState === 4){
if(xhr2.status === 200){
alert("successful upload ")
}
else{
// alert('Could not upload file.');
var error = new Error(xhr.responseText)
error.code = xhr.status;
for (var key in response) error[key] = response[key]
alert(error)
}
}
};
xhr2.send( result.base64)
}
else{
alert('Could not get signed URL.');
}
}
};
xhr.send();
}
};
then some where in the render method
<View style={{ flex: 1, alignItems: 'center', justifyContent: 'center' }}>
<Button title="Pick an image from camera roll" onPress={pickImage} />
{image && <Image source={{ uri: image }} style={{ width: 200, height: 200 }} />}
</View>
hope it helps any one who doesnt want sleepless nights like me.
import React from 'react'
import { Button, SafeAreaView } from 'react-native'
import { launchImageLibrary } from 'react-native-image-picker'
const Home = () => {
const getImageFromLibrary = async () => {
const result = await launchImageLibrary()
const { type, uri } = result.assets[0]
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest()
xhr.onload = function () {
resolve(xhr.response)
}
xhr.onerror = function () {
reject(new TypeError('Network request failed'))
}
xhr.responseType = 'blob'
xhr.open('GET', uri, true)
xhr.send(null)
})
// Send your blob off to the presigned url
const res = await axios.put(presignedUrl, blob)
}
return (
<SafeAreaView>
<Button onPress={getImageFromLibrary} title="Get from library" />
</SafeAreaView>
)
}
export default Home
Your BE that creates the pre-signed url can look something like this (pseudo code):
const { getSignedUrl } = require('#aws-sdk/s3-request-presigner')
const { S3Client, PutObjectCommand } = require('#aws-sdk/client-s3')
const BUCKET_NAME = process.env.BUCKET_NAME
const REGION = process.env.AWS_REGION
const s3Client = new S3Client({
region: REGION
})
const body = JSON.parse(request.body)
const { type } = body
const uniqueName = uuidv4()
const date = moment().format('MMDDYYYY')
const fileName = `${uniqueName}-${date}`
const params = {
Bucket: BUCKET_NAME,
Key: fileName,
ContentType: type
}
try {
const command = new PutObjectCommand(params)
const signedUrl = await getSignedUrl(s3Client, command, {
expiresIn: 60
})
response.send({ url: signedUrl, fileName })
} catch (err) {
console.log('ERROR putPresignedUrl : ', err)
response.send(err)
}
I am using aws-sdk v3 which is nice because the packages are smaller. I create a filename on the BE and send it to the FE. For the params, you don't need anything listed then those 3. Also, I never did anything with CORS and my bucket is completely private. Again, the BE code is pseudo code ish so you will need to edit a few spots.
Lastly, trying to use the native fetch doesn't work. It's not the same fetch you use in React. Use XHR request like I showed else you cannot create a blob.
First, install two libraries, then the image convert into base64 after that arrayBuffer, then upload it
import RNFS from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
try {
RNFS.readFile(fileUri, 'base64').then(data => {
const arrayBuffer = decode(data);
axios
.put(sThreeApiUrl.signedUrl, arrayBuffer, {
headers: {
'Content-Type': 'image/jpeg',
'Content-Encoding': 'base64',
},
})
.then(res => {
if (res.status == 200) {
console.log('image is uploaded successfully');
}
});
});
} catch (error) {
console.log('this is error', error); }