AWS cognito users list : lambda - amazon-web-services

I am working on one node application which is using AWS. now i want to get all cognito users but as per doc it returns first 60 users but i want all users. can you assist me with this? In doc, they mentioned that pass PaginationToken (string) . but i don't know what to pass in it.
Here what i have done so far :
exports.handler = (event, context, callback) => {
const requestBody = JSON.parse(event.body);
var params = {
"UserPoolId": "****************",
"Limit": 60,
"PaginationToken" : (what to pass here????),
}
const cognitoidentityserviceprovider = new AWS.CognitoIdentityServiceProvider();
cognitoidentityserviceprovider.listUsers(params, (err, data) => {
if (err) {
callback(null, { headers: { "Content-Type": "application/json", "Access-Control-Allow-Origin": "*" }, body: JSON.stringify({ statusCode: 405, data: err }) });
} else {
console.log(data);
let userdata = [];
for(let i=0; i<data.Users.length;i++){
// console.log(data.Users[i].Attributes);
userdata.push(getAttributes(data.Users[i].Attributes));
}
callback(null, { headers: { "Content-Type": "application/json", "Access-Control-Allow-Origin": "*" }, body: JSON.stringify({ statusCode: 200, data: userdata }) });
}
});
};
function getAttributes(attributes){
let jsonObj = {};
attributes.forEach((obj) => {
jsonObj[obj.Name] = obj.Value;
});
return jsonObj;
}

In your response you should see a property called PaginationToken. If you make the same call but include this value in your params you will receive the next 60 users. Here's the concept:
cognitoidentityserviceprovider.listUsers(params, (err, data) => {
// data.Users is the first 60 users
params.PaginationToken = data.PaginationToken;
cognitoidentityserviceprovider.listUsers(params, (err, data) => {
// data.Users is the next 60 users
});
});
You might want to consider switching to promises and async/await if your environment supports it. That would make this code easier to read and write.
const data = await cognitoidentityserviceprovider.listUsers(params).promise();
params.PaginationToken = data.PaginationToken;
const data2 = await cognitoidentityserviceprovider.listUsers(params).promise();

Related

Handling multipart/form-data in aws lambda

I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};

AWS - PresignedUrl Upload Error on Browser, Works in Postman

I'm trying to upload files to my S3 bucket via PresignedUrl Lambda function. everything works fine via post man. but the Browser based application is failing saying "SignatureDoesNotMatch"
My Lambda function region is ap-southeast-1.
but similar function works fine in ap-south1 (which is same timezone as mine). any idea why is this happening. could this be anything to do with the timezone difference between the server and client.
Please see my code below:
<script>
$(document).one('submit', '#memberForm', function (e) {
e.preventDefault();
$.get("<FUNCTION URL>", function (data) {
var getUrl = data.uploadURL;
var fileName = data.fileName;
var theFormFile = $('#fileLogo').get()[0].files[0];
if (theFormFile != null) {
console.log(theFormFile);
$.ajax({
type: 'PUT',
url: getUrl,
contentType: 'binary/octet-stream',
processData: false,
crossDomain: true,
data: theFormFile,
success: function () {
alert('Yeehaaaw');
},
error: function (e) {
console.log(e);
alert('File NOT uploaded');
console.log(arguments);
}
});
} else {
$("#memberForm").submit();
}
});
return false;
});
</script>
My Code for Url Generation is as below:
'use strict'
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.AWS_REGION || 'ap-southeast-1' })
const s3 = new AWS.S3()
// Main Lambda entry point
exports.handler = async (event) => {
console.log("execution started")
var contentType=event["queryStringParameters"]['contentType']
var path=event["queryStringParameters"]['path']
const result = await getUploadURL(contentType,path)
console.log('Result: ', result)
return result
}
const getContentType=function(contentType){
switch(contentType) {
case "png":
return "image/png"
case "jpg":
return "image/jpeg"
case "pdf":
return "application/pdf"
default:
return "application/json"
}
}
const getExtension=function(contentType){
switch(contentType) {
case "png":
return "png"
case "jpg":
return "jpg"
case "pdf":
return "pdf"
default:
return `${contentType}`
}
}
const getUploadURL = async function(contentType,path) {
console.log(`Content type is ${contentType}`)
const actionId = parseInt(Math.random()*10000000)
var type=getContentType(contentType);
var ext= getExtension(contentType);
const s3Params = {
Bucket: process.env.UploadBucket,
Key: `${path}/${actionId}.${ext}`,
ContentType: type,// Update to match whichever content type you need to upload
ACL: 'public-read', // Enable this setting to make the object publicly readable - only works if the bucket can support public objects,
Expires: 300
}
console.log('getUploadURL: ', s3Params)
return new Promise((resolve, reject) => {
// Get signed URL
resolve({
"statusCode": 200,
"isBase64Encoded": false,
"headers": {
"Access-Control-Allow-Origin": "*"
},
"body": JSON.stringify({
"uploadURL": s3.getSignedUrl('putObject', s3Params),
"fileName": `${actionId}.${ext}`
})
})
})
}
Also the same works when i try with PostMan.
I resolved this by adding the signature version:
const s3=new AWS.S3({
signatureVersion:'v4'
});

Why is my s3 createPresignedPost not respecting the content-range-length condition

I am trying to post a file to s3 using createPresignedPost. The file is posting to my bucket but it is not respecting the file size constraint. Here is my code and the file upload is base64 encoded string.
function postObjectSignedUrl(req) {
const key = `${req + "/" + uuid.v4()}`;
return new Promise(function (resolve, reject) {
const params = {
Bucket: 'base',
Expires: 60 * 60, // in seconds,
Fields: {
key: key,
},
conditions: [
['content-length-range', 0,1000000]
]
}
s3.createPresignedPost(params, (err, data) => {
if (err) {
reject(err)
} else {
resolve(data);
}
})
})
}
My client side code is the following:
var data = new FormData();
const getUrl = await getSignedUrl();
const keys = getUrl["fields"];
$.each(keys, function(key,value){
data.append(key,value);
});
data.append("file", profilePic);
try {
const result = await fetch(getUrl["url"], {
method: "POST",
mode: "cors",
headers: {
'Access-Control-Allow-Origin': '*',
},
body: data
})
if (result.status === 204){
}
} catch (err) {
console.log(err, " error ")
}
Normally params attributes in NodeJS SDK are Upper Camel Case so you have to change "conditions" for "Conditions".
BTW you can change your url generator code as follow :)
function postObjectSignedUrl(req) {
const key = `${req + "/" + uuid.v4()}`;
const params = {
Bucket: 'base',
Expires: 60 * 60, // in seconds,
Fields: {
key: key,
},
Conditions: [
['content-length-range', 0,1000000]
]
}
return s3.createPresignedPost(params).promise();
})
Regards,

AWS SES send email lambda not sending every time

I want to send emails using the ses from aws from lambda. The problem is that the email is only sent some times using the same code. We don't get errors.
Here's the code:
const AWS = require('aws-sdk');
var ses = new AWS.SES();
exports.handler = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
await new Promise((resolve, reject) => {
var params = {
Destination: {
ToAddresses: [myEmail]
},
Message: {
Body: {
Text: { Data: "Test"
}
},
Subject: { Data: "Test Email"
}
},
Source: "sourceMail"
};
ses.sendEmail(params, function (err, data) {
if (err) {
console.log(err);
context.fail(err);
} else {
console.log(data);
context.succeed(event);
}
callback(null, {err: err, data: data});
});
});
}
I would be careful with using callbackWaitsForEmptyEventLoop as it can lead to unexpected results (If this is false, any outstanding events continue to run during the next invocation.).
Can you try using this simplified version:
const AWS = require('aws-sdk');
var ses = new AWS.SES();
exports.handler = async (event, context, callback) => {
const params = {
Destination: {
ToAddresses: [myEmail],
},
Message: {
Body: {
Text: { Data: 'Test' },
},
Subject: { Data: 'Test Email' },
},
Source: 'sourceMail',
};
await ses.sendEmail(params).promise();
return event;
};

DynamoDB - The provided key element does not match the schema

I have a dynamodb table with attributes: userId, propertyInfo, and propertyId. userId is primary index. When I use the following lambda code to update(PUT) the item in the table, I get "The provided key element does not match the schema".
const AWS = require('aws-sdk'); // eslint-disable-line import/no-extraneous-dependencies
const dynamoDb = new AWS.DynamoDB.DocumentClient();
module.exports.update = (event, context, callback) => {
const timestamp = new Date().getTime();
const data = JSON.parse(event.body);
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
propertyId: event.pathParameters.id,
},
ExpressionAttributeNames: {
'#new_propertyInfo': 'propertyInfo',
},
ExpressionAttributeValues: {
':propertyInfo': data.propertyInfo,
},
UpdateExpression: 'SET #new_propertyInfo = :propertyInfo',
ReturnValues: 'ALL_NEW',
};
dynamoDb.update(params, (error, result) => {
// handle potential errors
if (error) {
console.error(error);
callback(null, {
statusCode: error.statusCode || 501,
headers: { 'Content-Type': 'text/plain' },
body: 'Couldn\'t fetch the item.',
});
return;
}
// create a response
const response = {
statusCode: 200,
body: JSON.stringify(result.Attributes),
};
callback(null, response);
});
};
Body of my update request is:
{
"propertyInfo":
{
"houseNumber": 2000,
"street": "easy st"
}
}
The event.pathParameters.id is obtained from /property/{id}. I need this id to search DB's propertyId. The userId is needed for authorization purpose. Search and update I need to search by propertyId. Could someone help to explain to me what I need to do to set this up correctly please?