Error with AWS Lambda salt and hash - amazon-web-services

I've been using source code from AWS Lambda in Action - Poccia, to create users in a User Pool and Identity Pool. I keep getting the error:
Response:
{
"errorMessage": "RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request"
}
Request ID:
"f6511085-f22c-11e7-be27-534dfc5d6456"
Function Logs:
START RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Version: $LATEST
2018-01-05T15:27:38.890Z f6511085-f22c-11e7-be27-534dfc5d6456 TypeError: Pass phrase must be a buffer
at TypeError (native)
at pbkdf2 (crypto.js:576:20)
at Object.exports.pbkdf2 (crypto.js:558:10)
at computeHash (/var/task/lib/cryptoUtils.js:10:10)
at InternalFieldObject.ondone (/var/task/lib/cryptoUtils.js:19:4)
END RequestId: f6511085-f22c-11e7-be27-534dfc5d6456
REPORT RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Duration: 113.62 ms Billed Duration: 200 ms Memory Size: 128 MB Max Memory Used: 33 MB
RequestId: f6511085-f22c-11e7-be27-534dfc5d6456 Process exited before completing request
I'm new to AWS Services and am not sure why this error is occurring. Below is the Lambda function I'm attempting to use and following is the cryptoUtils.js script it's referencing.
console.log('Loading function');
//Loading standard module, such as crypto and the AWS SDK
var AWS = require('aws-sdk');
var crypto = require('crypto');
var cryptoUtils = require('./lib/cryptoUtils.js'); //Loading the cryptoUtils.js module shared code, included in the uploaded ZIP archive
var config = require('./config.json'); //Loading the configuration in the config.json file, included in the uploaded ZIP archive
var dynamodb = new AWS.DynamoDB({
accessKeyId: 'usingKEYfromIAM',
secretAccessKey: 'usingKEYfromIAM',
}); //Getting the Amazon DynamoDB service object
var ses = new AWS.SES(); //Getting Amazon SES service object
function storeUser(email, password, salt, fn) { //The storeUser() function stores the new user in the DynamoDB table.
var len = 128;
crypto.randomBytes(len, function(err, token) { //Arandom token sent in the validation email and used to validate a user
if (err) return fn(err);
token = token.toString('hex');
dynamodb.putItem({ //Putting an item in the DynamoDB table
TableName: config.DDB_TABLE, //The table name is taken from the config.json configuration file.
//Most of the data is string ("S"), but the verifiede attribute is Boollean ("BOOL"),
//new users aren't verified (false), and the randomly generated token is stored in the "verifyToken" attribute
Item: {
email: {
S: email
},
passwordHash: {
S: password
},
passwordSalt: {
S: salt
},
verified: {
BOOL: false
},
verifyToken: {
S: token
}
},
ConditionExpression: 'attribute_not_exists (email)' //This condition avoids overwriting existing users (with the same email).
}, function(err, data) {
if (err) return fn(err);
else fn(null, token); //The storeUser() function returns the randomly generated token.
});
});
}
function sendVerificationEmail(email, token, fn) { //The send-VerificationEmail() funciton sends the verification email to the new user.
var subject = 'Verification Email for ' + config.EXTERNAL_NAME;
//The verification link, to the verify.hrml page, passes the randomly generated token as a query parameter.
var verificationLink = config.VERIFICATION_PAGE + '?email=' + encodeURIComponent(email) + '&verify=' + token;
ses.sendEmail({ //Sending the email in HTML format
Source: config.EMAIL_SOURCE,
Destination: {
ToAddresses: [
email
]
},
Message: {
Subject: {
Data: subject
},
Body: {
Html: {
Data: '<html><head>' + '<meta http-equiv= "Content-Type" content="test/html; charset=UTF-8" />' +
'<title>' + subject + '</title>' + '</head><body>' + 'Please <a href="' + verificationLink +
'">click here to verify your email address</a> or a copy & paste the following link in a browser:' +
'<br><br>' + '' + verificationLink + '' + '</body></html>'
}
}
}
}, fn);
}
exports.handler = (event, context, callback) => { //The function that's exported and can be invoked using AWS Lambda as createUser
//Getting the input parameters (email, password) from the event
var email = event.email;
var clearPassword = event.password;
//Using compute-Hash() from cryptoUtils.js to salt the password.
cryptoUtils.computeHash(clearPassword, function(err, salt, hash) {
if (err) {
callback('Error in hash: ' + err);
} else {
storeUser(email, hash, salt, function(err, token) { //Storing the user via the storeUser()function
if (err) {
if (err.code == 'ConditionalCheckFailedException') { //Checking if the database error is due to the email being already prsent in the database
//userID already found
callback(null, {
created: false
});
} else {
callback('Error in storUser: ' + err);
}
} else {
sendVerificationEmail(email, token, function(err, data) { //Sending the verification email
if (err) {
callback('Error in sendVerificationEmail: ' + err);
} else {
callback(null, {
created: true
});
}
});
}
});
}
});
};
var crypto = require('crypto');
function computeHash(password, salt, fn) {
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(password, salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, solat) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;
If anybody has any suggestions or needs more information to help me determine why the error is occurring I would greatly appreciate it. Thank you.

The password you're passing is a number?
If so:
Convert it to String.
If you don't want to do that, you can pass a Buffer object:
Pass a Buffer object using the class Method Buffer.from(string[, encoding])
https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_string_encoding
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
Hope it helps!

var crypto = require('crypto');
function computeHash(password, salt, fn) {
// Bytesize. The larger the numbers, the better the security, but the longer it will take to complete
var len = 512;
var iterations = 4096;
var digest = 'sha512';
if (3 == arguments.length) {
crypto.pbkdf2(Buffer.from(password, 'utf8'), salt, iterations, len, digest, function(err, derivedKey) {
if (err) return fn(err);
else fn(null, salt, derivedKey.toString('base64'));
});
} else {
fn = salt;
crypto.randomBytes(len, function(err, salt) {
if (err) return fn(err);
salt = salt.toString('base64');
computeHash(password, salt, fn);
});
}
}
module.exports.computeHash = computeHash;

Error "TypeError: Pass phrase must be a buffer"
does suggest trying a Buffer conversion of input String
Before your test of Buffer.from(password, 'utf8')
did you verify input value is encoding 'utf8',
and not some other encoding such as base64 or latin1 ?
List of encodings that Node.js supports

Related

Flutter aws amplify not returning data when calling graphql api

On button click I have programmed to call a graphql api which is connected to a Lambda function and the function is pulling data from a dynamodb table. The query does not produce any error, but it doesn't give me any results as well. I have also checked the cloudwatch logs and I dont see any traces of the function being called. Not sure on the careless mistake I am making here.
Here is my api
void findUser() async {
try {
String graphQLDocument = '''query getUserById(\$userId: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
var operation = Amplify.API.query(
request: GraphQLRequest<String>(
document: graphQLDocument,
variables: {'id': 'USER-14160000000'}));
var response = await operation.response;
var data = response.data;
print('Query result: ' + data);
} on ApiException catch (e) {
print('Query failed: $e');
}
}
Here is my lambda function -
const getUserById = require('./user-queries/getUserById');
exports.handler = async (event) => {
var userId = event.arguments.userId;
var name = event.arguments.name;
var avatarUrl = event.arguments.avatarUrl;
//console.log('Received Event - ', JSON.stringify(event,3));
console.log(userId);
switch(event.info.fieldName) {
case "getUserById":
return getUserById(userId);
}
};
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient({region: 'ca-central-1'});
async function getUserById(userId) {
const params = {
TableName:"Bol-Table",
KeyConditionExpression: 'pk = :hashKey and sk = :sortKey',
ExpressionAttributeValues: {
':hashKey': userId,
':sortKey': 'USER'
}
};
try {
const Item = await docClient.query(params).promise();
console.log(Item);
return {
id: Item.Items[0].pk,
name: Item.Items[0].details.displayName,
avatarUrl: Item.Items[0].details.avatarUrl,
createdAt: Item.Items[0].details.createdAt,
updatedAt: Item.Items[0].details.updatedAt
};
} catch(err) {
console.log("BOL Error: ", err);
}
}
module.exports = getUserById;
Upon button click I get this
Moving my comment to an answer:
Can you try changing your graphQLDocumnet to
String graphQLDocument = '''query getUserById(\$id: ID!) {
getUserById(userId: \$id) {
id
name
}
}''';
Your variable is $userId and then $id. Try calling it $id in both places like in your variables object.
Your flutter code is working fine but in lambda from the aws is returning blank string "" to not to print anything

Lambda function is not triggered for all the s3 image upload

I have s3 bucket as below,
myBucket
a/
b/
c/
where myBucket is a s3 bucket and a, b, c are the key folders inside that bucket.
I will upload images into a/. The s3 event notification will trigger SQS which will then trigger lambda function which does the process of removing image background and uploads into b/ folder.
The problem here is for example, if I upload a folder which has around 26 images into s3 only 23 or 22 images are getting triggered by lambda and only those images are getting are processed.
For some reason s3 is not triggering all the images or is that something I should configure in my lambda function?
Here is my function code
exports.handler = async(event, context, callback) => {
try {
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
console.log(json);
var srcBucket = json['bucket']['name'];
console.log('srcBucket: ' + srcBucket);
var srcKey = decodeURIComponent(json['object']['key'].replace(/\+/g, ' '));
console.log('srcKey: ' + srcKey);
var str = (srcKey.split('/').pop()).split('.')[0];
console.log('str: ' + str);
if (str != '') {
var folderPath = srcKey.substr(srcKey.indexOf('/') + 1).split('.')[0];
folderPath = folderPath.substring(0, folderPath.lastIndexOf('/'));
console.log('folderPath: ' + folderPath);
const params1 = { Bucket: srcBucket, Key: srcKey };
var origimage = await s3.getObject(params1).promise();
var destObject = await origimage.Body;
var destKey = 'removebg/' + folderPath + '/' + str + '.jpg';
var options = {
'method': 'POST',
'url': 'https://api.remove.bg/v1.0/removebg',
'headers': {
'X-Api-Key': 'xxxxxxxxxxx'
},
formData: {
'image_file': destObject,
'size': 'auto'
},
encoding: null
};
request(options, function(error, response, body) {
if (error) {
console.log(error);
sendmessage(error, 'Error removing image background', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
var params = { Bucket: destBucket, Key: destKey, Body: body };
s3.upload(params, function(err, data) {
if (err) {
console.log('Error uploading data: ', err);
sendmessage(err, 'Error uploading transparent image to s3', arn, srcBucket + '/' + srcKey, destBucket + destKey);
}
else { console.log('Successfully uploaded data to ' + destBucket); }
});
});
}
}
catch (e) {
console.log(e);
}
callback(null, 'All done!');
};
Please let me know. Thanks in advance.
I think your problem lies on these lines:
console.log(event.Records[0]);
var json = JSON.parse(event.Records[0]['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = json['Records'][0]['s3'];
Your function is only looking at the first record that is provided to the function. Multiple events can be given the the Lambda function, so your function should loop through the Records entries and process all of the events that are provided.
It should do something like:
for record in event.Records:
console.log(record);
var json = JSON.parse(record['body']);
console.log('json: '+json);
json = JSON.parse(json['Message']);
json = record['s3'];

Session leak on spanner insert from cloud function

I'm trying to insert data into spanner through cloud function, using post request. I thing that I'm doing everything as described in the documentation, and i just can't understand what causes the next error:
"Error: 1 session leak(s) detected.
at _requests.onIdle.then (/srv/node_modules/#google-cloud/spanner/build/src/session-pool.js:193:25)
at <anonymous>"
And there is my cloud function
const {Spanner} = require('#google-cloud/spanner');
module.exports.http = (req, res) => {
const projectId = 'project-id';
const instanceId = 'instance-id';
const databaseId = 'database-id';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
return;
}
try {
const data = req.body;
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Feedbacks (age, comment, gender, rating) VALUES (#age, #comment, #gender, #rating)',
params: {
age: data.age.toString(),
comment: data.comment,
gender: data.gender,
rating: data.rating.toString(),
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Feedbacks table.';
await transaction.commit();
res.status(200).send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res.status(500).send(JSON.stringify({message: err, requestBody: req.body}));
} finally {
database.close();
}
});
};
Your code appears to be correct. As noted by #Mayeru in the comments for your question, the first thing to confirm is that you're inserting a new record with a unique value specified for the column that is your table's primary key column.
Another possibility that could be causing the issue you are encountering is that you are trying to test the function using the "Testing" UI of the Cloud Console's Cloud Functions > "Function details" section. If so then you may be either using an empty request body or a malformed request body when you click the "Test the function" button. In the "Triggering event" textarea that appears above the "Test the function" button, make sure you have entered a valid JSON request body which includes the elements and values that your INSERT statement expects.
For example a "Triggering event" JSON request body like the following should work:
{"singerId":"1001","firstName":"Test","lastName":"Singer"}
Using the following "nodeInsert" function that's similar to the code you've shared:
const {Spanner} = require('#google-cloud/spanner');
module.exports.nodeInsert = (req, res) => {
const projectId = 'my-project';
const instanceId = 'my-instance';
const databaseId = 'my-database';
const spanner = new Spanner({
projectId: projectId,
});
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
let sqlResponse = "";
database.runTransaction(async (err, transaction) => {
if (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
return;
}
try {
const data = req.body;
const parsedSingerId = parseInt(data.singerId, 10);
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT Singers (SingerId, FirstName, LastName) VALUES (#singerId, #firstName, #lastName)',
params: {
singerId: parsedSingerId,
firstName: data.firstName,
lastName: data.lastName,
},
});
sqlResponse = 'Successfully inserted ' + rowCount + ' record into the Singers table.';
await transaction.commit();
res
.status(200)
.send(JSON.stringify({message: sqlResponse, requestBody: req.body}));
} catch (err) {
res
.status(500)
.send(JSON.stringify({message: err, requestBody: req.body}));
transaction.end();
console.error('Transaction terminated.');
} finally {
database.close();
}
});
};

Cognito send confirmation email using custom email

There's a way to send an email other than the one specified in the "Message customisation" tab on Cognito user pool?
I would like to use different email based on some parameters.
E.g.
verification#my-service.com for verification email
welcome#my-service.com for welcome email
You can go to the general settings in Cognito then click on triggers. There you can select Post Confirmation lambda function(this example in node) to send the email. In the lambda function you can make the subject whatever you like and change from email address.
var aws = require('aws-sdk');
var ses = new aws.SES();
exports.handler = function(event, context) {
console.log(event);
if (event.request.userAttributes.email) {
// Pull another attribute if you want
sendEmail(event.request.userAttributes.email,
"Congratulations "+event.userName+", you have been registered!"
, function(status) {
context.done(null, event);
});
} else {
// Nothing to do, the user's email ID is unknown
console.log("Failed");
context.done(null, event);
}
};
function sendEmail(to, body, completedCallback) {
var eParams = {
Destination: {
ToAddresses: [to]
},
Message: {
Body: {
Text: {
Data: body
}
},
Subject: {
Data: "Welcome to My Service!"
}
},
Source: "welcome#my-service.com"
};
var email = ses.sendEmail(eParams, function(err, data){
if (err) {
console.log(err);
} else {
console.log("===EMAIL SENT===");
}
completedCallback('Email sent');
});
console.log("EMAIL CODE END");
};
You will also have to set up SES.
If you want to handle all emails yourself, you can specify this with a CustomEmailSender Lambda. This trigger isn't currently available through the AWS Console, but you can specify it with the CLI or CDK/CloudFormation. See the docs here.
Those docs are pretty terrible though. The gist is that you'll be given a code property on the event, which is a base64-encoded blob that was encrypted with the KMS key you specified on your user pool. Depending on the triggering event, this is the verification code, temporary password, etc, generated by Cognito. Here's a simplified version of what my Lambda looks like:
import { buildClient, CommitmentPolicy, KmsKeyringNode } from '#aws-crypto/client-node';
const { decrypt } = buildClient(CommitmentPolicy.REQUIRE_ENCRYPT_ALLOW_DECRYPT);
const kmsKeyring = new KmsKeyringNode({
keyIds: [process.env.COGNITO_EMAILER_KEY_ARN]
});
export async function lambdaHandler(event, context) {
try {
let payload = '';
if (event.request.code) {
const { plaintext, messageHeader } = await decrypt(
kmsKeyring,
Buffer.from(event.request.code, "base64")
);
if (event.userPoolId !== messageHeader.encryptionContext["userpool-id"]) {
console.error("Encryption context does not match expected values!");
return;
}
payload = plaintext.toString();
}
let messageHtml = "";
switch (event.triggerSource) {
case "CustomEmailSender_SignUp": {
const verificationCode = payload;
messageHtml = `<p>Use this code to verify your email: ${verificationCode}</p>`;
break;
}
case "CustomEmailSender_AdminCreateUser":
case "CustomEmailSender_ResendCode": {
const tempPassword = payload;
messageHtml = `<p>Your temporary password is ${tempPassword}</p>`;
break;
}
default: {
console.warn("unhandled trigger:", event.triggerSource);
return;
}
}
await sendEmail({
subject: "Automated message",
to: event.request.userAttributes.email,
messageHtml,
});
return true;
} catch (err) {
console.error(err.message);
process.exit(1);
}
}

Timeout while launching EC2 instances from AWS Lambda

I have been trying to launch a new EC2 instance as well as add a piece of string data to my SQS through lambda in response to an object upload event in my s3 bucket.
I have been able to successfully to update my SQS but has been unable to initialise the new EC2 instance. Despite setting the time allocation to the lambda function to the maximum time of 5mins and increasing memory allocation, an operation timeout error continuously surface.
My code is as below. Can anyone point out what are the potential causes for such an error? While I have stuck my whole piece of code here for reference, the area concerning the launch is towards the end of the code.
Thank you so much!
console.log('Loading function');
var fs = require('fs');
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
var sqs = new aws.SQS({apiVersion: '2012-11-05'});
var ecs = new aws.ECS({apiVersion: '2014-11-13'});
var ec2 = new aws.EC2({apiVersion: '2015-10-01'});
// Check if the given key suffix matches a suffix in the whitelist. Return true if it matches, false otherwise.
exports.checkS3SuffixWhitelist = function(key, whitelist) {
if(!whitelist){ return true; }
if(typeof whitelist == 'string'){ return key.match(whitelist + '$') }
if(Object.prototype.toString.call(whitelist) === '[object Array]') {
for(var i = 0; i < whitelist.length; i++) {
if(key.match(whitelist[i] + '$')) { return true; }
}
return false;
}
console.log(
'Unsupported whitelist type (' + Object.prototype.toString.call(whitelist) +
') for: ' + JSON.stringify(whitelist)
);
return false;
};
exports.handler = function(event, context) {
//console.log('Received event:', JSON.stringify(event, null, 2));
console.log('Received event:');
//Read in the configuration file
var config = JSON.parse(fs.readFileSync('config.json', 'utf8'));
if(!config.hasOwnProperty('s3_key_suffix_whitelist')) {
config.s3_key_suffix_whitelist = false;
}
console.log('Config: ' + JSON.stringify(config));
var name = event.Records[0].s3.object.key;
if(!exports.checkS3SuffixWhitelist(name, config.s3_key_suffix_whitelist)) {
context.fail('Suffix for key: ' + name + ' is not in the whitelist')
}
// Get the object from the event and show its key
var bucket = event.Records[0].s3.bucket.name;
var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
var params = {
Bucket: bucket,
Key: key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting object " + key + " from bucket " + bucket +
". Make sure they exist and your bucket is in the same region as this function.";
console.log(message);
context.fail(message);
} else {
console.log('CONTENT TYPE:', key);
context.succeed(key);
}
});
//Sending the image key as a message to SQS and starting a new instance on EC2
async.waterfall([
function(next){
var params = {
MessageBody: JSON.stringify(event),
QueueUrl: config.queue
};
console.log("IN QUEUE FUNCTION");
sqs.sendMessage(params, function (err, data) {
if (err) { console.warn('Error while sending message: ' + err); }
else { console.info('Message sent, ID: ' + data.MessageId); }
next(err);
});
},
function (next) {
console.log("INITIALIZING ECS");
var params = {
ImageId: 'ami-e559b485',
MinCount: 1,
MaxCount: 1,
DryRun: true,
InstanceType: 't2.micro',
KeyName: 'malpem2102',
SubnetId: 'subnet-e8607e8d'
}
ec2.runInstances(params, function(err,data){
if(err){
console.log(err, err.stack);
context.fail('Error', "Error getting file: " + err);
return;
} else{
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance ", instanceId);
context.suceed("Created instance");
}
});
}
], function(err){
if (err) {
context.fail('An error has occurred: ' + err);
}
else {
context.succeed('Successfully processed Amazon S3 URL.');
}
}
);
};