Make a cross account call to Redshift Data API - amazon-web-services

Summary of problem:
We have an AWS Redshift cluster in Account A, this has a database called 'products'
In Account B we have a lambda function which needs to execute a SQL statement against 'products' using the Redshift Data API
We have setup a new secret in AWS Secrets manager containing the redshift cluster credentials. This secret has been shared with Account B. We've confirmed Account B can access this information from AWS Secrets Manager.
When we call the Redshift Data API action 'executeStatement' we get the following error:
ValidationException: Cluster doesn't exist in this region.
at Request.extractError (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\protocol\json.js:52:27)
at Request.callListeners (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\sequential_executor.js:106:20)
at Request.emit (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\sequential_executor.js:78:10)
at Request.emit (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\request.js:688:14)
at Request.transition (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\request.js:22:10)
at AcceptorStateMachine.runTo (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\state_machine.js:14:12)
at C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\state_machine.js:26:10
at Request.<anonymous> (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\request.js:38:9)
at Request.<anonymous> (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\request.js:690:12)
at Request.callListeners (C:\git\repositories\sandbox\redshift\node_modules\aws-sdk\lib\sequential_executor.js:116:18)
The error message suggest it's perhaps not going to the correct account, since the secret contains this information I would have expected it to know.
Code Sample:
Here's my code:
var redshiftdata = new aws.RedshiftData({ region: 'eu-west-2'});
const params : aws.RedshiftData.ExecuteStatementInput = {
ClusterIdentifier: '<clusteridentifier>',
Database: 'products',
SecretArn: 'arn:aws:secretsmanager:<region>:<accountNo>:secret:<secretname>',
Sql: `select * from product_table where id = xxx`,
StatementName: 'statement-name',
WithEvent: true
};
redshiftdata.executeStatement(params,
async function(err, data){
if (err) console.log(err, err.stack);
else {
const resultParams : aws.RedshiftData.GetStatementResultRequest = { Id: data.Id! };
redshiftdata.getStatementResult(resultParams, function(err, data){
if (err) console.log(err, err.stack);
else console.dir(data, {depth: null});
})
}
});
Any suggestions or pointers would be really appreciated.

Thanks for the answer Parsifal. Here's a code snippet of the working solution.
import aws from "aws-sdk";
var roleToAssume = {RoleArn: 'arn:aws:iam::<accountid>:role/<rolename>',
RoleSessionName: 'example',
DurationSeconds: 900,};
var sts = new aws.STS({ region: '<region>'});
sts.assumeRole(roleToAssume, function(err, data) {
if (err)
{
console.log(err, err.stack);
}
else
{
aws.config.update({
accessKeyId: data.Credentials?.AccessKeyId,
secretAccessKey: data.Credentials?.SecretAccessKey,
sessionToken: data.Credentials?.SessionToken
})
// Redshift code here...
}
});

Related

Error when trying to upload object to S3 through Lambda with AWS SDK, InvalidAccessPointARN: Access point ARN region is empty

I'm trying to upload a txt to S3.
The lambda function has the following policies:
AmazonS3FullAccess
AWSLambdaBasicExecutionRole
And this is the code
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
exports.handler = async (event) => {
const bucketName = 'arn:aws:s3:::0dd839423855435';
const keyName = 'test.txt';
const content = 'Test';
const params = {
'Bucket': bucketName,
'Key': keyName,
'Body': content
};
try {
const data = await s3.putObject(params).promise();
} catch (err) {
console.log(err)
}
}
And this the error
InvalidAccessPointARN: Access point ARN region is empty
at Request.validateAccessPointArn (/var/runtime/node_modules/aws-sdk/lib/services/s3.js:201:28)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)
at Request.runTo (/var/runtime/node_modules/aws-sdk/lib/request.js:403:15)
at /var/runtime/node_modules/aws-sdk/lib/request.js:792:12
at new Promise (<anonymous>)
at Request.promise (/var/runtime/node_modules/aws-sdk/lib/request.js:778:12) {
code: 'InvalidAccessPointARN',
time: 2020-06-29T23:00:43.979Z
I don't know what I am doing bad. Also, I would like to avoid creating an access point.
So the first thing I guess you are using terraform or something like Infrastructure as Code.
That's why your bucket name is the randomly generated number: 0dd839423855435
So you do not have to use arn (arn:aws:s3:::0dd839423855435) as bucket name, instead of using arn use just and just only bucket name or in other words this is the id of the bucket which is this 0dd839423855435 in your case.
Lambda and S3 should be located in same region.
In case you created s3 with terraform then you can output the arn or id by doing this:
For ARN:
output "your-bucket-arn" {
value = aws_s3_bucket.your-bucket.arn
}
For ID:
output "your-bucket-id" {
value = aws_s3_bucket.your-bucket.id
}

How to globally signout a cognito user via aws sdk

I work with AWS Cognito. I have the attached code to globally signout a user based on access token :
var signOut = (accessToken) =>
new Promise((resolve, reject) => {
var params = {
//UserPoolId: process.env.USER_POOL_ID, /* required */
AccessToken: accessToken /* required */
};
var cognitoidentityserviceprovider = new AWS.CognitoIdentityServiceProvider({
apiVersion: '2016-04-18',
region: 'us-east-1'
})
console.log("Signing out user .. ");
cognitoidentityserviceprovider.globalSignOut(params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
reject(err)
} else {
console.log(data);
resolve(data)
}
});
});
and when I invoke the code I get the following error:
2019-08-20T14:33:29.736Z 011d65cb-ee4d-4053-9283-6e6242560c6b { NotAuthorizedException: Access Token has been revoked
at Request.extractError (/var/task/sign-out/node_modules/aws-sdk/lib/protocol/json.js:51:27)
at Request.callListeners (/var/task/sign-out/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/var/task/sign-out/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/var/task/sign-out/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/var/task/sign-out/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/var/task/sign-out/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /var/task/sign-out/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request. (/var/task/sign-out/node_modules/aws-sdk/lib/request.js:38:9)
at Request. (/var/task/sign-out/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/var/task/sign-out/node_modules/aws-sdk/lib/sequential_executor.js:116:18)
message: 'Access Token has been revoked',
Does anyone know how to resolve this issue?
This exception throws as Access token you are using was already has been revoked by the global sign out it self.
Which means you have did already signed out from the cognito.
Try to invoke the same function with new Access Token generated by signing in (aka Login) API.

AWS SDK runinstance and IAM roles

The code below works when I have added the AWS IAM role "AdministratorAccess" - But it is risky and a bit of overkill... But how do I know and find only the necessary role(s)...It is very confusing and hard to know by when I look at all the possible roles in the console?
try {
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'us-east-2'});
var instanceParams = {
ImageId: 'ami-xxxxxxxxxxxx',
InstanceType: 't2.micro',
KeyName: 'xxxxxxxxxx',
SecurityGroups: ['xxxxxxxxxxxxxxx'],
MinCount: 1,
MaxCount: 1
};
// Create a promise on an EC2 service object
var instancePromise = new AWS.EC2({apiVersion: '2016-11-15'}).runInstances(instanceParams).promise();
// Handle promise's fulfilled/rejected states
instancePromise.then(
function (data) {
console.log(data);
var instanceId = data.Instances[0].InstanceId;
console.log("Created instance", instanceId);
// Add tags to the instance
var tagParams = {
Resources: [instanceId], Tags: [
{
Key: 'Name',
Value: 'SDK Sample'
}
]
};
// Create a promise on an EC2 service object
var tagPromise = new AWS.EC2({apiVersion: '2016-11-15'}).createTags(tagParams).promise();
// Handle promise's fulfilled/rejected states
tagPromise.then(
function (data) {
console.log("Instance tagged");
}).catch(
function (err) {
console.error(err, err.stack);
});
}).catch(
function (err) {
console.error(err, err.stack);
});
}
catch(e){
wl.info('Error: ' + e);
}
Firstly you can see the api's you are calling via the sdk as a hint to what permissions you need i.e ec2:RunInstance and ec2:CreateTags.
You first create a policy then select the service then attach permissions (RunInstances and CreateTags)
You then create a Role with that policy attached.
Then you can attach the role to your Lambda

How to use assume role credential in dynamodb (aws-sdk javascript)?

I already have aws assume role credentials in .aws/credetials file.
how to use it to creat sts or dynamodb like:
const { DynamoDB } = require('aws-sdk');
const { DocumentClient } = DynamoDB;
const dynamo = new DynamoDB({
endpoint: process.env.AWS_ENDPOINT,
region: process.env.AWS_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
secretToken: process.env.aws_security_token
});
I mean I got error as:
root#myubuntu:~/work/contacts_api# node ./seed/runner.js
```
Checking if 'contacts' table exists
{ UnrecognizedClientException: The security token included in the request is invalid.
at Request.extractError (/root/work/contacts_api/node_modules/aws-sdk/lib/protocol/json.js:51:27)
at Request.callListeners (/root/work/contacts_api/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/root/work/contacts_api/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/root/work/contacts_api/node_modules/aws-sdk/lib/request.js:683:14)
at Request.transition (/root/work/contacts_api/node_modules/aws-sdk/lib/request.js:22:10)
at AcceptorStateMachine.runTo (/root/work/contacts_api/node_modules/aws-sdk/lib/state_machine.js:14:12)
at /root/work/contacts_api/node_modules/aws-sdk/lib/state_machine.js:26:10
at Request. (/root/work/contacts_api/node_modules/aws-sdk/lib/request.js:38:9)
at Request. (/root/work/contacts_api/node_modules/aws-sdk/lib/request.js:685:12)
at Request.callListeners (/root/work/contacts_api/node_modules/aws-sdk/lib/sequential_executor.js:116:18)
message: 'The security token included in the request is invalid.',
code: 'UnrecognizedClientException',
time: 2019-01-07T05:39:54.907Z,
requestId: 'A5CFV62P0TGHJH7VDIBSL0JRC3VV4KQNSO5AEMVJF66Q9ASUAAJG',
statusCode: 400,
retryable: false,
retryDelay: 5.013458338738063 }
```
I want to know the correct way to initial credetials, if I want to use mfa credetials.
I'm guessing that the error here should give you a clue:
"The security token included in the request is invalid"
Did you try printing out the environment value
env | grep aws_security_token
If it's empty you'll have to set the value prior to running your code.
Also, I've noticed that your other aws keys are all caps whereas your aws_security_token is all lowercase.
I suspect secretToken isn't a thing. Here are two examples of how it could be done (how I've done it before).
That said I would encourage the construction and use of a Credentials where ever possible (the second example), but if you wanted to do it inline- that should work too.
/** assume a role and build a DocumentClient object to make a single scan **/
;(async () => {
const sts = new AWS.STS()
const assumeRole = await sts
.assumeRole({
RoleArn: process.env.ROLE_ARN,
RoleSessionName: process.env.ROLE_SESSION_NAME,
})
.promise()
const dynamodb = new AWS.DynamoDB.DocumentClient({
region: process.env.REGION,
credentials: {
accessKeyId: assumeRole.Credentials?.AccessKeyId,
secretAccessKey: assumeRole.Credentials?.SecretAccessKey,
sessionToken: assumeRole.Credentials?.SessionToken,
},
})
const scan = await dynamodb
.scan({
TableName: process.env.TABLE_NAME,
})
.promise()
console.log(scan)
})()
/**
* assume a role and build a Credentials object and use it
* to build a DocumentClient object to make a single scan
**/
;(async () => {
const sts = new AWS.STS()
const assumeRole = await sts
.assumeRole({
RoleArn: process.env.ROLE_ARN,
RoleSessionName: process.env.ROLE_SESSION_NAME,
})
.promise()
const credentials = new AWS.Credentials({
accessKeyId: assumeRole.Credentials?.AccessKeyId,
secretAccessKey: assumeRole.Credentials?.SecretAccessKey,
sessionToken: assumeRole.Credentials?.SessionToken,
})
const dynamodb = new AWS.DynamoDB.DocumentClient({
region: process.env.REGION,
credentials: credentials,
})
const scan = await dynamodb
.scan({
TableName: process.env.TABLE_NAME,
})
.promise()
console.log(scan)
})()

Create Lambda function which will parse the emails which uploaded to S3 with SES receipt rule

I would like to create Lambda function which will parse the emails which uploaded to S3 bucket through SES receipt rule.
Uploading to S3 bucket through SES receipt rule works fine. So, its tested already and confirmed that it uploads the file correctly.
My Amazon Lambda function:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
var bucketName = 'bucket_name/folder/destination';
exports.handler = function(event, context, callback) {
console.log('Process email');
var sesNotification = event.Records[0].ses;
console.log("SES Notification:\n", JSON.stringify(sesNotification, null, 2));
// Retrieve the email from your bucket
s3.getObject({
Bucket: bucketName,
Key: sesNotification.mail.messageId
}, function(err, data) {
if (err) {
console.log(err, err.stack);
callback(err);
} else {
console.log("Raw email:\n" + data.Body);
// Custom email processing goes here
callback(null, null);
}
});
};
When there is a file upload it triggers the lambda but I get an [SignatureDoesNotMatch] error:
{
"errorMessage": "The request signature we calculated does not match the signature you provided. Check your key and signing method.",
"errorType": "SignatureDoesNotMatch",
"stackTrace": [
"Request.extractError (/var/runtime/node_modules/aws-sdk/lib/services/s3.js:524:35)",
"Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)",
"Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)",
"Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:615:14)",
"Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)",
"AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)",
"/var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10",
"Request. (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)",
"Request. (/var/runtime/node_modules/aws-sdk/lib/request.js:617:12)",
"Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)"
]
}
If anyone can help me to approach this problem, it will be great!
Thanks
Okay I solved it. The bucketName should contain only the bucket's name. But the key can contain the rest of the route to your file's exact "directory".
So, basically bucketName should not contain subfolders but key should.
Thanks
For reference: AWS Lambda S3 GET/POST - SignatureDoesNotMatch error