AWS Cloudwatch metrics showing same values for different conditions - amazon-web-services

I have a lambda which in which I logged some events which I receive from a SNS topic. The logging line looks like this:
LOG.info("{}","metricData"+snsStatus);
Now there is one more data which is orgname which I am already populating before in the ThreadContext, so basically the event looks like this :
{
"line_number": 148,
"message": "metricDatareadSns",
"thread_name": "main",
"level": "INFO",
"file": "SnsSubscriber.java",
"method": "readSns",
"orgname": "abc"
}
What I wanted was to have this data is presented in a Graph on a Dashboard, but whats happening is instead of showing data either graph don't show the data or show a very large amount for all the orgname and status
I am using the below CDK code for generating the metrics :
const orgName: string[] = ['abc', 'xyz'];
const snsStatus: string[] = ['readSns', 'writeSns'];
let logGroup = LogGroup.fromLogGroupName(this, 'SnsSubscriber-Log-Group', `/aws/lambda/SnsSubscriber`);
const dashboard = new Dashboard(this, `SNS-Dashboard`, {
dashboardName: 'SNS-Dashboard'
});
snsStatus.forEach((statusCode) => {
let codeMetrics: IMetric[] | Metric[] = [];
orgName.forEach((org) => {
const metricFilter = new MetricFilter(this, `${org}-${statusCode}-MetricFilter`, {
logGroup: logGroup,
metricNamespace: `${org}-${statusCode}-NameSpace`,
metricName: `${org}-${statusCode}`,
filterPattern: FilterPattern.all(FilterPattern.stringValue(`$.message`,"=",`metricData${statusCode}`), FilterPattern.stringValue(`$.orgname`,"=",`${org}`)),
metricValue: '1',
defaultValue: 0
});
codeMetrics.push(metricFilter.metric({
dimensions: {transactionCode: `${org}`},
statistic: Statistic.SAMPLE_COUNT,
unit: Unit.COUNT,
label: `${org}`,
period: Duration.hours(3)
}));
});
dashboard.addWidgets(new GraphWidget({
width: 12,
title: `${statusCode}`,
left: codeMetrics
}));
});
let aggregatedStatusCodes: IMetric[] | Metric[] = [];
snsStatus.forEach((statusCode) => {
const metricFilter = new MetricFilter(this, `Agreegated-${statusCode}-MetricFilter`, {
logGroup: logGroup,
metricNamespace: `Agreegated-${statusCode}-NameSpace`,
metricName: `Agreegated-${statusCode}`,
filterPattern: FilterPattern.stringValue(`$.message`,"=",`metricData${statusCode}`),
metricValue: '1',
defaultValue: 0
});
aggregatedStatusCodes.push(metricFilter.metric({
statistic: Statistic.SAMPLE_COUNT,
unit: Unit.COUNT,
label: `${statusCode}`,
period: Duration.hours(3)
}));
});
dashboard.addWidgets(new GraphWidget({
width: 24,
title: 'Aggregated Status Codes',
left: aggregatedStatusCodes
}));

Related

ChartJS Choropleth Focusing On US and Not The Entire Earth?

I created this chartJS map to showcase the distribution of registrations for this course. However, i can only see US in the map. Is there an issue with the library or should i use a different project ?
Thank you in Advance
Here is the code:
fetch('https://unpkg.com/world-atlas/countries-50m.json').then((r) => r.json()).then((data) = {
const countries = ChartGeo.topojson.feature(data, data.objects.countries).features;
const dataCountries = <?= json_encode($WorldMapData) ?>;
const chart = new Chart(canvas.getContext("2d"), {
type: 'choropleth',
data: {
labels: countries.map((d) => d.properties.name),
datasets: [{
label: 'Course Registrations',
data: countries.map((d) => ({
feature: d,
value: dataCountries[dataCountries.indexOf(d.properties.name) + 1],
})),
}]
},
options: {
legend: {
display: true
},
scale: {
projection: 'equalEarth'
},
geo: {
colorScale: {
display: true,
position: 'bottom',
quantize: 1,
legend: {
position: 'bottom-right',
},
},
},
}
});
});
This is what I see:
enter image description here
I tried to get a map showcasing the entire earth but it is only showing US.

S3 trigger failes to update table in DynamoDB

I have created a web app in AWS Amplify. A web page uploads a file to an S3 bucket. I have created a trigger that should create a record in DynamoDB for each uploaded file. My trigger code is called, there are no errors I can find anywhere but the table is not updated.
The trigger code:
exports.handler = async function (event) {
console.log('Received S3 event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const eventName = event.Records[0].eventName;
const bucket = event.Records[0].s3.bucket.name; //eslint-disable-line
let key = event.Records[0].s3.object.key.replace('%3A', ':');
const imgSize = event.Records[0].s3.object.size;
console.log(`Bucket: ${bucket}`, `Key: ${key}`);
if (eventName === "ObjectCreated:Put") {
console.log("This is a put event")
addDBActivity(key);
}
};
var AWS = require('aws-sdk');
var ddb = new AWS.DynamoDB();
function addDBActivity(key) {
console.log("in addDBActivity");
// let activityTable = "Activity-***-dev"
let activityTable = "Activity-ctuqkflvhvbjxftdvdo64xnzle-dev"
let name = key.split('/').pop();
let owner = key.split('/')[0];
let format = "tcx";
let type = "cycling";
var params = {
TableName: activityTable,
Item: {
key: { S: key },
name: { S: name },
owner: { S: owner },
format: { S: format },
type: { S: type },
}
};
console.log(params);
ddb.putItem(params).promise()
.then(function(data) {
console.log(data);
})
.catch(function(err) {
console.log(err);
});
console.log("after promise");
}
schema.graphql
type Activity #model #auth(rules: [
{ allow: owner, ownerField: "id" }
]) {
id: ID!
name: String!
owner: String!
key: String!
format: String!
type: String!
}
After uploading a file this is the log output in CloudWatch:
2021-12-18T13:14:26.093+02:00
2021-12-18T11:14:26.092Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO This is a put event
2021-12-18T11:14:26.092Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO This is a put event
2021-12-18T13:14:26.093+02:00
2021-12-18T11:14:26.093Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO in addDBActivity
2021-12-18T11:14:26.093Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO in addDBActivity
2021-12-18T13:14:26.123+02:00
2021-12-18T11:14:26.123Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO {
TableName: 'Activity-ctuqkflvhvbjxftdvdo64xnzle-dev',
Item: {
key: {
S: 'public/sourb/ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx'
},
name: { S: 'ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' },
owner: { S: 'public' },
format: { S: 'tcx' },
type: { S: 'cycling' }
}
}
2021-12-18T11:14:26.123Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO { TableName: 'Activity-ctuqkflvhvbjxftdvdo64xnzle-dev', Item: { key: { S: 'public/sourb/ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' }, name: { S: 'ramp_test2tcx-a03e6380-e0c3-4966-b4eb-b74375f930bd.tcx' }, owner: { S: 'public' }, format: { S: 'tcx' }, type: { S: 'cycling' } } }
2021-12-18T13:14:26.542+02:00
2021-12-18T11:14:26.522Z 20cf4afe-3c96-4ee2-94c6-7c12c27c4d66 INFO after promise
Any idea what could be the issue?
You are using async handler so your function probably finishes before your database has a chance to get updated. You can wrap your code in a Promise as show in the docs to ensure full execution.

Lambda is not receiving the messages from AWS SQS

I am pushing 5 messages to the SQS and expecting that my lambda should get those 5 messages and just log, when i trigger the function I see that the publisher lambda is pushing 5 messages to the sqs but the consumer lambda is not getting those 5 messages instead it is getting only one. Any idea why?
# publisher lambda configuration
fetchUserDetails:
handler: FetchUserDetails/index.fetchUserDetails
timeout: 900
package:
individually: true
artifact: "./dist/FetchUserDetails.zip"
reservedConcurrency: 175
environment:
SEND_EMAIL_SQS_URL: ${self:custom.EMAILING_SQS_URL}
# consumer lambda configuration
sendEmails:
handler: SendEmails/index.sendEmails
timeout: 30
package:
individually: true
artifact: "./dist/SendEmails.zip"
events:
- sqs:
arn:
Fn::GetAtt:
- SendEmailSQS
- Arn
batchSize: 1
# SQS configuration
SendEmailSQS:
Type: "AWS::SQS::Queue"
Properties:
QueueName: ${self:custom.EMAILING_SQS_NAME}
FifoQueue: true
VisibilityTimeout: 45
ContentBasedDeduplication: true
RedrivePolicy:
deadLetterTargetArn:
Fn::GetAtt:
- SendEmailDlq
- Arn
maxReceiveCount: 15
# publisher lambda code
const fetchUserDetails = async (event, context, callback) => {
console.log("Input to the function-", event);
/* TODO: 1. fetch data applying all the where clauses coming in the input
* 2. push each row to the SQS */
const dummyData = [
{
user_id: "1001",
name: "Jon Doe",
email_id: "test1#test.com",
booking_id: "1"
},
{
user_id: "1002",
name: "Jon Doe",
email_id: "test2#test.com",
booking_id: "2"
},
{
user_id: "1003",
name: "Jon Doe",
email_id: "test3#test.com",
booking_id: "3"
},
{
user_id: "1004",
name: "Jon Doe",
email_id: "test4#test.com",
booking_id: "4"
},
{
user_id: "1005",
name: "Jon Doe",
email_id: "test5#test.com",
booking_id: "5"
}
];
try {
for (const user of dummyData) {
const params = {
MessageGroupId: uuid.v4(),
MessageAttributes: {
data: {
DataType: "String",
StringValue: JSON.stringify(user)
}
},
MessageBody: "Publish messages to send mailer lambda",
QueueUrl:
"https://sqs.ap-southeast-1.amazonaws.com/344269040775/emailing-sqs-dev.fifo"
};
console.log("params-", params);
const response = await sqs.sendMessage(params).promise();
console.log("resp-", response);
}
return "Triggered the SQS queue to publish messages to send mailer lambda";
} catch (e) {
console.error("Error while pushing messages to the queue");
callback(e);
}
};
# consumer lambda code, just some logs
const sendEmails = async event => {
console.log("Input to the function-", event);
const allRecords = event.Records;
const userData = event.Records[0];
const userDataBody = JSON.parse(userData.messageAttributes.data.stringValue);
console.log("records-", allRecords);
console.log("userData-", userData);
console.log("userDataBody-", userDataBody);
console.log("stringified log-", JSON.stringify(event));
};
# permissions lambda has
- Effect: "Allow"
Action:
- "sqs:SendMessage"
- "sqs:GetQueueUrl"
Resource:
- !GetAtt SendEmailSQS.Arn
- !GetAtt SendEmailDlq.Arn
Your consumer is only looking at one record:
const userData = event.Records[0];
It should loop through all Records and process their messages, rather than only looking at Records[0].

Why is my Lambda function creating two Spot Instance Requests instead of one?

I have the following lambda function
var AWS = require('aws-sdk');
var ec2 = new AWS.EC2({
region: "eu-west-1"
});
var userData = `#!/bin/bash
echo "hello there"
`;
var userDataEncoded = new Buffer.from(userData).toString('base64');
var params = {
InstanceCount: 1,
LaunchSpecification: {
ImageId: "ami-xxxxxxxxx",
InstanceType: "c4.2xlarge",
KeyName: "xxxxxxx",
SubnetId: "subnet-xxxxxxxxxx",
Placement: {
AvailabilityZone: "eu-west-1a"
},
SecurityGroupIds: [
"sg-xxxxxxxxxx"
],
UserData: userDataEncoded
},
SpotPrice: "0.8",
BlockDurationMinutes: 180,
Type: "one-time"
};
exports.handler = async (event, context) => {
await ec2.requestSpotInstances(params, function (err, data) {
if (err) {
console.log("error");
} else {
console.log("starting instance");
context.succeed('Completed');
return {
statusCode: 200,
body: JSON.stringify('success!'),
};
}
}).promise();
};
The function is supposed to take my params and create ONE spot request, but it always starts two parallel spot requests with one instance each.
There is no error in the logs, the function is only triggered once according to Cloudwatch and has a success rate of 100%.
I set the timeout on 20 minutes so it can't be that either.
Why is it doing that? I only want one request, and not two. Any help is appreciated.
You can either use the promise-based or callback-based approach. Using both at once results in duplicate calls.
So either remove the callback and use .then and .catch for you response or do the opposite and do not call .promise on requestSpotInstances.
exports.handler = async (event, context) =>
ec2.requestSpotInstances(params).promise()
.then(() => {
console.log("starting instance");
return {
statusCode: 200,
body: 'success!'
};
}).catch((error) => {
console.error("error");
return {
statusCode: 500,
body: 'an error occurred'
}
})

AWS.CloudWatch SDK vs. CLI produces different results

I am trying to pull data from AWS Cloudwatch. When using the CLI it works fine.
aws cloudwatch get-metric-statistics --namespace AWS/ApiGateway --metric-name Count --start-time 2020-01-03T23:00:00Z --end-time 2020-01-05T23:00:00Z --period 3600 --statistics Sum --dimensions Name=ApiName,Value=prod-api-proxy
But when using nodejs I get an empty result set. Here is the code:
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
var cw = new AWS.CloudWatch({apiVersion: '2010-08-01'});
var params = {
Dimensions: [
{
Name: 'ApiName',
Value: 'prod-api-proxy'
}
],
MetricName: 'Count',
Namespace: 'AWS/ApiGateway',
StartTime: new Date('2020-01-03T23:00:00Z').toISOString(),
EndTime: new Date('2020-01-05T23:00:00Z').toISOString(),
Statistics: ['Sum'],
Period: 3600
};
cw.getMetricStatistics(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Metrics", JSON.stringify(data.Metrics));
}
})
This is the empty response I get:
{ Dimensions: [ { Name: 'ApiName', Value: 'prod-api-proxy' } ],
MetricName: 'Count',
Namespace: 'AWS/ApiGateway',
StartTime: '2020-01-03T23:00:00.000Z',
EndTime: '2020-01-05T23:00:00.000Z',
Statistics: [ 'Sum' ],
Period: 3600 }
Metrics undefined
Any ideas?
Just heard from AWS support. I am posting the answer here if anyone needs it. There is an error in my code. The object data.Metrics is not part of the response.