Update Route53 record using lambda nodejs not working - amazon-web-services

I am trying to update a record in Route53 using a lambda function and nodejs runtime.
The problem is I am getting no errors, no logs or anything from route53 to even understand why it is not working.
I have setup the following:
Lambda function
SNS to read messages from
Attached a policy to update/change record sets
My lambda code:
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
var aws = require('aws-sdk');
var route53 = new aws.Route53();
exports.handler = async (event, context) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
var newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{Value: data.ip}],
TTL: 30,
}
}]
}
};
updateRecordSet(route53, DNS_RECORD_NAME, HOSTED_ZONE_ID, newRecord, function(err) {
if (err) {
return context.fail(err);
}
return context.succeed('OK');
});
}
} catch(err) {
console.error(err);
}
return message;
};
function updateRecordSet(route53, DNS_RECORD_NAME, HOSTED_ZONE_ID, newRecord, callback) {
console.log("Executing function updateRecordSet");
route53.changeResourceRecordSets(newRecord, function(err) {
if (err) {
console.log("Got an err");
return callback(err);
}
return console.log('Updated A record for', DNS_RECORD_NAME);
});
}
I get the output:
Function Logs:
START RequestId: 4ef801ba-c03c-4582-33a8-c078c46f0b03 Version: $LATEST
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 SNS message: {"ip": "10.1.1.1"}
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 New IP: 10.1.1.1
2019-04-07T04:18:55.201Z 4ef801ba-c03c-4582-83a8-c078c46f0b03 Executing function updateRecordSet
END RequestId: 4ef801ba-c03c-4582-33a8-c078c46f0b03
If the IAM policy is wrong I would at least get some kind of authentication error?

I could not get async to work with lambda for some reason but finally got working code.
This lambda will update or insert a record set in Route53 reading from SNS with a JSON message like {"ip": "10.1.1.1"}
console.log('Running updateRecordSet');
var AWS = require('aws-sdk');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxxxx';
DNS_RECORD_NAME = 'dns-record.example.com.';
exports.handler = function(event, context, callback) {
var route53 = new AWS.Route53();
// Get message from SNS
var message = event.Records[0].Sns.Message;
const data = JSON.parse(message);
if (typeof data.ip !== "undefined") {
route53.changeResourceRecordSets({
HostedZoneId : HOSTED_ZONE_ID,
ChangeBatch : {
Changes : [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [
{
Value: data.ip
}
],
TTL: 30
}
}]
}
}, function (err, data) {
if (err)
console.log(err, err.stack);
else {
console.log('Updated Route53 DNS record ' + DNS_RECORD_NAME);
}
});
} else {
console.log('No IP found in message. Discarding.');
}
};

If you want to have full promise and await thing setup, you can try the below code. It has a few additional things like STS assume-role for cross-account ROUTE53 access. Additionally, it has a weighted logic to create multiple CNAMEs. I understand this does not fit your use case, however it may help somebody who stumbles upon a similar issue to create Weighted load balancing with CNAME.
console.log('Running route53 changeRecrodSet with CNAME');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
const HOSTED_ZONE_ID = "xxxx";
const DNS_RECORD_NAME = "xxxxxx.com";
var AWS = require('aws-sdk');
AWS.config.region = 'us-west-1';
async function update_recordset(route53, records){
return route53.changeResourceRecordSets(records).promise();
}
async function getcred(){
console.log("inside getcred");
var sts = new AWS.STS();
try {
let temp_cred = sts.assumeRole({
RoleArn: 'arn:aws:iam::xxxxxxxx',
RoleSessionName: 'awssdk'
}).promise();
console.log("TEMP",temp_cred);
return temp_cred;
}catch(err){
console.log("ERROR",err);
}
}
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try{
const data = JSON.parse(message);
if (data.cname) {
console.log('New IP: ', data.cname);
const sts_result = await getcred();
console.log("STS_RESULT", sts_result);
AWS.config.update({
accessKeyId: sts_result.Credentials.AccessKeyId,
secretAccessKey: sts_result.Credentials.SecretAccessKey,
sessionToken: sts_result.Credentials.SessionToken
});
var route53 = new AWS.Route53();
console.log("ROUTE53 RESULT",route53);
const newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [
{
Action: 'UPSERT',
ResourceRecordSet: {
SetIdentifier: "elb",
Weight: 100,
Name: DNS_RECORD_NAME,
Type: 'CNAME',
ResourceRecords: [{ Value: "xxxxx.sxxxxx.com" }],
TTL: 300,
},
},
{
Action: 'UPSERT',
ResourceRecordSet: {
SetIdentifier: "cflare",
Weight: 100,
Name: DNS_RECORD_NAME,
Type: 'CNAME',
ResourceRecords: [{ Value: data.cname }],
TTL: 300,
},
}],
},
};
const results = await update_recordset(route53,newRecord);
console.log("Result", results);
}
}catch(err){
console.log("ERR",err);
}
};

You need to either put an async - await or just callback(). Both it is a bad practice.
I would do something like this:
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
var aws = require('aws-sdk');
var route53 = new aws.Route53();
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
var newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{Value: data.ip}],
TTL: 30,
}
}]
}
};
let result = await route53.changeResourceRecordSets(newRecord);
console.log(result);
}
} catch(err) {
console.error(err);
}
return message;
};
Also you are right about the iam role, you will get an autherization error if your code runs all the functions correctly.

To get async/await to work with AWS sdk, you need promisify.
See example below...
console.log('Running updateRecordSet');
/* global HOSTED_ZONE_ID*/
/* global DNS_RECORD_NAME*/
HOSTED_ZONE_ID = 'xxxx';
DNS_RECORD_NAME = 'dns-record.internal.example.com.';
const aws = require('aws-sdk');
const route53 = new aws.Route53();
const { promisify } = require('util');
const changeResourceRecordSets = promisify(route53.changeResourceRecordSets.bind(route53));
exports.handler = async (event) => {
const message = event.Records[0].Sns.Message;
console.log('SNS message:', message);
try {
const data = JSON.parse(message);
if (data.ip) {
console.log('New IP: ', data.ip);
const newRecord = {
HostedZoneId: HOSTED_ZONE_ID,
ChangeBatch: {
Changes: [
{
Action: 'UPSERT',
ResourceRecordSet: {
Name: DNS_RECORD_NAME,
Type: 'A',
ResourceRecords: [{ Value: data.ip }],
TTL: 30,
},
}],
},
};
const results = await changeResourceRecordSets(newRecord);
if (results.ChangeInfo.Status === 'PENDING') {
console.log('Updated A record for', DNS_RECORD_NAME, results);
return {
statusCode: 200,
body: 'Success',
};
} else {
console.error(results);
return {
statusCode: 500,
body: 'Something went wrong',
};
}
}
} catch (err) {
console.error(err);
}
};

Related

How to allow a Tenant lambda function to query a dynamo table in Control Tower ROOT using NodeJs SDK?

I have a Lambda Function running in a Tenant account that needs to query a DynamoDb Table B inside the Tenant and then query a DynamoDb Table A inside ROOT.
This is the code I have so far:
'use strict';
const AWS = require('aws-sdk');
const ddbDc = new AWS.DynamoDB.DocumentClient()
module.exports.testDynamo = async event => {
try {
let result
let params = {}
// Query Table B inside tenant
params = {
TableName: 'Table_B',
Key: { externalKey : 'CA6E03C' }
}
result = await ddbDc.get(params).promise()
console.log('🚀 result - ', result)
// Query Table A inside ROOT
// Restart ddbDc CLIENT with ROOT credentials ?
params = {
TableName: 'Table_A',
Key: { externalKey : 'MAP_CA6E03C' }
}
result = await ddbDc.get(params).promise()
console.log('🚀 result - ', result)
return {
statusCode: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(
{
response: response,
},
null,
2
),
}
} catch (error) {
console.error('🚀 testDynamo - error.stack:', error.stack)
return {
statusCode: 400,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(error.stack)
}
}
}
I think I need to Restart ddbDc CLIENT with ROOT credentials in order to get this access to the ROOT resources.
How Can I do that?
You need to use STS AssumeRole and assume the role which you need to access that specific item/table.
https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
const AWS = require('aws-sdk');
const ddbDc = new AWS.DynamoDB.DocumentClient()
const sts = new AWS.STS()
module.exports.testDynamo = async event => {
try {
let result
let params = {}
// Query Table B inside tenant
params = {
TableName: 'Table_B',
Key: { externalKey : 'CA6E03C' }
}
result = await ddbDc.get(params).promise()
console.log('🚀 result - ', result)
// Query Table A inside ROOT
// Restart ddbDc CLIENT with ROOT credentials ?
// https://aws.amazon.com/blogs/apn/isolating-saas-tenants-with-dynamically-generated-iam-policies/
// https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
const assumeRole = await sts
.assumeRole({
RoleArn: process.env.ROLE_ARN,
RoleSessionName: process.env.ROLE_SESSION_NAME,
})
.promise()
const credentials = new AWS.Credentials({
accessKeyId: assumeRole.Credentials?.AccessKeyId,
secretAccessKey: assumeRole.Credentials?.SecretAccessKey,
sessionToken: assumeRole.Credentials?.SessionToken,
})
const dynamodb = new AWS.DynamoDB.DocumentClient({
region: process.env.REGION,
credentials: credentials,
})
params = {
TableName: 'Table_A',
Key: { externalKey : 'MAP_CA6E03C' }
}
result = await dynamodb.get(params).promise()
console.log('🚀 result - ', result)
// TODO merge table data
return {
statusCode: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(
{
response: response,
},
null,
2
),
}
} catch (error) {
console.error('🚀 testDynamo - error.stack:', error.stack)
return {
statusCode: 400,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(error.stack)
}
}
}
Using Lambda function and Serverless Framework do this:
npm install --save-dev serverless-iam-roles-per-function to install serverless-iam-roles-per-function. Link
...
plugins:
- serverless-iam-roles-per-function
...
functions:
listComponentsRootDynamo:
...
...
iamRoleStatements:
- Effect: Allow
Action:
- sts:AssumeRole
Resource: "arn:aws:iam::RootID:role/roleName"
It will grant access to this lambda function listComponentsRootDynamo the access to Root-DynamoDb Table inside the Control Tower ROOT account.
Note that the role that provide that access to the Dynamo specific tables must exist in the ROOT-IAM Roles. Just copy its Role ARN in the Resource: portion of iamRoleStatements:.
Here is an example of the Role inside Root that provides that access to a specific Dynamo Table <DynamoDb-Table-Name>:
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"dynamodb:*"
],
"Resource": [
"arn:aws:dynamodb:us-east-1:Root-Id:table/<DynamoDb-Table-Name>",
"arn:aws:dynamodb:us-east-1:Root-Id:table/<DynamoDb-Table-Name>/index/*"
],
"Effect": "Allow",
"Sid": ""
}
]
}
And Following the Lee Hannigan answer do this inside the Lambda that will query the Root - DynamoDb Table:
'use strict';
const AWS = require('aws-sdk');
const ddbDc = new AWS.DynamoDB.DocumentClient()
const sts = new AWS.STS()
module.exports.testDynamo = async event => {
try {
let result
let params = {}
// Query Table B inside tenant
params = {
TableName: 'Table_B',
Key: { externalKey : 'CA6E03C' }
}
result = await ddbDc.get(params).promise()
console.log('🚀 result - ', result)
// Query Table A inside ROOT
// Restart ddbDc CLIENT with ROOT credentials ?
// https://aws.amazon.com/blogs/apn/isolating-saas-tenants-with-dynamically-generated-iam-policies/
// https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
const assumeRole = await sts
.assumeRole({
RoleArn: process.env.ROLE_ARN,
RoleSessionName: process.env.ROLE_SESSION_NAME,
})
.promise()
const credentials = new AWS.Credentials({
accessKeyId: assumeRole.Credentials.AccessKeyId,
secretAccessKey: assumeRole.Credentials.SecretAccessKey,
sessionToken: assumeRole.Credentials.SessionToken
})
const dynamodb = new AWS.DynamoDB.DocumentClient({
region: process.env.REGION,
credentials: credentials,
})
params = {
TableName: 'Table_A',
Key: { externalKey : 'MAP_CA6E03C' }
}
result = await dynamodb.get(params).promise()
console.log('🚀 result - ', result)
// TODO merge table data
return {
statusCode: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(
{
response: response,
},
null,
2
),
}
} catch (error) {
console.error('🚀 testDynamo - error.stack:', error.stack)
return {
statusCode: 400,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(error.stack)
}
}
}

Description field is coming as [ARRAY] instead of the actual array([1,2,3]) in dynamodb when I try to get data using query

Items: [
{
Continent: 'Continent#Antarctica',
SKCountry: 'Country#Chile',
CreatedAt: 1668579154424,
description: [Array],
PKContinent: 'PKContinent',
id: 16,
UpdatedAt: 1668579154424
},
{
Continent: 'Continent#Antarctica',
SKCountry: 'Country#France',
CreatedAt: 1668579154424,
description: [Array],
PKContinent: 'PKContinent',
id: 15,
UpdatedAt: 1668579154424
}]
, this is what I am getting but instead of "description: [Array],", I want this, description: [value1, value2, value3]. Also, I am getting this data in console but in browser I am getting an error in console (Uncaught TypeError: Cannot read properties of undefined (reading 'tagName')).
this is the code snippet in getAllItems.
var AWS = require("aws-sdk");
AWS.config.update({
region: "local",
endpoint: "http://localhost:8000"
});
var docClient = new AWS.DynamoDB.DocumentClient()
var table = "Tourism";
const getAllItems = async ()=> {
var PKContinent = "PKContinent";
//console.log("check",PKContinent)
const params = {
TableName: table,
KeyConditionExpression: "PKContinent = :pkUpdate AND begins_with(SKCountry, :SKCountry)",
ExpressionAttributeValues: {
":pkUpdate": PKContinent,
":SKCountry": "Country#"
}
}
docClient.query(params, function (error, data) {
if (error) {
console.log(error)
} else {
var viewArray = [];
if (data.Items.length === 0) {
console.log("data doesn't exists.")
}
else {
console.log(data);
}
}
})
}
module.exports = {
docClient,
getAllItems
};
this is the code in getAll
var express = require('express');
var router = express.Router();
const { getAllItems} = require('../getAllItems');
router.get('/', async (req, res, next) => {
try {
const getData = await getAllItems();
res.json(getData);
} catch (err) {
console.error(err);
res.status(500).json({ err: 'Something went wrong with get' });
}
});
module.exports = router;
For me I believe the issue is when saving the data, not reading it.
You mention you cannot read the data in tbe console? Can you share a screenshot of how that looks in your question.
And can you also share the output of the console which you stated worked, I'll be able to guide you to the issue then.

Why Does My Lambda Function Not `startExecution` My Step Function

I am looking to connect my Lambda with my Step Function, and cannot figure out why it will not startExecution.
SDK Code:
import AWS from "aws-sdk";
const stepfunctions = new AWS.StepFunctions({ apiVersion: "2016-11-23" });
interface Params {
stateMachineArn: string;
input: string;
}
export async function handler(event: any, context: object) {
console.log("event.body", event.body);
const params: Params = {
stateMachineArn: process.env.STEP_FUNCTION_ARN,
input: JSON.stringify(event.body),
name: "testNameField",
};
console.log("PARAMS", params);
stepfunctions.startExecution(params, (err: any, data: any) => {
if (err) {
console.log("THERE WAS AN ERROR", err);
console.log("ERROR STACK", err.stack);
} // an error occurred
else {
console.log("data", data);
} // successful response
});
}
Permissions:
Allow: states:DeleteStateMachine
Allow: states:StartExecution
Allow: states:CreateStateMachine
Allow: states:SendTaskSuccess
Allow: states:DeleteActivity
Allow: states:SendTaskHeartbeat
Allow: states:CreateActivity
Allow: states:SendTaskFailure
Allow: states:StopExecution
Allow: states:GetActivityTask
Allow: states:UpdateStateMachine
Allow: states:StartSyncExecution
Extra information:
I have tried doing a "test" on the console for the lambda function,
from which it succeeds. I'm not sure where else to look.
In the step function, all the columns
(Total/Running/Succeeded/Failed/Timed out/Aborted) are 0.
The params console.log offers the correct information
Are there any error messages outputted from the console.log?
Solution Code:
const AWS = require("aws-sdk");
AWS.config.update({ region: "eu-west-1" });
const stepFunction = new AWS.StepFunctions();
interface Params {
stateMachineArn: string;
input: string;
name: string;
}
exports.handler = async (event: any) => {
console.log(event);
const stepFunctionParams = {
stateMachineArn: process.env.STEP_FUNCTION_ARN,
input: JSON.stringify({
message: event.body,
}),
name: "name" + String(Date.now()),
};
try {
const stepFunctionResponse = await stepFunction
.startExecution(stepFunctionParams)
.promise();
return { statusCode: 200, body: "Success" };
} catch (e) {
console.log("Problem executing SF :", JSON.stringify(e));
return {
statusCode: 500,
body: "Problem executing step function : " + JSON.stringify(e),
headers: { "Access-Control-Allow-Origin": "*" },
};
}
};

Calling RDS data service executeStatement throws "Parameter X has value with no field set"

Not sure if I clearly understood how to provide parameter values when executing rds.executeStatement command.
When I execute the below code I get this error thrown -
{
"errorType": "BadRequestException",
"errorMessage": "Parameter \"userId\" has value with no field set",
"code": "BadRequestException",
"message": "Parameter \"userId\" has value with no field set"
}
Here is my code, How am I supposed to provide the userId and givenName values to the parameters array here.
const AWS = require('aws-sdk');
var RDS = new AWS.RDSDataService({
apiVersion: '2018-08-01'
});
exports.handler = async (event, context) => {
var userId;
var givenName;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
event.Records.forEach(function(record) {
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
params.sql = `INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);`
}
});
await RDS.executeStatement(params).promise();
console.log(params.parameters[0].value);
return 'done';
};
UPDATE March 13th
Attaching the cloudwatch logs printing out userId and givenName -
UPDATE March 16th - Function Updates
const AWS = require('aws-sdk');
const RDS = new AWS.RDSDataService({ apiVersion: '2018-08-01' });
exports.handler = async (event, context) => {
var userId;
var givenName;
var count = 0;
var params = {
secretArn: 'secretArn',
resourceArn: 'resourceArn',
database: 'bol_db',
parameters: [{
name: "userId",
value: {
"stringValue": userId
}
},
{
name: "givenName",
value: {
"stringValue": givenName
}
}
]
};
const promises = event.Records.map(async function(record) {
count++;
context.callbackWaitsForEmptyEventLoop = false;
if (record.eventName == 'INSERT') {
userId = record.dynamodb.NewImage.pk.S;
givenName = record.dynamodb.NewImage.sk.S;
console.log('userId - ' + userId);
console.log('givenName - ' + givenName);
console.log('Params -'+params.parameters);
params.sql = "INSERT INTO Users (UserId, GivenName) VALUES(:userId, :givenName);"
let result = await RDS.executeStatement(params).promise();
console.log('Result -' + result);
}
});
await Promise.all(promises);
console.log(count);
return 'done';
};
It seems that you're setting "stringValue": userId before userId has a value. In JavaScript you can't assign userId later and expect it to be propagated to all places that you used it before.
You should try with var params = {...} inside the .map function or, alternatively, inside the .map function you can loop through the parameter list and if you find the correct one, assign the value then.

Why is my Lambda function creating two Spot Instance Requests instead of one?

I have the following lambda function
var AWS = require('aws-sdk');
var ec2 = new AWS.EC2({
region: "eu-west-1"
});
var userData = `#!/bin/bash
echo "hello there"
`;
var userDataEncoded = new Buffer.from(userData).toString('base64');
var params = {
InstanceCount: 1,
LaunchSpecification: {
ImageId: "ami-xxxxxxxxx",
InstanceType: "c4.2xlarge",
KeyName: "xxxxxxx",
SubnetId: "subnet-xxxxxxxxxx",
Placement: {
AvailabilityZone: "eu-west-1a"
},
SecurityGroupIds: [
"sg-xxxxxxxxxx"
],
UserData: userDataEncoded
},
SpotPrice: "0.8",
BlockDurationMinutes: 180,
Type: "one-time"
};
exports.handler = async (event, context) => {
await ec2.requestSpotInstances(params, function (err, data) {
if (err) {
console.log("error");
} else {
console.log("starting instance");
context.succeed('Completed');
return {
statusCode: 200,
body: JSON.stringify('success!'),
};
}
}).promise();
};
The function is supposed to take my params and create ONE spot request, but it always starts two parallel spot requests with one instance each.
There is no error in the logs, the function is only triggered once according to Cloudwatch and has a success rate of 100%.
I set the timeout on 20 minutes so it can't be that either.
Why is it doing that? I only want one request, and not two. Any help is appreciated.
You can either use the promise-based or callback-based approach. Using both at once results in duplicate calls.
So either remove the callback and use .then and .catch for you response or do the opposite and do not call .promise on requestSpotInstances.
exports.handler = async (event, context) =>
ec2.requestSpotInstances(params).promise()
.then(() => {
console.log("starting instance");
return {
statusCode: 200,
body: 'success!'
};
}).catch((error) => {
console.error("error");
return {
statusCode: 500,
body: 'an error occurred'
}
})