I am exploring how to download document from Amazon S3 and then save it as an attachment on a Xero Invoice. Can I have help on this task?
I am referencing my code using these two links:
API
Link
createInvoiceAttachmentByFileName
https://xeroapi.github.io/xero-node/accounting/index.html#api-Accounting-createInvoiceAttachmentByFileName
s3 getObject
https://www.tabnine.com/code/javascript/functions/aws-sdk/S3/getObject
Error: I keep receiving this error while sending it to Xero.
TypeError: body.on is not a function
Main
const sendInvoicesResponse = await xeroService.sendDocument(
mappedDocumentData,
integration.tenant.id,
);
const downloadedDocument = await awsService.downloadDocument(document.key);
await xeroService.sendAttachmentToInvoice(
integration.tenant.id,
sendInvoicesResponse.body.invoices[0].invoiceID,
document.name,
downloadedDocument.data,
downloadedDocument.mimetype,
);
AWS Service
module.exports.downloadDocument = async (key) => {
try {
const getParams = {
Bucket: config.aws.documentsBucket, // your bucket name,
Key: key,
};
const file = await s3.getObject(getParams).promise();
return {
data: file.Body,
mimetype: file.ContentType,
};
} catch (error) {
logger.error(`download document from AWS fail | message=${message}`);
throw new Error(message);
}
};
In Xero
module.exports.sendAttachmentToInvoice = async (
tenantId,
invoiceId,
fileName,
body,
contentType,
includeOnline = true,
) => {
try {
return client.accountingApi.createInvoiceAttachmentByFileName(
tenantId,
invoiceId,
fileName,
body,
includeOnline,
{
headers: {
'Content-Type': contentType,
},
},
);
} catch (error) {
logger.error(`attaching file attachment to xero fail | message=${message}`);
throw new Error(message);
}
};
Related
const S3 = require('aws-sdk/clients/s3');
const { getSignedUrl } = require("#aws-sdk/s3-request-presigner");
const { S3Client, GetObjectCommand } = require("#aws-sdk/client-s3");
dotenv.config();
const bucketName = process.env.AWS_BUCKET_NAME
const region = process.env.AWS_BUCKET_REGION
const accessKeyId = process.env.AWS_ACCESS_KEY
const secretAccessKey = process.env.AWS_SECRET_KEY
const s3 = new S3({
region,
accessKeyId,
secretAccessKey
})
router.get("/:id", async (req, res) => {
try {
const post = await Post.findById(req.params.id);
const getObjectParams = {
Bucket: bucketName,
Key: post.photo,
}
const command = new GetObjectCommand(getObjectParams);
const url = await getSignedUrl(s3, command, { expiresIn: 3600 });
post.imageUrl = url
res.status(200).json(post);
} catch (err) {
console.error('errorrr', err);
res.status(500).json(err);
}
});
Here is my code I've console logged post, getObjectParams, command and everything is there but when I console log url it's not logging and when I console.log errorrr it logs Cannot read properties of undefined (reading 'clone')
What is the issue here?
I think issue is with function getSignedUrl, but not sure what it is
I have implemented hyperledger fabric network using v2.2 with 5 organizations. Chaincode implementation is accessible using cli but when using API it gives error "DiscoveryService: mfd-prd-channel error: access denied"
SDK used :
fabric-network: ^2.2.15
Note : Using cryptogen for certificate generation
Function used to invoke transaction
const invokeTransaction = async (
channelName,
chaincodeName,
fcn,
args,
username,
org_name
) => {
try {
const ccp = await registerUser.getCCP(org_name);
const walletPath = await registerUser.getWalletPath(org_name);
const wallet = await Wallets.newFileSystemWallet(walletPath);
console.log(`Wallet path: ${walletPath}`);
let identity = await wallet.get(username);
if (!identity) {
console.log(
`An identity for the user ${username} does not exist in the wallet, so registering user`
);
await registerUser.registerEnrollUser(username, org_name);
identity = await wallet.get(username);
console.log("Run the registerUser.js application before retrying");
return;
}
const connectOptions = {
wallet,
identity: username,
discovery: { enabled: true, asLocalhost: true },
// eventHandlerOptions: EventStrategies.NONE
};
const gateway = new Gateway();
await gateway.connect(ccp, connectOptions);
const network = await gateway.getNetwork(channelName);
const contract = network.getContract(chaincodeName);
let result;
let message;
switch (fcn) {
case "availableStock":
result = await contract.submitTransaction(fcn);
result = { txid: result.toString() };
break;
default:
break;
}
await gateway.disconnect();
let response = {
message: message,
result,
};
return response;
} catch (error) {
console.log(`Getting error: ${error}`);
return error.message;
}
};
The above function is called using
app.get('/producer/storage', async(req,res,next)=>{
let message = await invoke.invokeTransaction("mfd-prd-channel", "pmcc", "availableStock", "", "user1 ", "teafarm");
res.json(message);
})
I have tried to recreate the crypto-config material using cryptogen.
Tried to recreate identities
Cross checked the network, channel names
Previous answer didn't solved the query https://stackoverflow.com/questions/71705611/channelmychannel-received-discovery-erroraccess-denied-failed-to-evaluate-tran
CURRENTLY
I am trying to get AWS Textract working for images supplied from a function in Google Scripts, that is sent to a Lambda resolved. I am following documentation on https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Textract.html#analyzeDocument-property
My Google Scripts code:
function googleFunction(id) {
let file = DriveApp.getFileById(id);
console.log("File is a " + file.getMimeType());
let blob = file.getBlob();
let params = {
doc: blob,
};
var options = {
method: "PUT",
"Content-Type": "application/json",
payload: JSON.stringify(params),
};
let response = UrlFetchApp.fetch("https://api-path/prod/resolver", options);
}
My Lambda resolver code:
"use strict";
const AWS = require("aws-sdk");
exports.handler = async (event) => {
let params = JSON.parse(event.body);
console.log("Parse as document...");
let textract = new AWS.Textract();
let doc = params["doc"];
let config = {
Document: {
Bytes: doc,
FeatureTypes: ["TABLES"],
}
};
textract.analyzeDocument(config, function (err, data) {
console.log("analyzing...");
if (err) {
console.log(err, err.stack);
}
// an error occurred
else {
console.log("data:" + JSON.stringfy(data));
} // successful response
});
};
ISSUE
File is successfully sent from Google Scripts to Lambda, but the following error is returned:
"errorType": "InvalidParameterType",
"errorMessage": "Expected params.Document.Bytes to be a string, Buffer, Stream, Blob, or typed array object"
Questions
Is there a way of verifying what the format of the doc variable is, to ensure it meets AWS Textract's requirements?
Can anyone see a possible cause for the errors being returned?
NOTES
Textract works fine when the same file is uploaded to an S3 bucked, and supplied in the config using:
S3Object: { Bucket: 'bucket_name', Name: 'file_name' }
I have confirmed the file is a JPEG
Got it working with 2 changes:
added getBytes() to Google side code
added Buffer.from() to AWS side code
My Google Scripts code:
function googleFunction(id) {
let file = DriveApp.getFileById(id);
console.log("File is a " + file.getMimeType());
let blob = file.getBlob().getBytes();
let params = {
doc: blob,
};
var options = {
method: "PUT",
"Content-Type": "application/json",
payload: JSON.stringify(params),
};
let response = UrlFetchApp.fetch("https://api-path/prod/resolver", options);
}
My Lambda resolver code:
"use strict";
const AWS = require("aws-sdk");
exports.handler = async (event) => {
let params = JSON.parse(event.body);
console.log("Parse as document...");
let textract = new AWS.Textract();
let doc = params["doc"];
let config = {
Document: {
Bytes: Buffer.from(doc),
FeatureTypes: ["TABLES"],
}
};
textract.analyzeDocument(config, function (err, data) {
console.log("analyzing...");
if (err) {
console.log(err, err.stack);
}
// an error occurred
else {
console.log("data:" + JSON.stringfy(data));
} // successful response
});
};
I tried every possible solution on the internet with no hope
What I am trying to do is simply use aws lambda functions (through the aws console) to fetch user fcm token from lets say DynamoDB (not included in the question), use that token to create endpointArn, send push to that specific device
I tested to send Using SNS console and the push gets to the device successfully but I failed to get it to the device using Lambda functions although it gives success status and message ID
Here is the code I used
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set region
AWS.config.update({region: 'us-east-1'});
const sns = new AWS.SNS()
const sampleMessage = {
"GCM": {
"notification": {
"body": "Sample message for Android endpoints",
"title":"Title Test"
}
}
}
exports.handler = async (event) => {
const snsPayload = JSON.stringify(sampleMessage);
const response = {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
const params = {
PlatformApplicationArn: '<Platform Arn>',
Token: '<FCM Token>'
};
try {
const endpointData = await sns.createPlatformEndpoint(params).promise();
const paramsMessage = {
Message: snsPayload,
TargetArn: endpointData.EndpointArn
};
var publishTextPromise = await sns.publish(paramsMessage).promise();
response.MessageId = publishTextPromise.MessageId;
response.result = 'Success';
}
catch (e) {
console.log(e.stack)
response.result = 'Error'
}
return response;
};
After some trials and errors I figured out the solution for my own question
1- The GCM part of the payload should be a string not a json
2- The message parameter should have an attribute that explicitly sets the mime type of the payload to Json
Taking all that into consideration
const GCM_data = {
'notification': {
'body': 'Hellow from lambda function',
'title': 'Notification Title'
}
}
const data = {
"GCM": JSON.stringify(GCM_data)
}
const snsPayload = JSON.stringify(data)
and the params should look like
const paramsMessage = {
Message: snsPayload,
TargetArn: endpointData.EndpointArn,
MessageStructure: 'json'
};
and this will work :)
I have Cloudfront in front of an s3 bucket that serves HLS videos. I'm trying to dynamically modify the manifest files to add an auth token to the segments inside of them.
What I would really like to do is modify the body I send back to the client in a viewer response function, but since that isn't possible, I'm attempting to use a origin request function to manually fetch the object from S3, modify it, and return a Cloudfront request with the new body. I get a 503 error of "The Lambda function result failed validation: The body is not a string, is not an object, or exceeds the maximum size"
My body is under 8kb (1MB is the limit in the docs). As far as I can tell the cloudfront request object I'm generating looks good and the base64 data decodes to what I want. I've also tried using text instead of base64. I have "include body" enabled in Cloudfront.
const fs = require('fs');
const querystring = require('querystring');
const AWS = require('aws-sdk');
const S3 = new AWS.S3();
exports.handler = async (event) => {
const cfrequest = event.Records[0].cf.request;
const queryString = querystring.parse(event.Records[0].cf.request.querystring);
const jwtToken = queryString.token;
if (cfrequest.uri.match(/\.m3u8?$/mi)) {
const s3Response = await (new Promise((resolve, reject) => {
S3.getObject({
Bucket: 'bucket',
Key: cfrequest.uri.substring(1)
}, (err, data) => {
if (err) {
reject(err)
} else {
resolve(data);
}
});
}));
const manifestFile = s3Response.Body.toString('utf8');
const newManifest = manifestFile.replace(/^((\S+)\.(m3u8|ts|vtt))$/gmi, (_, url) => `${url}?token=${jwtToken}`);
const base64NewManifest = Buffer.from(newManifest, 'utf8').toString('base64');
const tokenizedCfRequest = {
...cfrequest,
body: {
action: 'replace',
data: base64NewManifest,
encoding: 'base64'
}
};
return tokenizedCfRequest;
}
return cfrequest;
}
If you want to generate your own response you need to use a viewer request or origin request event and return a response like this:
exports.handler = async (event) => {
const cfRequest = event.Records[0].cf.request;
const queryString = querystring.parse(event.Records[0].cf.request.querystring);
const jwtToken = queryString.token;
if (cfrequest.uri.match(/\.m3u8?$/mi)) {
// ... your code here ...
const response = {
status: 200, // only mandatory field
body: base64NewManifest,
bodyEncoding: 'base64',
};
return response;
}
// Return original request if no uri match
return cfRequest;
}
See also Generating HTTP Responses in Request Triggers.