Get speech mark from Amazon Polly using NodeJs - amazon-web-services

I am working on an animation project to add subtitle to what my character says. I can get the mp3 file from AWS Polly with no issue.
However, when I want to get each part of the word separately, it doesn't work. I checked inspector tab, and I can see some params are passing to request to polly.aws. Any idea how I get json/mark-up file to know the start and end of each word & sentence?
const AWS = require('aws-sdk')
const Fs = require('fs')
const Polly = new AWS.Polly({
signatureVersion: 'v4',
region: 'us-east-1'
})
// # this part works fine
let params = {
'Text': 'Hi, my name is Soley. We are building something amazing!',
'OutputFormat': 'mp3',
'VoiceId': 'Matthew'
}
// # from chrome's network tab:
// # and is there a way to get mp3 and mark-up text at the same time?
// "text": "Hi, my name is Soley. We are building something amazing!",
// "textContentType": "text",
// "voiceId": "Matthew",
// "languageCode": "en-US",
// "engine": "standard",
// "outputFormat": "json-8000",
// "lexiconNames": [],
// "speechMarksTypes": [
// "word",
// "sentence"
// ]
Polly.synthesizeSpeech(params, (err, data) => {
if (err) {
console.log(err)
} else if (data) {
console.log(data)
if (data.AudioStream instanceof Buffer) {
Fs.writeFile("speech."+params.OutputFormat, data.AudioStream, function (err) {
if (err) {
return console.log(err)
}
console.log("The file was saved!")
})
}
}
})
some useful links to check: https://aws.amazon.com/blogs/aws/new-amazon-polly-speech-marks/
using cli also works file: https://docs.aws.amazon.com/polly/latest/dg/speechmarkexamples.html but I want it in NodeJs

Oh, I think I found something:
let params = {
'Text': 'Hi, my name is Soley. We are building something amazing!',
'OutputFormat': 'json',
'VoiceId': 'Matthew',
'SpeechMarkTypes': ['word', 'sentence']
}
Thanks to java: https://docs.aws.amazon.com/polly/latest/dg/SynthesizeSpeechMarksSample.html

Related

Handling multipart/form-data in aws lambda

I'm trying to send a request with an image to a lambda function through API gateway.
I'm using this piece of code to parse the form-data-object received by my lambda function. I then upload the image to S3, but when downloading and opening the image from S3, I see that it's corrupt.
I have tried the following npm packages:
parse-multipart
parse-multipart-data
Both do not work, because I get an empty parts-array. The piece of code I use does get results in the array, but the buffers seems to be corrupt.
The problem seems to be in this line of code:
Buffer.from(item.slice(item.search(/Content-Type:\s.+/g) + item.match(/Content-Type:\s.+/g)[0].length + 4, -4), 'binary')
Does anyone has a solution for me?
You can use busboy to parse the multipart form data before uploading to S3 as shown below:
// This code is written in ECMAScript 6 (ES6), not CommonJS syntax.
// So, make sure you add <"type": "module"> in your package.json.
import {S3Client, PutObjectCommand} from '#aws-sdk/client-s3';
import Busboy from 'busboy';
// Initialize the clients outside the function handler to take advantage of execution environment reuse.
const s3Client = new S3Client({region: process.env.AWS_REGION});
// Function handler.
export async function handler(event) {
const {
ContentType: contentType = '',
File: file = '',
} = await FORM.parse(event['body'], event['headers']);
try {
// Adds an object to a bucket. We must have `WRITE` permissions on a bucket to add an object to it.
await s3Client.send(new PutObjectCommand({
Body: file,
Bucket: 'BUCKET_NAME',
ContentType: contentType,
Key: 'SOME_KEY',
}));
return {
isBase64Encoded: false,
statusCode: 200,
body: JSON.stringify({
message: 'Everything is gonna be alright.',
}),
}
} catch (e) {
return {
isBase64Encoded: false,
statusCode: 404,
body: JSON.stringify(e),
}
}
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {}),
limits: {
fileSize: 10485760, // Set as desired.
files: 1,
},
});
bb.on('file', (name, stream, info) => {
const chunks = [];
stream.on('data', (chunk) => {
if (name === 'File') {
chunks.push(chunk);
}
}).on('limit', () => {
reject(new Error('File size limit has been reached.'));
}).on('close', () => {
if (name === 'File') {
data[name] = Buffer.concat(chunks);
data['ContentType'] = info.mimeType;
}
});
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};

How to upload multiple photos on aws at once?

I use react-native with graphQL.
Now I can upload one single photo on aws successfully.
But I want to upload multiple files at once.
If I run loop I can successfully upload multiple files on aws as below.
const onValid = ({ caption }) => {
const uploadPhotoArray = selectPhoto.map((sp, index) => {
return new ReactNativeFile({
uri: sp,
name: `${index}.jpg`,
type: "image/jpeg",
});
});
for (let i = 0; i < uploadPhotoArray.length; i++) {
uploadPhotoMutation({
variables: {
caption,
file: uploadPhotoArray[i],
},
});
}
};
But the problem is, if I upload 2 images, then it creates two rows on aws and backend.
In order to make it one row (post) with 2 images, I make file column of backend as Array, not string.
However I think the problem is the frontend.
I wanted to make uploadPhotoArray as array like below.
Array [
ReactNativeFile {
"name": "0.jpg",
"type": "image/jpeg",
"uri": "file:///storage/emulated/0/DCIM/Camera/20220306_020216.jpg",
},
ReactNativeFile {
"name": "1.jpg",
"type": "image/jpeg",
"uri": "file:///storage/emulated/0/DCIM/Camera/20220305_201130.jpg",
},
]
then tried to run uploadPhotoMutation with this array.
uploadPhotoMutation({
variables: {
caption,
file: uploadPhotoArray,
},
});
Then it will pass array data to backend.
But it seems now working.
If I can't pass Array data to backend, which means that one by one is possible, then I need to make this incoming data to array on backend.
But that's also hard for me.
If you want to clarify my question, I can answer in real-item and chat is also possible.
Please give me any idea. :(
backend code
const fileUrl = fileArrayCheck
? await uploadFileToS3(file, loggedInUser.id, "uploads")
: await uploadStringleFileToS3(file, loggedInUser.id, "uploads");
export const uploadStringleFileToS3 = async (file, userId, folderName) => {
console.log(file);
AWS.config.update({
credentials: {
accessKeyId: process.env.AWS_KEY,
secretAccessKey: process.env.AWS_SECRET,
},
});
const { filename, createReadStream } = await file;
const readStream = createReadStream();
const objectName = `${folderName}/${userId}-${Date.now()}-${filename}`;
const { Location } = await new AWS.S3()
.upload({
Bucket: "chungchunonuploads",
Key: objectName,
ACL: "public-read",
Body: readStream,
})
.promise();
return [Location];
};
export const uploadFileToS3 = async (filesToUpload, userId, folderName) => {
const uploadPromises = await filesToUpload.map((file) => {
uploadStringleFileToS3(file, userId, folderName);
});
return Promise.all(uploadPromises);
};

Send an image to server through fetch using expo-image-picker React Native

I want to upload and image from my gallery using expo-image-picker and send it to the server through fetch.
expo-image-picker to Upload image Code:
const [photo, setPhoto] = React.useState(null)
const chooseImage = async ()=> {
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect:[4,3],
quality:1
})
console.log(result)
if(!result.cancelled){
setPhoto(result.uri)
}
}
Fetch Code:
export const createRestaurantFetch = (endpoint, inputName, inputType, inputDescription, inputAddress, img, touken)=> {
let bearer = 'Bearer ' + touken;
var files = new FormData();
files.append('name',inputName)
files.append('type',inputType)
files.append('description',inputDescription)
files.append('address',inputAddress)
files.append('photo',img)
console.log(files)
fetch(endpoint, {
method: "POST",
body: files,
headers:{
'Authorization': bearer
}
}).then(res =>{
/* console.log("POST RESPONSE: "+JSON.stringify(res)); */
/* console.log(res.status); */
if(res.status==200) {
res.json().then((data) => {alert(data.message)});
}else res.json().then((data) => {alert(data.msg)});
})
.catch(error => console.error('Error:', error))
/* .then(response => console.log(response)); */
}
console.log(files) shows:
FormData {
"_parts": Array [
Array [
"name",
"Resto",
],
Array [
"type",
"Fastfood",
],
Array [
"description",
"Atnight",
],
Array [
"address",
"Springfield",
],
Array [
"photo",
"file:///data/user/0/host.exp.exponent/cache/ExperienceData/%2540daolavez%252Fubereats/ImagePicker/dfaedaaf-3c1f-49ea-88ce-e62c9e674115.jpg",
],
],
}
Function that receives body of fetch on server side:
export const createRestaurant = async (req: any, res: Response) => {
const body=req.body;
const files = req.files;
try {
if (!files || Object.keys(files).length === 0 || !files.photo ) {
res.status(400).json({
done: false,
msg: 'There is no photo uploaded, please try again.'
});
}
} catch (e) {
console.error(e);
};
So first I fill the inputs(that is the data you see in the first line of Fetch Code), then I upload the image as you can see, then I hit "Create" button and it executes the Fetch Code, and then you can see in the content of the formData in the console.log(files) just so you know it's wrapping the data well to send it, and in the end the response from the server is the msg you see in the code which is 'There is no photo uploaded, please try again.' meaning that it doesn't recognize the file that I send. I'm not sure how I can achieve this, any suggestions?
By the way, that img parameter in the first line of the Fetch Code is the image that I'm passing from a function to the Fetch

Failed use apigwManagementApi.postToConnection in $connect route

I want to return connectionId to a client after the client connect to aws websocket.
I'm using apigwManagementApi.postToConnection to send a response to a client, but I always get an absurd error message.
I already try to debug & search in google, but I can't find a solution for this.
patch.js
require('aws-sdk/lib/node_loader');
var AWS = require('aws-sdk/lib/core');
var Service = AWS.Service;
var apiLoader = AWS.apiLoader;
apiLoader.services['apigatewaymanagementapi'] = {};
AWS.ApiGatewayManagementApi = Service.defineService('apigatewaymanagementapi', ['2018-11-29']);
Object.defineProperty(apiLoader.services['apigatewaymanagementapi'], '2018-11-29', {
get: function get() {
var model = {
"metadata": {
"apiVersion": "2018-11-29",
"endpointPrefix": "execute-api",
"signingName": "execute-api",
"serviceFullName": "AmazonApiGatewayManagementApi",
"serviceId": "ApiGatewayManagementApi",
"protocol": "rest-json",
"jsonVersion": "1.1",
"uid": "apigatewaymanagementapi-2018-11-29",
"signatureVersion": "v4"
},
"operations": {
"PostToConnection": {
"http": {
"requestUri": "/#connections/{connectionId}",
"responseCode": 200
},
"input": {
"type": "structure",
"members": {
"Data": {
"type": "blob"
},
"ConnectionId": {
"location": "uri",
"locationName": "connectionId"
}
},
"required": [
"ConnectionId",
"Data"
],
"payload": "Data"
}
}
},
"shapes": {}
}
model.paginators = {
"pagination": {}
}
return model;
},
enumerable: true,
configurable: true
});
module.exports = AWS.ApiGatewayManagementApi;
index.js
const AWS = require('aws-sdk');
require('./patch.js');
exports.handler = async(event) => {
const connectionId = event.requestContext.connectionId;
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: '2018-11-29',
endpoint: event.requestContext.domainName + '/' + event.requestContext.stage
});
await apigwManagementApi.postToConnection({ ConnectionId: connectionId, Data: connectionId }).promise();
return {};
};
client.js
const WebSocket = require('ws');
const ws = new WebSocket('wss://****');
ws.on('open', () => {
console.log('connected ===================>')
ws.on('message', data => console.warn(`From server: ${data}`));
});
Error in cloudwatch
{
"errorMessage": "410",
"errorType": "UnknownError",
"stackTrace": [
"Object.extractError (/var/runtime/node_modules/aws-sdk/lib/protocol/json.js:48:27)",
"Request.extractError (/var/runtime/node_modules/aws-sdk/lib/protocol/rest_json.js:52:8)",
"Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)",
"Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)",
"Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)",
"Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:22:10)",
"AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12)",
"/var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10",
"Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:38:9)",
"Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:685:12)"
]
}
I don't know why, but if I'm trying in a custom route, this code can work.
Does anyone know how to solve this?
I'd suggest to look into this example from AWS, there is on connect response for subprotocol confirmation, but I think any payload can be provided.
The most important bit is the route integration settings in the template, basically, the following two lines in the route integration properties:
IntegrationMethod: POST
ConnectionType: INTERNET
then response will be sent to the connected client.
The only way I've found to make this work is to use a DynamoDB table to store connections, then set up a trigger from the table back to a Lambda function.
There are a few catches though. This Lambda function wont work like your index.js file above. You'll have to use NPM install --save aws-sdk on a folder with your index.js file, zip it and upload it to the lambda function, so that the SDK is localized.
You will also need to set up a user with proper access and put the credentials into a your Lambda function.
Note, if you see a 410 error, that means the connection is no longer there, so you're going in the right direction at that point.
const AWS = require('aws-sdk');
require('./patch.js');
var log = console.log;
AWS.config.update({
accessKeyId: "YOURDATAHERE",
secretAccessKey: "YOURDATAHERE"
});
let send = undefined;
function init() {
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: '2018-11-29',
endpoint: "HARDCODEYOURENDPOINTHERE"
});
send = async (connectionId, data) => {
await apigwManagementApi.postToConnection({ ConnectionId: connectionId, Data: `${data}` }).promise();
}
}
exports.handler = async (event, context) => {
init();
console.log('Received event:', JSON.stringify(event, null, 2));
for (const record of event.Records) {
//console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record: %j', record.dynamodb);
if(record.eventName == "INSERT"){
var connectionId = record.dynamodb.NewImage.connectionId.S;
try{
await send(connectionId, connectionId);
}catch(err){
log("Error", err);
}
log("sent");
}
}
return `Successfully processed ${event.Records.length} records.`;
};

Pushing AWS Lambda data to Kinesis Stream

Is there are way to push data from a Lambda function to a Kinesis stream? I have searched the internet but have not found any examples related to it.
Thanks.
Yes, you can send information from Lambda to Kinesis Stream and it is very simple to do. Make sure you are running Lambda with the right permissions.
Create a file called kinesis.js, This file will provide a 'save' function that receives a payload and sends it to the Kinesis Stream. We want to be able to include this 'save' function anywhere we want to send data to the stream. Code:
const AWS = require('aws-sdk');
const kinesisConstant = require('./kinesisConstants'); //Keep it consistent
const kinesis = new AWS.Kinesis({
apiVersion: kinesisConstant.API_VERSION, //optional
//accessKeyId: '<you-can-use-this-to-run-it-locally>', //optional
//secretAccessKey: '<you-can-use-this-to-run-it-locally>', //optional
region: kinesisConstant.REGION
});
const savePayload = (payload) => {
//We can only save strings into the streams
if( typeof payload !== kinesisConstant.PAYLOAD_TYPE) {
try {
payload = JSON.stringify(payload);
} catch (e) {
console.log(e);
}
}
let params = {
Data: payload,
PartitionKey: kinesisConstant.PARTITION_KEY,
StreamName: kinesisConstant.STREAM_NAME
};
kinesis.putRecord(params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log('Record added:',data);
});
};
exports.save = (payload) => {
const params = {
StreamName: kinesisConstant.STREAM_NAME,
};
kinesis.describeStream(params, function(err, data) {
if (err) console.log(err, err.stack);
else {
//Make sure stream is able to take new writes (ACTIVE or UPDATING are good)
if(data.StreamDescription.StreamStatus === kinesisConstant.STATE.ACTIVE
|| data.StreamDescription.StreamStatus === kinesisConstant.STATE.UPDATING ) {
savePayload(payload);
} else {
console.log(`Kinesis stream ${kinesisConstant.STREAM_NAME} is ${data.StreamDescription.StreamStatus}.`);
console.log(`Record Lost`, JSON.parse(payload));
}
}
});
};
Create a kinesisConstant.js file to keep it consistent :)
module.exports = {
STATE: {
ACTIVE: 'ACTIVE',
UPDATING: 'UPDATING',
CREATING: 'CREATING',
DELETING: 'DELETING'
},
STREAM_NAME: '<your-stream-name>',
PARTITION_KEY: '<string-value-if-one-shard-anything-will-do',
PAYLOAD_TYPE: 'String',
REGION: '<the-region-where-you-have-lambda-and-kinesis>',
API_VERSION: '2013-12-02'
}
Your handler file: we added the 'done' function to send a response to whoever wants to send the data to the stream but 'kinesis.save(event)' does all the work.
const kinesis = require('./kinesis');
exports.handler = (event, context, callback) => {
console.log('LOADING handler');
const done = (err, res) => callback(null, {
statusCode: err ? '400' : '200',
body: err || res,
headers: {
'Content-Type': 'application/json',
},
});
kinesis.save(event); // here we send it to the stream
done(null, event);
}
This should be done exactly like doing it on your computer.
Here's an example in nodejs:
let aws = require('aws');
let kinesis = new aws.Kinesis();
// data that you'd like to send
let data_object = { "some": "properties" };
let data = JSON.stringify(data_object);
// push data to kinesis
const params = {
Data: data,
PartitionKey: "1",
StreamName: "stream name"
}
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
}
Please note, this piece of code will not work, as the Lambda has no permissions to your stream.
When accessing AWS resources through Lambda, it is better to use IAM roles;
When configuring a new Lambda, you can choose existing / create a role.
Go to IAM, then Roles, and pick the role name you assigned to the Lambda function.
Add the relevant permissions (putRecord, putRecords).
Then, test the Lambda.
Yes, this can be done, I was trying to accomplish the same thing and was able to do so in Lambda using Node.js 4.3 runtime, and it also works in version 6.10.
Here is the code:
Declare the following at the top of your Lambda function:
var AWS = require("aws-sdk");
var kinesis = new AWS.Kinesis();
function writeKinesis(rawdata){
data = JSON.stringify(rawdata);
params = {Data: data, PartitionKey: "<PARTITION_KEY>", StreamName: "<STREAM_NAME>"};
kinesis.putRecord(params, (err, data) => {
if (err) console.error(err);
else console.log("data sent");
});
}
Now, in the exports.handler, call the function:
writeKinesis(<YOUR_DATA>);
A few things to note... for Kinesis to ingest data, it must be encoded. In the example below, I have function that takes logs from CloudWatch, and sends them over to a Kinesis stream.
Note that I'm inserting the contents of buffer.toString('utf8') into the writeKinesis function:
exports.handler = function(input, context) {
...
var zippedInput = new Buffer(input.awslogs.data, 'base64');
zlib.gunzip(zippedInput, function(error, buffer) {
...
writeKinesis(buffer.toString('utf8'));
...
}
...
}
Finally, in IAM, configure the appropriate permissions. Your Lambda function has to run within the context of an IAM role that includes the following permissions below. In my case, I just modified the default lambda_elasticsearch_execution role to include a policy called "lambda_kinesis_execution" with the following code:
"Effect": "Allow",
"Action": [
"kinesis:*"
],
"Resource": [
"<YOUR_STREAM_ARN>"
]