cannot create communication from sender to receiver CAF v3 - casting

we are able to make communication from sender to receiver.
but we can only send NUMBERS as a custom message ex: 123,45, etc
when we try to attach any string content eg: hello,hello122 or any character the receiver side does not get any message.
We have executed the following code :
try {
const channel = "urn:x-cast:testChannel";
const iframe = document.getElementById("frame");
const node = document.getElementById("message");
const ctx = cast.framework.CastReceiverContext.getInstance();
node.innerHTML = 'test1';
ctx.addCustomMessageListener(channel, (evt) => {
node.innerHTML = 'test...';
ctx.sendCustomMessage('urn:x-cast:testChannel', evt.senderId, 'Message Invoked v3', (data) => {
document.getElementById("message").innerHTML = 'message sent';
iframe.src = "https://duchy-messages.bradnin.ch/";
node.innerHTML = 'Message sent '
})
node.innerHTML = evt.senderId+ ' test ' +JSON.stringify(evt);
alert('here');
})
ctx.start();
} catch (e) {
document.getElementById("message").innerHTML = JSON.stringify(e);
}

Related

Cannot POST when sending request in Postman

I am having a small issue. I am trying to send a request in Postman, but I receive "
Cannot POST /api/logTemp/1/25
"
Here is my app.js:
const express = require('express')
const bodyParser = require('body-parser')
const cors= require('cors')
const fs= require('fs')
const path= require('path')
const morgan = require('morgan')
const router = require('./routes/route')
const app = express()
app.use(cors())
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false}))
// parse application/json
app.use(bodyParser.json())
app.use(morgan('dev'))
//create a write stream (in append mode)
var accessLogStream = fs.createWriteStream(path.join(__dirname, '/logs/access.log'), {flags: 'a'})
//setup the logger
app.use(morgan('combined', {stream: accessLogStream}))
app.use(router)
app.get('/', (req, res) => {
res.send('Hello World!')
})
const port = 3000
//app.listen(process.env.PORT || port, (err) => {
app.listen(port, () => {
console.log('Server started running on :' + port)
})
and here is my controller file:
const { getEnabledCategories } = require('trace_events');
const mysql = require('../database/db')
class MainController {
async logTemp(req, res){
console.log(req.params.temperature)
console.log(req.params.deviceID)
if(req.params.deviceID != null && req.params.temperature != null){
let deviceID = req.params.deviceID
let temperature = req.params.temperature;
var sql = `insert into log_temp (log_date, device_id, temp) values (now(),${deviceID}, ${temperature});`
mysql.query(sql, (error,data,fields) => {
if(error){
res.status(500)
res.send(error.message)
} else{
console.log(data)
res.json({
status: 200,
message: "Log uploaded successfully",
affectedRows: data.affectedRows
})
}
})
} else {
res.send('Por favor llena todos los datos!')
}
}
async getLogs(req, res){
console.log("Get Logs")
console.log(req.params.deviceID)
if(req.params.deviceID!=null){
let deviceID = req.params.deviceID;
var sql = `SELECT * FROM log_temp where device_id=${deviceID}`
mysql.query(sql, (error, data, fields) => {
if(error) {
res.status(500)
res.send(error.message)
} else {
console.log(data)
res.json({
data
})
}
})
}
}
}
const tempController = new MainController()
module.exports = tempController;
The code above was made in Visual Studio. It is odd because getLogs does work but logTemp does not. What I intend to do with logTemp is add a new value (which is the value temperature) to MySQL database. The connection to the database worked just fine, as well as localhost. If you need any more info in order to help me find a solution, please let me know and I will be more than happy to provide it. Also, i'm sorry for any grammar errors, english is not my first language :)

AWS Firehose newline Character

I've read a lot of similar questions around adding newline characters to firehose, but they're all around adding the newline character to the source. The problem is that I don't have access to the source, and a third party is piping data to our Kinesis instance and I cannot add the \n to the source.
I've tried doing a Firehose data transformation using the following code:
'use strict';
console.log('Loading function');
exports.handler = (event, context, callback) => {
/* Process the list of records and transform them */
const output = [];
event.records.forEach((record) => {
const results = {
/* This transformation is the "identity" transformation, the data is left intact */
recordId: record.recordId,
result: record.data.event_type === 'alert' ? 'Dropped' : 'Ok',
data: record.data + '\n'
};
output.push(results);
});
console.log(`Processing completed. Successful records ${output.length}.`);
callback(null, { records: output });
};
but the newline is still lost. I've also tried JSON.stringify(record.data) + '\n' but then I get an Invalid output structure error.
Try decoding the record.data
add a new line
then encode it again as base64.
This is python but the idea is the same
for record in event['records']:
payload = base64.b64decode(record['data'])
# Do custom processing on the payload here
payload = payload + '\n'
output_record = {
'recordId': record['recordId'],
'result': 'Ok',
'data': base64.b64encode(json.dumps(payload))
}
output.append(output_record)
return {'records': output}
From the comment of #Matt Westlake:
For those looking for the Node.js answer, it's
const data =
JSON.parse(new Buffer.from(record.data,'base64').toString('utf8'));
and
new Buffer.from(JSON.stringify(data) + '\n').toString('base64')
The kinesis-firehose-cloudwatch-logs-processor blueprint lambda does this (with some additional handling for cloudwatch logs).
Here's the lambda code from the blueprint as of today:
/*
For processing data sent to Firehose by Cloudwatch Logs subscription filters.
Cloudwatch Logs sends to Firehose records that look like this:
{
"messageType": "DATA_MESSAGE",
"owner": "123456789012",
"logGroup": "log_group_name",
"logStream": "log_stream_name",
"subscriptionFilters": [
"subscription_filter_name"
],
"logEvents": [
{
"id": "01234567890123456789012345678901234567890123456789012345",
"timestamp": 1510109208016,
"message": "log message 1"
},
{
"id": "01234567890123456789012345678901234567890123456789012345",
"timestamp": 1510109208017,
"message": "log message 2"
}
...
]
}
The data is additionally compressed with GZIP.
The code below will:
1) Gunzip the data
2) Parse the json
3) Set the result to ProcessingFailed for any record whose messageType is not DATA_MESSAGE, thus redirecting them to the
processing error output. Such records do not contain any log events. You can modify the code to set the result to
Dropped instead to get rid of these records completely.
4) For records whose messageType is DATA_MESSAGE, extract the individual log events from the logEvents field, and pass
each one to the transformLogEvent method. You can modify the transformLogEvent method to perform custom
transformations on the log events.
5) Concatenate the result from (4) together and set the result as the data of the record returned to Firehose. Note that
this step will not add any delimiters. Delimiters should be appended by the logic within the transformLogEvent
method.
6) Any additional records which exceed 6MB will be re-ingested back into Firehose.
*/
const zlib = require('zlib');
const AWS = require('aws-sdk');
/**
* logEvent has this format:
*
* {
* "id": "01234567890123456789012345678901234567890123456789012345",
* "timestamp": 1510109208016,
* "message": "log message 1"
* }
*
* The default implementation below just extracts the message and appends a newline to it.
*
* The result must be returned in a Promise.
*/
function transformLogEvent(logEvent) {
return Promise.resolve(`${logEvent.message}\n`);
}
function putRecordsToFirehoseStream(streamName, records, client, resolve, reject, attemptsMade, maxAttempts) {
client.putRecordBatch({
DeliveryStreamName: streamName,
Records: records,
}, (err, data) => {
const codes = [];
let failed = [];
let errMsg = err;
if (err) {
failed = records;
} else {
for (let i = 0; i < data.RequestResponses.length; i++) {
const code = data.RequestResponses[i].ErrorCode;
if (code) {
codes.push(code);
failed.push(records[i]);
}
}
errMsg = `Individual error codes: ${codes}`;
}
if (failed.length > 0) {
if (attemptsMade + 1 < maxAttempts) {
console.log('Some records failed while calling PutRecordBatch, retrying. %s', errMsg);
putRecordsToFirehoseStream(streamName, failed, client, resolve, reject, attemptsMade + 1, maxAttempts);
} else {
reject(`Could not put records after ${maxAttempts} attempts. ${errMsg}`);
}
} else {
resolve('');
}
});
}
function putRecordsToKinesisStream(streamName, records, client, resolve, reject, attemptsMade, maxAttempts) {
client.putRecords({
StreamName: streamName,
Records: records,
}, (err, data) => {
const codes = [];
let failed = [];
let errMsg = err;
if (err) {
failed = records;
} else {
for (let i = 0; i < data.Records.length; i++) {
const code = data.Records[i].ErrorCode;
if (code) {
codes.push(code);
failed.push(records[i]);
}
}
errMsg = `Individual error codes: ${codes}`;
}
if (failed.length > 0) {
if (attemptsMade + 1 < maxAttempts) {
console.log('Some records failed while calling PutRecords, retrying. %s', errMsg);
putRecordsToKinesisStream(streamName, failed, client, resolve, reject, attemptsMade + 1, maxAttempts);
} else {
reject(`Could not put records after ${maxAttempts} attempts. ${errMsg}`);
}
} else {
resolve('');
}
});
}
function createReingestionRecord(isSas, originalRecord) {
if (isSas) {
return {
Data: new Buffer(originalRecord.data, 'base64'),
PartitionKey: originalRecord.kinesisRecordMetadata.partitionKey,
};
} else {
return {
Data: new Buffer(originalRecord.data, 'base64'),
};
}
}
function getReingestionRecord(isSas, reIngestionRecord) {
if (isSas) {
return {
Data: reIngestionRecord.Data,
PartitionKey: reIngestionRecord.PartitionKey,
};
} else {
return {
Data: reIngestionRecord.Data,
};
}
}
exports.handler = (event, context, callback) => {
Promise.all(event.records.map(r => {
const buffer = new Buffer(r.data, 'base64');
const decompressed = zlib.gunzipSync(buffer);
const data = JSON.parse(decompressed);
// CONTROL_MESSAGE are sent by CWL to check if the subscription is reachable.
// They do not contain actual data.
if (data.messageType === 'CONTROL_MESSAGE') {
return Promise.resolve({
recordId: r.recordId,
result: 'Dropped',
});
} else if (data.messageType === 'DATA_MESSAGE') {
const promises = data.logEvents.map(transformLogEvent);
return Promise.all(promises)
.then(transformed => {
const payload = transformed.reduce((a, v) => a + v, '');
const encoded = new Buffer(payload).toString('base64');
return {
recordId: r.recordId,
result: 'Ok',
data: encoded,
};
});
} else {
return Promise.resolve({
recordId: r.recordId,
result: 'ProcessingFailed',
});
}
})).then(recs => {
const isSas = Object.prototype.hasOwnProperty.call(event, 'sourceKinesisStreamArn');
const streamARN = isSas ? event.sourceKinesisStreamArn : event.deliveryStreamArn;
const region = streamARN.split(':')[3];
const streamName = streamARN.split('/')[1];
const result = { records: recs };
let recordsToReingest = [];
const putRecordBatches = [];
let totalRecordsToBeReingested = 0;
const inputDataByRecId = {};
event.records.forEach(r => inputDataByRecId[r.recordId] = createReingestionRecord(isSas, r));
let projectedSize = recs.filter(rec => rec.result === 'Ok')
.map(r => r.recordId.length + r.data.length)
.reduce((a, b) => a + b);
// 6000000 instead of 6291456 to leave ample headroom for the stuff we didn't account for
for (let idx = 0; idx < event.records.length && projectedSize > 6000000; idx++) {
const rec = result.records[idx];
if (rec.result === 'Ok') {
totalRecordsToBeReingested++;
recordsToReingest.push(getReingestionRecord(isSas, inputDataByRecId[rec.recordId]));
projectedSize -= rec.data.length;
delete rec.data;
result.records[idx].result = 'Dropped';
// split out the record batches into multiple groups, 500 records at max per group
if (recordsToReingest.length === 500) {
putRecordBatches.push(recordsToReingest);
recordsToReingest = [];
}
}
}
if (recordsToReingest.length > 0) {
// add the last batch
putRecordBatches.push(recordsToReingest);
}
if (putRecordBatches.length > 0) {
new Promise((resolve, reject) => {
let recordsReingestedSoFar = 0;
for (let idx = 0; idx < putRecordBatches.length; idx++) {
const recordBatch = putRecordBatches[idx];
if (isSas) {
const client = new AWS.Kinesis({ region: region });
putRecordsToKinesisStream(streamName, recordBatch, client, resolve, reject, 0, 20);
} else {
const client = new AWS.Firehose({ region: region });
putRecordsToFirehoseStream(streamName, recordBatch, client, resolve, reject, 0, 20);
}
recordsReingestedSoFar += recordBatch.length;
console.log('Reingested %s/%s records out of %s in to %s stream', recordsReingestedSoFar, totalRecordsToBeReingested, event.records.length, streamName);
}
}).then(
() => {
console.log('Reingested all %s records out of %s in to %s stream', totalRecordsToBeReingested, event.records.length, streamName);
callback(null, result);
},
failed => {
console.log('Failed to reingest records. %s', failed);
callback(failed, null);
});
} else {
console.log('No records needed to be reingested.');
callback(null, result);
}
}).catch(ex => {
console.log('Error: ', ex);
callback(ex, null);
});
};
Here is code that will solve the problem
__Author__ = "Soumil Nitin Shah"
import json
import boto3
import base64
class MyHasher(object):
def __init__(self, key):
self.key = key
def get(self):
keys = str(self.key).encode("UTF-8")
keys = base64.b64encode(keys)
keys = keys.decode("UTF-8")
return keys
def lambda_handler(event, context):
output = []
for record in event['records']:
payload = base64.b64decode(record['data'])
"""Get the payload from event bridge and just get data attr """""
serialize_payload = str(json.loads(payload)) + "\n"
hasherHelper = MyHasher(key=serialize_payload)
hash = hasherHelper.get()
output_record = {
'recordId': record['recordId'],
'result': 'Ok',
'data': hash
}
print("output_record", output_record)
output.append(output_record)
return {'records': output}

Cognito custom-message triggered lambda returns InvalidLambdaResponseException

I've created a lambda and assigned it to cognito throw the UI as its custom-message lambda.
Here is the code in typescript:
export const handler = async (event) => {
const trigger = event.triggerSource
const customMessage = cloneDeep(customMessages[trigger])
if (customMessage) {
try {
// inject cognito values to custom message
const codeParameter = event.request.codeParameter
const usernameParameter = event.request.usernameParameter
for (let key in customMessage) {
let text = customMessage[key]
if (codeParameter) {
customMessage[key] = text.replace(/{{codeParameter}}/g, codeParameter)
}
if (usernameParameter) {
customMessage[key] = text.replace(/{{usernameParameter}}/g, usernameParameter)
}
}
// load HTML template
let htmlFile = readFileSync(templateFilePath, { encoding: 'utf8' })
htmlFile = htmlFile.replace(/(\r\n|\n|\r)/gm, '')
const template = handlebars.compile(htmlFile)
const html = template(customMessage)
event.emailMessage = html
event.response.emailSubject = customMessage.title
} catch (err) {
logger.error(err)
return event
}
}
return event
}
Basically it loads an html template file and injects the code-parameters and username.
now the response our signup flow lambda returns is:
{
"message": "InvalidLambdaResponseException",
"details": "Unrecognizable lambda output"
}
I event tried to copy paste AWS example:
exports.handler = (event, context, callback) => {
//
if(event.userPoolId === "theSpecialUserPool") {
// Identify why was this function invoked
if(event.triggerSource === "CustomMessage_SignUp") {
// Ensure that your message contains event.request.codeParameter. This is the placeholder for code that will be sent
event.response.smsMessage = "Welcome to the service. Your confirmation code is " + event.request.codeParameter;
event.response.emailSubject = "Welcome to the service";
event.response.emailMessage = "Thank you for signing up. " + event.request.codeParameter + " is your verification code";
}
// Create custom message for other events
}
// Customize messages for other user pools
// Return to Amazon Cognito
callback(null, event);
};
The response is the same.
Any suggestions?
Thanks
Here is my custom message lambda. It runs on Node 8.10. Maybe you'd like to test/adapt it. I've stripped some other stuff out but it should work fine
exports.handler = function(event, context) {
const cognitoUserPool = 'us-east-1_AAAAAA';
const snsTopicArn = 'arn:aws:sns:us-east-1:9999999999:BBBBBBBBBB';
const baseurl = 'https://company.us-east-1.elasticbeanstalk.com/app';
console.log('Cognito Event:', event);
var AWS = require("aws-sdk");
if(event.userPoolId === cognitoUserPool) {
if(event.triggerSource === "CustomMessage_SignUp") {
event.response.emailSubject = "Welcome to Company";
event.response.emailMessage = "Hello etc";
context.done(null, event);
}
if(event.triggerSource === "CustomMessage_ResendCode") {
event.response.emailSubject = "Welcome to Company";
event.response.emailMessage = "Some other message etc";
context.done(null, event);
}
if(event.triggerSource === "CustomMessage_ForgotPassword") {
event.response.emailSubject = "Your password reset";
event.response.emailMessage = "Some other message again etc";
context.done(null, event);
}
// Other event types can go here
} else {
context.done(null, event);
}
};

not able to get request,file() working in adonisjs post

when I try to call the request of both the text and the file I don't get a response. postman just show 200 and nothing is updated to the database.
'use strict';
const Pendingupload = use('App/Models/Pendingupload');
const Helpers = use('Helpers');
class UploadController {
async create({ request, response, auth, session }) {
const { title, description } = request.all();
const validationOptions = {
types: ['image'],
size: '2mb',
extnames: ['png', 'gif']
};
// when call it; there is an error
const avatar = request.file('file', validationOptions);
try {
const Pendinguploads = new Pendingupload();
Pendinguploads.title = title;
Pendinguploads.description = description;
Pendinguploads.user_id = await auth.user.id;
Pendinguploads.image_url = '';
// this is when validation occurs
Pendinguploads.image_url = new Date().getTime + '.' + avatar.subtype;
await avatar.move(Helpers.tmpPath('pendinguploads/pic'));
if (!avatar.moved()) {
return avatar.errors();
}
console.log(Pendinguploads.title);
console.log(Pendinguploads.description);
// })
//await request.multipart.process();
await Pendinguploads.save();
session.flash({ message: 'Your video has been posted!.' });
return response.redirect('upload');
} catch (error) {
session.flash({ uploadError: 'could not upload file' + avatar });
return response.redirect('back');
}
}
}
module.exports = UploadController;

What's the best way to process array of JSON messages posted to Nodejs server?

A client sends an array of JSON messages to be stored at Nodejs server; but client will require some sort of acknowledgement for each message (through unique id), that it was properly stored at server, and hence doesn't need to be sent again.
At server I want to parse the JSON array, then loop through it, store each message in db, store response for this message in JSON array named responses, and finally send this responses array to the client. But as the db operations are async, all other code is executed before any result returned from db storing methods. My question is how to keep updating the responses array, untill all db operations are complete?
var message = require('./models/message');
var async = require('async');
var VALID_MESSAGE = 200;
var INVALID_MESSAGE = 400;
var SERVER_ERROR = 500;
function processMessage(passedMessage, callback) {
var msg = null;
var err = null;
var responses = [];
isValidMessage(passedMessage, function(err, result) {
if(err) {
callback( createResponse(INVALID_MESSAGE, 0) );
}else{
var keys = Object.keys(result);
for(var i=0, len = keys.length; i<len; i++) {
async.waterfall([
//store valid json message(s)
function storeMessage(callback) {
(function(oneMessage) {
message.processMessage(result[i], function(res) {
callback(res, result[i].mid, callback);
});
})(result[i]);
console.log('callback returns from storeMessage()');
},
//create a json response to send back to client
function createResponse(responseCode, mid, callback) {
var status = "";
var msg = "";
switch(responseCode) {
case VALID_MESSAGE: {
status = "pass";
msg = "Message stored successfuly.";
break;
}
case INVALID_MESSAGE: {
status = "fail";
msg = "Message invalid, please send again with correct data.";
break;
}
case SERVER_ERROR: {
status = "fail";
msg = "Internal Server Error! please contact the administrator.";
break;
}
default: {
responseCode = SERVER_ERROR;
status = "fail";
msg = "Internal Server Error! please contact the administrator.";
break;
}
}
var response = { "mid": mid, "status": status, "message": msg, "code": responseCode};
callback(null, response );
}
],
function(err, result) {
console.log('final callback in series: ', result);
responses.push(result);
});
}//loop ends
}//else ends
console.log('now we can send response back to app as: ', responses);
});//isValid finishes
}
To expand on what lanzz said, this is a pretty common solution (start a number of "tasks" all at the same time, and then use a common callback to determine when they're all done). Here's a quick paste of my function from my userStats function, which gets the number of active users (DAU, WAU, and HAU):
exports.userStats = function(app, callback)
{
var res = {'actives': {}},
day = 1000 * 60 * 60 * 24,
req_model = Request.alloc(app).model,
actives = {'DAU': day, 'MAU': day*31, 'WAU': day*7},
countActives = function(name, time) {
var date = new Date(new Date().getTime() - time);
req_model.distinct('username',{'date': {$gte: date}}, function(e,c){
res.actives[name] = parseInt(c ? c.length : 0, 10);
if(Object.keys(actives).length <= Object.keys(res.actives).length)
callback(null, res);
});
};
var keys = Object.keys(actives);
for(var k in keys)
{
countActives(keys[k], actives[keys[k]]);
}
};
Only send your responses array when the number of items in it equals the number of keys in your result object (i.e. you've gathered responses for all of them). You can check if you're good to send after you push each response in the array.