Invoke another player when one media response(audio playing) is done - action

I have a problem.
I pick up a dialogflow, google action sample (playing a audio file)
I want to build for several audio files.
So when media response is done, media status is invoked.
So at that time, I want to play next audio automatically.
Please help me. I need your kind help.
Thanks.
This is my code.
'use strict';
const {
dialogflow,
SimpleResponse,
Image,
Suggestions,
MediaObject,
} = require('actions-on-google');
const functions = require('firebase-functions');
const app = dialogflow({debug: true});
app.intent('Media Response', (conv) => {
if (!conv.surface.capabilities
.has('actions.capability.MEDIA_RESPONSE_AUDIO')) {
conv.ask('Sorry, this device does not support audio playback.');
conv.ask('Which response would you like to see next?');
return;
}
conv.ask('This is a media response example.');
conv.ask(new MediaObject({
name: 'Jazz in Paris',
url: 'https://storage.googleapis.com/automotive-media/Jazz_In_Paris.mp3',
description: 'A funky Jazz tune',
icon: new Image({
url: 'https://storage.googleapis.com/automotive-media/album_art.jpg',
alt: 'Album cover of an ocean view',
}),
}));
conv.ask(new Suggestions(['cancel']));
});
app.intent('Media Status', (conv) => {
const mediaStatus = conv.arguments.get('MEDIA_STATUS');
let response = 'Unknown media status received.';
if (mediaStatus && mediaStatus.status === 'FINISHED') {
response = 'Hope you enjoyed the tune! ';
}
conv.ask(response);
conv.ask('Media ended successfully');
conv.ask(new Suggestions(['exit']));
});
exports.dialogflowFirebaseFulfillment = functions.https.onRequest(app);
I tried to invoke another response on 'Media Status' intent.
app.intent('Media Status', (conv) => {
const mediaStatus = conv.arguments.get('MEDIA_STATUS');
let response = 'Unknown media status received.';
if (mediaStatus && mediaStatus.status === 'FINISHED') {
conv.ask(new MediaObject({
name: 'Jazz in Paris',
url: 'https://storage.googleapis.com/automotive-media/Jazz_In_Paris.mp3',
description: 'A funky Jazz tune',
icon: new Image({
url: 'https://storage.googleapis.com/automotive-media/album_art.jpg',
alt: 'Album cover of an ocean view',
}),
}));
conv.ask(new Suggestions(['cancel']));
});
}
});
Then "Webhook failed for intent: Media Status" error happend. How should I solve this problem? Thank you.

All responses that include a MediaObject must also include a SimpleResponse - which is usually just some text that is said before the Media.
For example, in the "Media Response" Intent Handler when you send your first MediaObject, you have the line:
conv.ask('This is a media response example.');
But this sort of line is missing from the "Media Status" Intent Handler.

Related

Sequelize model only updates when I log out then log back in

So basically my issue is that when a user is logged in they can take a test. When the test is passed a user is given points and their level is updated using a put route. Once the put route has run the user is redirected back to their homepage where their stats are displayed. If I look in Postman after the request is made I can see the changes. However, the get route on the user's homepage is still displaying as the previous data. Only when I log out then log back in does the user object actually update on the app.
the get request is called in the document. ready function for the user's page like so...
$(document).ready(() => {
// This file just does a GET request to figure out which user is logged in
// and updates the HTML on the page
$.get("/api/user_data").then(data => {
console.log(data)
$(".user-name").text(data.username);
$(".first-name").text(data.first);
$(".last-name").text(data.last);
$(".email").text(data.email);
$(".xp").text(data.points);
});
the put route is in a function that runs when the last question of the test is answered. It looks like this...
function endGame() {
console.log("END OF GAME SCORE: " + score);
$(".quiz-container").css("display", "none");
console.log(data.points);
console.log(data.level);
var addPoints = score * 100 + data.points;
var newLevel = data.level + 1;
$.ajax({
url: "/api/user_data",
method: "PUT",
data: {
id: data.id,
level: newLevel,
point: addPoints
},
error: function(req, err) {
console.log(err)
},
success: function(res, err) {
window.location.replace("/members");
}
}).then(result => {
console.log("user info updated");
console.log(result);
window.location.replace("/members");
});
}
As you can see the user is redirected over to the "members" page which is where the get request is sent on the document being ready. I'm pretty new so any help would be greatly appreciated.
also here is the db.sync method I had been working with force true and force false now i just have...
db.sequelize.sync({}).then(() => {
app.listen(PORT, () => {
console.log(
"==> 🌎 Listening on port %s. Visit http://localhost:%s/ in your browser.",
PORT,
PORT
);
});
});

Apify: Preserve headers in RequestQueue

I'm trying to crawl our local Confluence installation with the PuppeteerCrawler. My strategy is to login first, then extracting the session cookies and using them in the header of the start url. The code is as follows:
First, I login 'by foot' to extract the relevant credentials:
const Apify = require("apify");
const browser = await Apify.launchPuppeteer({sloMo: 500});
const page = await browser.newPage();
await page.goto('https://mycompany/confluence/login.action');
await page.focus('input#os_username');
await page.keyboard.type('myusername');
await page.focus('input#os_password');
await page.keyboard.type('mypasswd');
await page.keyboard.press('Enter');
await page.waitForNavigation();
// Get cookies and close the login session
const cookies = await page.cookies();
browser.close();
const cookie_jsession = cookies.filter( cookie => {
return cookie.name === "JSESSIONID"
})[0];
const cookie_crowdtoken = cookies.filter( cookie => {
return cookie.name === "crowd.token_key"
})[0];
Then I'm building up the crawler structure with the prepared request header:
const startURL = {
url: 'https://mycompany/confluence/index.action',
method: 'GET',
headers:
{
Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
Cookie: `${cookie_jsession.name}=${cookie_jsession.value}; ${cookie_crowdtoken.name}=${cookie_crowdtoken.value}`,
}
}
const requestQueue = await Apify.openRequestQueue();
await requestQueue.addRequest(new Apify.Request(startURL));
const pseudoUrls = [ new Apify.PseudoUrl('https://mycompany/confluence/[.*]')];
const crawler = new Apify.PuppeteerCrawler({
launchPuppeteerOptions: {headless: false, sloMo: 500 },
requestQueue,
handlePageFunction: async ({ request, page }) => {
const title = await page.title();
console.log(`Title of ${request.url}: ${title}`);
console.log(page.content());
await Apify.utils.enqueueLinks({
page,
selector: 'a:not(.like-button)',
pseudoUrls,
requestQueue
});
},
maxRequestsPerCrawl: 3,
maxConcurrency: 10,
});
await crawler.run();
The by-foot-login and cookie extraction seems to be ok (the "curlified" request works perfectly), but Confluence doesn't accept the login via puppeteer / headless chromium. It seems like the headers are getting lost somehow..
What am I doing wrong?
Without first going into the details of why the headers don't work, I would suggest defining a custom gotoFunction in the PuppeteerCrawler options, such as:
{
// ...
gotoFunction: async ({ request, page }) => {
await page.setCookie(...cookies); // From page.cookies() earlier.
return page.goto(request.url, { timeout: 60000 })
}
}
This way, you don't need to do the parsing and the cookies will automatically be injected into the browser before each page load.
As a note, modifying default request headers when using a headless browser is not a good practice, because it may lead to blocking on some sites that match received headers against a list of known browser fingerprints.
Update:
The below section is no longer relevant, because you can now use the Request class to override headers as expected.
The headers problem is a complex one involving request interception in Puppeteer. Here's the related GitHub issue in Apify SDK. Unfortunately, the method of overriding headers via a Request object currently does not work in PuppeteerCrawler, so that's why you were unsuccessful.

open graph Meta Tags in Vue JS don't show image

I design a blog and I would like that when sharing to social networks, the preview image displays like in Medium's posts
<meta property="og:image" content="https://medium.com/js-dojo/getting-your-head-around-vue-js-scoped-slots-281bf82a1e4e"/>
I using vuejs2 with webpack and vue-meta to change the dynamic image of my post.
But for Facebook, there don't working even when I put them in index.html.
I find this article on medium where it is said that it is necessary to use Server Side Rendered, but it is not said how to migrate from a totally designed project with a basic configuration (without SSR) to a project solving the problem. already the architecture is different and I have no reference
here is my code vue-meta
metaInfo () {
return {
title: '41devs | blog',
titleTemplate: '%s - le titre',
meta: [
{name: 'viewport', content: 'user-scalable=no'},
{property: 'og:title', content: 'title'},
{property: 'og:type', content: 'article'},
{property: 'og:url', content: 'http://c5e3b0ec.ngrok.io/blog/s'},// here it is just ngrok for my test
{property: 'og:description', content: 'description'},
{property: 'og:image', content: 'https://firebasestorage.googleapis.com/v0/b/dev-blog-2503f.appspot.com/o/postsStorage%2F-KxXdvvLqDHBcxdUfLgn%2Fonfleck?alt=media&token=24a9bf5b-dce2-46e8-b175-fb63f7501c98'},
{property: 'twitter:image:src', content: 'https://firebasestorage.googleapis.com/v0/b/dev-blog-2503f.appspot.com/o/postsStorage%2F-KxXdvvLqDHBcxdUfLgn%2Fonfleck?alt=media&token=24a9bf5b-dce2-46e8-b175-fb63f7501c98'},
{property: 'og:image:width', content: '1000'},
{property: 'og:site_name', content: '41devs | blog'}
]
}
}
When Facebook checks your page to find the meta data, they don't run your Javascript. Vue never runs, your tags are never replaced. This is a limitation of Facebook's crawler.
This means you would indeed have to render those tags at the server level, whether by Vue's server side rendering or by some other method (I don't know what type of server you are running). But yes, ultimately, you must be able to hard-code this value into your server response, otherwise it won't display in Facebook.
The way I handled this was to create some middleware that parses the URL path and dynamically updates the meta tags with the Cheerio module.
File for OG meta tag info:
const products = [
{
id: 111111111,
title: 'Corporate Fat Cat',
ogImage: 'https://cdn.com/corporate.jpg',
description: 'The fat cats in Washington don’t even look this good'
},
{
id: 222222222,
title: 'Gangsta Cat',
ogImage: 'https://cdn.com/gangsta.jpg',
description: 'That’s how we roll'
},
{
id: 333333333,
title: 'Mechanic Cat',
ogImage: 'https://cdn.com/mechanic.jpg',
description: 'I have no idea what I’m doing.'
}
];
Middleware:
app.use('/*', (req, res, next) => {
if (/^\/api\//.test(req.originalUrl)) next();
else if (/\/item\//.test(req.originalUrl)) updateMetaTags(req, res);
else res.sendFile(`${__dirname}/client/dist/index.html`);
});
updateMetaTags function:
async function updateMetaTags(req, res) {
// Get and parse products array from app src
const productsSrc = `${__dirname}/client/src/products.js`;
const productsText = await fs.promises.readFile(productsSrc);
const productsArr = JSON.parse(productsText);
// Retrieve product object that includes the current URL item id
const productID = (req.originalUrl.match(/\d{9}/) || [])[0];
const productObj = productsArr.find(prod => prod.id == productID);
// Update the meta tag properties in the built bundle w/ Cheerio
const baseSrc = `${__dirname}/client//dist/index.html`;
const baseHTML = await fs.promises.readFile(baseSrc);
const $base = $(baseHTML);
const $url = $base.find('meta[property=og\\:url]');
const $title = $base.find('meta[property=og\\:title]');
const $image = $base.find('meta[property=og\\:image]');
$desc = $base.find('meta[property=og\\:description]');
$url.attr('content', `https://${req.get('host')}${req.originalUrl}`);
$title.attr('content', productObj.title);
$image.attr('content', productObj.ogImage);
$desc.attr('content', productObj.description);
// Send the modified HTML as the response
res.send($.html($base));
}
I have this approached detailed more in this blog post.
Your title and titleTemplate has wrong structure.
return {
title: 'Le titre', // Set a current title of page
titleTemplate: '%s - 41devs blog', // Name of your blog/website,
// Title is now "Le titre - 41devs blog"
meta: [ ...
]
}
It performed for best SEO in google https://support.google.com/webmasters/answer/79812?hl=en

Ionic 2 Native Facebook Plugin errors when grabbing User profile picture

I have a snippet of code in my Ionic 2 app that is supposed to grab the Facebook profile of the user after they have successfully logged on. I've verified that the request path /me returns data associated with the user, however when I set the request path to /me/picture, I get an error: There was an error making the graph call.
Here is my code:
if(this.platform.is('cordova')) {
Facebook.login([
'public_profile',
'user_friends',
'email'
]).then((result) => {
Facebook.api('/me?fields=id,name,email,cover', []).then(data => {
// Create the user object
let user = {
access_token: result.authResponse.accessToken,
display_name: data.name,
email: data.email,
facebook_id: data.id,
cover_photo: data.cover.source,
}
Facebook.api(`me/picture`, []).then(data => {
user['profile_photo'] = data.url;
this.loginWithFacebook(user);
}, error => { this.user = JSON.stringify(error)});
})
},
error => {
this.user = JSON.stringify(error);
})
}
Am I missing something? I've even tried doing /{user-id}/picture and still receive an error. Anyone run into this issue?
By default this edge will return a 302 redirect to the picture image. To get access to the data about the picture, please include redirect=false in your query.
Source: https://developers.facebook.com/docs/graph-api/reference/user/picture/
For example: Facebook.api('me/picture?redirect=false', [])...

Facebook video upload invalid foramat. It should be an image file data

Here I am trying to upload a video to user profile.
I have set up javascript sdk and my authentication works well .
I have the following code here..
FB.api(
`/${user_id}/videos`,
"POST",
{
"file_url": video,
"description": description,
"thumb": video_thumbnail,
"title": title,
},
function (response) {
console.log("fb response")
console.log(response)
if (response && !response.error) {
/* handle the result */
console.log("video upload response")
console.log(response)
}
});
Here I get the following error ..
code: 100
fbtrace_id: "FD5tVyrH9bS"
message: "(#100) Invalid format. It should be an image file data."
type: "OAuthException"
I am using file_url and passing url to my video. I guess it should upload the video..
Thank you for the response
I confirm that you must post image file data in source field when posting to Facebook.
You can test by use Postman.
This is example:
var fs = require("fs");
var request = require("request");
var options = { method: 'POST',
url: 'https://graph.facebook.com/v2.11/2011156779127713/thumbnails',
headers:
{ 'Postman-Token': '6c17c103-d8f6-47a5-713b-b3709dde762d',
'Cache-Control': 'no-cache',
'content-type': 'multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' },
formData:
{ access_token: 'test',
is_preferred: 'true',
source:
{ value: 'fs.createReadStream("./Downloads/923249_818835191462845_1528674847924045075_n.jpg")',
options:
{ filename: './Downloads/923249_818835191462845_1528674847924045075_n.jpg',
contentType: null } } } };
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body);
});
The problem isn't the video or the URL, it's the thumb parameter.
The thumb parameter needs to be 'file data', not the URL.
As to what format the image needs to be in..please let me know if you find out! I'm asking the same here.
The facebook API is terrible...