when i update to webpack5, there is a error: configuration has an unknown property 'before' - webpack-5

when I update webpack 4 to 5, the error exits.
I have a webpackDevServer.js which include the error message 'error'
// webpackDevServer.js
module.exports = function(proxy, allowedHost) {
return {
before(app, server) {
if (fs.existsSync(paths.proxySetup)) {
// This registers user provided middleware for proxy reasons
require(paths.proxySetup)(app);
}
// This lets us fetch source contents from webpack for the error overlay
app.use(evalSourceMapMiddleware(server));
// This lets us open files from the runtime error overlay.
app.use(errorOverlayMiddleware());
// This service worker file is effectively a 'no-op' that will reset any
// previous service worker registered for the same host:port combination.
// We do this in development to avoid hitting the production cache if
// it used the same host and port.
// https://github.com/facebook/create-react-app/issues/2272#issuecomment-302832432
app.use(noopServiceWorkerMiddleware());
},
};
};
I use the above file in a start.js file, when I run the project, I type node scripts/start.js
// start.js
...
const createDevServerConfig = require('../config/webpackDevServer.config');
...
const serverConfig = createDevServerConfig(
proxyConfig,
urls.lanUrlForConfig
);
const devServer = new WebpackDevServer(compiler, serverConfig);
then it throws an error
configuration has an unknown property 'before'. These properties are valid:
object { bonjour?, client?, compress?, dev?, firewall?, headers?, historyApiFallback?, host?, hot?, http2?, https?, injectClient?, injectHot?, liveReload?, onAfterSetupMiddleware?, onBeforeSetupMiddleware?, onListening?, open?, openPage?, overlay?, port?, proxy?, public?, setupExitSignals?, static?, stdin?, transportMode?, useLocalIp? }
here is my package.json
"webpack": "^5.20.2",
"webpack-dev-server": "^4.0.0-beta.0",
"webpack-manifest-plugin": "2.0.4",
"workbox-webpack-plugin": "^6.1.0"

You have to change before to the onBeforeSetupMiddleware. Link with migration description from v3 to v4. https://github.com/webpack/webpack-dev-server/blob/master/migration-v4.md
In case, something will change on the migration guide, details are attached below
v3:
module.exports = {
devServer: {
after: function (app, server, compiler) {
app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};
v4:
module.exports = {
devServer: {
onAfterSetupMiddleware: function (devServer) {
devServer.app.get("/some/path", function (req, res) {
res.json({ custom: "response" });
});
},
},
};

fxxk, I'm stupid, when i search some key word (eg: onBeforeSetupMiddleware), I found the github of webpack-dev-server which tell the changes in new version 4.0.0 beta. https://github.com/webpack/webpack-dev-server/releases

Related

Error: Expected private key to be an Uint8Array with length 32

Following the guide from https://ethereum.org/vi/developers/tutorials/hello-world-smart-contract/
I am getting this error when trying to run my deploy script. I am absolutely lost as to why this is not working as I have copied every piece of code directly from the guide.
My hardhat.config.js
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
const { API_URL, PRIVATE_KEY } = process.env;
/**
* #type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
solidity: "0.7.3",
defaultNetwork: "ropsten",
networks: {
hardhat: {},
ropsten: {
url: API_URL,
accounts: [`0x${PRIVATE_KEY}`]
}
},
}
My deploy.js
async function main() {
const HelloWorld = await ethers.getContractFactory("HelloWorld");
// Start deployment, returning a promise that resolves to a contract object
const hello_world = await HelloWorld.deploy("Hello World!");
console.log("Contract deployed to address:", hello_world.address);}
main()
.then(() => process.exit(0))
.catch(error => {
console.error(error);
process.exit(1);
});
my .env
API_URL = "https://eth-ropsten.alchemyapi.io/v2/[REDACTED]"
PRIVATE_KEY = "[REDACTED]". // my private key goes here, not including the 0x
It compiles fine but gives me the error when I use the command
npx hardhat run scripts/deploy.js --network ropsten
You don't need the 0x in the private key, just put the exact key you got from metamask :)
https://github.com/ethereumjs/ethereumjs-tx
As per usage example we need to add chain name while creating Transaction.
const tx = new Tx(txObject , { chain: 'rinkeby' })

GCP Video Intelligence - batchPredict error

Following this documentation, when requesting a batchPredict I run into this error via API
{
"error": {
"code": 13
"message": "internal",
}
}
Additionally, here's a screenshot screenshot of the error I see when I try to use the "Test & Use" tab. Neither of which are descriptive, so I'm not sure where the error lies.
In the request, I include the path to my CSV file in the Google Storage, which links to a video in the same bucket. Here's the contents of the CSV:
gs://XXXXXXXXXXXX/movie1.mov,0,inf
gs://XXXXXXXXXXXX/movie2.mov,0,inf
I also include the path to a /Results folder (in the same bucket) to save the predictions.
Code making the call:
const client = new PredictionServiceClient();
async function batchPredict() {
const request = {
name: client.modelPath('project-id-xxxxxx', 'us-central1', 'VOTxxxxxxxxxx'),
inputConfig: {
gcsSource: {
inputUris: ['gs://XXXXXXXXXXXX/apitest.csv'],
},
},
outputConfig: {
gcsDestination: {
outputUriPrefix: 'gs://XXXXXXXXXXXX/results/',
},
},
};
Please let me know if I need to provide any more detail.
The possible root cause is one of those two:
There is an issue somewhere in your code. So, if your code is not the same as below, I suggest that you try it out (changing the appropriate variables of course).
There is something wrong with your model, which is the most probable root cause (as per the error message itself).
So, if it is not your code, you should create a private issue report on issue-tracker explaining your issue and giving as much details as possible on it as well as your use case and impact.
As it is private, only Googlers and you will have access to it so feel free to share your project and model IDs.
Here is what I did to try to reproduce your issue (be sure to follow the before you begin guide):
I have trained a model on gs://YOUR_BUCKET/TRAINING.csv
TRAIN,gs://automl-video-demo-data/traffic_videos/traffic_videos_train.csv
TEST,gs://automl-video-demo-data/traffic_videos/traffic_videos_test.csv
Predicted on a couple of images on gs://YOUR_BUCKET/VIDEOS_TO_ANNOTATE.csv (inputUri):
gs://automl-video-demo-data/traffic_videos/highway_078.mp4, 0,inf
gs://automl-video-demo-data/traffic_videos/highway_079.mp4,10.00000,15.50000
using the Node.js predict example from the tutorial:
/**
* TODO(developer): Uncomment these variables before running the sample.
*/
const projectId = 'YOUR_PROJECT';
const location = 'us-central1';
const modelId = 'VOTXXXXXXXXXXXXXXXXXX';
const inputUri = 'gs://YOUR_BUCKET/VIDEOS_TO_ANNOTATE.csv';
const outputUri = 'gs://YOUR_BUCKET/outputs/';
// Imports the Google Cloud AutoML library
const {PredictionServiceClient} = require('#google-cloud/automl').v1beta1;
// Instantiates a client
const client = new PredictionServiceClient();
async function batchPredict() {
// Construct request
const request = {
name: client.modelPath(projectId, location, modelId),
inputConfig: {
gcsSource: {
inputUris: [inputUri],
},
},
outputConfig: {
gcsDestination: {
outputUriPrefix: outputUri,
},
},
};
const [operation] = await client.batchPredict(request);
console.log('Waiting for operation to complete...');
// Wait for operation to complete.
const [response] = await operation.promise();
console.log(
`Batch Prediction results saved to Cloud Storage bucket. ${response}`
);
}
batchPredict();
Note that I have also tried the REST & CMD LINE predict example.
And in both cases, it worked well and I received a correct response:
Nodejs prediction's response:
Waiting for operation to complete...
Batch Prediction results saved to Cloud Storage bucket. [object Object]
REST & CMD LINE prediction's response:
{
"name": "projects/XXXXXXXXXX/locations/us-central1/operations/VOTXXXXXXXXXXXXXXX",
"metadata": {
"#type": "type.googleapis.com/google.cloud.automl.v1beta1.OperationMetadata",
"createTime": "2021-04-16T08:09:52.102270Z",
"updateTime": "2021-04-16T08:09:52.102270Z",
"batchPredictDetails": {
"inputConfig": {
"gcsSource": {
"inputUris": [
"gs://MY_BUCKET/VIDEOS_TO_ANNOTATE.csv"
]
}
}
}
}
}

is there a way to get string (data) from text file stored in s3 in Alexa localisation.js file?

Problem:
I am trying to get the data from a text file stored in s3, I get it right in intent handler using a sync await but I want to get string in localisation file as I am trying to implement the solution in 2 languages.
I am getting err saying skill does not respond correctly.
This is file.js
const AWS = require('aws-sdk');
//========================
// This step is not required if you are running your code inside lambda or in
// the local environment that has AWS set up
//========================
const s3 = new AWS.S3();
async function getS3Object (bucket, objectKey) {
try {
const params = {
Bucket: 'my-bucket',
Key: 'file.txt',
};
const data = await s3.getObject(params).promise();
let dat = data.Body.toString('utf-8');
return dat;
} catch (e) {
throw new Error(`Could not retrieve file from S3: ${e.message}`);
}
}
module.exports = getS3Object;
this is the localisation.js file code
const dataText = require('file.js');
async let textTitle = await dataText().then(); **// this does not work**
module.exports = {
en: {
translation: {
WELCOME_BACK_MSG : textTitle,
}
},
it: {
translation: {
WELCOME_MSG: textTitle,
}
}
}
The problem is that in your localisation.js file you are trying to export something that is obtained via an asynchronous function call, but you cannot do that directly, module.exports is assigned and returned synchronously. Please, see for instance this SO question and answer for an in-deep background.
As you are mentioning Alexa skill, and for the name of the file, localisation.js, I assume you are trying something similar to the solution proposed in this GitHub repository.
Analyzing the content of the index.js file they provide, it seems the library is using i18next for localisation.
The library provides the concept of backend if you need to load your localisation information from an external resource.
You can implement a custom backend, although the library offers one that could fit your needs, i18next-http-backend.
As indicated in the documentation, you can configure the library to fetch your localization resources with this backend with something like the following:
import i18next from 'i18next';
import Backend from 'i18next-http-backend';
i18next
.use(Backend)
.init({
backend: {
// for all available options read the backend's repository readme file
loadPath: '/locales/{{lng}}/{{ns}}.json'
}
});
Here in SO you can find a more complete example.
You need to provide a similar configuration to the localisation interceptor provided in the Alexa skill example project, perhaps something like:
import HttpApi from 'i18next-http-backend';
/**
* This request interceptor will bind a translation function 't' to the handlerInput
*/
const LocalizationInterceptor = {
process(handlerInput) {
const localisationClient = i18n
.use(HttpApi)
.init({
lng: Alexa.getLocale(handlerInput.requestEnvelope),
// resources: languageStrings,
backend: {
loadPath: 'https://your-bucket.amazonaws.com/locales/{{lng}}/translations.json',
crossDomain: true,
},
returnObjects: true
});
localisationClient.localise = function localise() {
const args = arguments;
const value = i18n.t(...args);
if (Array.isArray(value)) {
return value[Math.floor(Math.random() * value.length)];
}
return value;
};
handlerInput.t = function translate(...args) {
return localisationClient.localise(...args);
}
}
};
Please, be aware that instead of a text file you need to return a valid son file with the appropriate translations:
{
"WELCOME_MSG" : "Welcome!!",
"WELCOME_BACK_MSG" : "Welcome back!!"
}

How to dynamically change Apollo Web Socket Link URI?

Currently I've set up Apollo's web socket link like so:
const wsLink = new WebSocketLink({
uri: `ws://example.com/graphql?token=${getToken()}`,
options: {
reconnect: true,
connectionParams(): ConnectionParams {
return {
authToken: getToken(),
};
},
},
});
This works fine while the connection lasts, but fails when the connection needs to be re-established if the token in the query string has expired.
The way the infra I'm dealing with is set up requires this token to be set as a query param in the URI. How can I dynamically change the URI so that I may provide a new token when the connection needs to be re-established?
You can set property wsLink.subscriptionClient.url manually (or create a new subscriptionClient instance?) in function setContext https://www.apollographql.com/docs/link/links/context/.
For example:
import { setContext } from 'apollo-link-context'
...
const wsLink = your code...
const authLink = setContext(() => {
wsLink.subscriptionClient.url = `ws://example.com/graphql?token=${getToken()}`
})
...
const config = {
link: ApolloLink.from([
authLink,
wsLink
]),
...
}

AWS S3 Bucket Upload using CollectionFS and cfs-s3 meteor package

I am using Meteor.js with Amazon S3 Bucket for uploading and storing photos. I am using the meteorite packges collectionFS and aws-s3. I have setup my aws-s3 connection correctly and the images collection is working fine.
Client side event handler:
'click .submit': function(evt, templ) {
var user = Meteor.user();
var photoFile = $('#photoInput').get(0).files[0];
if(photoFile){
var readPhoto = new FileReader();
readPhoto.onload = function(event) {
photodata = event.target.result;
console.log("calling method");
Meteor.call('uploadPhoto', photodata, user);
};
}
And my server side method:
'uploadPhoto': function uploadPhoto(photodata, user) {
var tag = Random.id([10] + "jpg");
var photoObj = new FS.File({name: tag});
photoObj.attachData(photodata);
console.log("s3 method called");
Images.insert(photoObj, function (err, fileObj) {
if(err){
console.log(err, err.stack)
}else{
console.log(fileObj._id);
}
});
The file that is selected is a .jpg image file but upon upload I get this error on the server method:
Exception while invoking method 'uploadPhoto' Error: DataMan constructor received data that it doesn't support
And no matter whether I directly pass the image file, or attach it as data or use the fileReader to read as text/binary/string. I still get that error. Please advise.
Ok, maybe some thoughts. I have done things with collectionFS some months ago, so take care to the docs, because my examples maybe not 100% correct.
Credentials should be set via environment variables. So your key and secret is available on server only. Check this link for further reading.
Ok first, here is some example code which is working for me. Check yours for differences.
Template helper:
'dropped #dropzone': function(event, template) {
addImage(event);
}
Function addImage:
function addImagePreview(event) {
//Go throw each file,
FS.Utility.eachFile(event, function(file) {
//Some Validationchecks
var reader = new FileReader();
reader.onload = (function(theFile) {
return function(e) {
var fsFile = new FS.File(image.src);
//setMetadata, that is validated in collection
//just own user can update/remove fsFile
fsFile.metadata = {owner: Meteor.userId()};
PostImages.insert(fsFile, function (err, fileObj) {
if(err) {
console.log(err);
}
});
};
})(file);
// Read in the image file as a data URL.
reader.readAsDataURL(file);
});
}
Ok, your next point is the validation. The validation can be done with allow/deny rules and with a filter on the FS.Collection. This way you can do all your validation AND insert via client.
Example:
PostImages = new FS.Collection('profileImages', {
stores: [profileImagesStore],
filter: {
maxSize: 3145728,
allow: {
contentTypes: ['image/*'],
extensions: ['png', 'PNG', 'jpg', 'JPG', 'jpeg', 'JPEG']
}
},
onInvalid: function(message) {
console.log(message);
}
});
PostImages.allow({
insert: function(userId, doc) {
return (userId && doc.metadata.owner === userId);
},
update: function(userId, doc, fieldNames, modifier) {
return (userId === doc.metadata.owner);
},
remove: function(userId, doc) {
return false;
},
download: function(userId) {
return true;
},
fetch: []
});
Here you will find another example click
Another point of error is maybe your aws configuration. Have you done everything like it is written here?
Based on this post click it seems that this error occures when FS.File() is not constructed correctly. So maybe this should be you first way to start.
A lot for reading so i hope this helps you :)