how jszip async zip file when a lot of single file need to zip - jszip

i am not good at english,but i have a question about use jszip. the code like this
for (var i = 0; i < files.length; i++) {
compressFiles(files[i], compressName);
}
function compressFiles(file,compressName) {
var fileContent = file.file;
var fileName = file.name;
var zip = new JSZip();
zip.file(fileName, fileContent);
zip.generateAsync({ type: "blob" }).then(function (content) {
saveAs(content, compressName);
});
}
my question is when my file in files are very big ,about 88m. some file compress are about 0m i guess the reason is async,the loop put file stream into memory one by one,if the memory is full,compress filed. so who can tell me the real reason?thank you!

You can use an array to keep the zip.file calls and jQuery.when to wait for all of them to finish, and then call zip.generateAsync. Sample code below.
Helpful links:
jQuery.when
What does $.when.apply($, someArray) do?
function zipFiles(images) {
var zip = new JSZip();
deferreds = [];
images.forEach(function(image) {
deferreds.push(zip.file(image.name, image.data, { binary: true }));
});
$.when.apply($, deferreds).then(function() {
zip.generateAsync({ type: "blob" }).then(function(content) {
saveAs(content, 'Filename.zip'); //FileSaver.js
});
});
}

Related

Dropzone.js + AWS S3 stalling queue

I'm trying to impliment a dropzone.js uploader to amazon S3 using the aws-sdk.js for the browser. But when I exceed the 'parallelUploads' maximum in the settings, the queue never completes. I'm using the approach in the following link:
amazon upload
relevant parts of my code:
var dz = new Dropzone("#DZContainer", {
acceptedFiles: "image/*,.jpg,.jpeg,.png,.gif",
autoQueue: true,
autoProcessQueue: true,
parallelUploads: 10,
clickable: [".uploadButton"],
accept: function(file, done){
let params = {
"Bucket": "upload-bucket",
"Key": getFullKey(file.name),
Body: file,
Region: "us-east-1,
ContentType: file.type
}
file.s3upload = AWS.S3.ManagedUpload(params);
if (typeof(done) === 'function') done();
},
canceled: function(file) {
if (file.s3upload) file.s3upload.abort();
},
init: function () {
this.on('removedfile', function (file) {
if (file.s3upload) file.s3upload.abort();
});
}
)
dz.uploadFiles = function (files) {
for (var j = 0; j < files.length; j++) {
var file = files[j];
dz.SendFile(file);
}
};
dz.SendFile = function(file) {
file.s3upload.send(function (err, data) {
if (err) {
console.err(err)
dz.emit("error", file, err.message);
} else {
dz.emit("complete", file);
}
});
if I drag in (or use the clickable) more than 10 files, the first 10 complete but it never processes the rest of the queue. What am I missing? All help is appreciated
EDIT: With a little more digging into Dropzone, it looks as though the file status is never getting set to complete. I see a function called _finished() in the dropzone code, but I'm having a hard time figuring out what specifically is supposed to trigger that function. I have tried dz.emit("complete", file) listed below as well as adding dz.emit("success",file) but my breakpoint at the first line of the _finished() function never triggers. Thus the file.status never gets set to completed.
Does anyone know when/what/how _finished() is supposed to be run?
As mentioned in the edit, I was able to track down where the .status was not properly getting set. This seemed to be in a private Dropzone function called _finished()
With further examination, I noticed that _finished() seemed to also be calling emit("complete", file) after setting file.status to Dropzone.SUCCESS and also emitting "success". It then checks if autoProcessQueue is set and if it is, returns the result of a processQueue() call.
I had a hard time figuring out what triggered this function as it was on an onload event that eventually realized was tied to an XHTTPRequest object used by the internal uploader (which is being overridden by the S3 uploader)
So I modified the function to emulate what the Dropzone._finished() was doing and it's behaving as expected:
dz.SendFile = function(file) {
file.s3upload.send(function (err, data) {
if (err) {
console.err(err)
dz.emit("error", file, err.message);
} else {
file.status = Dropzone.SUCCESS;
dz.emit("success", file, data, err);
dz.emit("complete", file);
if(dz.options.autoProcessQueue)
dz.processQueue()
}
});

Postman test is always passing even though it fails

While running postman Tests, Test case seems to be always passing
The response body is provided below. I am trying to fetch id when the name is "Erin" and validate that id is 800. Small piece of code that i wrote is below the response body written below.FOr some reason the test always returns true. If for some reason if Erin and 800 are not present still it passes the test.
[
{
"id":991,
"name":"Tomy"
},
{
"id":800,
"name":"Erin"
}
]
Code:
pm.test("Validate id to be 800", function() {
var jsonData = pm.response.json();
for(int i=0; i<responseJson.length;i++){
if(jsonData[i].name=='Erin'){
pm.expect(jsonData[i].id).to.eql(800);
}
}
});
Updated the response a bit a below , i wanted my test to fail as "Jack" is
not found and to pass only if Jack is found
pm.test("Validate id to be 800", function () {
let jsonData = pm.response.json();
for(i=0; i < jsonData.length; i++) {
if(jsonData[i].name == 'Jack') {
pm.expect(jsonData[i].id).to.eql(800);
}
}
});
That response body doesn't look quite right to me, I would expect to see quotes around the property keys in the objects.
Also, your references were not named correctly and that would pass the test as it wouldn't have caused any reference errors in the scripts.
This should help you out:
pm.test("Validate id to be 800", function () {
let jsonData = pm.response.json();
for(i=0; i < jsonData.length; i++) {
if(jsonData[i].name === 'Erin') {
pm.expect(jsonData[i].id).to.eql(800);
}
}
});
You could rewrite the test code to something like this:
pm.test("Validate id to be 800", () => {
let jsonData = pm.response.json();
jsonData.forEach(item => {
if(item.name === 'Erin') {
pm.expect(item.id).to.eql(800);
}
});
});
And the Test Results when it fails:

My async call is returning before list is populated in forEach loop

I have a routine which gets a list of filenames from the device, then reads the file(s) to build a list. However, the calling routine always returns with zero items. I print the filenames, so I know they exist, however, it appears that the async is returning before I read the files. I used similar code when making an HTTP call. But, something here is causing the routine to return the list even though it hasn't completed. Perhaps, it is possible that I am calling it at the wrong time? I am calling retrieveItems here:
#override
void initState() {
super.initState();
retrieveItems();
}
Eventually I will have a refresh button, but for now I'd simply like the list to populate with the data from the files...
--------------------
Callee
Future<List<String>> readHeaderData() async {
List<String> l = new List();
List<String> files = await readHeaders(); // Gets filenames
files.forEach((filename) async {
final file = await File(filename);
String contents = await file.readAsString();
User usr = User.fromJson(json.decode(contents));
String name = usr.NameLast + ", " + usr.NameFirst;
print(name);
l.add(name);
}
return l;
Caller
void retrieveItems() async {
LocalStorage storage = new LocalStorage();
await storage.readHeaderData().then((item) {
try {
if ((item != null ) &&(item.length >= 1)) {
setState(() {
users.clear();
_users.addAll(item);
});
} else {
setState(() {
_users.clear();
final snackbar = new SnackBar(
content: new Text('No users found.'),
);
scaffoldKey.currentState.showSnackBar(snackbar);
});
}
} on FileNotFoundException catch (e) {
print(e.toString()); //For debug only
setState(() {
_users.clear();
});
});
}
});
This code
Future<List<String>> readHeaderData() async {
List<String> l = new List();
List<String> files = await readHeaders(); // Gets filenames
files.forEach((filename) async {
final file = await File(filename);
String contents = await file.readAsString();
User user = User.fromJson(json.decode(contents));
String name = user.NameLast + ", " + user.NameFirst;
print(name);
l.add(name);
}
return l;
}
returns the list l and then processes the asyc forEach(...) callbacks
If you change it to
Future<List<String>> readHeaderData() async {
List<String> l = new List();
List<String> files = await readHeaders(); // Gets filenames
for(var filename in files) { /// <<<<==== changed line
final file = await File(filename);
String contents = await file.readAsString();
User user = User.fromJson(json.decode(contents));
String name = user.NameLast + ", " + user.NameFirst;
print(name);
l.add(name);
}
return l;
}
the function will not return before all filenames are processed.
files.forEach((filename) async {
means that you can use await inside the callback, but forEach doesn't care about what (filename) async {...} returns.
Also possible
await Future.forEach(yourList, (T elem) async { ...async staff });
To expand on Günter's comment regarding using list.map(f), here's an example of converting a forEach call so that it works correctly.
Broken example
Incorrectly assumes forEach will wait on futures:
Future<void> brokenExample(List<String> someInput) async {
List<String> results;
someInput.forEach((input) async {
String result = await doSomethingAsync(input);
results.add(result);
});
return results;
}
Corrected example
Waits on the async functions to complete, using Future.wait and .map():
Future<void> correctedExample(List<String> someInput) async {
List<String> results;
await Future.wait(someInput.map((input) async {
String result = await doSomethingAsync(input);
results.add(result);
}));
return results;
}
I encountered the similar issue. The problem is that dart will NOT wait for "forEach" contrary to public believe. There are two solutions:
1) Convert forEach to for loop as indicated by others. Another is use Future:
2) await Future.forEach(list, (item) async {
// your code
final result = await getMyResult();
});
Another option
Future.wait(someList.map((item) => something_returns_future(item)));

Read content of SP.File object as text using JSOM

as the title suggests, I am trying to read the contents of a simple text file using JSOM. I am using a Sharepoint-hosted addin for this, the file I am trying to read resides on the host web in a document library.
Here's my JS code:
function printAllListNamesFromHostWeb() {
context = new SP.ClientContext(appweburl);
factory = new SP.ProxyWebRequestExecutorFactory(appweburl);
context.set_webRequestExecutorFactory(factory);
appContextSite = new SP.AppContextSite(context, hostweburl);
this.web = appContextSite.get_web();
documentslist = this.web.get_lists().getByTitle('Documents');
var camlQuery = new SP.CamlQuery();
camlQuery.set_viewXml('<View><ViewFields><FieldRef Name="Name"/></ViewFields></View>');
listitems = documentslist.getItems(camlQuery);
context.load(listitems, 'Include(File,FileRef)');
context.executeQueryAsync(
Function.createDelegate(this, successHandler),
Function.createDelegate(this, errorHandler)
);
function successHandler() {
var enumerator = listitems.getEnumerator();
while (enumerator.moveNext()) {
var results = enumerator.get_current();
var file = results.get_file();
//Don't know how to get this to work...
var fr = new FileReader();
fr.readAsText(file.get);
}
}
function errorHandler(sender, args) {
console.log('Could not complete cross-domain call: ' + args.get_message());
}
}
However, in my succes callback function, I don't know how I can extract the contents of the SP.File object. I tried using the FileReader object from HTML5 API but I couldn't figure out how to convert the SP.File object to a blob.
Can anybody give me a push here?
Once file url is determined file content could be loaded from the server using a regular HTTP GET request (e.g. using jQuery.get() function)
Example
The example demonstrates how to retrieve the list of files in library and then download files content
loadItems("Documents",
function(items) {
var promises = $.map(items.get_data(),function(item){
return getFileContent(item.get_item('FileRef'));
});
$.when.apply($, promises)
.then(function(content) {
console.log("Done");
//print files content
$.each(arguments, function (idx, args) {
console.log(args[0])
});
},function(e) {
console.log("Failed");
});
},
function(sender,args){
console.log(args.get_message());
}
);
where
function loadItems(listTitle,success,error){
var ctx = SP.ClientContext.get_current();
var web = ctx.get_web();
var list = web.get_lists().getByTitle(listTitle);
var items = list.getItems(createAllFilesQuery());
ctx.load(items, 'Include(File,FileRef)');
ctx.executeQueryAsync(
function() {
success(items);
},
error);
}
function createAllFilesQuery(){
var qry = new SP.CamlQuery();
qry.set_viewXml('<View Scope="RecursiveAll"><Query><Where><Eq><FieldRef Name="FSObjType" /><Value Type="Integer">0</Value></Eq></Where></Query></View>');
return qry;
}
function getFileContent(fileUrl){
return $.ajax({
url: fileUrl,
type: "GET"
});
}

Output html from CouchDB list

I am trying to create a list in CouchDB 0.11 which responds with some html, I am having problems getting CouchDB to set the correct header, whatever I try, I just get an application/json response header. Here is my list function.
function(head, req) {
var rows = [];
var row;
while(row = getRow()) {
rows.push(row);
}
rows.sort(function(a, b) {
return (Date.parse(a['value']['last_logtime']) - Date.parse(b['value']['last_logtime']));
});
provides("html", function() {
var content = "<html><head><title>foobar</title></head><body>";
for(var i in rows) {
content = content + rows[i]['value']['last_logtime']+"<br/>";
}
content = content + "</body></html>";
return content;
});
}
Any suggestions what I am doing wrong?
well actually figured it out myself.
the getRow() stuff needs to be inside the provides function :)