html fetch multiple files - html

I would like to fetch multiple files at once using the new fetch api (https://fetch.spec.whatwg.org/). Is is possible natively? If so, how should I do it leveraging the promises?

var list = [];
var urls = ['1.html', '2.html', '3.html'];
var results = [];
urls.forEach(function(url, i) { // (1)
list.push( // (2)
fetch(url).then(function(res){
results[i] = res.blob(); // (3)
})
);
});
Promise
.all(list) // (4)
.then(function() {
alert('all requests finished!'); // (5)
});
This is untested code! Additionally, it relies on Array.prototype.forEach and the new Promise object of ES6. The idea works like this:
Loop through all URLs.
For each URL, fetch it with the fetch API, store the returned promise in list.
Additionally, when the request is finished, store the result in results.
Create a new promise, that resolves, when all promises in list are resolved (i.e., all requests finished).
Enjoy the fully populated results!

While implementing Boldewyn's solution in Kotlin, I pared it down to this:
fun fetchAll(vararg resources: String): Promise<Array<out Response>> {
return Promise.all(resources.map { fetch(it) }.toTypedArray())
}
Which roughly translates to this in JavaScript:
function fetchAll(...resources) {
var destination = []
resources.forEach(it => {
destination.push(fetch(it))
})
return Promise.all(destination)
}
Earlier, I tried to use map instead of forEach + pushing to a new array, but for some reason that simply didn't work.

Related

calling store procedures within fast-csv asynchronously

I am writing a backend API in node.js and need the functionality for users to be able to upload files with data and then calling stored procedures for inserting data into MySQL. I'm thinking of using fast-csv as parser, however I am struggling with how to set up the call to stored procedure in csv stream. the idea is something like this:
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
csv
.fromStream(stream2, { headers: true })
.on("data", function(data) {
//CALL TO SP with params from "data"//
numlines++;
})
.on("end", function() {
console.log("done");
});
In other parts of application I have set up routes as follows:
auth.post("/verified", async (req, res) => {
var user = req.session.passwordless;
if (user) {
const rawCredentials = await admin.raw(getUserRoleCredentials(user));
const { user_end, role } = await normalizeCredentials(rawCredentials);
const user_data = { user_end, role };
res.send(user_data);
} else {
res.sendStatus(401);
}
});
..that is - routes are written in async/await way with queries (all are Stored Procedures called) being defined as Promises.. I would like to follow this pattern in upload/parse csv/call SP for every line function
This is doing the job for me - - can you please describe how to achive that with your framework - - I believe it should be done somehowe, I just need to configure it correctli
//use fast-csv to stream data from a file
csv
.fromPath(form.FileName, { headers: true })
.on("data", async data => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
fileRows.push(data); // push each row - for testing only
})
.on("end", function() {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)) // - for testing
});
As mentioned in the comment, I made my scramjet to handle such a use case with ease... Please correct me if I understood it wrong, but I understand you want to call the two await lines for every CSV row in the test.
If so, your code would look like this (updated to match your comment/answer):
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
var {DataStream} = require("scramjet");
DataStream
// the following line will convert any stream to scramjet.DataStream
.from(csv.fromStream(stream2, { headers: true }))
// the next lines controls how many simultaneous operations are made
// I assumed 16, but if you're fine with 40 or you want 1 - go for it.
.setOptions({maxParallel: 16})
// the next line will call your async function and wait until it's completed
// and control the back-pressure of the stream
.do(async (data) => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
return data; // push each row - for testing only)
})
// next line will run the stream until end and return a promise
.toArray()
.then(fileRows => {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)); // - for testing
})
.catch(e => {
res.writeHead(500); // some error handling
res.end(e.message);
})
;
// you may want to put an await statement before this, or call then to check
// for errors, which I assume is your use case.
;
To answer your comment question - if you were to use an async function in the on("data") event - you would need to create an array of promises and await Promise.all of that array on stream end - but that would need to be done synchronously - so async function in an event handler won't do it.
In scramjet this happens under the hood, so you can use the function.

Using KnexJS to query X number of tables?

I have a unique situation here which I am having trouble solving in an elegant fashion.
A user passes up an array of signals which they want to export data for. This array can be 1 -> Any_Number so first I go fetch the table names (each signal stores data in a separate table) based on the signals passed and store those in an object.
The next step is to iterate over that object (which contains the table names I need to query), execute the query per table and store the results in an object which will be passed to next chain in the Promise. I haven't seen any examples online of good ways to handle this but I know it's a fairly unique scenario.
My code prior to attempting to add support for arrays of signals was simply the following:
exports.getRawDataForExport = function(data) {
return new Promise(function(resolve, reject) {
var getTableName = function() {
return knex('monitored_parameter')
.where('device_id', data.device_id)
.andWhere('internal_name', data.param)
.first()
.then(function(row) {
if(row) {
var resp = {"table" : 'monitored_parameter_data_' + row.id, "param" : row.display_name};
return resp;
}
});
}
var getData = function(runningResult) {
return knexHistory(runningResult.table)
.select('data_value as value', 'unit', 'created')
.then(function(rows) {
runningResult.data = rows;
return runningResult;
});
}
var createFile = function(runningResult) {
var fields = ['value', 'unit', 'created'],
csvFileName = filePathExport + runningResult.param + '_export.csv',
zipFileName = filePathExport + runningResult.param + '_export.gz';
var csv = json2csv({data : runningResult.data, fields : fields, doubleQuotes : ''});
fs.writeFileSync(csvFileName, csv);
// create streams for gZipping
var input = fs.createReadStream(csvFileName);
var output = fs.createWriteStream(zipFileName);
// gZip
input.pipe(gzip).pipe(output);
return zipFileName;
}
getTableName()
.then(getData)
.then(createFile)
.then(function(zipFile) {
resolve(zipFile);
});
});
}
Obviously that works fine for a single table and I have gotten the getTableName() and createFile() methods updated to handle arrays of data so this question only pertains to the getData() method.
Cheers!
This kind of problem is far from unique and, approached the right way, is very simply solved.
Don't rewrite any of the three internal functions.
Just purge the explicit promise construction antipattern from .getRawDataForExport() such that it returns a naturally occurring promise and propagates asynchronous errors to the caller.
return getTableName()
.then(getData)
.then(createFile);
Now, .getRawDataForExport() is the basic building-block for your multiple "gets".
Then, a design choice; parallel versus sequential operations. Both are very well documented.
Parallel:
exports.getMultiple = function(arrayOfSignals) {
return Promise.all(arrayOfSignals.map(getRawDataForExport));
};
Sequential:
exports.getMultiple = function(arrayOfSignals) {
return arrayOfSignals.reduce(function(promise, signal) {
return promise.then(function() {
return getRawDataForExport(signal);
});
}, Promise.resolve());
};
In the first instance, for best potential performance, try parallel.
If the server chokes, or is likely ever to choke, on parallel operations, choose sequential.

What's the Reactive way to collapse elements into an array?

Take the following TypeScript/Angular 2 code sample:
query(): Rx.Observable<any> {
return Observable.create((o) => {
var refinedPosts = new Array<RefinedPost>();
var observable = this.server.get('http://localhost/rawData.json').toRx().concatMap(
result =>
result.json().posts
)
.map((post: any) => {
// Assume I want to convert the raw JSON data into a nice class object with
// methods, etc.
var refinedPost = new RefinedPost();
refinedPost.Message = post.Message.toLowerCase();
refinedPosts.push(refinedPost);
})
.subscribeOnCompleted(() => {
o.onNext(refinedPosts);
})
});
}
Written out, the database is returning JSON. I want to iterate over the raw JSON and create a custom object, eventually returning to subscribers an Array<RefinedPost>.
The code works and the final subscribers get what they need, but I can't help but feel like I didn't do it the "Reactive Way". I cheated and used an external accumulator to gather up the elements in the Array, which seems to defeat the purpose of using streams.
So, the question is, is there a better, more concise, reactive way to write this code?
Answering my own question.
query(): Rx.Observable<any> {
return this.server.get('http://localhost/rawData.json').toRx().concatMap(
result =>
result.json().posts
)
.map((post: any) => {
var refinedPost = new RefinedPost();
refinedPost.Message = post.Message.toLowerCase();
return refinedPost;
}).toArray();
}
This removes the internal accumulator and the wrapped Observable. toArray() took the sequence of items and brought them together into an array.

Angular service/factory return after getting data

I know this has something to do with using $q and promises, but I've been at it for hours and still can't quite figure out how it's supposed to work with my example.
I have a .json file with the data I want. I have a list of people with id's. I want to have a service or factory I can query with a parameter that'll http.get a json file I have, filter it based on the param, then send it back to my controller.
angular
.module("mainApp")
.controller('personInfoCtrl',['$scope', '$stateParams', 'GetPersonData', function($scope, $stateParams, GetPersonData) {
$scope.personId = $stateParams.id; //this part work great
$scope.fullObject = GetPersonData($stateParams.id);
//I'm having trouble getting ^^^ to work.
//I'm able to do
//GetPersonData($stateParams.id).success(function(data)
// { $scope.fullObject = data; });
//and I can filter it inside of that object, but I want to filter it in the factory/service
}]);
Inside my main.js I have
//angular.module(...
//..a bunch of urlrouterprovider and stateprovider stuff that works
//
}]).service('GetPersonData', ['$http', function($http)
{
return function(id) {
return $http.get('./data/people.json').then(function(res) {
//I know the problem lies in it not 'waiting' for the data to get back
//before it returns an empty json (or empty something or other)
return res.data.filter(function(el) { return el.id == id)
});
}
}]);
The syntax of the filtering and everything works great when it's all in the controller, but I want to use the same code in several controls, so I'm trying to break it out to a service (or factory, I just want the controllers to be 'clean' looking).
I'm really wanting to be able to inject "GetPersonData" to a controller, then call GetPersonData(personId) to get back the json
You seems to be syntax issue in your filter function in the service.
.service('GetPersonData', ['$http', function($http){
return function(id) {
return $http.get('./data/people.json').then( function (res) {
return res.data.filter(function(el) { return el.id == id });
});
}}]);
But regarding the original issue you cannot really access the success property of the $q promise that you are returning from your function because there is no such property exist, It exists only on the promise directly returned by the http function. So you just need to use the then to chain it through in your controller.
GetPersonData($stateParams.id).then(function(data){ $scope.fullObject = data; });
If you were to return return $http.get('./data/people.json') from your service then you will see the http's custom promise methods success and error.

A solution for streaming JSON using oboe.js in AngularJS?

I'm pretty new to Angular so maybe I'm asking the impossible but anyway, here is my challenge.
As our server cannot paginate JSON data I would like to stream the JSON and add it page by page to the controller's model. The user doesn't have to wait for the entire stream to load so I refresh the view fo every X (pagesize) records.
I found oboe.js for parsing the JSON stream and added it using bower to my project. (bower install oboe --save).
I want to update the controllers model during the streaming. I did not use the $q implementation of pomises, because there is only one .resolve(...) possible and I want multiple pages of data loaded via the stream so the $digest needs to be called with every page. The restful service that is called is /service/tasks/search
I created a factory with a search function which I call from within the controller:
'use strict';
angular.module('myStreamingApp')
.factory('Stream', function() {
return {
search: function(schema, scope) {
var loaded = 0;
var pagesize = 100;
// JSON streaming parser oboe.js
oboe({
url: '/service/' + schema + '/search'
})
// process every node which has a schema
.node('{schema}', function(rec) {
// push the record to the model data
scope.data.push(rec);
loaded++;
// if there is another page received then refresh the view
if (loaded % pagesize === 0) {
scope.$digest();
}
})
.fail(function(err) {
console.log('streaming error' + err.thrown ? (err.thrown.message):'');
})
.done(function() {
scope.$digest();
});
}
};
});
My controller:
'use strict';
angular.module('myStreamingApp')
.controller('MyCtrl', function($scope, Stream) {
$scope.data = [];
Stream.search('tasks', $scope);
});
It all seams to work. After a while however the system gets slow and the http call doesn't terminate after refreshing the browser. Also the browser (chrome) crashes when there are too many records loaded.
Maybe I'm on the wrong track because passing the scope to the factory search function doesn't "feel" right and I suspect that calling the $digest on that scope is giving me trouble. Any ideas on this subject are welcome. Especially if you have an idea on implementing it where the factory (or service) could return a promise and I could use
$scope.data = Stream.search('tasks');
in the controller.
I digged in a little further and came up with the following solution. It might help someone:
The factory (named Stream) has a search function which is passed parameters for the Ajax request and a callback function. The callback is being called for every page of data loaded by the stream. The callback function is called via a deferred.promise so the scope can be update automatically with every page. To access the search function I use a service (named Search) which initially returns an empty aray of data. As the stream progresses the factory calls the callback function passed by the service and the page is added to the data.
I now can call the Search service form within a controller and assign the return value to the scopes data array.
The service and the factory:
'use strict';
angular.module('myStreamingApp')
.service('Search', function(Stream) {
return function(params) {
// initialize the data
var data = [];
// add the data page by page using a stream
Stream.search(params, function(page) {
// a page of records is received.
// add each record to the data
_.each(page, function(record) {
data.push(record);
});
});
return data;
};
})
.factory('Stream', function($q) {
return {
// the search function calls the oboe module to get the JSON data in a stream
search: function(params, callback) {
// the defer will be resolved immediately
var defer = $q.defer();
var promise = defer.promise;
// counter for the received records
var counter = 0;
// I use an arbitrary page size.
var pagesize = 100;
// initialize the page of records
var page = [];
// call the oboe unction to start the stream
oboe({
url: '/api/' + params.schema + '/search',
method: 'GET'
})
// once the stream starts we can resolve the defer.
.start(function() {
defer.resolve();
})
// for every node containing an _id
.node('{_id}', function(node) {
// we push the node to the page
page.push(node);
counter++;
// if the pagesize is reached return the page using the promise
if (counter % pagesize === 0) {
promise.then(callback(page));
// initialize the page
page = [];
}
})
.done(function() {
// when the stream is done make surethe last page of nodes is returned
promise.then(callback(page));
});
return promise;
}
};
});
Now I can call the service from within a controller and assign the response of the service to the scope:
$scope.mydata = Search({schema: 'tasks'});
Update august 30, 2014
I have created an angular-oboe module with the above solution a little bit more structured.
https://github.com/RonB/angular-oboe