I am trying to create a simple web application which fires a http.request call, get the data and display it over to the html(ejs here). I am able to fire the request, get the data, massage it etc.. but unable to pass it to the view. Sample code is as below:
var searchData = [];
router.post('/',requesthandler);
function requesthandler(req,res){
var options = {
host: url,
port: 9999,
path: qstring,
method: 'GET'
};
var reqget = http.request(options,responsehandler);
reqget.end();
console.log('Rendering now:............................ ');
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
function responsehandler(ress) {
console.log('STATUS: ' + ress.statusCode);
ress.on('data', function (chunk) {
output += chunk;
console.log('BODY: ' );
});
/* reqget.write(output); */
ress.on('end',parseresponse);
}
function parseresponse(){
var data = JSON.parse(output);
console.log(data.responseHeader);
// populate searchData here from data object
searchData.push({//some data});
}
function errorhandler(e) {
console.error(e);
}
module.exports = router;
Problem is I a unable to pass the objeect searchData to the view via res.render();
'Rendering now: ...........' gets executed before execution starts in parseresponse() and so the page is displayed without the data which seems to be in conjuction with using callbacks, So how can I pass the data object to the view once the searchData is loaded in parseresponse().
PS: I am able to print all console statements
define res variable globally:
var res;
function requesthandler(req,resObj){
res = resObj;//set it to the resObj
}
wrap res.render inside a function like this:
function renderPage(){
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
then in parseresponse function do this:
function parseresponse(){
var data = JSON.parse(output);
searchData.push({some data});
renderPage();
}
Hope this solves your problem.
Related
gulp.task('default', function(done) {
inquirer.prompt([{
type: `input`,
message: `Enter the path`,
default: `./admin/admin.json`,
name: `path`
}]).then(function(answers) {
console.log(answers.path);
console.log('answers');
mydefaultTaskTwo(null, answers.path).pipe(pipedFunction());
done();
})
});
function mydefaultTaskTwo(cb, path) {
let data = '';
try {
data = fs.readFileSync(path, 'utf-8');
} catch (e) {
console.log(`Error: ${e}`);
}
return data;
}
function pipedFunction() {
let object = JSON.parse(data);
object['main'] = 'admin';
data = JSON.stringify(object);
const readable = Readable.from(data)
return readable;
}
I understand that src returns a stream and pipe takes that stream and return a stream, but how do you feed in the stream into the pipedFunction called inside of pipe? I am unsure how it works. I get the following error:
ReferenceError: data is not defined.
Is there something I am misunderstanding about gulp scripts?
Basically you define data as a local scope-level variable and try to reach it from a different scope, where it's undefined. So, you need to make use of the fact that data is returned and pass it, like:
var data = mydefaultTaskTwo(null, answers.path);
data.pipe(pipedFunction(data));
I am writing a backend API in node.js and need the functionality for users to be able to upload files with data and then calling stored procedures for inserting data into MySQL. I'm thinking of using fast-csv as parser, however I am struggling with how to set up the call to stored procedure in csv stream. the idea is something like this:
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
csv
.fromStream(stream2, { headers: true })
.on("data", function(data) {
//CALL TO SP with params from "data"//
numlines++;
})
.on("end", function() {
console.log("done");
});
In other parts of application I have set up routes as follows:
auth.post("/verified", async (req, res) => {
var user = req.session.passwordless;
if (user) {
const rawCredentials = await admin.raw(getUserRoleCredentials(user));
const { user_end, role } = await normalizeCredentials(rawCredentials);
const user_data = { user_end, role };
res.send(user_data);
} else {
res.sendStatus(401);
}
});
..that is - routes are written in async/await way with queries (all are Stored Procedures called) being defined as Promises.. I would like to follow this pattern in upload/parse csv/call SP for every line function
This is doing the job for me - - can you please describe how to achive that with your framework - - I believe it should be done somehowe, I just need to configure it correctli
//use fast-csv to stream data from a file
csv
.fromPath(form.FileName, { headers: true })
.on("data", async data => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
fileRows.push(data); // push each row - for testing only
})
.on("end", function() {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)) // - for testing
});
As mentioned in the comment, I made my scramjet to handle such a use case with ease... Please correct me if I understood it wrong, but I understand you want to call the two await lines for every CSV row in the test.
If so, your code would look like this (updated to match your comment/answer):
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
var {DataStream} = require("scramjet");
DataStream
// the following line will convert any stream to scramjet.DataStream
.from(csv.fromStream(stream2, { headers: true }))
// the next lines controls how many simultaneous operations are made
// I assumed 16, but if you're fine with 40 or you want 1 - go for it.
.setOptions({maxParallel: 16})
// the next line will call your async function and wait until it's completed
// and control the back-pressure of the stream
.do(async (data) => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
return data; // push each row - for testing only)
})
// next line will run the stream until end and return a promise
.toArray()
.then(fileRows => {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)); // - for testing
})
.catch(e => {
res.writeHead(500); // some error handling
res.end(e.message);
})
;
// you may want to put an await statement before this, or call then to check
// for errors, which I assume is your use case.
;
To answer your comment question - if you were to use an async function in the on("data") event - you would need to create an array of promises and await Promise.all of that array on stream end - but that would need to be done synchronously - so async function in an event handler won't do it.
In scramjet this happens under the hood, so you can use the function.
This is my controller which is calling the login service
mod.controller("loginCtrl",function($scope,loginService,$http)
{
$scope.Userlogin = function()
{
var User = {
userid :$scope.uname,
pass:$scope.pass
};
var res = UserloginService(User);
console.log(res);
alert("login_succ");
}
});
And this is the login service code which takes the User variable and checks for username & password
mod.service("loginService",function($http,$q) {
UserloginService = function(User) {
var deffered = $q.defer();
$http({
method:'POST',
url:'http://localhost:8080/WebApplication4_1/login.htm',
data:User
}).then(function(data) {
deffered.resolve(data);
}).error(function(status) {
deffered.reject({
status:status
});
});
return deffered.promise;
// var response = $http({
//
// method:"post",
// url:"http://localhost:8080/WebApplication4_1/login.htm",
// data:JSON.stringify(User),
// dataType:"json"
// });
// return "Name";
}
});
I have created a rest api using springs which upon passing json return back the username and password in json like this
Console shows me this error for angular
You need to enable CORS for your application for guidance see this link
https://htet101.wordpress.com/2014/01/22/cors-with-angularjs-and-spring-rest/
I prefer to use Factory to do what you're trying to do, which would be something like this:
MyApp.factory('MyService', ["$http", function($http) {
var urlBase = "http://localhost:3000";
return {
getRecent: function(numberOfItems) {
return $http.get(urlBase+"/things/recent?limit="+numberOfItems);
},
getSomethingElse: function(url) {
return $http.get(urlBase+"/other/things")
},
search: function (searchTerms) {
return $http.get(urlBase+"/search?q="+searchTerms);
}
}
}]);
And then in your controller you can import MyService and then use it in this way:
MyService.getRecent(10).then(function(res) {
$scope.things = res.data;
});
This is a great way to handle it, because you're putting the .then in your controller and you are able to control the state of the UI during a loading state if you'd like, like this:
// initialize the loading var, set to false
$scope.loading = false;
// create a reuseable update function, and inside use a promise for the ajax call,
// which is running inside the `Factory`
$scope.updateList = function() {
$scope.loading = true;
MyService.getRecent(10).then(function(res) {
$scope.loading = false;
$scope.things = res.data;
});
};
$scope.updateList();
The error in the console shows two issues with your code:
CORS is not enabled in your api. To fix this you need to enable CORS using Access-Control-Allow-Origin header to your rest api.
Unhandled rejection error, as the way you are handling errors with '.error()' method is deprecated.
'Promise.error()' method is deprecated according to this and this commit in Angular js github repo.
Hence you need to change the way you are handling errors as shown below :
$http().then(successCallback, errorCallback);
function successCallback (res) {
return res;
}
function errorCallback (err) {
return err;
}
One more thing in your code which can be avoided is you have defined a new promise and resolving it using $q methods, which is not required. $http itself returns a promise by default, which you need not define again inside it to use it as a Promise. You can directly use $http.then().
I am creating a website that reads externally hosted json files and then uses node.js to populate the sites content.
Just to demonstrate what I'm after, this is a really simplified version of what I'm trying to do in node.js
var ids = [111, 222, 333];
ids.forEach(function(id){
var json = getJSONsomehow('http://www.website.com/'+id+'.json');
buildPageContent(json);
});
Is what I want to do possible?
(Marked as a duplicate of "How do I return the response from an asynchronous call?" see my comment below for my rebuttal)
You are trying to get it synchronously. What you should aim for instead, is not a function used like this:
var json = getJSONsomehow('http://www.website.com/'+id+'.json');
but more like this:
getJSONsomehow('http://www.website.com/'+id+'.json', function (err, json) {
if (err) {
// error
} else {
// your json can be used here
}
});
or like this:
getJSONsomehow('http://www.website.com/'+id+'.json')
.then(function (json) {
// you can use your json here
})
.catch(function (err) {
// error
});
You can use the request module to get your data with something like this:
var request = require('request');
var url = 'http://www.website.com/'+id+'.json';
request.get({url: url, json: true}, (err, res, data) => {
if (err) {
// handle error
} else if (res.statusCode === 200) {
// you can use data here - already parsed as json
} else {
// response other than 200 OK
}
});
For a working example see this answer.
For more info see: https://www.npmjs.com/package/request
I think problem is in async request. Function will return result before request finished.
AJAX_req.open( "GET", url, true );
Third parameter specified async request.
You should add handler and do all you want after request finished.
For example:
function AJAX_JSON_Req( url ) {
var AJAX_req = new XMLHttpRequest.XMLHttpRequest();
AJAX_req.open( "GET", url, true );
AJAX_req.setRequestHeader("Content-type", "application/json");
AJAX_req.onreadystatechange = function() {
if (AJAX_req.readyState == 4 && AJAX_req.status == 200) {
console.log(AJAX_req.responseText);
}
};
}
I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}
This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})
It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}