Read individual JSON from log file containing multiple lines of JSON - json

I'm trying to read a log file where each entry is a line of JSON(JSON structured text).
What I ultimately hope to do is iterate over each line and if
"Event":"SparkListenerTaskEnd"
is found that JSON line will be parsed for the values of keys "Finish Time" and "Executor CPU Time".
I'm new to node.js so not may be completely wrong but so far I've got this block of code for iterating through the file:
exports.getId(function(err, id){
console.log(id);
var data = fs.readFileSync('../PC Files/' + id, 'utf8', function(err, data) {
var content = data.split('\n');
async.map(content, function (item, callback) {
callback(null, JSON.parse(item));
}, function (err, content) {
console.log(content);
});
});
//console.log(data);
});
This doesn't seem to be doing anything though. However, I know the log file can be read as I can see it if I uncomment //console.log(data);.
Below is an example JSON line that I'm talking about:
{"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ShuffleMapTask","Task End Reason":{"Reason":"Success"},"Task Info":{"Task ID":0,"Index":0,"Attempt":0,"Launch Time":1514983570810,"Executor ID":"0","Host":"192.168.111.123","Locality":"PROCESS_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1514983574496,"Failed":false,"Killed":false,"Accumulables":[{"ID":22,"Name":"internal.metrics.input.recordsRead","Update":99171,"Value":99171,"Internal":true,"Count Failed Values":true},{"ID":20,"Name":"internal.metrics.shuffle.write.writeTime","Update":5893440,"Value":5893440,"Internal":true,"Count Failed Values":true},{"ID":19,"Name":"internal.metrics.shuffle.write.recordsWritten","Update":3872,"Value":3872,"Internal":true,"Count Failed Values":true},{"ID":18,"Name":"internal.metrics.shuffle.write.bytesWritten","Update":1468516,"Value":1468516,"Internal":true,"Count Failed Values":true},{"ID":10,"Name":"internal.metrics.peakExecutionMemory","Update":16842752,"Value":16842752,"Internal":true,"Count Failed Values":true},{"ID":9,"Name":"internal.metrics.diskBytesSpilled","Update":0,"Value":0,"Internal":true,"Count Failed Values":true},{"ID":8,"Name":"internal.metrics.memoryBytesSpilled","Update":0,"Value":0,"Internal":true,"Count Failed Values":true},{"ID":7,"Name":"internal.metrics.resultSerializationTime","Update":1,"Value":1,"Internal":true,"Count Failed Values":true},{"ID":6,"Name":"internal.metrics.jvmGCTime","Update":103,"Value":103,"Internal":true,"Count Failed Values":true},{"ID":5,"Name":"internal.metrics.resultSize","Update":2597,"Value":2597,"Internal":true,"Count Failed Values":true},{"ID":4,"Name":"internal.metrics.executorCpuTime","Update":1207164005,"Value":1207164005,"Internal":true,"Count Failed Values":true},{"ID":3,"Name":"internal.metrics.executorRunTime","Update":2738,"Value":2738,"Internal":true,"Count Failed Values":true},{"ID":2,"Name":"internal.metrics.executorDeserializeCpuTime","Update":542927064,"Value":542927064,"Internal":true,"Count Failed Values":true},{"ID":1,"Name":"internal.metrics.executorDeserializeTime","Update":835,"Value":835,"Internal":true,"Count Failed Values":true}]},"Task Metrics":{"Executor Deserialize Time":835,"Executor Deserialize CPU Time":542927064,"Executor Run Time":2738,"Executor CPU Time":1207164005,"Result Size":2597,"JVM GC Time":103,"Result Serialization Time":1,"Memory Bytes Spilled":0,"Disk Bytes Spilled":0,"Shuffle Read Metrics":{"Remote Blocks Fetched":0,"Local Blocks Fetched":0,"Fetch Wait Time":0,"Remote Bytes Read":0,"Local Bytes Read":0,"Total Records Read":0},"Shuffle Write Metrics":{"Shuffle Bytes Written":1468516,"Shuffle Write Time":5893440,"Shuffle Records Written":3872},"Input Metrics":{"Bytes Read":0,"Records Read":99171},"Output Metrics":{"Bytes Written":0,"Records Written":0},"Updated Blocks":[{"Block ID":"broadcast_1_piece0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":false,"Replication":1},"Memory Size":5941,"Disk Size":0}},{"Block ID":"broadcast_1","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":true,"Replication":1},"Memory Size":9568,"Disk Size":0}},{"Block ID":"broadcast_0_piece0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":false,"Replication":1},"Memory Size":25132,"Disk Size":0}},{"Block ID":"broadcast_0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":true,"Replication":1},"Memory Size":390808,"Disk Size":0}}]}}
Update
Here is my whole code. I'm sure it's not pretty but it works. I'll now look at improving it.
var http = require("http");
var fs = require('fs');
var async = require('async');
var readline = require('readline')
//get file name
var options = {
"method" : "GET",
"hostname" : "xxx.xxx.xxx.xxx",
"port" : "18080",
"path" : "/api/v1/applications/"
};
exports.getId = function(callback) {
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = JSON.parse(Buffer.concat(chunks));
var arrFound = Object.keys(body).filter(function(key) {
if (body[key].name.indexOf("TestName") > -1) {
return body[key].name;
}
}).reduce(function(obj, key){
obj = body[key].id;
return obj;
}, {});;
//console.log("ID: ", arrFound);
callback(null, arrFound);
});
});
req.end();
}
// parse log file line at a time and for any use lines where the Event = SparkListenerTaskEnd
exports.getId(function(err, id){
console.log(id);
var lineReader = readline.createInterface({
input: fs.createReadStream('../PC Files/' + id, 'utf8')
});
lineReader.on('line', function (line) {
var obj = JSON.parse(line);
if(obj.Event == "SparkListenerTaskEnd") {
console.log('Line from file:', obj['Task Info']['Finish Time']);
}
});
});
Adam, I tried your suggested code but got the following error:
null
fs.js:646
return binding.open(pathModule._makeLong(path), stringToFlags(flags), mode);
^
Error: ENOENT: no such file or directory, open '../PC Files/null'
at Object.fs.openSync (fs.js:646:18)
at Object.fs.readFileSync (fs.js:551:33)
at /test-runner/modules/getEventLog.js:61:19
at IncomingMessage.<anonymous> (/test-runner/modules/getEventLog.js:35:13)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1056:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickCallback (internal/process/next_tick.js:180:9)

At first glance, it appears you are using callbacks incorrectly.
I assume you are using the getId function like:
getId(function(error, data) {
// Do something with data
}
In which case, the callback function should be returned like:
// Remove the error, this will not be entered as a parameter
// Add callback as parameter
exports.getId(function(id, callback){
console.log(id);
var data = fs.readFileSync('../PC Files/' + id, 'utf8', function(err, data) {
var content = data.split('\n');
// Removed callback from here
// We will not have access to the
// to it here
async.map(content, function (item) {
callback(null, JSON.parse(item));
// Add callback with error in place of null
}, function (err, content) {
callback(err)
console.log(content);
});
});
//console.log(data);
});

Related

Google Data Studio Community Connector getData() not working as expected

function getData(request){
try{
var options = {
'method' : 'post',
'contentType': 'application/json',
'payload' : JSON.stringify(request)
};
response=UrlFetchApp.fetch(getDataUrl, options);
resData = JSON.parse(response.getContentText())
return resData
}catch (e) {
e = (typeof e === 'string') ? new Error(e) : e;
Logger.log("Catch", e);
throw e;
}
}
The the above is my getData() function.
My isAdminUser() returns true.
When I try to visualize my data, I get the following error
Data Set Configuration Error
Data Studio cannot connect to your data set.
There was an error requesting data from the community connector. Please report the issue to the provider of this community connector if this issue persists.
Error ID: 3d11b88b
https://i.stack.imgur.com/x3Hki.png
The error code changes every time I refresh data and I can't find any dictionary to map the error id to an error
I tried debugging by logging the request parameter, response.getContentText() and resData variable to make sure I my data is formatted correctly.
Following are the logs printed in Stackdriver logs
request
{configParams={/Personal config data/}, fields=[{name=LASTNAME}]}
response.getContentText()
{"schema":[{"name":"LASTNAME","dataType":"STRING"}],"rows":[{"values":["test"]},{"values":["test"]},{"values":["Dummy"]},{"values":["One"]},{"values":["Nagargoje"]},{"values":[""]},{"values":[""]},{"values":[""]},{"values":[""]},{"values":[""]}],"filtersApplied":false}
resData
{rows=[{values=[test]}, {values=[test]}, {values=[Dummy]},
{values=[One]}, {values=[Nagargoje]}, {values=[]}, {values=[]},
{values=[]}, {values=[]}, {values=[]}], filtersApplied=false,
schema=[{name=LASTNAME, dataType=STRING}]}
I am not sure what is wrong with my getData() function.
The Object that I am returning seems to match the structure given here https://developers.google.com/datastudio/connector/reference#getdata
So there was no issue with my getData() function, the issue existed in the manifest file.
I was searching about passing parameter via URL and I stumbled upon a field called
dataStudio.useQueryConfig and added that to my manifest file and set its value to true.
Google Data studio was expecting me to return a query Config for getData().
But what I really wanted was this.
Anyways, I was able to debug it thanks to Matthias for suggesting me to take a look at Open-Source implementations
I implemented JSON connect which worked fine, so I Logged what it was returning in getData() and used that format/structure in my code, but my connector still didn't work.
My next assumption was maybe there is something wrong with my getSchema() return value. So I logged that as well and then copy pasted the hard coded value of both getData() and getSchema() return varaibles from JSON connect.
And even that didn't work, so my last bet was there must be something wrong with the manifest file, maybe the dummy links I added in it must be the issue. Then, after carrying out field by comparison I was finally able to get my community connector working.
This would have been easier to debug if the error messages were a bit helpful and didn't seem so generic.
First: You can always check out the Open-Source implementations that others did for custom Google Data Studio connectors. They are a great source if information. Fore more information checkout the documentation on Open Source Community Connectors.
Second: My implementation is for a time tracking system thus having confidential GDPR relevant data. That's why I can not just give you response messages. But I assembled this code. It contains authentifiction, HTTP GET data fetch and data conversions. Explanation is below the code. Again, checkout the open-source connectors if you need further assistance.
var cc = DataStudioApp.createCommunityConnector();
const URL_DATA = 'https://www.myverysecretdomain.com/api';
const URL_PING = 'https://www.myverysecretdomain.com/ping';
const AUTH_USER = 'auth.user'
const AUTH_KEY = 'auth.key';
const JSON_TAG = 'user';
String.prototype.format = function() {
// https://coderwall.com/p/flonoa/simple-string-format-in-javascript
a = this;
for (k in arguments) {
a = a.replace("{" + k + "}", arguments[k])
}
return a
}
function httpGet(user, token, url, params) {
try {
// this depends on the URL you are connecting to
var headers = {
'ApiUser': user,
'ApiToken': token,
'User-Agent': 'my super freaky Google Data Studio connector'
};
var options = {
headers: headers
};
if (params && Object.keys(params).length > 0) {
var params_ = [];
for (const [key, value] of Object.entries(params)) {
var value_ = value;
if (Array.isArray(value))
value_ = value.join(',');
params_.push('{0}={1}'.format(key, encodeURIComponent(value_)))
}
var query = params_.join('&');
url = '{0}?{1}'.format(url, query);
}
var response = UrlFetchApp.fetch(url, options);
return {
code: response.getResponseCode(),
json: JSON.parse(response.getContentText())
}
} catch (e) {
throwConnectorError(e);
}
}
function getCredentials() {
var userProperties = PropertiesService.getUserProperties();
return {
username: userProperties.getProperty(AUTH_USER),
token: userProperties.getProperty(AUTH_KEY)
}
}
function validateCredentials(user, token) {
if (!user || !token)
return false;
var response = httpGet(user, token, URL_PING);
if (response.code == 200)
console.log('API key for the user %s successfully validated', user);
else
console.error('API key for the user %s is invalid. Code: %s', user, response.code);
return response;
}
function getAuthType() {
var cc = DataStudioApp.createCommunityConnector();
return cc.newAuthTypeResponse()
.setAuthType(cc.AuthType.USER_TOKEN)
.setHelpUrl('https://www.myverysecretdomain.com/index.html#authentication')
.build();
}
function resetAuth() {
var userProperties = PropertiesService.getUserProperties();
userProperties.deleteProperty(AUTH_USER);
userProperties.deleteProperty(AUTH_KEY);
console.info('Credentials have been reset.');
}
function isAuthValid() {
var credentials = getCredentials()
if (credentials == null) {
console.info('No credentials found.');
return false;
}
var response = validateCredentials(credentials.username, credentials.token);
return (response != null && response.code == 200);
}
function setCredentials(request) {
var credentials = request.userToken;
var response = validateCredentials(credentials.username, credentials.token);
if (response == null || response.code != 200) return { errorCode: 'INVALID_CREDENTIALS' };
var userProperties = PropertiesService.getUserProperties();
userProperties.setProperty(AUTH_USER, credentials.username);
userProperties.setProperty(AUTH_KEY, credentials.token);
console.info('Credentials have been stored');
return {
errorCode: 'NONE'
};
}
function throwConnectorError(text) {
DataStudioApp.createCommunityConnector()
.newUserError()
.setDebugText(text)
.setText(text)
.throwException();
}
function getConfig(request) {
// ToDo: handle request.languageCode for different languages being displayed
console.log(request)
var params = request.configParams;
var config = cc.getConfig();
// ToDo: add your config if necessary
config.setDateRangeRequired(true);
return config.build();
}
function getDimensions() {
var types = cc.FieldType;
return [
{
id:'id',
name:'ID',
type:types.NUMBER
},
{
id:'name',
name:'Name',
isDefault:true,
type:types.TEXT
},
{
id:'email',
name:'Email',
type:types.TEXT
}
];
}
function getMetrics() {
return [];
}
function getFields(request) {
Logger.log(request)
var fields = cc.getFields();
var dimensions = this.getDimensions();
var metrics = this.getMetrics();
dimensions.forEach(dimension => fields.newDimension().setId(dimension.id).setName(dimension.name).setType(dimension.type));
metrics.forEach(metric => fields.newMetric().setId(metric.id).setName(metric.name).setType(metric.type).setAggregation(metric.aggregations));
var defaultDimension = dimensions.find(field => field.hasOwnProperty('isDefault') && field.isDefault == true);
var defaultMetric = metrics.find(field => field.hasOwnProperty('isDefault') && field.isDefault == true);
if (defaultDimension)
fields.setDefaultDimension(defaultDimension.id);
if (defaultMetric)
fields.setDefaultMetric(defaultMetric.id);
return fields;
}
function getSchema(request) {
var fields = getFields(request).build();
return { schema: fields };
}
function convertValue(value, id) {
// ToDo: add special conversion if necessary
switch(id) {
default:
// value will be converted automatically
return value[id];
}
}
function entriesToDicts(schema, data, converter, tag) {
return data.map(function(element) {
var entry = element[tag];
var row = {};
schema.forEach(function(field) {
// field has same name in connector and original data source
var id = field.id;
var value = converter(entry, id);
// use UI field ID
row[field.id] = value;
});
return row;
});
}
function dictsToRows(requestedFields, rows) {
return rows.reduce((result, row) => ([...result, {'values': requestedFields.reduce((values, field) => ([...values, row[field]]), [])}]), []);
}
function getParams (request) {
var schema = this.getSchema();
var params;
if (request) {
params = {};
// ToDo: handle pagination={startRow=1.0, rowCount=100.0}
} else {
// preview only
params = {
limit: 20
}
}
return params;
}
function getData(request) {
Logger.log(request)
var credentials = getCredentials()
var schema = getSchema();
var params = getParams(request);
var requestedFields; // fields structured as I want them (see above)
var requestedSchema; // fields structured as Google expects them
if (request) {
// make sure the ordering of the requested fields is kept correct in the resulting data
requestedFields = request.fields.filter(field => !field.forFilterOnly).map(field => field.name);
requestedSchema = getFields(request).forIds(requestedFields);
} else {
// use all fields from schema
requestedFields = schema.map(field => field.id);
requestedSchema = api.getFields(request);
}
var filterPresent = request && request.dimensionsFilters;
//var filter = ...
if (filterPresent) {
// ToDo: apply request filters on API level (before the API call) to minimize data retrieval from API (number of rows) and increase speed
// see https://developers.google.com/datastudio/connector/filters
// filter = ... // initialize filter
// filter.preFilter(params); // low-level API filtering if possible
}
// get HTTP response; e.g. check for HTTT RETURN CODE on response.code if necessary
var response = httpGet(credentials.username, credentials.token, URL_DATA, params);
// get JSON data from HTTP response
var data = response.json;
// convert the full dataset including all fields (the full schema). non-requested fields will be filtered later on
var rows = entriesToDicts(schema, data, convertValue, JSON_TAG);
// match rows against filter (high-level filtering)
//if (filter)
// rows = rows.filter(row => filter.match(row) == true);
// remove non-requested fields
var result = dictsToRows(requestedFields, rows);
console.log('{0} rows received'.format(result.length));
//console.log(result);
return {
schema: requestedSchema.build(),
rows: result,
filtersApplied: filter ? true : false
};
}
A sample request that filters for all users with names starting with J.
{
configParams={},
dateRange={
endDate=2020-05-14,
startDate=2020-04-17
},
fields=[
{name=name}
],
scriptParams={
lastRefresh=1589543208040
},
dimensionsFilters=[
[
{
values=[^J.*],
operator=REGEXP_EXACT_MATCH,
type=INCLUDE,
fieldName=name
}
]
]
}
The JSON data returned by the HTTP GET contains all fields (full schema).
[ { user:
{ id: 1,
name: 'Jane Doe',
email: 'jane#doe.com' } },
{ user:
{ id: 2,
name: 'John Doe',
email: 'john#doe.com' } }
]
Once the data is filtered and converted/transformed, you'll get this result, which is perfectly displayed by Google Data Studio:
{
filtersApplied=true,
schema=[
{
isDefault=true,
semantics={
semanticType=TEXT,
conceptType=DIMENSION
},
label=Name,
name=name,
dataType=STRING
}
],
rows=[
{values=[Jane Doe]},
{values=[John Doe]}
]
}
getData should return data for only the requested fields. In request.fields should have the list of all requested fields. Limit your data for those fields only and then send the parsed data back.

ReferenceError: data is not defined when writing a piped gulp script

gulp.task('default', function(done) {
inquirer.prompt([{
type: `input`,
message: `Enter the path`,
default: `./admin/admin.json`,
name: `path`
}]).then(function(answers) {
console.log(answers.path);
console.log('answers');
mydefaultTaskTwo(null, answers.path).pipe(pipedFunction());
done();
})
});
function mydefaultTaskTwo(cb, path) {
let data = '';
try {
data = fs.readFileSync(path, 'utf-8');
} catch (e) {
console.log(`Error: ${e}`);
}
return data;
}
function pipedFunction() {
let object = JSON.parse(data);
object['main'] = 'admin';
data = JSON.stringify(object);
const readable = Readable.from(data)
return readable;
}
I understand that src returns a stream and pipe takes that stream and return a stream, but how do you feed in the stream into the pipedFunction called inside of pipe? I am unsure how it works. I get the following error:
ReferenceError: data is not defined.
Is there something I am misunderstanding about gulp scripts?
Basically you define data as a local scope-level variable and try to reach it from a different scope, where it's undefined. So, you need to make use of the fact that data is returned and pass it, like:
var data = mydefaultTaskTwo(null, answers.path);
data.pipe(pipedFunction(data));

I get undefined values in my field value when inserting multiple documents after converting csv to json string and parsing. Why?

Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

aws lambda s3 function isn't called inside alexa skills kit

I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}
This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})
It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}

Unable to send data in response to the view in nodejs

I am trying to create a simple web application which fires a http.request call, get the data and display it over to the html(ejs here). I am able to fire the request, get the data, massage it etc.. but unable to pass it to the view. Sample code is as below:
var searchData = [];
router.post('/',requesthandler);
function requesthandler(req,res){
var options = {
host: url,
port: 9999,
path: qstring,
method: 'GET'
};
var reqget = http.request(options,responsehandler);
reqget.end();
console.log('Rendering now:............................ ');
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
function responsehandler(ress) {
console.log('STATUS: ' + ress.statusCode);
ress.on('data', function (chunk) {
output += chunk;
console.log('BODY: ' );
});
/* reqget.write(output); */
ress.on('end',parseresponse);
}
function parseresponse(){
var data = JSON.parse(output);
console.log(data.responseHeader);
// populate searchData here from data object
searchData.push({//some data});
}
function errorhandler(e) {
console.error(e);
}
module.exports = router;
Problem is I a unable to pass the objeect searchData to the view via res.render();
'Rendering now: ...........' gets executed before execution starts in parseresponse() and so the page is displayed without the data which seems to be in conjuction with using callbacks, So how can I pass the data object to the view once the searchData is loaded in parseresponse().
PS: I am able to print all console statements
define res variable globally:
var res;
function requesthandler(req,resObj){
res = resObj;//set it to the resObj
}
wrap res.render inside a function like this:
function renderPage(){
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
then in parseresponse function do this:
function parseresponse(){
var data = JSON.parse(output);
searchData.push({some data});
renderPage();
}
Hope this solves your problem.