NodeJS creating JSON using all JSONs uploaded by user - json

I am trying to make a JSON file using all the JSON files in a directory. Every time a user uploads a new JSON a new combined JSON should be generated. I want the new JSON to have a custom structure hence cant use any libraries. I have the following code:
router.post('/upload', function(req, res) {
var sampleFile;
var bbbid = req.body.bbbid;
DDLFile = req.files.DDLFile;
j++;
DDLFile.mv('/uploads/' + bbbid + '/device' + j + '.json', function (err) {
if (err) {
res.status(500).send(err);
}
else {
res.redirect("fileuploaded");
}
});
var myfiles = [];
var fs = require('fs');
var arrayOfFiles = fs.readdirSync('/uploads/' + bbbid);
arrayOfFiles.forEach(function (file) {
myfiles.push(file);
console.log(myfiles);
});
console.log('No of Files:', myfiles.length);
var files = myfiles.length;
console.log('Files:', files);
console.log('J', j);
var cddl = "{ BBBID:" + bbbid;
if (files == 0) {
cddl = cddl + '}';
console.log('Entered if loop');
}
else {
var i = 0;
/*var obj;
fs.readFile('/uploads/' + bbbid + '/device' + j + '.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
});*/
for (i = 0; i < files; i++) {
console.log('Entered For loop');
console.log('Count:', count);
console.log('Sensor:', sensor);
try{
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
console.log('hi');
//JSON.stringify(obj);
var ddl = require('/uploads/' + bbbid + '/device' + count + '.json');
console.log('o');
cddl = cddl + ", {" + obj1.DDL.Sensor.Description.Verbose_Description + ":" + JSON.stringify(ddl) + "}"
JSON.stringify(cddl);
console.log(cddl);
count++;
sensor++;
console.log('Count:', count);
console.log('Sensor:', sensor);
}
cddl = cddl + '}';
JSON.stringify(cddl);
console.log(cddl);
}
});
I want to generate a new cddl everytime a new file is uploaded. Having a lot of problems. Help please!

I see two problems. First instead of this:
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
You can write(fix path, if necessary):
var obj1 = require('./uploads/' + bbbid + '/device' + count + '.json')
Then, when you call:
JSON.stringify(cddl);
You're not saving the result anywhere. So you should save it in the place, you need to:
var a = JSON.stringify(cddl);
And when all set, dont forget to write to file back using fs.writeFileSync or async one fs.writeFile.

Related

Nodejs with mysql query not giving dynamic value from database

I am trying to upload a file in two location, it is working find when I am testing it from postman. But when we trying to upload file from my mobile app without it return an error.
one interesting thing: when after restarting server if we first hit from postman then it is working from mobile app as well but in that case dynamic value to not updating, Value came from postman hit is used in dynamic field.
// app.get('/empid/:id', (req, res) => {
app.post('/upload/:id', function(req,res){
pool.query('SELECT first_name, username FROM user_detail where user_detail_pk=?', [req.params.id], (err, rows, fields) => {
if (!err)
res.send(rows);
else
console.log(err);
rows.forEach( (row) => {
console.log(`${row.first_name}`);
var fname = (`${row.first_name}`);
var username = (`${row.username}`);
console.log('First Name is : ' + fname);
console.log('UserName Name is : ' + username);
var dateObj = new Date();
//var month = dateObj.getUTCMonth() + 1; //months from 1-12
var year = dateObj.getUTCFullYear();
var month = dateObj.toLocaleString('default', { month: 'long' });
var day = dateObj.getUTCDate();
datewise = "datewise" + "/" + year + "/" + month + "/" + day + "/" + fname + "(" + username + ")";
empwise = "empwise" + "/" + fname + "(" + username + ")" + "/" + year + "/" + month + "/" + day;
});
var storage = multer.diskStorage({
destination: function (req, file, cb) {
let Id = req.body.id;
let datewisepath = `/home/KunDan/RestAPIUpload/Conveyance/${datewise}`;
let empwisepath = `/home/KunDan/RestAPIUpload/Conveyance/${empwise}`;
fs.mkdirsSync(datewisepath);
fs.mkdirsSync(empwisepath);
cb(null, datewisepath);
cb(null, empwisepath);
// callback(null, 'uploads/' + req.user.id);
},
/* filename: function (req, file, callback) {
callback(null, file.originalname + '-' + Date.now());
}
});
*/
filename: function (req, file, cb) {
console.log(file);
let extArray = file.mimetype.split("/");
let extension = extArray[extArray.length - 1];
cb(null, file.originalname + '-' + Date.now() + "." + extension);
console.log(req.file);
}
})
var upload = multer({ storage : storage }).array('dataFile',50);
upload(req,res,function(err) {
console.log(req.body);
console.log(req.files);
if(err) {
return res.end("Error uploading file.");
}
//return res.end("File is uploaded");
else
("File is uploaded");
});
});
});
Error Screenshot is Here

Extract data from JSON within Lambda so it's not undefined

Looking for advice / second opinion. I'm trying to pass JSON via HTTP API (api gateway) > Lambda. I'm receiving the data (pic of Cloudwatch), getting undefined when trying to extract values. The file is being written to S3, but undefined.
I included Lambda code, picture of Cloudwatch logs. I'm about there :) . Newbie here...
Logs
Lambda Code
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var bucketName = process.env.bucketName;
var folder = process.env.folder;
var filename = getFileName();
console.log("Filename:" + filename);
var raw = JSON.stringify(event.body);
console.log("raw after stringify:" + raw);
var results = JSON.parse(raw);
console.log("results:" + results);
let firstname = results.firstName;
console.log("firstName:" + firstname);
let lastname = results.lastName;
console.log("lastName:" + lastname);
let message = results.Message;
console.log("Message:" + message);
var content = message + "," + firstname + "," + lastname;
console.log("content:" + content);
var keyName = getKeyName(folder, filename);
var params = { Bucket: bucketName, Key: keyName, Body: content };
s3.putObject(params, function (err, data) {
if (err)
console.log(err)
else
console.log("Successfully saved object to: " + bucketName + "/" + keyName);
});
function getKeyName(folder, filename) {
return folder + '/' + filename;
}
function getFileName() {
var _uuid = uuidv4();
var _date = Date.now();
return _uuid + "-" + _date;
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
var html = '<html><head><title>Prayer received result</title></head>' +
'<body><h1>Your Prayer has been received!</h1></body></html>';
//callback(null, res); - use this when using proxy model
callback(null, html);
};
Made the following changes.
//var raw = JSON.stringify(event.body);
//console.log("raw after stringify:" + raw);
var results = JSON.parse(event.body);
console.log("results:" + results);
Hope this helps others. I'm newer as of this post to Lambda, JSON.

Sending multiple JSON request to server under one request nodejs

I have some files downloaded and I wanna push them to a server. Every time I try to push the file I get the error
Error: Can't set headers after they are sent.
I have a for loop that reads all the files and parse them and after that I want to send them to the server one by one.
app.get('/dataparser', function(req, res) {
var fs = require('fs');
var obj;
var jsonGis = new Array();
var jsonArray;
var filePaths = [];
const downloadFolder = './sampletest/';
var mtimes = {};
var reloadTimes = 10000;
fs.readdir(downloadFolder, (err, files) => {
files.forEach(file => {
filePaths.push("sampletest/" + file);
});
})
var execFunction = function() {
for (var i = 0; i < filePaths.length; i++) {
parseFile(filePaths[i], mtimes[filePaths[i]]);
}
};
execFunction();
setInterval(execFunction, reloadTimes);
function parseFile(fileName, lastModifiedTime) {
fs.stat(fileName, function(err, fd) {
for (var i=0, len = filePaths.length; i<len; i++) {
if (fd.mtime !== lastModifiedTime) {
mtimes[fileName] = fd.mtime;
fs.readFile(filePaths[i], function(err, data) {
if (err) {
return console.error(err);
}
obj = JSON.parse(data);
jsonGis.push('"Person1"');
jsonGis.push('"' + obj.pages[1].answers[2].values[0] + '"');
jsonGis.push('"person2"');
jsonGis.push('"' + obj.pages[1].answers[0].values[0] + '"');
jsonGis.push('"codewals"');
jsonGis.push('"42343GSDS"');
jsonGis.push('"geometry":{');
jsonGis.push('"x":' + obj.pages[1].answers[4].values[0].coordinates.latitude + ',');
jsonGis.push('"y":' + obj.pages[1].answers[4].values[0].coordinates.longitude);
var str = "[{ " + jsonGis[0] + jsonGis[1] + jsonGis[2] + ": " + jsonGis[3] + "," + jsonGis[4] + ": " + jsonGis[16] + "}}]"
//pushing to the server
console.log("check here");
console.log(str);
var qs = require("querystring");
var http = require("http");
var options = {
"method": "POST",
"hostname": "twst2.gtw.com",
"port": null,
"path": "localpath/",
"headers": {
"accept": "application/json",
"content-type": "application/x-www-form-urlencoded",
}
};
var req = http.request(options, function(res) {
var chunks = [];
res.on("data", function(chunk) {
chunks.push(chunk);
});
res.on("end", function() {
var body = Buffer.concat(chunks);
console.log(body.toString());
});
});
req.write(qs.stringify({features: str}));
res.send("The server was updated");
req.end();
});
}
}
});
};
Just need to send the data to the server from file1.json and then file2.json then file3.json and so on.
Try creating a separate callback function that includes .write, .send, and .end, then let it be the callback for .request.

AS3: How to check, all zip files has been extracted?

How to check all zip files has been extracted?
var reader: ZipFileReader = new ZipFileReader();
reader.addEventListener(ZipEvent.ZIP_DATA_UNCOMPRESS, zipDataUncompressHandler);
var zipFile: File = new File(zipFilePath);
reader.open(zipFile);
var list: Array = reader.getEntries();
zipFileCount = list.length;
trace(zipFileCount + " Numbers of items");
for each(var entry: ZipEntry in list) {
var filename: String = entry.getFilename();
if (entry.isDirectory()) {
trace("DIR --->" + filename);
} else {
trace("FILE --->" + filename + "(" + entry.getCompressRate() + ")");
reader.unzipAsync(entry);
}
zipFileWritedCount = zipFileWritedCount + 1;
}
function zipDataUncompressHandler(e: ZipEvent): void {
var entry: ZipEntry = e.entry;
var zfile: File = File.userDirectory.resolvePath('somefolder' + File.separator + entry.getFilename());
var fs: FileStream = new FileStream();
fs.open(zfile, FileMode.WRITE);
fs.writeBytes(e.data);
fs.close();
trace("Refresh Scene");
//include "RefreshScene.as";
}
My files were extracted, but I need to check all files are actually extracted.
Is there any way i can do that.
And I am using airxzip while working with zip file.
Also if I can add an loader.
You can shorten zipFileWritedCount = zipFileWritedCount + 1;
By using just a zipFileWritedCount +=1; or even
zipFileWritedCount++;
Anyways for checking the "all files extracted" amount you could try
the Equality == operator as mentioned in the manual.
Quick example :
for each(var entry: ZipEntry in list)
{
var filename: String = entry.getFilename();
if ( entry.isDirectory() ) { trace("DIR --->" + filename); }
else
{
trace("FILE --->" + filename + "(" + entry.getCompressRate() + ")");
reader.unzipAsync(entry);
}
zipFileWritedCount += 1; //add plus 1
if ( zipFileWritedCount == zipFileCount ) //if Equal to zipFileCount..
{
trace ("unzipped all files...");
trace ("zipFileCount: " + zipFileCount + " -VS- " + "zipFileWritedCount: " + zipFileWritedCount )
}
}

multiple async mongo request generate messed up returns

I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};