Appending JSON data to a file - json

I have some data I want to write to a file periodically and I'd like to write it small JSON objects. At a later time I'd like to read them all in for processing, but the appended file of JSON objects isn't JSON itself.
So I stringify the JSON object and write them to file periodically and I get something like this;
{
"100": {
"stuff": 123
}
}
{
"300": {
"stuff": 456
}
}
{
"200": {
"stuff": 789
}
}
Of course when I try to parse the file with a simple script like the following;
var fs = require('fs');
var file = 'test.log'
var obj = JSON.parse(fs.readFileSync(file, 'utf8'));
var fs = require('fs');
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
console.log(obj);
});
...the parser freaks out because this isn't a valid JSON file, I'd need something more like;
{
"100": {
"stuff": 123
},
"300": {
"stuff": 456
},
"200": {
"stuff": 789
}
}
...but I can't get this by simply appending records. I can of course force commas between the records before writing them, but I end up missing the '{' at the start of the file, the '}' at the end of the file and would have an extra ',' on the last record. The whole things reaks of being a kludge.
I'm guessing someone has worked all of this out alreadyand there is a nice pattern for this, but I couldn't find anything from searching. Each section will have a variable amount of data so I like the flexibility JSON offers, but I don't want to store it all ahead of time before writing to disk as it will get large.
If you could point me at a good solution for this it would be appreciated.
Sincerely, Paul.

Why don't you use a regex before processing the object list file to add a comma. Since the pattern is a new line after every object, you could use this to find and replace: /}\n{/g.
For your sample, this should do the trick :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to create an array of objects
obj = JSON.parse(jsonString);
console.log(obj);
});
Then, if you want to have an object as you specified you can use the spread operator ... to append an object to your super-object :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to creat an array of objects
obj = JSON.parse(jsonString);
var superObj = {};
for (var i = 0; i < obj.length; i++) {
superObj = {...superObj,...obj[i]};
}
console.log(superObj);
});

Related

How to access the contents of a JSON file without a key?

Basically, I am setting up a web server via Node.js and Express (I am a beginner at this) to retrieve data by reading a JSON file.
For example, this is my data.json file:
[{
"color": "black",
"category": "hue",
"type": "primary"
},
{
"color": "red",
"category": "hue",
"type": "primary"
}
]
I am trying to retrieve all of the colors by implementing this code for it to display on localhost:
router.get('/colors', function (req, res) {
fs.readFile(__dirname + '/data.json', 'utf8', function (err, data) {
data = JSON.parse(data);
res.json(data); //this displays all of the contents of data.json
})
});
router.get('/colors:name', function (req, res) {
fs.readFile(__dirname + '/data.json', 'utf8', function (err, data) {
data = JSON.parse(data);
for (var i = 0; i < data.length; i++) {
res.json(data[i][1]); //trying to display the values of color
}
})
});
How do I go about doing this?
What you are trying to do is actually pretty simple once you break it into smaller problems. Here is one way to break it down:
Load your JSON data into memory for use by your API.
Define an API route which extracts only the colours from your JSON data and sends them to the client as a JSON.
var data = [];
try {
data = JSON.parse(fs.readFileSync('/path/to/json'));
} catch (e) {
// Handle JSON parse error or file not exists error etc
data = [{
"color": "black",
"category": "hue",
"type": "primary"
},
{
"color": "red",
"category": "hue",
"type": "primary"
}
]
}
router.get('/colors', function (req, res, next) {
var colors = data.map(function (item) {
return item.color
}); // This will look look like: ["black","red"]
res.json(colors); // Send your array as a JSON array to the client calling this API
})
Some improvements in this method:
The file is read only once synchronously when the application is started and the data is cached in memory for future use.
Using Array.prototype.map Docs to extract an array of colors from the object.
Note:
You can structure the array of colors however you like and send it down as a JSON in that structure.
Examples:
var colors = data.map(function(item){return {color:item.color};}); // [{"color":"black"},{"color":"red"}]
var colors = {colors: data.map(function(item){return item.color;})} // { "colors" : ["black" ,"red"] }
Some gotchas in your code:
You are using res.json in a for loop which is incorrect as the response should only be sent once. Ideally, you would build the JS object in the structure you need by iterating over your data and send the completed object once with res.json (which I'm guessing internally JSON.stringifys the object and sends it as a response after setting the correct headers)
Reading files is an expensive operation. If you can afford to read it once and cache that data in memory, it would be efficient (Provided your data is not prohibitively large - in which case using files to store info might be inefficient to begin with)
in express, you can do in this way
router.get('/colors/:name', (req, res) => {
const key = req.params.name
const content = fs.readFileSync(__dirname + '/data.json', 'utf8')
const data = JSON.parse(content)
const values = data.reduce((values, value) => {
values.push(value[key])
return values
}, [])
// values => ['black', 'red']
res.send(values)
});
and then curl http://localhost/colors/color,
you can get ['black', 'red']
What you're looking to do is:
res.json(data[i]['color']);
If you don't really want to use the keys in the json you may want to use the Object.values function.
...
data = JSON.parse(data)
var values = []
for (var i = 0; i < data.length; i++) {
values.push(Object.values(data[i])[0]) // 0 - color, 1 - category, 2 - type
}
res.json(values) // ["black","red"]
...
You should never use fs.readFileSync in production. Any sync function will block the event loop until the execution is complete hence delaying everything afterwords (use with caution if deemed necessary). A few days back I had the worst experience myself and learnt that in a hard way.
In express you can define a route with param or query and use that to map the contents inside fs.readFile callback function.
/**
* get color by name
*
* #param {String} name name of the color
* #return {Array} array of the color data matching param
*/
router.get('/colors/:name', (req, res) => {
const color = req.params.name
const filename = __dirname + '/data.json';
fs.readFile('/etc/passwd', 'utf8', (err, data) => {
if(err){
return res.send([]); // handle any error returned by readFile function here
}
try{
data = JSON.parse(data); // parse the JSON string to array
let filtered = []; // initialise empty array
if(data.length > 0){ // we got an ARRAY of objects, right? make your check here for the array or else any map, filter, reduce, forEach function will break the app
filtered = data.filter((obj) => {
return obj.color === color; // return the object if the condition is true
});
}
return res.send(filtered); // send the response
}
catch(e){
return res.send([]); // handle any error returned from JSON.parse function here
}
});
});
To summarise, use fs.readFile asynchronous function so that the event loop is not clogged up. Inside the callback parse through the content and then return the response. return is really important or else you might end up getting Error: Can't set headers after they are sent
DISCLAIMER This code above is untested but should work. This is just to demonstrate the idea.
I think you can’t access JSON without key. You can use Foreach loop for(var name : object){} check about foreach it may help you

I get undefined values in my field value when inserting multiple documents after converting csv to json string and parsing. Why?

Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

Parse JSON file containing multiple objects

I have a JSON file that contains multiple objects of the same structure that look like this:
{
"id": "123",
"type": "alpha"
}
{
"id": "321",
"type": "beta"
}
I'm using node.js to read the file.
fs.readFile(__dirname + "/filename.json", 'utf8', function(err, data) {
var content = JSON.parse(JSON.stringify(data));
If I do a console.log(content) things look good. I see the content of the json file. I'm trying to iterate over each object but I'm not sure how to do that. I've tried using
for(var doc in content)
but the doc isn't each object as I was expecting. How do I loop over the content to get each object in a json format so that I can parse it?
If content is an array, you can use
content.forEach(function (obj, index) { /* your code */ })
See documentation for Array.prototype.forEach()
if you need to just iterate, a forEach loop would work or a normal for loop :
for(var i = 0; i<content.length(); i++){
//perform whatever you need on the following object
var myobject = content[i];
}
Depend of the files, the two current answer (Osama and Daniel) assume you have a JSON Array:
[
{
"id": "123",
"type": "alpha"
},
{
"id": "456",
"type": "beta"
}
]
In which case, you can use any array iterator:
var async = require('async'),
content = require(__dirname + "/filename.json");
async.each(content, function (item, callback) {
//...
});
But in your case, it seems to not be JSON (no bracket to indicate array, and no comma to separate the objects), so in the case JSON.parse doesn t throw up any error, you'll need to isolate your objects first:
var fs = require('fs'),
async = require('async');
fs.readFile(__dirname + "/filename.notjson", 'utf8', function(err, data) {
var content = data.split('}');
async.map(content, function (item, callback) {
callback(null, JSON.parse(item));
}, function (err, content) {
console.log(content);
};
});

JSON output formatting

I am trying to format the JSON output through node js as shown below
[{"headers":["id","name","description","type","actionmode","outputparser","dispatchtype","troubleticketaction","alarmaction","actionexecutionmode","cost","isparent__"],"values":["100","test_bsc_interface","test_bsc_interface","test","Open Loop","regex","HPSA",null,null,"Asynchronous",null,"0"]},["101","check_tt","check_tt","test","Open Loop","None","Trouble Ticket","check_tt",null,"Synchronous",null,"0"]}
But currently i am getting the output as shown below
[{"headers":["id","name","description","type","actionmode","outputparser","dispatchtype","troubleticketaction","alarmaction","actionexecutionmode","cost","isparent__"],"values":["100","test_bsc_interface","test_bsc_interface","test","Open Loop","regex","HPSA",null,null,"Asynchronous",null,"0"]},{"headers":["id","name","description","type","actionmode","outputparser","dispatchtype","troubleticketaction","alarmaction","actionexecutionmode","cost","isparent__"],"values":["101","check_tt","check_tt","test","Open Loop","None","Trouble Ticket","check_tt",null,"Synchronous",null,"0"]}
Please find code snippet that i have used : may know whats the changes required in the code ::
var json_string=JSON.stringify(rows,null,2);
var json_object=setValue(JSON.parse(json_string));
if (!err){
console.log('The object are returning ',json_object);
var result = _.map(json_object, function(o) {
return {headers: _.keys(o), values : _.values(o)}
});
Your problem seems to be from another post I answered:
How to format the JSON object key/value pair
var json_string=JSON.stringify(rows,null,2);
var json_object=setValue(JSON.parse(json_string));
var result;
if (!err){
console.log('The object are returning ',json_object);
if (_.isArray(json_object) && json_object.length > 0) {
result = {
headers: _.keys(json_object[0]),
values: _.map(json_object, function(o) {
return _.values(o);
})
};
}
}

Boolean field not updating in MongoDB and Nodejs

There are a number of questions that relate to this but none of the answers seem to fix my problem.
I read json object strings from a file line by line - convert them to json objects then use some of the json object to find whether they are in mongodb. If they are I want to update the isValid field to true but I'm just getting a return value of 1 and no update happens.
Here's my code:
var mongodb = require('mongodb')
, MongoClient = mongodb.MongoClient;
var lazy = require("lazy"),
fs = require("fs");
MongoClient.connect('mongodb://localhost', function(err, db) {
var collection = db.collection('offers');
new lazy(fs.createReadStream('./offers.txt'))
.lines
.forEach(function(line){
var jsonOffer = JSON.parse(line.toString());
var find = {
pricing: jsonOffer.pricing,
details: jsonOffer.details,
retailer: jsonOffer.retailer,
brand: jsonOffer.brand,
type: jsonOffer.type
}
collection.update(find, { $set: { 'isValid': true } }, function (err, result) {
if (err) throw err;
console.log(result);//prints 1
});
});
});
Can anyone help me here?
Thanks!