I have a JSON file that contains multiple objects of the same structure that look like this:
{
"id": "123",
"type": "alpha"
}
{
"id": "321",
"type": "beta"
}
I'm using node.js to read the file.
fs.readFile(__dirname + "/filename.json", 'utf8', function(err, data) {
var content = JSON.parse(JSON.stringify(data));
If I do a console.log(content) things look good. I see the content of the json file. I'm trying to iterate over each object but I'm not sure how to do that. I've tried using
for(var doc in content)
but the doc isn't each object as I was expecting. How do I loop over the content to get each object in a json format so that I can parse it?
If content is an array, you can use
content.forEach(function (obj, index) { /* your code */ })
See documentation for Array.prototype.forEach()
if you need to just iterate, a forEach loop would work or a normal for loop :
for(var i = 0; i<content.length(); i++){
//perform whatever you need on the following object
var myobject = content[i];
}
Depend of the files, the two current answer (Osama and Daniel) assume you have a JSON Array:
[
{
"id": "123",
"type": "alpha"
},
{
"id": "456",
"type": "beta"
}
]
In which case, you can use any array iterator:
var async = require('async'),
content = require(__dirname + "/filename.json");
async.each(content, function (item, callback) {
//...
});
But in your case, it seems to not be JSON (no bracket to indicate array, and no comma to separate the objects), so in the case JSON.parse doesn t throw up any error, you'll need to isolate your objects first:
var fs = require('fs'),
async = require('async');
fs.readFile(__dirname + "/filename.notjson", 'utf8', function(err, data) {
var content = data.split('}');
async.map(content, function (item, callback) {
callback(null, JSON.parse(item));
}, function (err, content) {
console.log(content);
};
});
Related
I have some data I want to write to a file periodically and I'd like to write it small JSON objects. At a later time I'd like to read them all in for processing, but the appended file of JSON objects isn't JSON itself.
So I stringify the JSON object and write them to file periodically and I get something like this;
{
"100": {
"stuff": 123
}
}
{
"300": {
"stuff": 456
}
}
{
"200": {
"stuff": 789
}
}
Of course when I try to parse the file with a simple script like the following;
var fs = require('fs');
var file = 'test.log'
var obj = JSON.parse(fs.readFileSync(file, 'utf8'));
var fs = require('fs');
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
console.log(obj);
});
...the parser freaks out because this isn't a valid JSON file, I'd need something more like;
{
"100": {
"stuff": 123
},
"300": {
"stuff": 456
},
"200": {
"stuff": 789
}
}
...but I can't get this by simply appending records. I can of course force commas between the records before writing them, but I end up missing the '{' at the start of the file, the '}' at the end of the file and would have an extra ',' on the last record. The whole things reaks of being a kludge.
I'm guessing someone has worked all of this out alreadyand there is a nice pattern for this, but I couldn't find anything from searching. Each section will have a variable amount of data so I like the flexibility JSON offers, but I don't want to store it all ahead of time before writing to disk as it will get large.
If you could point me at a good solution for this it would be appreciated.
Sincerely, Paul.
Why don't you use a regex before processing the object list file to add a comma. Since the pattern is a new line after every object, you could use this to find and replace: /}\n{/g.
For your sample, this should do the trick :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to create an array of objects
obj = JSON.parse(jsonString);
console.log(obj);
});
Then, if you want to have an object as you specified you can use the spread operator ... to append an object to your super-object :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to creat an array of objects
obj = JSON.parse(jsonString);
var superObj = {};
for (var i = 0; i < obj.length; i++) {
superObj = {...superObj,...obj[i]};
}
console.log(superObj);
});
Basically, I am setting up a web server via Node.js and Express (I am a beginner at this) to retrieve data by reading a JSON file.
For example, this is my data.json file:
[{
"color": "black",
"category": "hue",
"type": "primary"
},
{
"color": "red",
"category": "hue",
"type": "primary"
}
]
I am trying to retrieve all of the colors by implementing this code for it to display on localhost:
router.get('/colors', function (req, res) {
fs.readFile(__dirname + '/data.json', 'utf8', function (err, data) {
data = JSON.parse(data);
res.json(data); //this displays all of the contents of data.json
})
});
router.get('/colors:name', function (req, res) {
fs.readFile(__dirname + '/data.json', 'utf8', function (err, data) {
data = JSON.parse(data);
for (var i = 0; i < data.length; i++) {
res.json(data[i][1]); //trying to display the values of color
}
})
});
How do I go about doing this?
What you are trying to do is actually pretty simple once you break it into smaller problems. Here is one way to break it down:
Load your JSON data into memory for use by your API.
Define an API route which extracts only the colours from your JSON data and sends them to the client as a JSON.
var data = [];
try {
data = JSON.parse(fs.readFileSync('/path/to/json'));
} catch (e) {
// Handle JSON parse error or file not exists error etc
data = [{
"color": "black",
"category": "hue",
"type": "primary"
},
{
"color": "red",
"category": "hue",
"type": "primary"
}
]
}
router.get('/colors', function (req, res, next) {
var colors = data.map(function (item) {
return item.color
}); // This will look look like: ["black","red"]
res.json(colors); // Send your array as a JSON array to the client calling this API
})
Some improvements in this method:
The file is read only once synchronously when the application is started and the data is cached in memory for future use.
Using Array.prototype.map Docs to extract an array of colors from the object.
Note:
You can structure the array of colors however you like and send it down as a JSON in that structure.
Examples:
var colors = data.map(function(item){return {color:item.color};}); // [{"color":"black"},{"color":"red"}]
var colors = {colors: data.map(function(item){return item.color;})} // { "colors" : ["black" ,"red"] }
Some gotchas in your code:
You are using res.json in a for loop which is incorrect as the response should only be sent once. Ideally, you would build the JS object in the structure you need by iterating over your data and send the completed object once with res.json (which I'm guessing internally JSON.stringifys the object and sends it as a response after setting the correct headers)
Reading files is an expensive operation. If you can afford to read it once and cache that data in memory, it would be efficient (Provided your data is not prohibitively large - in which case using files to store info might be inefficient to begin with)
in express, you can do in this way
router.get('/colors/:name', (req, res) => {
const key = req.params.name
const content = fs.readFileSync(__dirname + '/data.json', 'utf8')
const data = JSON.parse(content)
const values = data.reduce((values, value) => {
values.push(value[key])
return values
}, [])
// values => ['black', 'red']
res.send(values)
});
and then curl http://localhost/colors/color,
you can get ['black', 'red']
What you're looking to do is:
res.json(data[i]['color']);
If you don't really want to use the keys in the json you may want to use the Object.values function.
...
data = JSON.parse(data)
var values = []
for (var i = 0; i < data.length; i++) {
values.push(Object.values(data[i])[0]) // 0 - color, 1 - category, 2 - type
}
res.json(values) // ["black","red"]
...
You should never use fs.readFileSync in production. Any sync function will block the event loop until the execution is complete hence delaying everything afterwords (use with caution if deemed necessary). A few days back I had the worst experience myself and learnt that in a hard way.
In express you can define a route with param or query and use that to map the contents inside fs.readFile callback function.
/**
* get color by name
*
* #param {String} name name of the color
* #return {Array} array of the color data matching param
*/
router.get('/colors/:name', (req, res) => {
const color = req.params.name
const filename = __dirname + '/data.json';
fs.readFile('/etc/passwd', 'utf8', (err, data) => {
if(err){
return res.send([]); // handle any error returned by readFile function here
}
try{
data = JSON.parse(data); // parse the JSON string to array
let filtered = []; // initialise empty array
if(data.length > 0){ // we got an ARRAY of objects, right? make your check here for the array or else any map, filter, reduce, forEach function will break the app
filtered = data.filter((obj) => {
return obj.color === color; // return the object if the condition is true
});
}
return res.send(filtered); // send the response
}
catch(e){
return res.send([]); // handle any error returned from JSON.parse function here
}
});
});
To summarise, use fs.readFile asynchronous function so that the event loop is not clogged up. Inside the callback parse through the content and then return the response. return is really important or else you might end up getting Error: Can't set headers after they are sent
DISCLAIMER This code above is untested but should work. This is just to demonstrate the idea.
I think you can’t access JSON without key. You can use Foreach loop for(var name : object){} check about foreach it may help you
I'm trying to access JSON Object properties directly and log it, here is my function :
loadProcesses(filter?){
this._postService.getAllProcess(filter)
.subscribe(
res=> {
this.processListe = res;
// console.log(this.processListe.)
}
,null,
() =>{
console.log("get processes liste" + filter)
});
So this.processListe contain a JSON Object, and my JSON format is like this:
{"Person": {
"id": "A256",
"name": "GET",
"status": "active",
"description": "hardworking, openminded",
...
So it will contains exactly the same things, for example if i want to simply print the label on a console log how can i do it ??
Are you looking for something like this:
function parseObject(obj)
{
for(var key in obj)
{
console.log("key: " + key + ", value: " + obj[key])
if(obj[key] instanceof Object)
{
parseObject(obj[key]);
}
}
}
just call parseObject(res) in the subscribe method.
parse it and access the fields.
var obj = JSON.parse(filter);
obj.Person.id;
//etc
parse it in the .subscribe:
res => this.processListe = res.json();
a better solution is to declare your response with any :
loadProcesses(filter?){
this._postService.getAllProcess(filter)
.subscribe(
(res: any)=> {
this.processListe = res;
// console.log(this.processListe.)
}
,null,
() =>{
console.log("get processes liste" + filter)
});
this way you can access any attirbute in your response
I am building a website with NodeJS which asks for a data-file to be uploaded, then I have to check and (if needed) transform the content of this file.
The source file is a JSON or XML configuration file, I just need to ensure its content is well-formatted for the rest of the application.
I am wondering what would be the best way to check the global file's content.
I usually manipulate files with Streams, but I am not sure if it allows me to do what I want...
The source file has a similar format :
{
"parameters": [{
"name": "name",
"settings": {
"key": "value"
}
}],
"data": [{
"id": "1",
"label": "label 1",
}, {
"id": "2",
"label": "label 2"
}]
}
What I need to do is to parse the file's content, and check if the file-format is good ;
Otherwise transform the file to a well-formatted one :
// Read the file content
var parameters = [],
data = [],
p = parameters.length,
d = data.length;
// Loop on the parameters, and check the format
while (p--) {
var parameter = parameters[p];
if (name in parameter && typeof parameter.name == "string") {
// Add several rules
parameters.push(parameter);
}
}
// Do a similar control for "data".
// Then save the well-formatted parameters and data into a file
The thing is that the uploaded file might be very large...
Can I perform it with transform Streams ? Because I need to check the full-content of the file as a object !
How can I be sure a Stream transformer won't give a chunk with just a part of data, for instance ?
I'd first try something like this:
var fs = require('fs');
try {
var inputFile = require('./config.json');
} catch (e) {
console.log(e.message); // Do proper error handling.
}
// Loop on the parameters, and check the format
if (!'parameters' in inputFile) {
console.log("Got a problem here!");
}
var parameters = [];
var p = inputFile['parameters'].length;
while(p--) {
var parameter = inputFile['parameters'][p];
if ('name' in parameter && typeof parameter.name == 'string') {
// Add several rules
parameters.push(parameter);
}
}
// Do a similar control for "data".
var data = inputFile['data'];
// More code needed here...
// Then save the well-formatted parameters and data into a file
fs.writeFileSync('./data.json', JSON.stringify({parameters: parameters, data: data}, null, 4) , 'utf-8');
If you are dealing with mammoth files that cannot fit into memory, you've got a HUGELY more difficult task ahead of you. In general, you cannot guarantee that a partial read will give you enough of the JSON to parse anything out of (e.g. {"data": ["<FOUR PETABYTE STRING>"]}).
I get a JSON response from the server that I have full access to using loadComplete. Is it possible to be able to access the JSON response using
onSelectRow?
any other custom function defined outside of loadComplete?
You can define a variable which will hold the last state of the JSON response returned from the server:
var serverData;
$('#list').jqGrid({
datatype: 'json',
// ... other parameters
loadComplete: function (data) {
serverData = data; // or serverData = data.rows
// ...
},
onSelectRow: function (id) {
if (serverData) {
// here you can access serverData, but you need
// here probably find the item in the serverData
// which corresponds the id
}
}
});
If you have JSON data for example from the form
{
"total": "xxx",
"page": "yyy",
"records": "zzz",
"rows" : [
{"id" :"1", "cell": ["cell11", "cell12", "cell13"]},
{"id" :"2", "cell": ["cell21", "cell22", "cell23"]},
...
]
}
then you can save in serverData not the data directly. It could be interesting to save only cell part and save it as the value of the serverData[id]:
var serverData = [];
$('#list').jqGrid({
datatype: 'json',
// ... other parameters
loadComplete: function (data) {
var i, rows = data.rows, l = rows.length, item;
for (i = 0; i < l; i++) {
item = rows[i];
serverData[item.id] = item.cell;
}
// ...
},
onSelectRow: function (id) {
var item = serverData[id]; // the part of data which we need
}
});
If you use repeatitems: false setting in the jsonReader then you can save in the serverData only the part of the items (selected properties) which represented the row of the server data.
In any way you should save the part of the information from data parameter of loadComplete in some variable defined outside of the loadComplete.