I have a JSON file which I am reading with node, modifying and saving it as a json file.
I'm looking to save the new json as newline delimited vs being in an array.
I came across https://github.com/CrowdProcess/newline-json but don't fully understand streams. If i have the following stream setup, how can I pipe it though the parser and stringifier?
fileStream = fs.createReadStream('source.json')
writeStream = fs.createWriteStream('output.txt');
var Parser = require('newline-json').Parser;
var Stringifier = require('newline-json').Stringifier;
var parser = new Parser();
var stringifier = new Stringifier();
But running the following only outputs a blank file.
fileStream.pipe(parser).pipe(stringifier).pipe(writeStream)
what am I missing about streams?
One way to convert a JSON array to a stream of newline-delimited JSON entities is to use jq with the -c option, e.g.
$ jq -c ".[]"
Input:
[[1,2], 3, {"4":5}]
Output:
[1,2]
3
{"4":5}
See https://stedolan.github.io/jq
For anybody who is looking to a solution on how to convert json array of objects to nd-json. Here is the solution:
Input:
const arrObj = [{
id: 1,
name: 'joe'
}, {
id: 2,
name: 'ben'
}, {
id: 3,
name: 'jake'
}, {
id: 4,
name: 'marsh'
}];
// stringify the object and join with \n
const ndJson = arrObj.map(JSON.stringify).join('\n');
console.log(ndJson);
Output:
{"id":1,"name":"joe"}
{"id":2,"name":"ben"}
{"id":3,"name":"jake"}
{"id":4,"name":"marsh"}
Example Usecase: When importing bulk request from a json file to elasticsearch.
Happy Coding :)
In node.js you can use the node-jq package to do what #peak has shown above.
var stream = require('stream');
var fs = require('fs');
const jq = require('node-jq');
var fileName = 'YOUR_FILE_NAME'; //e.g abc.json
var bucketName = 'YOUR_BUCKET NAME'; // e.g gs://def
var dataStream = new stream.PassThrough();
async function formatJson() {
jq.run('.[]', fileName, {output: 'compact'})
.then((output) => {
dataStream.push(output)
dataStream.push(null)
console.log(dataStream)
})
.catch((err) => {
console.log(err)
})
}
formatJson()
I am not an experienced node person, so apologies if the code is clumsy but it works.
Related
I am trying to process a CSV file in NestJS using Multer and Papa Parse. I do not want to store the file locally. I just want to parse CSV files to extract some information.
However, I am unable to process it, I have tried two different ways. In the first one, I passed the file buffer to Papa.parse function. However, I get the error: ReferenceError: FileReaderSync is not defined
#Post('1')
#UseInterceptors(
FileInterceptor('file', {})
)
async uploadFile(#UploadedFile() file: Express.Multer.File ){
const csvData = papa.parse(file.buffer, {
header: false,
worker: true,
delimiter: ",",
step: function (row){
console.log("Row: ", row.data);
}
});
}
So tried calling the readFileSync() as shown below, but this time I got the error, ERROR [ExceptionsHandler] ENAMETOOLONG: name too long, open
#Post('2')
#UseInterceptors(
FileInterceptor('file', {})
)
async uploadFile(#UploadedFile() file: Express.Multer.File ){
const $file = readFileSync(file.buffer);
const csvData = papa.parse($file, {
header: false,
worker: true,
delimiter: ",",
step: function (row){
console.log("Row: ", row.data);
}
});
}
will appreciate any help to resolve this issue.
As pointed out by #skink, the file buffer needs to be converted to stream before it can be used by papa parse.
const { Readable } = require('stream');
And updated the function, where converting the file.buffer to stream before calling parse()
#Post('1')
#UseInterceptors(
FileInterceptor('file', {})
)
async uploadFile(#UploadedFile() file: Express.Multer.File ){
const stream = Readable.from(file.buffer);
const csvData = papa.parse(stream, {
header: false,
worker: true,
delimiter: ",",
step: function (row){
console.log("Row: ", row.data);
}
});
}
So I have this array with different links
const urls = ['myurl.com', 'localhost.com'... etc]
In the for loop I want to create an object like so, it should basically create a new object for every URL, and then pass the looped URL to the userUrl section
for(let url of urls) {
[
{
user: 1,
isVerified: true
userUrl: url
}
]
}
After the loop finishes, this data should be readable in a JSON file
it should look something like this
[
{
user: 1,
isVerified: true,
userUrl: myUrl.com
},
{
user: 2,
isVerified: true,
userUrl: localhost.com
}
...etc
]
I tried this code on Chrome and it works correctly plus it correctly format the json data now, instructing JSON.stringify to use 4 spaces indentation.
It won't work in the snippet, but it will if you save it in your own file. I did it inside the chrome developer tools in the Sources tab as a snippet and as soon as executed the download queue was fed with the json file.
I left the live snippet here because there's the chance to see the jsonData on console anyway.
The way to programmatically send a string to a download file was inspired by this question:
How do I save JSON to local text file
function download(content, fileName, contentType) {
var a = document.createElement("a");
var file = new Blob([content], {type: contentType});
a.href = URL.createObjectURL(file);
a.download = fileName;
a.click();
}
const urls = ['myurl.com', 'localhost.com'];
const data = factory(urls);
const jsonData = JSON.stringify(data, null, 4);
console.log(jsonData);
download(jsonData, 'json.txt', 'text/plain');
function factory(urls){
const output = [];
for(let url of urls) {
output.push({
user: 1,
isVerified: true,
userUrl: url
});
}
return output;
}
I am trying to parse a JSON object in Google Scripts but I can't seem to get it to work.
The JSON I am trying to parse looks like this
{
"keywords":[
{
"c":0.015165274822976534,
"monthly":[ ],
"kw":"handstand",
"n":60500,
"sb":0.3
}
],
"not_found":[
]
}
I can't seem to access any of the values within the object. If I use JSON.parse() it seems to create a non-JSON object.
var response = '{"keywords":[{"c":0.015165274822976534,"monthly":[
],"kw":"handstand","n":60500,"sb":0.3}],"not_found":[]}'
var obj = JSON.parse(response);
Logger.log(obj);
Returns
{keywords=[{c=0.015165274822976534, monthly=[], kw=handstand, n=60500, sb=0.3}], not_found=[]}
This doesn't validate.
If I try to work with that anyways this happens
var response = '{"keywords":[{"c":0.015165274822976534,"monthly":[
],"kw":"handstand","n":60500,"sb":0.3}],"not_found":[]}'
var obj = JSON.parse(response);
Logger.log(obj.keywords.n)
Returns undefined
If I don't use JSON.parse and just work with the original response object I get the following:
var response = '{"keywords":[{"c":0.015165274822976534,"monthly":[ ],"kw":"handstand","n":60500,"sb":0.3}],"not_found":[]}'
Logger.log(response.keywords);
I get undefined
I'm not really sure where I am going wrong here. Any help is appreciated. Thanks!
As #Tanaike mentions, your n property is a property of an object in the keywords array. So to log the values of n, you need to - after parsing the string to JSON - evaluate each object in keywords:
var response = '{"keywords":[{"c": 0.015165274822976534, "monthly": [ ], "kw": "handstand", "n": 60500, "sb": 0.3}], "not_found": []}'
var obj = JSON.parse(response);
// Log all values of n for each keyword:
obj.keywords.forEach(function (keyword) {
console.log("Word: " + keyword.kw + ", times used: " + keyword.n);
});
I have JSON data stored in the variable 'data'.
I want to write this to a text file.
Can I do this with Node? I am a beginner
You can use this NPM module: https://www.npmjs.com/package/jsonfile
var jsonfile = require('jsonfile')
var file = '/tmp/data.json'
var obj = {name: 'JP'}
jsonfile.writeFile(file, obj, function (err) {
console.error(err)
})
I installed node.js and express.js and I'm trying to use:
var str = { test: 'info'};
obj = JSON.parse(str);
but it causes an error: SyntaxError: Unexpected token 0 at Object.parse (native)
How can I fix this? Thanks.
you parsing Object to Object?
JSON.parse() expects string:
var str = '{"test": "info"}';
obj = JSON.parse(str);
Basically JSON.parse() expects string but you were passing object, so instead do this:
var original = { test: 'info'};
var str = JSON.stringify(original);
var restored = JSON.parse(str);
Here's helpful docs about JSON https://developer.mozilla.org/en/Using_native_JSON
str is not a string, but an object. Put the whole thing into a string first, i.e.:
var str = '{ "test": "info"}';
obj = JSON.parse(str);
express.js cannot parse JSON objects on its own. Try using the body-parser
var app = require('express')();
var bodyParser = require('body-parser');
var multer = require('multer'); // v1.0.5
var upload = multer(); // for parsing multipart/form-data
app.use(bodyParser.json()); // for parsing application/json
app.use(bodyParser.urlencoded({ extended: true })); // for parsing application/x-www-form-urlencoded
app.post('/profile', upload.array(), function (req, res, next) {
console.log(req.body);
res.json(req.body);
});
If you want to create JSON you need to use JSON.stringify.
var thing = { test: 'info' };
var json = JSON.stringify(thing);
If you want to parse JSON to an object you need to use parse. Parse expects you to use valid JSON.
var json = '{ "test": "info" }'; //Use double quotes for JSON
var thing = JSON.parse(json);
An easy way to test if you are using valid json is to use something like: http://jsonlint.com/
The thing you are calling str is actually already an object. JSON is not necessary, pure JavaScript is all you need:
var obj = { test: 'info'};
console.log( JSON.stringify(obj) ); // JSON output