I have JSON data stored in the variable 'data'.
I want to write this to a text file.
Can I do this with Node? I am a beginner
You can use this NPM module: https://www.npmjs.com/package/jsonfile
var jsonfile = require('jsonfile')
var file = '/tmp/data.json'
var obj = {name: 'JP'}
jsonfile.writeFile(file, obj, function (err) {
console.error(err)
})
Related
"New to programming"
I have a CSV file at
http://vhost11.lnu.se:20090/assig2/data1.csv
I am trying to convert it to a local json file. My code below.
I am getting {"X":"153","Y":"21","time":"21438"}} value in my data1.json.
const request=require('request')
const csv=require('csvtojson')
const fs = require('fs')
csv()
.fromStream(request.get('http://vhost11.lnu.se:20090/assig2/data1.csv'))
.on("json",function(jsonObj){ //single json object will be emitted for each csv line
console.log(jsonObj);
fs.writeFile("./data1.json", JSON.stringify(jsonObj), (err) => {
if (err) {
console.error(err);
return;
};
});
});
Where did I go wrong?
The callback function in the on event is called for each line. You'll want to initialize an empty list in the outer most scope and push jsonObj to it from the callback in on. You can then write your list to a file when the input file is done being read by handling the done event.
From React dropzone, i receive a File object with a File.preview property whose value is a blob:url. i.e. File {preview: "blob:http://localhost:8080/52b6bad4-58f4-4ths-a2f5-4ee258ba864a"
Is there a way to convert this to json on the client? The file isnt need to be stored in a database (the convert JSON will be). I've attempted to use csvtojson but it's unable to use the file system as it uses node to power it. Ideally would like to convert this in the client if possible, once user has uploaded it. Any suggestions welcomed.
<Dropzone
name={field.name}
onDrop={(acceptedFiles, rejectedFiles) => {
acceptedFiles.forEach(file => {
console.log(file)
let tempFile = file.preview
csv()
.fromSteam(tempFile) // this errors with fs.exists not a function as its not running serverside
.on('end_parsed',(jsonArrObj)=>{
console.log(jsonArrObj)
})
})
}}
>
Yes, its possible with FileReader and csv:
import csv from 'csv';
// ...
const onDrop = onDrop = (e) => {
const reader = new FileReader();
reader.onload = () => {
csv.parse(reader.result, (err, data) => {
console.log(data);
});
};
reader.readAsBinaryString(e[0]);
}
// ...
<Dropzone name={field.name} onDrop={onDrop} />
FileReader API: https://developer.mozilla.org/en/docs/Web/API/FileReader
csv package: https://www.npmjs.com/package/csv
I'd like to convert a CSV file to a JSON object using NodeJS. The problem is that my CSV file is hosted on a special URL.
URL : My CSV here
var fs = require("fs");
var Converter = require("csvtojson").Converter;
var fileStream = fs.createReadStream("myurl");
var converter = new Converter({constructResult:true});
converter.on("end_parsed", function (jsonObj) {
console.log(jsonObj);
});
fileStream.pipe(converter);
Issue :
Error: ENOENT, open 'C:\xampp\htdocs\GestionDettes\http:\www.banque-france.fr\fileadmin\user_upload\banque_de_france\Economie_et_Statistiques\Changes_et_Taux\page3_quot.csv'
at Error (native)
Edit #1 :
Request.get(myurl, function (error, Response, body) {
var converter = new Converter({
constructResult: true,
delimiter: ';'
});
converter.fromString(body,function(err, taux){
console.log(taux); // it works
});
});
I did just that in a module reading and writing on different protocol in different data formats. I used request to get http resources.
If you want take a look at alinex-datastore. With this module it should work like:
const DataStore = require('#alinex/datastore').default;
async function transform() {
const ds = new DataStore('http://any-server.org/my.csv');
await ds.load();
await ds.save('file:/etc/my-app.json');
}
That should do it.
I have a JSON file which I am reading with node, modifying and saving it as a json file.
I'm looking to save the new json as newline delimited vs being in an array.
I came across https://github.com/CrowdProcess/newline-json but don't fully understand streams. If i have the following stream setup, how can I pipe it though the parser and stringifier?
fileStream = fs.createReadStream('source.json')
writeStream = fs.createWriteStream('output.txt');
var Parser = require('newline-json').Parser;
var Stringifier = require('newline-json').Stringifier;
var parser = new Parser();
var stringifier = new Stringifier();
But running the following only outputs a blank file.
fileStream.pipe(parser).pipe(stringifier).pipe(writeStream)
what am I missing about streams?
One way to convert a JSON array to a stream of newline-delimited JSON entities is to use jq with the -c option, e.g.
$ jq -c ".[]"
Input:
[[1,2], 3, {"4":5}]
Output:
[1,2]
3
{"4":5}
See https://stedolan.github.io/jq
For anybody who is looking to a solution on how to convert json array of objects to nd-json. Here is the solution:
Input:
const arrObj = [{
id: 1,
name: 'joe'
}, {
id: 2,
name: 'ben'
}, {
id: 3,
name: 'jake'
}, {
id: 4,
name: 'marsh'
}];
// stringify the object and join with \n
const ndJson = arrObj.map(JSON.stringify).join('\n');
console.log(ndJson);
Output:
{"id":1,"name":"joe"}
{"id":2,"name":"ben"}
{"id":3,"name":"jake"}
{"id":4,"name":"marsh"}
Example Usecase: When importing bulk request from a json file to elasticsearch.
Happy Coding :)
In node.js you can use the node-jq package to do what #peak has shown above.
var stream = require('stream');
var fs = require('fs');
const jq = require('node-jq');
var fileName = 'YOUR_FILE_NAME'; //e.g abc.json
var bucketName = 'YOUR_BUCKET NAME'; // e.g gs://def
var dataStream = new stream.PassThrough();
async function formatJson() {
jq.run('.[]', fileName, {output: 'compact'})
.then((output) => {
dataStream.push(output)
dataStream.push(null)
console.log(dataStream)
})
.catch((err) => {
console.log(err)
})
}
formatJson()
I am not an experienced node person, so apologies if the code is clumsy but it works.
How to get a local big json data?
I have tried this, but I had no success:
var sa = require("./shared/resources/sa.json");
var array = new observableArrayModule.ObservableArray(sa);
Use the file-system module to read the file and then parse it with JSON.parse():
var fs = require('file-system');
var documents = fs.knownFolders.currentApp();
var jsonFile = documents.getFile('shared/resources/sa.json');
var array;
var jsonData;
jsonFile.readText()
.then(function (content) {
try {
jsonData = JSON.parse(content);
array = new observableArrayModule.ObservableArray(jsonData);
} catch (err) {
throw new Error('Could not parse JSON file');
}
}, function (error) {
throw new Error('Could not read JSON file');
});
Here's a real life example of how I'm doing it in a NativeScript app to read a 75kb/250 000 characters big JSON file.
TypeScript:
import {knownFolders} from "tns-core-modules/file-system";
export class Something {
loadFile() {
let appFolder = knownFolders.currentApp();
let cfgFile = appFolder.getFile("config/config.json");
console.log(cfgFile.readTextSync());
}
}
As of TypeScript version 2.9.x and above (in NativeScript 5.x.x is using versions 3.1.1 and above) we can now use resovleJsonModule option for tsconfig.json. With this option, the JSON files can now be imported just as modules and the code is simpler to use, read and maintain.
For example, we can do:
import config from "./config.json";
console.log(config.count); // 42
console.log(config.env); // "debug"
All we need to do is to use TypeScript 2.9.x and above and enable the propety in tsconfig.json
// tsconfig.json
{
"compilerOptions": {
"module": "commonjs",
"resolveJsonModule": true,
"esModuleInterop": true
}
}
A sample project demonstrating the above can be found here
I just wanted to add one more thing, which might be even easier. You can simply write the content of your JSON file in a data.js file, or whatever name you would like to use, and export it as an array. Then you can just require the data.js module.