I'm trying to read in my .CSV file and output a .json file using npm's csvtojson
I am using the following code:
//Converter Class
var Converter = require("csvtojson").Converter;
var converter = new Converter({});
var fs = require("fs");
//end_parsed will be emitted once parsing finished
converter.on("end_parsed", function (jsonArray) {
console.log(jsonArray); //here is your result jsonarray
});
fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: '|'
}))
.pipe(fs.createWriteStream('dataOut.json'));
However, I'm running into the error "csv2json is not defined"
Does anyone know why I'm running into this error, despite including "csvtojson" on the first line?
cvs2json is not defined anywhere in your code. The cannocial example for cvs2json (from https://github.com/julien-f/csv2json) is:
var csv2json = require('csv2json');
var fs = require('fs');
fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: ';'
}))
.pipe(fs.createWriteStream('data.json'));
So the simple answer is to change your first line to var csv2json = require('csv2json');. However, this would cause an error in your attempt to have the end_parse event fire. To listen to that event, use the Node Stream eventing:
var stream = fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: '|'
}));
stream.on('end', function (jsonArray) {
console.log(jsonArray); //here is your result jsonarray
});
stream.pipe(fs.createWriteStream('dataOut.json'));
This is how I implement hope it helps.
let csv2Json = require('convert-csv-to-json');
let fileInputName = 'stores.csv';
let fileOutputName = 'stores.json';
csv2Json.fieldDelimiter(',').generateJsonFileFromCsv(fileInputName,
fileOutputName);
//To display json you created on terminal.
let json = csv2Json.getJsonFromCsv("stores.csv");
for(let i=0; i<json.length;i++){
console.log(json[i]);
}
Once you run the file, it is going to create the json file, if it is exists it is going to overwrite it.
Related
I want to convert a fairly unorganized and unstructured text file to JSON format. I want to be able to use the city ID information. Is there anyway I can convert this to JSON?
UPDATE: I also found this solution after a while. Very simple way to get the JSON of any tab separated text file.
https://shancarter.github.io/mr-data-converter/
You can try to use tsv2json this tool can reads a tsv file from stdin and writes a json file to stdout.
It's distributed in source file, to compile it you need to download D compiler and then run dmd tsv2json.d.
If you have more complex task there is another tool named tsv-utils
TSV to JSON in nodejs
var file_name = 'city_list.txt';
var readline = require('readline');
var fs = require('fs');
var lineReader = readline.createInterface({
input: fs.createReadStream(file_name)
});
var isHeader = false;
var columnNames = [];
function parseLine(line) {
return line.trim().split('\t')
}
function createRowObject(values) {
var rowObject = {};
columnNames.forEach((value,index) => {
rowObject[value] = values[index];
});
return rowObject;
}
var json = {};
json[file_name] = [];
lineReader.on('line', function (line) {
if(!isHeader) {
columnNames = parseLine(line);
isHeader = true;
} else {
json[file_name].push(createRowObject(parseLine(line)));
}
});
lineReader.on('close', function () {
fs.writeFileSync(file_name + '.json', JSON.stringify(json,null,2));
});
I am getting an error that "directory is not defined" I really don't know if it means in my code or my json file. Here is the json file which I checked the format with https://jsonformatter.curiousconcept.com/
[
{
"directory": "C:\\Users\\PCAdmin\\Downloads\\jsonin",
"dirID": "dir01"
},
{
"directory": "C:\\Users\\PCAdmin\\Downloads\\jsonout",
"dirID": "dir02"
}
]
Here is my code which based on examples I've seen should work yet I can't seem to get past the error;
'use strict';
var fs = require('fs');
var jsonObj = require("./jsonDirectories.json");
for(directory in jsonObj) {
console.log("Dir: "+jsonObj.directory);
}
I'm sure it's something stupid but any direction would be appreciated
The error means that the variable directory on line 4 of your code was not initialized. The following code will fix that bug:
'use strict';
var fs = require('fs');
var jsonObj = require("./jsonDirectories.json");
for (var dirInfo in jsonObj) {
console.log("Dir: " + dirInfo.directory);
}
However, this still does not do what you want, because the in operator does not work this way for arrays. The in operator is generally used to get the keys of objects (and then should still be used carefully).
To loop over your array of directory info, what you want is the following:
'use strict';
var fs = require('fs');
var jsonObj = require('./jsonDirectories.json');
jsonObj.forEach(function(dirInfo) {
console.log('Dir: '+dirInfo.directory);
}
(I also removed the mixed single and double quotes, which is good practice.)
You need to declare the directory variable before using it:
for (let item of jsonObj) {
// This line also needed fixing:
console.log("Dir: ", item.directory);
}
when you use for(directory in jsonObj) in node.js, directory will be assigned to the index of each of the items, not the value. so you can use jsonObj[directory] to get the directory.
But in my mind, this alternative is better:
jsonObj.forEach( function(directory) {
console.log("Dir: "+ directory);
});
I have a large javascript object that I want to convert to JSON and write to a file. I thought I could do this using streams like so
var fs = require('fs');
var JSONStream = require('JSONStream');
var st = JSONStream.stringifyObject()
.pipe(fs.createWriteStream('./output_file.js'))
st.write(large_object);
When I try this I get an error:
stream.js:94
throw er; // Unhandled stream error in pipe.
^
TypeError: Invalid non-string/buffer chunk
at validChunk (_stream_writable.js:153:14)
at WriteStream.Writable.write (_stream_writable.js:182:12)
So apparently I cant just write an object to this stringifyObject. I'm not sure what the next step is. I need to convert the object to a buffer? Run the object through some conversion stream and pipe it to strinigfyObject
JSONStream doesn't work that way but since your large object is already loaded into memory there is no point to that.
var fs = require('fs-extra')
var file = '/tmp/this/path/does/not/exist/file.txt'
fs.outputJson(file, {name: 'JP'}, function (err) {
console.log(err) // => null
});
That will write the JSON.
If you want to use JSONStream you could do something like this:
var fs = require('fs');
var jsonStream = require('JSONStream');
var fl = fs.createWriteStream('dat.json');
var out = jsonStream.stringifyObject();
out.pipe(fl);
obj = { test:10, ok: true };
for (key in obj) out.write([key, obj[key]]);
out.end();
Well the question is quite old but still valid for nowadays, I faced same issue but solved it using this JsonStreamStringify package.
const { JsonStreamStringify } = require("json-stream-stringify");
Now,
x = new JsonStreamStringify(cursor).pipe(res);
x.on("data", (doc) => {
res.write(doc);
});
Here you can read your file using fs and then write the above code. 'cursor' will be pointing to your file.
In this way, you can stream your file in valid JSON Format.
For Docs:
https://www.npmjs.com/package/json-stream-stringify
I'm trying to create a .zip file from a JSON object in Node.js. I'm using adm-zip to do that however I'm unable to make it work with this code:
var admZip = require('adm-zip');
var zip = new admZip();
zip.addFile(Date.now() + '.json', new Buffer(JSON.stringify(jsonObject));
var willSendthis = zip.toBuffer();
fs.writeFileSync('./example.zip', willSendthis);
This code creates example.zip but I'm not able to extract it, I tried with a .zipextractor but also with this code:
var admZip = require('adm-zip');
var zip = new admZip("./example.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.data.toString('utf8'));
});
It returns Cannot read property 'toString' of undefined at the line with console.log.
I could use zip.writeZip() for this example but I'm sending the .zipfile to Amazon S3 thus I need to use the method .toBuffer() to do something like this after using adm-zip:
var params = {Key: 'example.zip', Body: zip.toBuffer()};
s3bucket.upload(params, function(err, data) {...});
I don't see what is wrong, am I using the package correctly?
Try use zipEntry.getData().toString('utf8') instead zipEntry.data.toString('utf8'):
var admZip = require('adm-zip');
var zip = new admZip("./example.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.getData().toString('utf8'));
});
I am trying to read a JSON file using co and generators. test.json contains:
{
"hello": "world"
}
This passes jsonlint so it should be valid. Here is my code at present:
#!/usr/bin/env node --harmony
var co = require('co'),
fs = require('fs'),
thunkify = require('thunkify');
var read = thunkify(fs.readFile);
var JSONParse = thunkify(JSON.parse);
var log = console.log.bind(console);
co(function *(){
var fileContents = yield read('test.json', 'utf-8');
var works = yield JSONParse(fileContents)
log(works)
})()
Only an empty object is logged. However fileContents is full, so it seems to be the JSON.parse part that's broken. What am I doing wrong?
Silly me, JSON.parse is sync. So no need to wrap.
var fileContents = yield read('public/data/works.json', 'utf-8');
log(JSON.parse(fileContents))