json object to file in D3 V4,V5 - json

I'm able to read a .csv file with d3.csv, stratify it and try to save the object to a file to generate a .json file for D3 graphs. Basically I'm doing a simple converter csv to json.
I'm using D3 V5 and FileSaver.js libraries only. Code runs fine until I arrive to JSON.stringify call that crashes. How can I safely save the root object generated in a .json file ?
.csv format file is also simple, first row is header describing row format: id, parentId, size
Code I'm using is simple:
(function () {
"use strict";
var filecsv = "data/Who.csv",
filejson = "data/Who.json";
const partition = data => {
const root = d3.hierarchy(data)
.sum(d => d.data.size)
.sort((a, b) => b.value - a.value);
return d3.partition()
.size([2 * Math.PI, root.height + 1])
(root);
};
d3.csv(filecsv).then(function(vCsvData) {
var data = d3.stratify()(vCsvData);
var root = partition(data);
var blob = new Blob([JSON.stringify(data)], {type: "text/plain;charset=utf-8"}); // crash here
saveAs(blob, filejson);
})
.catch(function(error){
console.log("Something wrong reading data from csv file...");
});
}());

Related

create file object using file path instead of input file form

most if not all Forge samples used input file form to get input file like below:
$("#inputFile").change(function () {
_fileInputForm = this;
if (_fileInputForm.files.length === 0) return;
var file = _fileInputForm.files[0];
}
what if i want to achieve the same result without UI at all? something like below:
const data = {
ModelId: "dXJuOmFkc2sud2lwcHJvZDpmcy5maWxlOnZmLjNSTzhMWmtUVGVHM3p1M1FoNjRGM3c_dmVyc2lvbj0x",
ElementId: [{ UniqueId: "ca5762b5-0f46-4a2f-8599-1d1a5dd19a81-00024b8c" }],
};
var file = new File([data], "whatever.json", { type: "application/json" });
data is manually copied from whatever.json, how to do it via code? thanks

Convert .txt file to JSON

I want to convert a fairly unorganized and unstructured text file to JSON format. I want to be able to use the city ID information. Is there anyway I can convert this to JSON?
UPDATE: I also found this solution after a while. Very simple way to get the JSON of any tab separated text file.
https://shancarter.github.io/mr-data-converter/
You can try to use tsv2json this tool can reads a tsv file from stdin and writes a json file to stdout.
It's distributed in source file, to compile it you need to download D compiler and then run dmd tsv2json.d.
If you have more complex task there is another tool named tsv-utils
TSV to JSON in nodejs
var file_name = 'city_list.txt';
var readline = require('readline');
var fs = require('fs');
var lineReader = readline.createInterface({
input: fs.createReadStream(file_name)
});
var isHeader = false;
var columnNames = [];
function parseLine(line) {
return line.trim().split('\t')
}
function createRowObject(values) {
var rowObject = {};
columnNames.forEach((value,index) => {
rowObject[value] = values[index];
});
return rowObject;
}
var json = {};
json[file_name] = [];
lineReader.on('line', function (line) {
if(!isHeader) {
columnNames = parseLine(line);
isHeader = true;
} else {
json[file_name].push(createRowObject(parseLine(line)));
}
});
lineReader.on('close', function () {
fs.writeFileSync(file_name + '.json', JSON.stringify(json,null,2));
});

Creating output for npm csvtojson

I'm trying to read in my .CSV file and output a .json file using npm's csvtojson
I am using the following code:
//Converter Class
var Converter = require("csvtojson").Converter;
var converter = new Converter({});
var fs = require("fs");
//end_parsed will be emitted once parsing finished
converter.on("end_parsed", function (jsonArray) {
console.log(jsonArray); //here is your result jsonarray
});
fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: '|'
}))
.pipe(fs.createWriteStream('dataOut.json'));
However, I'm running into the error "csv2json is not defined"
Does anyone know why I'm running into this error, despite including "csvtojson" on the first line?
cvs2json is not defined anywhere in your code. The cannocial example for cvs2json (from https://github.com/julien-f/csv2json) is:
var csv2json = require('csv2json');
var fs = require('fs');
fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: ';'
}))
.pipe(fs.createWriteStream('data.json'));
So the simple answer is to change your first line to var csv2json = require('csv2json');. However, this would cause an error in your attempt to have the end_parse event fire. To listen to that event, use the Node Stream eventing:
var stream = fs.createReadStream('data.csv')
.pipe(csv2json({
// Defaults to comma.
separator: '|'
}));
stream.on('end', function (jsonArray) {
console.log(jsonArray); //here is your result jsonarray
});
stream.pipe(fs.createWriteStream('dataOut.json'));
This is how I implement hope it helps.
let csv2Json = require('convert-csv-to-json');
let fileInputName = 'stores.csv';
let fileOutputName = 'stores.json';
csv2Json.fieldDelimiter(',').generateJsonFileFromCsv(fileInputName,
fileOutputName);
//To display json you created on terminal.
let json = csv2Json.getJsonFromCsv("stores.csv");
for(let i=0; i<json.length;i++){
console.log(json[i]);
}
Once you run the file, it is going to create the json file, if it is exists it is going to overwrite it.

How to use streams to JSON stringify large nested objects in Node.js?

I have a large javascript object that I want to convert to JSON and write to a file. I thought I could do this using streams like so
var fs = require('fs');
var JSONStream = require('JSONStream');
var st = JSONStream.stringifyObject()
.pipe(fs.createWriteStream('./output_file.js'))
st.write(large_object);
When I try this I get an error:
stream.js:94
throw er; // Unhandled stream error in pipe.
^
TypeError: Invalid non-string/buffer chunk
at validChunk (_stream_writable.js:153:14)
at WriteStream.Writable.write (_stream_writable.js:182:12)
So apparently I cant just write an object to this stringifyObject. I'm not sure what the next step is. I need to convert the object to a buffer? Run the object through some conversion stream and pipe it to strinigfyObject
JSONStream doesn't work that way but since your large object is already loaded into memory there is no point to that.
var fs = require('fs-extra')
var file = '/tmp/this/path/does/not/exist/file.txt'
fs.outputJson(file, {name: 'JP'}, function (err) {
console.log(err) // => null
});
That will write the JSON.
If you want to use JSONStream you could do something like this:
var fs = require('fs');
var jsonStream = require('JSONStream');
var fl = fs.createWriteStream('dat.json');
var out = jsonStream.stringifyObject();
out.pipe(fl);
obj = { test:10, ok: true };
for (key in obj) out.write([key, obj[key]]);
out.end();
Well the question is quite old but still valid for nowadays, I faced same issue but solved it using this JsonStreamStringify package.
const { JsonStreamStringify } = require("json-stream-stringify");
Now,
x = new JsonStreamStringify(cursor).pipe(res);
x.on("data", (doc) => {
res.write(doc);
});
Here you can read your file using fs and then write the above code. 'cursor' will be pointing to your file.
In this way, you can stream your file in valid JSON Format.
For Docs:
https://www.npmjs.com/package/json-stream-stringify

Creating a zip file from a JSON object using adm-zip

I'm trying to create a .zip file from a JSON object in Node.js. I'm using adm-zip to do that however I'm unable to make it work with this code:
var admZip = require('adm-zip');
var zip = new admZip();
zip.addFile(Date.now() + '.json', new Buffer(JSON.stringify(jsonObject));
var willSendthis = zip.toBuffer();
fs.writeFileSync('./example.zip', willSendthis);
This code creates example.zip but I'm not able to extract it, I tried with a .zipextractor but also with this code:
var admZip = require('adm-zip');
var zip = new admZip("./example.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.data.toString('utf8'));
});
It returns Cannot read property 'toString' of undefined at the line with console.log.
I could use zip.writeZip() for this example but I'm sending the .zipfile to Amazon S3 thus I need to use the method .toBuffer() to do something like this after using adm-zip:
var params = {Key: 'example.zip', Body: zip.toBuffer()};
s3bucket.upload(params, function(err, data) {...});
I don't see what is wrong, am I using the package correctly?
Try use zipEntry.getData().toString('utf8') instead zipEntry.data.toString('utf8'):
var admZip = require('adm-zip');
var zip = new admZip("./example.zip");
var zipEntries = zip.getEntries(); // an array of ZipEntry records
zipEntries.forEach(function(zipEntry) {
console.log(zipEntry.getData().toString('utf8'));
});