Grabbing a number from .JSON - json

I have a long list of data, in the following format:
[
{
"ID": "1234",
"date/time": "2016-07-18 18:21:44",
"source_address": "8011",
"lat": "40.585260",
"lng": "-105.084420",
}
]
And I am creating a script to extract the values out of each line. For example, if a line contains "ID": I want to be able to store the value "1234" into a variable, so I can store it in a different format.
Here is my code to detect "ID":
'use strict';
let lineReader = require('line-reader');
//.JSON variables from input file
let id;
//begin creating new object
console.log('var source = {');
//output the file
lineReader.eachLine('dataOut.json', function (line, last) {
//detect ID, and print it out in the new format
if (id =~ /^id:$/) {
console.log('id: "') + console.log('",');
}
//done
if (last) {
console.log('}');
return false; // stop reading
}
});
Once I detect the ID, I'm not sure how I can obtain the value that follows the "ID" on that line.
How can I store the values on a line, after I detect which line they are on?

Unless your json file is stupidly big, you can just require it and then it's an in memory JS object.
var obj = require('./dataOut.json');
// first element
console.log(obj[0]);

Related

How do i convert this json file to csv

I have this JSON which I want to convert to CSV, I have to use online tools for converting it but its not converting in proper CSV format according to the JSON format , but my JSON format is not in one layer, here is what I'm saying
because after converting into proper CSV i have to add some data and want to convert back to JSON like the original but with additional data so I can upload it on firebase
I suppose the first layer in the JSON is the first header name. The second layer has the value from 1st header as key and the other headers/values as value. This format is probably called "Hashed CSV". The format is something like below in Typescript:
type HashedCsv = {
[header1: string]: {
[value1: string]: {
[otherHeader: string]: string
}
}
}
So, to convert to CSV and from CSV you just need to have this type above in mind.
Below is a code snippet in JavaScript to convert the JSON to CSV. You can test it here on StackOverflow by clicking Run code snippet or with even with NodeJS.
function jsonToCsv (/** #type {{[header1: string]: {[value1: string]: {[otherHeader: string]: string}}}} */ json) {
// The first layer is the first header name
const header1 = Object.keys(json)[0]
const values = json[header1]
// In the 2nd layer are the other header names
const otherHeaders = Object.keys(Object.values(values)[0])
const headers = [header1, ...otherHeaders]
/** #type {string[]} */
const csvValues = []
for (const [value1, otherValues] of Object.entries(values)) {
csvValues.push([
value1,
// Escape commas and quotes
...otherHeaders.map(header => /[,"]/g.test(otherValues[header])
? `"${otherValues[header].replaceAll('"', '""')}"`
: otherValues[header])
].join(','))
}
return [
headers.join(','),
...csvValues
].join('\n')
}
const json = loadJson()
const csv = jsonToCsv(json)
console.log(csv)
// Loads the JSON. It could be a file or from memory like below
function loadJson () {
return {
"Quotes": {
"q1": {
"title": "The unexamined life is not worth living – Socrates"
},
"q2": {
"title": "Whereof one cannot speak, thereof one must be silent – Ludwig Wittgenstein"
},
"q3": {
"title": "Entities should not be multiplied unnecessarily – William of Ockham"
},
"q4": {
"title": "The mind is furnished with ideas by experience alone – John Locke"
},
"q5": {
"title": "We are too weak to discover the truth by reason alone – St. Augustine"
},
"q6": {
"title": "Man is the measure of all things – Protagoras"
},
"q7": {
"title": "Everything that exists is born for no reason, carries on living through weakness, and dies by accident – Jean-Paul Sartre"
},
"q8": {
"title": "To do as one would be done by, and to love one's neighbor as oneself, constitute the ideal perfection of utilitarian morality – John Stuart Mill"
},
"q9": {
"title": "Good and evil, reward and punishment, are the only motives to a rational creature – John Locke"
}
}
}
}
PS: Notice that I've stripped the JSON a bit.
try this
var jObject = (JObject)JObject.Parse(json)["Quotes"];
var arr = jObject.Properties().Select(a => new { title = a.Name, text = a.Value["title"] }).Dump();
var sb = new StringBuilder();
foreach (var item in arr)
sb.Append("\"" + item.title + "\"" + "," + "\"" + item.text + "\"" + Environment.NewLine);
File.WriteAllText("quotes.csv", sb.ToString());

how extract string into object

I’m not sure how to explain this but I’ll write an example on how I can create a new data from this using SQL. this is from MongoDb database and I can't change any thing. I was hoping if any one Knows how to execute this using the Select method.
SELECT * FROM mytable
Original data
[{
"id": "2433-10",
"busiName": "ABC",
"srTypeId": "2433-10",
"nodeType": "0",
"pathName": "home",
"busiSort": 10,
"SampleInfo": "1:sql test question identifiers: itemid:12345;itemname:Ford;itemid:12345; itemlocation=USA/itemDate=2014",
"superTypeId": "002",}]
I want extract just SampleInfo into New data
[{
"1":"sql test question identifiers"
"itemid":"12345";
"itemname":"Ford";
"iteminfo":"it's car";
"itemlocation ":"USA";
"itemDate":"2014";
}]
With some initial sanitization(replacing "=" with ":" and "/" with ";") maybe this is what you need:
( This is assuming that you have only single delimiter between the key/values and single delimiter between key and value )
db.collection.aggregate([
{
$addFields: {
newData: {
"$arrayToObject": {
"$map": {
"input": {
$split: [
"$SampleInfo",
";"
]
},
"as": "newD",
"in": {
"$split": [
"$$newD",
":"
]
}
}
}
}
}
}
])
Explained:
Split the SampleInfo based on delimiter ";" ( considering you have "key1:value1;key2:value2;key3:value3" in new array called newData.
Split the keys and values based on the key/value delimiter ":" , convert them to "key":"value" pair in the newData array field.
playground just aggregation
( If you want to just parse and output )
playground update + agg pipleine 4.2+
( If you want to parse and store back to the database under new field: newData )
But afcourse prefered option as suggested above is to sanitize and parse the data before inserting it to the database ...
Same thing via JavaScript Example:
mongos> function stringToObj (string) { var obj = {}; var stringArray = string.split(';'); for(var i = 0; i < stringArray.length; i++){ var kvp = stringArray[i].split(':'); if(kvp[1]){ obj[kvp[0]] = kvp[1] } } return obj; }
mongos> db.collection.find().forEach(function(d){ d.newData=stringToObj(d.SampleInfo);db.collection.save(d); } )
mongos>
Explained:
Define JS function stringToObj ( Converting the string to object )
Loop over all documents via forEach and use the function to parse and modify the document adding new field newData with the content.

Parsing JSON error cannot read property 'url' of undefined

I have been trying to parse JSON, which have 3 different set of data where one element have various number of children and sometimes none. I am getting an error when there is no children present or only one present. I declared the JSON as var data.
JSON A
{
"floorplan": [
{
"title": "plan1",
"url": "https://media.plan1.pdf"
},
{
"title": "plan2",
"url": "https://media.plan2.pdf"
}
]
}
JSON B
{"floorplan": []}
JSON C
{
"floorplan": [
{
"title": "plan1",
"url": "https://media.plan1.pdf"
}
]
}
I parsed the JSON like this:
var items = JSON.parse(data);
return {
floorplan1: items.floorplan[0].url;
floorplan2: items.floorplan[1].url;
}
But, it only returned data for the JSON A, for other 2 it gave TypeError: Cannot read property 'url' of undefined.
I modified the code to check if floorplan have at least one child and then parse data.
var items = JSON.parse(data);
var plan = items.floorplan[0];
if(plan){
return {
floorplan1: items.floorplan[0].url;
floorplan2: items.floorplan[1].url;
}
}
The new code returned data for JSON A and B(as empty row), but gave error for C. C have one child still it got the error.
I also tried this code, still got the error for JSON C.
var items = JSON.parse(data);
var plan = items.floorplan[0];
var plan1;
var plan2;
if(plan){
plan1 = items.floorplan[0].url;
plan2 = items.floorplan[1].url;
}
return{
floorplan1 : plan1 ? plan1 : null;
floorplan2 : plan2 ? plan2 : null;
}
Is there any method I can try to get data returned for all 3 types of JSON?
let data = `
[{"floorplan": [{
"title": "plan1",
"url": "https://media.plan1.pdf"
}, {
"title": "plan2",
"url": "https://media.plan2.pdf"
}]},
{"floorplan": []},
{"floorplan": [{
"title": "plan1",
"url": "https://media.plan1.pdf"
}]}]`;
let json = JSON.parse(data);
//console.log(json);
json.forEach(items=>{
//console.log(items);
let o = {
floorplan1: items.floorplan.length > 0 ? items.floorplan[0].url : '',
floorplan2: items.floorplan.length > 1 ? items.floorplan[1].url : ''
};
console.log(o);
o = {
floorplan1: (items.floorplan[0] || {'url':''}).url,
floorplan2: (items.floorplan[1] || {'url':''}).url
};
console.log(o);
o = {
floorplan1: items.floorplan[0]?.url,
floorplan2: items.floorplan[1]?.url
};
console.log(o);
const {floorplan: [one = {url:''}, two = {url:''}]} = items;
o = {
floorplan1: one.url,
floorplan2: two.url
};
console.log(o);
});
Sure. A few ways, and more than I have here. I have put all the raw data into one string, parsed it into json and then iterated through that. In each loop my variable items will correspond to one of the json variables you created and referenced in your question as items.
In the first example, I check to make sure that items.floorplan has at least enough elements to contain the url I'm trying to reference, then use the ternary operator ? to output that URL if it exists or an empty string if it doesn't.
In the second example, I use the || (OR) operator to return the first object that evaluates to true. If items.floorplan[x] exists, then it will be that node, and if it doesn't I provide a default object with an empty url property on the right hand side, and then just use the url from the resulting object.
In the third, I use the optional chaining operator that was introduced in 2020. This method will return undefined if the url doesn't exist.
In the fourth example, I use destructuring to pull values out of the items variable, and make sure that there is a default value for url in case the items variable doesn't have a corresponding value.
But there are many more ways to go about it. These are just a few, and you can't necessarily say which approach is better. It's dependent on your intent and environment. With the exception of optional chaining (which shows undefined if the property doesn't exist), you can see these produce the same results.
DOCS for optional chaining: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Optional_chaining
DOCS for destructuring: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Destructuring_assignment
An article on destructuring: https://javascript.info/destructuring-assignment

Restructuring a large amount of values in a JSON file

I have a JSON file with a large amount of the following values:
"values": [
"Foo": 1,
"Bar": 2,
"Baz": 3,
...
],
How do I efficiently convert this into:
"values": [
{
"name": "Foo",
"value": 1
},
{
"name": "Bar",
"value": 2
},
{
"name": "Baz",
"value": 3
},
...
],
Any help would be appreciated!
Okay, so there are two problems with your input. The first is the fact that the given JSON is invalid, so can't directly be parsed. The square brackets after "values" should be curly brackets, to allow for a hash instead of an array:
let raw_old_data =
// Read the old file
fs.readFileSync('./input_data.json').toString()
// Remove all newlines which could interfere with the regex
.replace(/[\r\n]/g, '')
// Replace the square brackets after `"values"` with curly braces
.replace(/"values": \[(.+?)\]/g, '"values": { $1 }');
To convert this (now valid) string to a JSON object, you use JSON.parse:
let old_data = JSON.parse(raw_old_data);
The second problem is that the format in which the values are stored doesn't match your needs. You want to convert from { key: "value" } to [ name: "key", value: "value" ]. The following function can do that, assuming your version of Node supports ES6 (If not, look at Murillo's answer):
function fix_format(obj) {
// This is where we keep the new items in the correct format
let res = [];
// Loop over all values
Object.keys(obj.values).forEach(name => {
let value = obj.values[name];
// Change the format and add to resulting array
res.push({
// If the variable is the same as the key of the hash, it doesn't have to be specified
name,
value,
});
});
return res;
}
All that's then left to do is loop all data from the old object through that function with the Array.map function:
let new_data = old_data.map(fix_format);
And optionally write it back to a file to use with a different program:
fs.writeFileSync('./formatted_data.json', JSON.stringify(data, null, 2));
Note: The 2 in the JSON.stringify function indicates that the resulting JSON should be padded with 2 spaces, to keep it readable.
With ES6:
Object.keys(values).map(name => ({
name,
value: values[name]
}))
Without ES6:
var keys = Object.keys(values);
var newValues = [];
for(var i = 0; i < keys.length; i++){
newValues.push({
name: keys[i],
value: values[keys[i]]
})
}
If your intention is to use the received data i.e obtain data from DB (e.g MSSql, MySql...) using the connection.query(your_custom_sql_query, (err, rows, fields)
for more info:Node.js MySQL Select From Table
I'll recommend you to use:
const myJson = JSON.stringify(rows[0]);

Creating CSV view from CouchDB

I know this should be easy, but I just can't work out how to do it despite having spent several hours looking at it today. There doesn't appear to be a straightforward example or tutorial online as far as I can tell.
I've got several "tables" of documents in a CouchDB database, with each "table" having a different value in a "schema" field in the document. All documents with the same schema contain an identical set of fields. All I want to do is be able to view the different "tables" in CSV format, and I don't want to have to specify the list of fieldnames in each schema.
The CSV output is going to be consumed by an R script, so I don't want any additional headers in the output if I can avoid them; just the list of fieldnames, comma separated, with the values in CSV format.
For example, two records in the "table1" format might look like:
{
"schema": "table1",
"field1": 17,
"field2": "abc",
...
"fieldN": "abc",
"timestamp": "2012-03-30T18:00:00Z"
}
and
{
"schema": "table1",
"field1": 193,
"field2": "xyz",
...
"fieldN": "ijk",
"timestamp": "2012-03-30T19:01:00Z"
}
My view is pretty simple:
"all": "function(doc) {
if (doc.schema == "table1") {
emit(doc.timestamp, doc)
}
}"
as I want to sort my records in timestamp order.
Presumably the list function will be something like:
"csv": "function(head, req) {
var row;
...
// Something here to iterate through the list of fieldnames and print them
// comma separated
for (row in getRow) {
// Something here to iterate through each row and print the field values
// comma separated
}
}"
but I just can't get my head around the rest of it.
If I want to get CSV output looking like
"timestamp", "field1", "field2", ..., "fieldN"
"2012-03-30T18:00:00Z", 17, "abc", ..., "abc"
"2012-03-30T19:01:00Z", 193, "xyz", ..., "ijk"
what should my CouchDB list function look like?
Thanks in advance
The list function that works with your given map should look something like this:
function(head,req) {
var headers;
start({'headers':{'Content-Type' : 'text/csv; charset=utf-8; header=present'}});
while(r = getRow()) {
if(!headers) {
headers = Object.keys(r.value);
send('"' + headers.join('","') + '"\n');
}
headers.forEach(function(v,i) {
send(String(r.value[v]).replace(/\"/g,'""').replace(/^|$/g,'"'));
(i + 1 < headers.length) ? send(',') : send('\n');
});
}
}
Unlike Ryan's suggestion, the fields to include in the list are not configurable in this function, and any changes in order or included fields would have to be written in. You would also have to rewrite any quoting logic needed.
Here some generic code that Max Ogden has written. While it is in node-couchapp form, you probably can get the idea:
var couchapp = require('couchapp')
, path = require('path')
;
ddoc = { _id:'_design/csvexport' };
ddoc.views = {
headers: {
map: function(doc) {
var keys = [];
for (var key in doc) {
emit(key, 1);
}
},
reduce: "_sum"
}
};
ddoc.lists = {
/**
* Generates a CSV from all the rows in the view.
*
* Takes in a url encoded array of headers as an argument. You can
* generate this by querying /_list/urlencode/headers. Pass it in
* as the headers get parameter, e.g.: ?headers=%5B%22_id%22%2C%22_rev%5D
*
* #author Max Ogden
*/
csv: function(head, req) {
if ('headers' in req.query) {
var headers = JSON.parse(unescape(req.query.headers));
var row, sep = '\n', headerSent = false, startedOutput = false;
start({"headers":{"Content-Type" : "text/csv; charset=utf-8"}});
send('"' + headers.join('","') + '"\n');
while (row = getRow()) {
for (var header in headers) {
if (row.value[headers[header]]) {
if (startedOutput) send(",");
var value = row.value[headers[header]];
if (typeof(value) == "object") value = JSON.stringify(value);
if (typeof(value) == "string") value = value.replace(/\"/g, '""');
send("\"" + value + "\"");
} else {
if (startedOutput) send(",");
}
startedOutput = true;
}
startedOutput = false;
send('\n');
}
} else {
send("You must pass in the urlencoded headers you wish to build the CSV from. Query /_list/urlencode/headers?group=true");
}
}
}
module.exports = ddoc;
Source:
https://github.com/kanso/kanso/issues/336