I am taking a file input from a user and I want to check if the selected file is a JSON file. How can I do that?
<input type = "file" onChange = {checkJSON}/>
Also how do I retrieve data from the JSON file.
If JSON.parse throws an error, its most likely invalid, therefore you can check if its valid by getting the data into a string and then trying to parse it.
try {
const json = JSON.parse(jsonStr);
} catch (e) {
console.log('invalid json');
}
You can check if file extension is json and be sure that selected file is valid JSON in that way:
function Validate() {
fileName = document.querySelector('#myfile').value;
extension = fileName.split('.').pop();
if (extension != "json")
{
alert("Sorry, " + fileName + " is invalid, allowed extension is json!");
return false;
}
var file = document.getElementById('myfile').files[0];
var reader = new FileReader();
reader.readAsText(file, 'UTF-8');
reader.onload = function(evt) {
var jsondata = evt.target.result;
try {
const json = JSON.parse(jsondata);
document.getElementById('jsondata').innerHTML = jsondata;
} catch (e) {
alert("Sorry, " + fileName + " is not valid JSON file!");
return false;
}
}
return true;
};
Html content:
<script src="script2.js"></script>
File: <input type="file" id="myfile" onchange="Validate()"/><br /><br />
<div id="jsondata"></div>
You can play with my little working live demo on ReplIt (both cases - onsubmit and onchange demos)
I have some data I want to write to a file periodically and I'd like to write it small JSON objects. At a later time I'd like to read them all in for processing, but the appended file of JSON objects isn't JSON itself.
So I stringify the JSON object and write them to file periodically and I get something like this;
{
"100": {
"stuff": 123
}
}
{
"300": {
"stuff": 456
}
}
{
"200": {
"stuff": 789
}
}
Of course when I try to parse the file with a simple script like the following;
var fs = require('fs');
var file = 'test.log'
var obj = JSON.parse(fs.readFileSync(file, 'utf8'));
var fs = require('fs');
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
console.log(obj);
});
...the parser freaks out because this isn't a valid JSON file, I'd need something more like;
{
"100": {
"stuff": 123
},
"300": {
"stuff": 456
},
"200": {
"stuff": 789
}
}
...but I can't get this by simply appending records. I can of course force commas between the records before writing them, but I end up missing the '{' at the start of the file, the '}' at the end of the file and would have an extra ',' on the last record. The whole things reaks of being a kludge.
I'm guessing someone has worked all of this out alreadyand there is a nice pattern for this, but I couldn't find anything from searching. Each section will have a variable amount of data so I like the flexibility JSON offers, but I don't want to store it all ahead of time before writing to disk as it will get large.
If you could point me at a good solution for this it would be appreciated.
Sincerely, Paul.
Why don't you use a regex before processing the object list file to add a comma. Since the pattern is a new line after every object, you could use this to find and replace: /}\n{/g.
For your sample, this should do the trick :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to create an array of objects
obj = JSON.parse(jsonString);
console.log(obj);
});
Then, if you want to have an object as you specified you can use the spread operator ... to append an object to your super-object :
var fs = require('fs');
var file = 'test.log'
var obj;
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
var jsonString = '[' + data.replace(/}\n{/g, '},{')+ ']'; // Adding brackets in order to creat an array of objects
obj = JSON.parse(jsonString);
var superObj = {};
for (var i = 0; i < obj.length; i++) {
superObj = {...superObj,...obj[i]};
}
console.log(superObj);
});
I'm trying to read a log file where each entry is a line of JSON(JSON structured text).
What I ultimately hope to do is iterate over each line and if
"Event":"SparkListenerTaskEnd"
is found that JSON line will be parsed for the values of keys "Finish Time" and "Executor CPU Time".
I'm new to node.js so not may be completely wrong but so far I've got this block of code for iterating through the file:
exports.getId(function(err, id){
console.log(id);
var data = fs.readFileSync('../PC Files/' + id, 'utf8', function(err, data) {
var content = data.split('\n');
async.map(content, function (item, callback) {
callback(null, JSON.parse(item));
}, function (err, content) {
console.log(content);
});
});
//console.log(data);
});
This doesn't seem to be doing anything though. However, I know the log file can be read as I can see it if I uncomment //console.log(data);.
Below is an example JSON line that I'm talking about:
{"Event":"SparkListenerTaskEnd","Stage ID":0,"Stage Attempt ID":0,"Task Type":"ShuffleMapTask","Task End Reason":{"Reason":"Success"},"Task Info":{"Task ID":0,"Index":0,"Attempt":0,"Launch Time":1514983570810,"Executor ID":"0","Host":"192.168.111.123","Locality":"PROCESS_LOCAL","Speculative":false,"Getting Result Time":0,"Finish Time":1514983574496,"Failed":false,"Killed":false,"Accumulables":[{"ID":22,"Name":"internal.metrics.input.recordsRead","Update":99171,"Value":99171,"Internal":true,"Count Failed Values":true},{"ID":20,"Name":"internal.metrics.shuffle.write.writeTime","Update":5893440,"Value":5893440,"Internal":true,"Count Failed Values":true},{"ID":19,"Name":"internal.metrics.shuffle.write.recordsWritten","Update":3872,"Value":3872,"Internal":true,"Count Failed Values":true},{"ID":18,"Name":"internal.metrics.shuffle.write.bytesWritten","Update":1468516,"Value":1468516,"Internal":true,"Count Failed Values":true},{"ID":10,"Name":"internal.metrics.peakExecutionMemory","Update":16842752,"Value":16842752,"Internal":true,"Count Failed Values":true},{"ID":9,"Name":"internal.metrics.diskBytesSpilled","Update":0,"Value":0,"Internal":true,"Count Failed Values":true},{"ID":8,"Name":"internal.metrics.memoryBytesSpilled","Update":0,"Value":0,"Internal":true,"Count Failed Values":true},{"ID":7,"Name":"internal.metrics.resultSerializationTime","Update":1,"Value":1,"Internal":true,"Count Failed Values":true},{"ID":6,"Name":"internal.metrics.jvmGCTime","Update":103,"Value":103,"Internal":true,"Count Failed Values":true},{"ID":5,"Name":"internal.metrics.resultSize","Update":2597,"Value":2597,"Internal":true,"Count Failed Values":true},{"ID":4,"Name":"internal.metrics.executorCpuTime","Update":1207164005,"Value":1207164005,"Internal":true,"Count Failed Values":true},{"ID":3,"Name":"internal.metrics.executorRunTime","Update":2738,"Value":2738,"Internal":true,"Count Failed Values":true},{"ID":2,"Name":"internal.metrics.executorDeserializeCpuTime","Update":542927064,"Value":542927064,"Internal":true,"Count Failed Values":true},{"ID":1,"Name":"internal.metrics.executorDeserializeTime","Update":835,"Value":835,"Internal":true,"Count Failed Values":true}]},"Task Metrics":{"Executor Deserialize Time":835,"Executor Deserialize CPU Time":542927064,"Executor Run Time":2738,"Executor CPU Time":1207164005,"Result Size":2597,"JVM GC Time":103,"Result Serialization Time":1,"Memory Bytes Spilled":0,"Disk Bytes Spilled":0,"Shuffle Read Metrics":{"Remote Blocks Fetched":0,"Local Blocks Fetched":0,"Fetch Wait Time":0,"Remote Bytes Read":0,"Local Bytes Read":0,"Total Records Read":0},"Shuffle Write Metrics":{"Shuffle Bytes Written":1468516,"Shuffle Write Time":5893440,"Shuffle Records Written":3872},"Input Metrics":{"Bytes Read":0,"Records Read":99171},"Output Metrics":{"Bytes Written":0,"Records Written":0},"Updated Blocks":[{"Block ID":"broadcast_1_piece0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":false,"Replication":1},"Memory Size":5941,"Disk Size":0}},{"Block ID":"broadcast_1","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":true,"Replication":1},"Memory Size":9568,"Disk Size":0}},{"Block ID":"broadcast_0_piece0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":false,"Replication":1},"Memory Size":25132,"Disk Size":0}},{"Block ID":"broadcast_0","Status":{"Storage Level":{"Use Disk":false,"Use Memory":true,"Deserialized":true,"Replication":1},"Memory Size":390808,"Disk Size":0}}]}}
Update
Here is my whole code. I'm sure it's not pretty but it works. I'll now look at improving it.
var http = require("http");
var fs = require('fs');
var async = require('async');
var readline = require('readline')
//get file name
var options = {
"method" : "GET",
"hostname" : "xxx.xxx.xxx.xxx",
"port" : "18080",
"path" : "/api/v1/applications/"
};
exports.getId = function(callback) {
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = JSON.parse(Buffer.concat(chunks));
var arrFound = Object.keys(body).filter(function(key) {
if (body[key].name.indexOf("TestName") > -1) {
return body[key].name;
}
}).reduce(function(obj, key){
obj = body[key].id;
return obj;
}, {});;
//console.log("ID: ", arrFound);
callback(null, arrFound);
});
});
req.end();
}
// parse log file line at a time and for any use lines where the Event = SparkListenerTaskEnd
exports.getId(function(err, id){
console.log(id);
var lineReader = readline.createInterface({
input: fs.createReadStream('../PC Files/' + id, 'utf8')
});
lineReader.on('line', function (line) {
var obj = JSON.parse(line);
if(obj.Event == "SparkListenerTaskEnd") {
console.log('Line from file:', obj['Task Info']['Finish Time']);
}
});
});
Adam, I tried your suggested code but got the following error:
null
fs.js:646
return binding.open(pathModule._makeLong(path), stringToFlags(flags), mode);
^
Error: ENOENT: no such file or directory, open '../PC Files/null'
at Object.fs.openSync (fs.js:646:18)
at Object.fs.readFileSync (fs.js:551:33)
at /test-runner/modules/getEventLog.js:61:19
at IncomingMessage.<anonymous> (/test-runner/modules/getEventLog.js:35:13)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1056:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickCallback (internal/process/next_tick.js:180:9)
At first glance, it appears you are using callbacks incorrectly.
I assume you are using the getId function like:
getId(function(error, data) {
// Do something with data
}
In which case, the callback function should be returned like:
// Remove the error, this will not be entered as a parameter
// Add callback as parameter
exports.getId(function(id, callback){
console.log(id);
var data = fs.readFileSync('../PC Files/' + id, 'utf8', function(err, data) {
var content = data.split('\n');
// Removed callback from here
// We will not have access to the
// to it here
async.map(content, function (item) {
callback(null, JSON.parse(item));
// Add callback with error in place of null
}, function (err, content) {
callback(err)
console.log(content);
});
});
//console.log(data);
});
Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);
In that case I've already tried to get the returned JSON, but when I use the JSON.parse and the JSON.stringify it returns undefined. If I do not use and leave only the data = data.toString('utf8');, return:
!!xxxxxxxxxxxxxxxxxxxxxxxxxxxxx.rest.schema.CtLoginResp {error: null, sessionID: 6dMX4uGVurFdLRL+hW4F2kIW}
And I want the sessionid... But If i try get this, return undefined, I try JSON.parse, JSON.stringify, see that:
My code:
var Client = require('./lib/node-rest-client').Client;
var client = new Client();
var dataLogin = {
data: { "userName":"xxxxxxxxxxx","password":"xxxxxxxxxxxxx","platform":"xxxxxxx" },
headers: { "Content-Type": "application/json" }
};
client.registerMethod("postMethod", "xxxxxxxxxxxxxxxxxxxx/login", "POST");
client.methods.postMethod(dataLogin, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
if(Buffer.isBuffer(data)){ // if i remove buffer return is 21 22 etc etc
data = data.toString('utf8'); // this return all but String
var outputTe = data;
var res = outputTe.split(" ", 4);
res = res[3].split("}", 1);
}
console.log(res);
});
Image return:
In the case if i does not use Buffer return is 21 34 56 etc.
But if I use return is all the STRING data.toString(); inside the image...
EDIT.
I try use split but return just the string "sessionid" see the other image:
I try same code inside W3 schools and does not work inside my code but in W3 school test works fine:
1)
2)
In the case I use regex:
client.methods.postMethod(dataLogin, function (data, response) {
if(Buffer.isBuffer(data)){
data = data.toString('utf8');
console.log(data);
var re = /(sessionID: )([^,}]*)/g;
var match = re.exec(data);
var sessionid = match[2]
console.log(sessionid);
openRequest(numberOrigin);
}
});