couchdb - Import json file - json

I have a file with data in the json format. The data is odd in that the rows have varying subsets of a set of properties that are not all known in advance (over time they will build up). For example:
[{"firstName":"Joe","education":"highschool","sex":"male"},
{"lastName":"Edwards","address":"ohio","sex":"male"},
{"favoriteSport":"cycling","bicycle":"raleigh","ownsBoat":"yes","ownsDog":"yes","ownsHouse":"yes"}]
A large amount of data exists already in a file so I would like to import it in to couchdb rather than enter the data item by item. I followed the procedures from a post here but, while a db was created, it was empty. I used:
curl -X PUT -d #../Data/data.json http://127.0.0.1:5984/test_import
UPDATE: Since I'm working with nodejs (newbie), I thought I'd try using 'cradle'. My thought was to take the import the data as an array, and bulk load that using 'cradle's dbsave(). But using the following:
var fs = require('fs');
var cradle = require('cradle');
var data = fs.readFile( '../Data/data.json', function (err, data) {
if (err) {
throw err;
}
.
.
.
makeDB(bigdata,'test_import'); // where 'bigdata' is an array of json objects/couchdb 'documents'
});
function makeDB (p,filename) {
var db = new(cradle.Connection)().database(filename);
console.log(db);
db.save(p, function(err,res) {
if (err) {
console.log(err);
} else {
console.log('Success!');
}
});
};
The latter seems to work!! A database is created and filled but it does however throw the following errors:
k:\nodejs\node_modules\cradle\lib\cradle.js:198
return callback(body);
^
TypeError: undefined is not a function
OR
k:\nodejs\node_modules\cradle\lib\cradle.js:201
callback(null, self.options.raw ? body : new cradle.Response(body, res
^
TypeError: undefined is not a function
at Request.cradle.Connection.request [as _callback] (k:\nodejs\node_modules\cradle\lib\cradle.
js:201:9)
at Request.init.self.callback (k:\nodejs\node_modules\request\main.js:120:22)
at Request.EventEmitter.emit (events.js:91:17)
at Request.<anonymous> (k:\nodejs\node_modules\request\main.js:633:16)
at Request.EventEmitter.emit (events.js:88:17)
at IncomingMessage.Request.start.self.req.self.httpModule.request.buffer (k:\nodejs\node_modul
es\request\main.js:595:14)
at IncomingMessage.EventEmitter.emit (events.js:115:20)
at IncomingMessage._emitEnd (http.js:366:10)
at HTTPParser.parserOnMessageComplete [as onMessageComplete] (http.js:149:23)
at Socket.socketOnData [as ondata] (http.js:1366:20)

[SOLVED]
The answers to my questions are that yes, of course, couchdb is perfectly suited to that kind of data. The easiest way I found to do the bulk import with node.js is using cradle (whose creator provided solution to problem). The preceding code works error free with the following changes to the makeDB function:
// Take the array of objects and create a couchdb database
function makeDB (data,filename) {
var db = new(cradle.Connection)().database(filename);
//console.log(db);
db.create(function(err){
if (err) console.log(err);
});
db.save(data, function(err) {
if (err) console.log(err);
console.log(filename + ' is created.');
});
};

Related

Loopback autoupdate not creating custom models

I'm trying to create an app using Loopback (V3) and i've encountered a weird error.
I'm trying to create my Mysql tables using autoupdate(), but for my custom models it is not working.
This is what i've tried so far ->
var server = require('../server');
var ds = server.dataSources.db;
var models = ['test','Post','User', 'AccessToken', 'ACL', 'RoleMapping', 'Role'];
ds.isActual(models, function(err, actual) {
if (!actual) {
ds.autoupdate(null, function(err,result){
console.log("DONE!");
console.log(result);
});
};
});
The script works. If the database is empty it will create tables for all EXCEPT test and Post. Those are my custom models, the others are built into loopback.
Is it because of model type? (tried Model and PersistedModel) or is it something else? I even tried without the isActual check and still nothing.
I would recommend that you keep two separate arrays for built-in models and custom models and write code like following, that way you could know where the issue is. also, I think there is an error in your code near ds.autoupdate(null, fun.....
please follow according to the below code
var builtInModels = ['AccessToken', 'ACL', 'RoleMapping','Role'];
var userDefinedModels = ['Post','test'];
// migrate built-in models
dataSource.isActual(builtInModels, function (err, actual) {
if (!actual) {
dataSource.autoupdate(builtInModels, function (err, result) {
if(err) console.log(err);
console.log('Datasource Synced: Built in models');
});
}
});
// migrate userdefined models
dataSource.isActual(userDefinedModels, function (err, actual) {
if (!actual) {
dataSource.autoupdate(userDefinedModels, function (err, result) {
if (err) console.log(err);
console.log('Datasource Synced: User defined models');
});
}
});

csv from URL to json file in node.js

"New to programming"
I have a CSV file at
http://vhost11.lnu.se:20090/assig2/data1.csv
I am trying to convert it to a local json file. My code below.
I am getting {"X":"153","Y":"21","time":"21438"}} value in my data1.json.
const request=require('request')
const csv=require('csvtojson')
const fs = require('fs')
csv()
.fromStream(request.get('http://vhost11.lnu.se:20090/assig2/data1.csv'))
.on("json",function(jsonObj){ //single json object will be emitted for each csv line
console.log(jsonObj);
fs.writeFile("./data1.json", JSON.stringify(jsonObj), (err) => {
if (err) {
console.error(err);
return;
};
});
});
Where did I go wrong?
The callback function in the on event is called for each line. You'll want to initialize an empty list in the outer most scope and push jsonObj to it from the callback in on. You can then write your list to a file when the input file is done being read by handling the done event.

Error while running stubby4node using Gulp

I am trying to setup Stubby Server in my JavaScript environment and I am getting the error below.
The relevant part of my Gulpfile:
gulp.task('stubby', function(cb) {
var options = {
callback: function (server, options) {
server.get(1, function (err, endpoint) {
if (!err)
console.log(endpoint);
});
},
stubs: 8000,
tls: 8443,
admin: 8010,
files: [
'*.*'
]
};
stubby(options, cb);
});
The error:
[12:15:03] Starting 'stubby'...
[12:15:03] 'stubby' errored after 17 ms
[12:15:03] Error: Missing error message
at new PluginError (C:\Users\admin\IdeaProjects\myproject\node_modules\gulp-util\lib\PluginError.js:73:28)
at readJSON (C:\Users\admin\IdeaProjects\myproject\node_modules\gulp-stubby-server\index.js:90:15)
at C:\Users\admin\IdeaProjects\myproject\node_modules\gulp-stubby-server\index.js:149:24
at Array.map (native)
at stubbyPlugin (C:\Users\admin\IdeaProjects\myproject\node_modules\gulp-stubby-server\index.js:136:12)
at Gulp.<anonymous> (C:\Users\admin\IdeaProjects\myproject\gulpfile.js:54:5)
at module.exports (C:\Users\admin\IdeaProjects\myproject\node_modules\orchestrator\lib\runTask.js:34:7)
at Gulp.Orchestrator._runTask (C:\Users\admin\IdeaProjects\myproject\node_modules\orchestrator\index.js:273:3)
at Gulp.Orchestrator._runStep (C:\Users\admin\IdeaProjects\myproject\node_modules\orchestrator\index.js:214:10)
at Gulp.Orchestrator.start (C:\Users\admin\IdeaProjects\myproject\node_modules\orchestrator\index.js:134:8)
Searching the gulp-stubby-server codebase for PluginError yields the follow snippet:
function readJSON(filepath, options) {
var src = fs.readFileSync(filepath, options),
result;
if (!options.mute) {
gutil.log(gutil.colors.yellow('Parsing ' + filepath + '...'));
}
try {
result = JSON.parse(src);
return result;
} catch (e) {
throw new gutil.PluginError(PLUGIN_NAME, 'Unable to parse "' + filepath + '" file (' + e.message + ').', e);
}
}
— Source on GitHub
You can tell this is the likely culprit because of the stack trace you see, where the PluginError is coming from readJSON.
The issue
Take note of the catch block. This is caused by one of the files matching your glob (*.*) not being a valid JSON file.
To fix
Ensure you are using the newest version of gulp-stubby-server
Ensure that you are using the correct glob (that is, do you really mean *.*)
Ensure that all the files in the current working directory are valid JSON files

How to write and read from csv or json file in meteor

I am using papaParse
and I want to save result of this package into file and users can download it. what is the best way to do this? also I am use this node.js code for do it
var csv = Papa.unparse(Users.find().fetch());
console.log("csv : " + JSON.stringify(csv)); // get csv format (not in file)
fs.writeFile("meteorProject/public/", csv, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
but give this error
{ [Error: EISDIR: illegal operation on a directory, open 'meteorProject/public/']
I20160907-13:00:26.970(4.5)? errno: -21,
I20160907-13:00:26.970(4.5)? code: 'EISDIR',
I20160907-13:00:26.970(4.5)? syscall: 'open',
I20160907-13:00:26.970(4.5)? path: 'meteorProject/public/' }
and how can resolve it ??
thanks :-)
This is the issue related to directory please check the directory path you are given here. i.e. - "meteorProject/public/".
So what you need to change is just the directory name you are using .
As i tried with my directory and its running well.
Or i suggest you to try with the fileName.csv as well while saving the file
like:- meteorProject/public/test.csv
or just completely change the path of directory like as i tried with my ubuntu machine and its running well.
var userArray = Users.find().fetch();
var data = Papa.unparse(userArray);
console.log("data is....");
console.log(data);
fs.writeFile('/home/parveen/test/test.csv',data,function(err,res){
if(err){
console.log("err while saving");
console.log(err)
}
else{
console.log("File saved");
console.log(res);
}
});
The above code save the file in the test folder via test.csv name.
Please check and let me know if you again facing any issue.
If you want to know about your error in depth then please see the link:-
https://github.com/bekk/grunt-retire/issues/2
Hope this would help!
Thanks
finally I used this package: Meteor-Files
for solving this problem!
this is sample code:
export(){
var csv = Papa.unparse(Users.find({},{fields:{_id:0}}).fetch());
Exports.write(csv,{
fileName: 'backup'+ new Date().getTime() +'.csv',
type: 'text/csv'
}, function (error, fileRef) {
if (error) {
throw error;
} else {
console.log(fileRef.name + ' is successfully saved to FS. _id: ' + fileRef._id);
}
});
},
Exports is collection .This is the function that helped me.

How to generate json output through cucumberjs

I am quite new to cucumberjs and javascript, trying to generate a json output. Have created a hook:
have the following in my JsonOutputHook.js
module.exports = function JsonOutputHook() {
try {
var Cucumber = require('cucumber');
var JsonFormatter = Cucumber.Listener.JsonFormatter();
var fs = require('fs');
JsonFormatter.log = function (json) {
fs.writeFile('./cucumber.json', json, function (err) {
if (err) throw err;
});
};
this.registerListener(JsonFormatter);
}
catch(err){
console.log('entered hook exception);
}
};
And following in my world.js
var hooks = require('./JsonOutputHook');
//calling it like this
hooks.call(this);
But on doing so, it throws the following error:
[TypeError: this.registerListener is not a function]
Not sure why am getting this error, also how should I call the hook through my world.js.
Suggestion please.
Thanks
Simit
I'm not sure but I think it's exactly as it means. That function 'registerListener' doesn't exist. Did you write that method? because it's bind to your JsonOutputHook. You could also check /cucumber/lib/listener to see if that method is there. Also if you are use atom as a text editor you can search the project for that method.(I'm pretty sure other text editors might have this feature as well). As for how you can call the hook that should be in the marvelous documentation found on their git repo (https://github.com/cucumber/cucumber-js/blob/master/docs/support_files/hooks.md).