I want to define my mongoose schema from JSON file. This is my JSON file structure:
{
"default": [
{
"item": "productTitle",
"label": "Product Title",
"note": "e.g Samsung GALAXY Note 4",
"type": "text",
"required": "Product Name cannot be blank..."
},
{
"item": "productCode",
"label": "Product Code",
"type": "text",
"required": "Product Code cannot be blank..."
}
]}
This is my node.js model:
// Load the module dependencies
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
var fs = require('fs');
var file = __dirname + '/product.server.model.json';
// Read the json file
fs.readFile(file, 'utf8', function (err, data) {
data = JSON.parse(data);
var productJson = {};
for(var i = 0; i < data.default.length; i++) {
productJson[data.default[i].slug] = {
type: 'String',
required: data.default[i].required,
default: '',
trim: true
}
}
});
// Define a new 'ProductSchema'
var ProductSchema = new Schema(
// Here I want to put JSON Data 'productJson'
);
// Create the 'Product' model out of the 'ProductSchema'
mongoose.model('Product', ProductSchema);
I tried every possible way to define mongoose schema from JSON data 'productJson'. But unless I pre-define my mongoose schema, it is not working. Is there any way to define mongoose schema from JSON data in my model? Any suggestion please?
fs.readFile is an asynchronous function which means that it returns immediately and then later provides its results to the caller via the callback function that you provide as the third parameter.
As such, you need to hold off on using productJson until its populated within that callback. That means moving your schema and model creation inside the callback as well.
fs.readFile(file, 'utf8', function (err, data) {
data = JSON.parse(data);
var productJson = {};
for(var i = 0; i < data.default.length; i++) {
// Changed .slug to .item here as I don't see slug in the JSON
productJson[data.default[i].item] = {
type: 'String',
required: data.default[i].required,
default: '',
trim: true
}
}
// Define a new 'ProductSchema'
var ProductSchema = new Schema(productJson);
// Create the 'Product' model out of the 'ProductSchema'
mongoose.model('Product', ProductSchema);
});
Another alternative you can use here is to use the synchronous fs.readFileSync method to read the file instead. This is helpful in startup/initialization cases like this where your application as a whole shouldn't proceed until this file is processed.
var data = fs.readFileSync(file, 'utf8');
data = JSON.parse(data);
var productJson = {};
for(var i = 0; i < data.default.length; i++) {
// Changed .slug to .item here as I don't see slug in the JSON
productJson[data.default[i].item] = {
type: 'String',
required: data.default[i].required,
default: '',
trim: true
}
}
// Define a new 'ProductSchema'
var ProductSchema = new Schema(productJson);
// Create the 'Product' model out of the 'ProductSchema'
mongoose.model('Product', ProductSchema);
Related
I have a JSON file with nested arrays of varying length. That is, each object has an ARR with a different number of objects.
{ "count": 200,
"objects": [
{
"id": "FIRST",
"b": "two",
"c": "three",
"ARR": [{
"aa": "onion ",
"bb": 2,
"cc": "peanuts"},
},
{
"aa": "Jam ",
"bb": 4,
"cc": "Bread"},
}],
"d":"four"
]
}, . . . on and on
I have imported the JSON data to my JavaScript file:
const data = JSON.parse(require('fs').readFileSync('./jsonfiles/objects.JSON', 'utf8'))
trim data down to the objects of interest
const objs=data.objects;
I'm using Sequelize to write this to a mysql database. I have two models: Model 1: hasMany Arr sets: Model 2: belongsTo Model1.
Writing the to table1 from Model1 works well like this:
for (var key in Objs) {
var item = Objs[key]
db.Model1.create({
modelID: item.id,
modelB: item.b,
modelC:item.c
})
}
Now, I'm trying to write ARR to the associated model and am stumped on how to do this.
I do not know how many objects will be in each ARR
Storing ARR as a JSON obj in table1 won't serve well later.
This is the function I created for our companies API. Took me a week to put together but hopefully, this helps.
exports.functionName = async (req, res) => {
const params = req.params.something;
if (!params) {
res.status(httpStatusCodes.BAD_REQUEST).send({message:'Please provide params'});
return;
}
const function = inputs.map(prop => ({
propOne: uniqueID,
propTwo: prop.value,
}));
const value = await productInput.bulkCreate(function);
if (!value || value.length < 1) {
res.status(httpStatusCodes.INTERNAL_SERVER_ERROR).send({ message:'No inputs were updated for this product.' });
return;
}
res.send(value);
return;
};
I just tried to run the JSONIX sample purchase order.
I did it like it was mentioned on the Highscore WebSite.
What makes me wonder was this sample bases on the use of a XSD, the validation of the incoming XML is used for elements with child nodes but not for simple tags.
This will show an error:
... <item_will_cause_error partNum="926-AA">
<productName>Baby Monitor</productName>
<quantity>1</quantity>
<USPrice>39.98</USPrice>
<shipDate>1999-05-21</shipDate>
... </item_will_cause_error>
This not:
... <item partNum="926-AA">
<productName>Baby Monitor</productName>
<quantity_will_cause_error>1</quantity_will_cause_error>
<USPrice>39.98</USPrice>
<shipDate>1999-05-21</shipDate>
... </item>
So, is it possible to switch on a strong validation, because <quantity_will_cause_error>is not a valid element.
Kind regards
Markus
now I use this
var Jsonix = require('jsonix').Jsonix;
//Include or require PO.js so that PO variable is available
//For instance, in node.js:
var PO = require('./mappings/PO').PO;
//First we construct a Jsonix context - a factory for unmarshaller
//(parser)
//and marshaller (serializer)
var context = new Jsonix.Context([ PO ]);
//Then we create a unmarshaller
var unmarshaller = context.createUnmarshaller();
//Unmarshal an object from the XML retrieved from the URL
var fs = require('fs');
var Ajv = require('ajv');
var XMLSchemaJsonSchema =
JSON.parse(fs.readFileSync(
'./node_modules/jsonix/jsonschemas/w3c/2001/XMLSchema.jsonschema')
.toString());
var JsonixJsonSchema = JSON.parse(fs.readFileSync(
'./node_modules/jsonix/jsonschemas/jsonix/Jsonix.jsonschema')
.toString());
var POJsonSchema = JSON.parse(fs.readFileSync(
'./mappings/PO.jsonschema').toString());
var ajv = new Ajv();
ajv.addSchema(XMLSchemaJsonSchema,
'http://www.jsonix.org/jsonschemas/w3c/2001/XMLSchema.jsonschema');
ajv.addSchema(JsonixJsonSchema,
'http://www.jsonix.org/jsonschemas/jsonix/Jsonix.jsonschema');
var validate = ajv.compile(POJsonSchema);
unmarshaller.unmarshalFile('./po.xml',
//This callback function will be provided
//with the result of the unmarshalling
function (unmarshalled) {
var po_ = unmarshalled;
var valid = validate(po_);
if (!valid) {
console.log('Validation failed.');
console.log('Validation errors:');
console.log(validate.errors);
}
});
The Result looks like this:
Validation failed.
Validation errors:
[ { keyword: 'type',
dataPath: '.value.items.item[1].shipDate.timezone',
schemaPath: '#/definitions/integer/type',
params: { type: 'integer,null' },
message: 'should be integer,null' },
{ keyword: 'type',
dataPath: '.value.items.item[1].shipDate',
schemaPath: '#/anyOf/1/type',
params: { type: 'null' },
message: 'should be null' },
{ keyword: 'anyOf',
dataPath: '.value.items.item[1].shipDate',
schemaPath: '#/anyOf',
params: {},
message: 'should match some schema in anyOf' },
{ keyword: 'enum',
dataPath: '.name.localPart',
schemaPath: '#/anyOf/1/properties/name/allOf/1/properties/localPart/enum',
params: { allowedValues: [Object] },
message: 'should be equal to one of the allowed values' },
{ keyword: 'anyOf',
dataPath: '',
schemaPath: '#/anyOf',
params: {},
message: 'should match some schema in anyOf' } ]
But this make me again wonder: dataPath: '', an error on the root ???
I have a simple set of JSON data that I am pulling from a local file and loading into a datatable
Using YUI, how can I filter the response of this request to match only the data that is relevant to the request data?
EDIT: improper formatting on first post
YUI().use('aui-datatable', 'datatable-sort', 'aui-io-request', 'aui-tabview', 'datasource-io',
function(Y) {
var columns = [{
key : 'id',
sortable : true
}, {
key : 'name',
sortable : true
},{
key : 'price',
sortable : true
}];
var dataTable = new Y.DataTable({
columns : columns
}).render("#searchResultsTab");
var node = Y.one('#searchButton');
var criteria = document.getElementById("searchCriteria");
node.on(
'click', //on Search..
function(){
dataSource = new Y.DataSource.IO({source:'mydata.json'});
request = document.getElementById("searchBox").value;
dataSource.sendRequest({
on: {
success: function(e){
var response = e.data.responseText;
jdata = Y.JSON.parse(response);
dataTable.set('data', jdata.info); //setting table data to json response
},
failure: function(e){
alert(e.error.message);
}
}
});
}
);
new Y.TabView(
{
srcNode: '#searchResultsContainer'
}
).render();
});
mydata.json
{"info" : [
{"id": 1,"name": "A green door","price": 12.50 },
{"id": 2,"name": "A blue door","price": 10.50 },
{"id": 3,"name": "A red door","price": 8.50 }
}
In your on success method filter your response data before setting the datatable data source. Here is an example of model list filtering: http://yuilibrary.com/yui/docs/model-list/#filtering-models
I have multiple json Objects
json1 = [
{'category_id':1,'name':'test1' },
{'category_id':1,'name':'test2' },
{'category_id':1,'name':'test3' },
{'category_id':2,'name':'test2' },
{'category_id':3,'name':'test1' }
];
json2 = [{'category_id':1,'type':'test1'}];
json3 = [
{'category_id':1,'color':'black'},
{'category_id':2,'color':'black'},
{'category_id':3,'color':'white'}
];
I am expecting output like this
final = [
{'category_id':1,'name':'test1','type':'test`','color':'black' },
{'category_id':1,'name':'test2','type':'test`','color':'black' },
{'category_id':1,'name':'test3','type':'test`','color':'black' },
{'category_id':2,'name':'test2','color':'black' },
{'category_id':3,'name':'test1','color':'white' }
];
As i have long json object. Looping is good idea or not ? Does there is any inbuilt function for doing the same.
Using underscore you can achieve it via:
Demo Fiddle
var jsons = [json1, json2, json3];
var final = {};
// merge all data
_.each(jsons, function (jsonArr) {
_.each(jsonArr, function (json) {
final[json.category_id] = _.extend({}, final[json.category_id], json);
});
});
// map it back onto json1
var finalArr = _.map(json1, function (json) {
return final[json.category_id];
});
console.log(finalArr);
Final value of finalArr:
Here is how you can do the same in plain javascript. :)
var final = {};
var jsons = [json1, json2, json3];
for(var i=0;i<jsons.length;i++){
final[i]=jsons[i];
}
Fiddle
EDIT:
Well, you will have to do it programmatically!
I have a complex JSON Object like this:
var requestData = { __batchRequests: [ { __changeRequests: [
{ requestUri: "Customers", method: "POST", headers: { "Content-ID": "1" }, data: {
CustomerID: 400, CustomerName: "John"
} }
] } ] };
I am trying to do two things:
Declare this object but with the variable data empty
With a loop, add items dynamically to the data object,
How can I do it?
This isn't too complex an object. And it isn't JSON until it's converted into a string.
Right now, it's just plain-ol' JS objects and arrays.
Breaking that down into its elements might look like this:
var requestData = {};
requestData.__batchRequests = [];
requestData.__batchRequests[0] = {};
requestData.__batchRequests[0].__changeRequests = [];
requestData.__batchRequests[0].__changeRequests[0] = {};
requestData.__batchRequests[0].__changeRequests[0].requestUri = "Customers";
requestData.__batchRequests[0].__changeRequests[0].method = "POST";
requestData.__batchRequests[0].__changeRequests[0].headers = { "Content-ID" : "1" };
requestData.__batchRequests[0].__changeRequests[0].data = {};
Aside from the repeats, what do you see?
Personally, I see that __changeRequests[0] is an object as simple as:
var changeRequest = {
requestUri : "Customers",
method : "POST",
headers : { "Content-ID" : "1" },
data : {}
};
I also see that I can just push that onto my array of change requests:
requestData.__batchRequests[0].__changeRequests.push(changeRequest);
Right?
I also know that my changeRequest variable still points to the one that I just added to the array, and whatever I change on the object will show up as changed in the array's reference to the object, too:
changeRequest.data.CustomerName = "Bob";
changeRequest.data.CustomerID = "204";
requestData.__/*...*/changeRequests[0].data.CustomerName; // Bob
So how about writing yourself some helper-functions?
function extend (obj, additions) {
var key;
for (key in obj) { if (additions.hasOwnProperty(key)) {
obj[key] = additions[key];
}
}
function makeChangeRequest (url, method, headers, data) {
var request = {
requestUri : url,
method : method,
headers : {},
data : {}
};
extend(request.headers, headers);
extend(request.data, data);
return request;
}
function getBatch (num) { return requestData.__batchRequests[num]; }
var changeReq = makeChangeRequest("Customers",
"POST",
{ "Content-ID" : "1" },
{ CustomerName : "Bob", CustomerID : "2012" });
var batch = getBatch(0);
batch.__changeRequests.push(changeReq);
If you want to add more data to changeReq.data later:
extend(changeReq.data, { Address : "33 Nowhere Rd.", City : "Splitsville" });
For the first part of your question, you can initialize data with an empty associative array:
var requestData = { __batchRequests: [ { __changeRequests: [
{ requestUri: "Customers", method: "POST", headers: { "Content-ID": "1" }, data: {} }
] } ] };
This next part assumes, perhaps incorrectly, that you can use jQuery. It also assumes that you have an array containing all of the relevant key value pairs.
var customerDeetsArray =[{CustomerID: 400}, {CustomerName: "John"}];
for (var i in customerDeetsArray) {
requestData.data = $.extend(requestData.data, customerDeetsArray[i]);
}
See working example which makes use of console.debug:
http://jsfiddle.net/4Rh72/6/