remove a field from mongodb query result in golang - json

This is my function from mongodb-go-driver:
func MongodbFindOne(key, value string) bson.M {
var result bson.M
opts := options.FindOne().SetShowRecordID(false)
_ = Collection.FindOne(context.TODO(), bson.M{key: value}, opts).Decode(&result)
return result
}
The function works very good but i get _id field in the result. I know the mongodb query to exclude a field from query result, But i don't know how to use it with FindOne() function:
From tutorialspoint:
db.removeIdDemo.find({},{_id:0});
From mongodb query result without field name
db.collection.find({},{_id:0, t_number:1}).toArray().map(function(ele) {return ele.t_number} );
From remove _id from mongo result (nodejs):
app.get('/itesms', function(req, res) { items.find({}, { _id: 0 }).toArray(function (err, array) {
res.send(array); }) });

To exclude fields from the result, use a projection. Use FindOneOptions.SetProjection() to set the projection.
To specifically exclude the _id field:
err = c.FindOne(ctx,
bson.M{key: value},
options.FindOne().SetProjection(bson.M{"_id": 0}),
).Decode(&result)

Related

use mysql to update specific field(s) using expressjs

I am developing a middleware using express.js with mysql(new to mysql) and in my situation I have built this patch method to update the table. but the issue is I dont want to pass the entire field set to update specific fields out of many. so whats the preferred way to do this so that whatever fields I will send in request body those fields should be updated only.
const updateCompany = (req, res, next) => {
const cid = req.params.cid;
const {
company_id,
company_name,
company_address_line1,
company_address_line2,
company_email,
company_phone,
longitude,
latitude
} = req.body;
var myquery = `UPDATE Company_Master SET company_name="${company_name}",company_address_line1="${company_address_line1}",company_address_line2="${company_address_line2}",company_email="${company_email}",company_phone="${company_phone}",longitude=${longitude},latitude=${latitude} WHERE company_id = "${cid}"`
conn.query(myquery, (err, result) => {
if (err) {
console.log("err" + err);
} else {
res.status(201).json(req.body);
}
})
}
You can do as follows
const updateCompany = (req, res, next) => {
const cid = req.params.cid;
let
allowedcolumns = ["company_name", "company_address_line1", ... ], //all columns that can be updated
stmts = [],
values = [];
for (let c of allowedcolumns) {
if (c in req.body) { //check if there is a value for that column in the request body
stmts.push(`${c} = ?`),
values.push(req.body[c]);
}
}
if (stmts.length == 0) {
return res.sendStatus(204); //nothing to do
}
values.push(cid);
conn.query(`UPDATE Company_Master SET ${stmts.join(", ")} WHERE company_id = ?`, values, (err, result) => {
if (err) {
console.log("err" + err);
res.sendStatus(400);
} else {
res.status(200).json(req.body);
}
})
}
allowedcolumns will contain all columns that you are allowed to update via this request. For each of them check, whether there is a value in the request body or not. If yes, add it to the update statements, if not, ignore it (this assumes, the properties in the req.body and the columns in the table have the same name). Furthermore, to create a parameterized query, add the respective value to a values array, that you then can pass to the query.
If you don't have any values, there is nothing to do, so you can immediately return.
Else execute the query (don't forget to also add the cid to the values array). And return the respective status, based on whether there was an error or not.
BTW: 201 is status CREATED. You shouldn use that, if you are updating an already existing entity ...

How to properly query Postgres JSONB field in Vapor/Fluent

I have a table with some jsonb columns created by a migration like this:
public func prepare(on database: Database) -> EventLoopFuture<Void> {
return database.schema(MyTable.schema)
.id()
.field(.metadata, .custom("JSONB"), .required)
.create()
}
I am trying to filter query on jsonb field. The following is a simple string interpolation that works.
//jsonFilters is a dictionary of key value pair for which we want to filter in jsonb field
var query = MyTable.query(on: db)
var filterString = ""
var cycleCount = 0;
jsonFilters.forEach({
(key, value) in
filterString +=
"metadata->>'\(key)' = '\(value)' "
cycleCount+=1
if(cycleCount < filter.metadata!.count) {
filterString += " AND "
}
})
query = query.filter(.custom(metadataString))
// Also filter on something else.
query = query.filter(....)
However this is not secure and is sql injection vulnerable. Is there a way to bind the filter arguments in for example using SQLQueryString? It should work in conjunction with the rest of the regular filter. ( Last line in the code)
Just in case someone runs into the same here is what works with SQLQueryString so you can pass the parameters instead of string interpolation:
var queryString = SQLQueryString("")
var cycleCount = 0;
filter.metadata!.forEach({
(key, value) in
queryString.appendLiteral("metadata->>")
queryString.appendInterpolation(bind: key)
queryString.appendLiteral(" = ")
queryString.appendInterpolation(bind: value)
cycleCount+=1
if(cycleCount < filter.metadata!.count) {
queryString.appendLiteral(" AND ")
}
})

convert select request result to an array in nodejs using mysql

i am executing this select request in mysql data base and nodejs, the expected results are only an array with string content containing the link : ['http://hgj','http://jfhd'], but with the code that i will insert it shows me : [{link:''http://hgj},{link:''http://jfhd}] how can i remove the objects {link} and insert in the table only the string 'http...'?
query = "select link from weblist";
var res= await con.query(query, (error, response) => {
console.log('link from database', error || response);
var table = JSON.parse(JSON.stringify(response));
return table ;
});
There are a few simple steps involved in this solution:
Stringify the object using JSON.stringify. This is now a string
The string contains chars you don't want so you use string.replace() and pass a regex literal that irrespective of char position or casing, it removes the unwanted char with "" (i.e.nothing);
return that new string with a representation of your urls inside an array
One-liner solution:
let oneliner = links.map((elem) => {
return JSON.stringify(elem).replace(/[{}"'link:]/gi, "");
})
console.log(oneliner);
// [ 'http: //hgj', 'http://jfhd' ]

Can I define a GraphQL field to be any valid json? [duplicate]

Is it possible to specify that a field in GraphQL should be a blackbox, similar to how Flow has an "any" type? I have a field in my schema that should be able to accept any arbitrary value, which could be a String, Boolean, Object, Array, etc.
I've come up with a middle-ground solution. Rather than trying to push this complexity onto GraphQL, I'm opting to just use the String type and JSON.stringifying my data before setting it on the field. So everything gets stringified, and later in my application when I need to consume this field, I JSON.parse the result to get back the desired object/array/boolean/ etc.
#mpen's answer is great, but I opted for a more compact solution:
const { GraphQLScalarType } = require('graphql')
const { Kind } = require('graphql/language')
const ObjectScalarType = new GraphQLScalarType({
name: 'Object',
description: 'Arbitrary object',
parseValue: (value) => {
return typeof value === 'object' ? value
: typeof value === 'string' ? JSON.parse(value)
: null
},
serialize: (value) => {
return typeof value === 'object' ? value
: typeof value === 'string' ? JSON.parse(value)
: null
},
parseLiteral: (ast) => {
switch (ast.kind) {
case Kind.STRING: return JSON.parse(ast.value)
case Kind.OBJECT: throw new Error(`Not sure what to do with OBJECT for ObjectScalarType`)
default: return null
}
}
})
Then my resolvers looks like:
{
Object: ObjectScalarType,
RootQuery: ...
RootMutation: ...
}
And my .gql looks like:
scalar Object
type Foo {
id: ID!
values: Object!
}
Yes. Just create a new GraphQLScalarType that allows anything.
Here's one I wrote that allows objects. You can extend it a bit to allow more root types.
import {GraphQLScalarType} from 'graphql';
import {Kind} from 'graphql/language';
import {log} from '../debug';
import Json5 from 'json5';
export default new GraphQLScalarType({
name: "Object",
description: "Represents an arbitrary object.",
parseValue: toObject,
serialize: toObject,
parseLiteral(ast) {
switch(ast.kind) {
case Kind.STRING:
return ast.value.charAt(0) === '{' ? Json5.parse(ast.value) : null;
case Kind.OBJECT:
return parseObject(ast);
}
return null;
}
});
function toObject(value) {
if(typeof value === 'object') {
return value;
}
if(typeof value === 'string' && value.charAt(0) === '{') {
return Json5.parse(value);
}
return null;
}
function parseObject(ast) {
const value = Object.create(null);
ast.fields.forEach((field) => {
value[field.name.value] = parseAst(field.value);
});
return value;
}
function parseAst(ast) {
switch (ast.kind) {
case Kind.STRING:
case Kind.BOOLEAN:
return ast.value;
case Kind.INT:
case Kind.FLOAT:
return parseFloat(ast.value);
case Kind.OBJECT:
return parseObject(ast);
case Kind.LIST:
return ast.values.map(parseAst);
default:
return null;
}
}
For most use cases, you can use a JSON scalar type to achieve this sort of functionality. There's a number of existing libraries you can just import rather than writing your own scalar -- for example, graphql-type-json.
If you need a more fine-tuned approach, than you'll want to write your own scalar type. Here's a simple example that you can start with:
const { GraphQLScalarType, Kind } = require('graphql')
const Anything = new GraphQLScalarType({
name: 'Anything',
description: 'Any value.',
parseValue: (value) => value,
parseLiteral,
serialize: (value) => value,
})
function parseLiteral (ast) {
switch (ast.kind) {
case Kind.BOOLEAN:
case Kind.STRING:
return ast.value
case Kind.INT:
case Kind.FLOAT:
return Number(ast.value)
case Kind.LIST:
return ast.values.map(parseLiteral)
case Kind.OBJECT:
return ast.fields.reduce((accumulator, field) => {
accumulator[field.name.value] = parseLiteral(field.value)
return accumulator
}, {})
case Kind.NULL:
return null
default:
throw new Error(`Unexpected kind in parseLiteral: ${ast.kind}`)
}
}
Note that scalars are used both as outputs (when returned in your response) and as inputs (when used as values for field arguments). The serialize method tells GraphQL how to serialize a value returned in a resolver into the data that's returned in the response. The parseLiteral method tells GraphQL what to do with a literal value that's passed to an argument (like "foo", or 4.2 or [12, 20]). The parseValue method tells GraphQL what to do with the value of a variable that's passed to an argument.
For parseValue and serialize we can just return the value we're given. Because parseLiteral is given an AST node object representing the literal value, we have to do a little bit of work to convert it into the appropriate format.
You can take the above scalar and customize it to your needs by adding validation logic as needed. In any of the three methods, you can throw an error to indicate an invalid value. For example, if we want to allow most values but don't want to serialize functions, we can do something like:
if (typeof value == 'function') {
throw new TypeError('Cannot serialize a function!')
}
return value
Using the above scalar in your schema is simple. If you're using vanilla GraphQL.js, then use it just like you would any of the other scalar types (GraphQLString, GraphQLInt, etc.) If you're using Apollo, you'll need to include the scalar in your resolver map as well as in your SDL:
const resolvers = {
...
// The property name here must match the name you specified in the constructor
Anything,
}
const typeDefs = `
# NOTE: The name here must match the name you specified in the constructor
scalar Anything
# the rest of your schema
`
Just send a stringified value via GraphQL and parse it on the other side, e.g. use this wrapper class.
export class Dynamic {
#Field(type => String)
private value: string;
getValue(): any {
return JSON.parse(this.value);
}
setValue(value: any) {
this.value = JSON.stringify(value);
}
}
For similar problem I've created schema like this:
"""`MetadataEntry` model"""
type MetadataEntry {
"""Key of the entry"""
key: String!
"""Value of the entry"""
value: String!
}
"""Object with metadata"""
type MyObjectWithMetadata {
"""
... rest of my object fields
"""
"""
Key-value entries that you can attach to an object. This can be useful for
storing additional information about the object in a structured format
"""
metadata: [MetadataEntry!]!
"""Returns value of `MetadataEntry` for given key if it exists"""
metadataValue(
"""`MetadataEntry` key"""
key: String!
): String
}
And my queries can look like this:
query {
listMyObjects {
# fetch meta values by key
meta1Value: metadataValue(key: "meta1")
meta2Value: metadataValue(key: "meta2")
# ... or list them all
metadata {
key
value
}
}
}

Node JS: Make a flat json from a tree json

I was writing a node.js script to combine all the json files in a directory and store the result as a new json file. I tried do the job to a great extent but it has few flaws.
A.json
[
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
]
B.json
[
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
]
What I finally need is:
result.json
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!"
}
I wrote a node.js script:
var fs = require('fs');
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
fs.readFile(dirname + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
onFileContent(filename, content);
});
});
});
}
var data = {};
readFiles('C:/node/test/', function(filename, content) {
data[filename] = content;
var lines = content.split('\n');
lines.forEach(function(line) {
var parts = line.split('"');
if (parts[1] == 'id') {
fs.appendFile('result.json', parts[3]+': ', function (err) {});
}
if (parts[1] == 'defaultMessage') {
fs.appendFile('result.json', parts[3]+',\n', function (err) {});
}
});
}, function(err) {
throw err;
});
It extracts the 'id' and 'defaultMessage' but is not able to append correctly.
What I get:
result.json
addEmoticon1: addPhoto1: Hello, {name}!,
close1: How are you??,
Close!,
This output is different every time I run my script.
Aim 1: Surround items in double quotes,
Aim 2: Add curly braces at the top and at the end
Aim 3: No comma at the end of last element
Aim 4: Same output every time I run my script
I'll start with the finished solution...
There's a big explanation at the end of this answer. Let's try to think big-picture for a little bit first tho.
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Console output
wrote results to /path/to/result.json
result.json (I added a c.json with some data to show that this works with more than 2 files)
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!",
"somethingelse": "Something!"
}
Implementation
I made Promise-based interfaces for readdir and readFile and writeFile
import {readdir, readFile, writeFile} from 'fs';
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
In order to map functions to our Promises, we needed an fmap utility. Notice how we take care to bubble errors up.
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
And here's the rest of the utilities
const fmap = f=> x=> x.fmap(f);
const mapResolve = dir=> map(x=>resolve(dir,x));
const map = f=> xs=> xs.map(x=> f(x));
const filter = f=> xs=> xs.filter(x=> f(x));
const match = re=> s=> re.test(s);
const concatp = xs=> Promise.all(xs);
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
Lastly, the one custom function that does your work
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
And here's c.json
[
{
"id": "somethingelse",
"description": "something",
"defaultMessage": "Something!"
}
]
"Why so many little functions ?"
Well despite what you may think, you have a pretty big problem. And big problems are solved by combining several small solutions. The most prominent advantage of this code is that each function has a very distinct purpose and it will always produce the same results for the same inputs. This means each function can be used other places in your program. Another advantage is that smaller functions are easier to read, reason with, and debug.
Compare all of this to the other answers given here; #BlazeSahlen's in particular. That's over 60 lines of code that's basically only usable to solve this one particular problem. And it doesn't even filter out non-JSON files. So the next time you need to create a sequence of actions on reading/writing files, you'll have to rewrite most of those 60 lines each time. It creates lots of duplicated code and hard-to-find bugs because of exhausting boilerplate. And all that manual error-handling... wow, just kill me now. And he/she thought callback hell was bad ? haha, he/she just created yet another circle of hell all on his/her own.
All the code together...
Functions appear (roughly) in the order they are used
import {readdir, readFile, writeFile} from 'fs';
import {resolve} from 'path';
// logp: Promise<Value> -> Void
const logp = p=> p.then(x=> console.log(x), x=> console.err(x));
// fmap : Promise<a> -> (a->b) -> Promise<b>
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
// fmap : (a->b) -> F<a> -> F<b>
const fmap = f=> x=> x.fmap(f);
// readdirp : String -> Promise<Array<String>>
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
// mapResolve : String -> Array<String> -> Array<String>
const mapResolve = dir=> map(x=>resolve(dir,x));
// map : (a->b) -> Array<a> -> Array<b>
const map = f=> xs=> xs.map(x=> f(x));
// filter : (Value -> Boolean) -> Array<Value> -> Array<Value>
const filter = f=> xs=> xs.filter(x=> f(x));
// match : RegExp -> String -> Boolean
const match = re=> s=> re.test(s);
// readfilep : String -> Promise<String>
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
// concatp : Array<Promise<Value>> -> Array<Value>
const concatp = xs=> Promise.all(xs);
// reduce : (b->a->b) -> b -> Array<a> -> b
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
// flatten : Array<Array<Value>> -> Array<Value>
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
// writefilep : String -> Value -> Promise<String>
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
// -----------------------------------------------------------------------------
// createMap : Object -> Object -> Object
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
// do it !
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Still having trouble following along?
It's not easy to see how these things work at first. This is a particularly squirrely problem because the data gets nested very quickly. Thankfully that doesn't mean our code has to be a big nested mess just to solve the problem ! Notice the code stays nice and flat even when we're dealing with things like a Promise of an Array of Promises of JSON...
// Here we are reading directory '.'
// We will get a Promise<Array<String>>
// Let's say the files are 'a.json', 'b.json', 'c.json', and 'run.js'
// Promise will look like this:
// Promise<['a.json', 'b.json', 'c.json', 'run.js']>
readdirp('.')
// Now we're going to strip out any non-JSON files
// Promise<['a.json', 'b.json', 'c.json']>
.fmap(filter(match(/\.json$/)))
// call `readfilep` on each of the files
// We will get <Promise<Array<Promise<JSON>>>>
// Don't freak out, it's not that bad!
// Promise<[Promise<JSON>, Promise<JSON>. Promise<JSON>]>
.fmap(map(readfilep))
// for each file's Promise, we want to parse the data as JSON
// JSON.parse returns an object, so the structure will be the same
// except JSON will be an object!
// Promise<[Promise<Object>, Promise<Object>, Promise<Object>]>
.fmap(map(fmap(JSON.parse)))
// Now we can start collapsing some of the structure
// `concatp` will convert Array<Promise<Value>> to Array<Value>
// We will get
// Promise<[Object, Object, Object]>
// Remember, we have 3 Objects; one for each parsed JSON file
.fmap(concatp)
// Your particular JSON structures are Arrays, which are also Objects
// so that means `concatp` will actually return Promise<[Array, Array, Array]
// but we'd like to flatten that
// that way each parsed JSON file gets mushed into a single data set
// after flatten, we will have
// Promise<Array<Object>>
.fmap(flatten)
// Here's where it all comes together
// now that we have a single Promise of an Array containing all of your objects ...
// We can simply reduce the array and create the mapping of key:values that you wish
// `createMap` is custom tailored for the mapping you need
// we initialize the `reduce` with an empty object, {}
// after it runs, we will have Promise<Object>
// where Object is your result
.fmap(reduce(createMap)({}))
// It's all downhill from here
// We currently have Promise<Object>
// but before we write that to a file, we need to convert it to JSON
// JSON.stringify(data, null, '\t') will pretty print the JSON using tab to indent
// After this, we will have Promise<JSON>
.fmap(data=> JSON.stringify(data, null, '\t'))
// Now that we have a JSON, we can easily write this to a file
// We'll use `writefilep` to write the result to `result.json` in the current working directory
// I wrote `writefilep` to pass the filename on success
// so when this finishes, we will have
// Promise<Path>
// You could have it return Promise<Void> like writeFile sends void to the callback. up to you.
.fmap(writefilep(resolve(__dirname, 'result.json')))
// the grand finale
// alert the user that everything is done (or if an error occurred)
// Remember `.then` is like a fork in the road:
// the code will go to the left function on success, and the right on failure
// Here, we're using a generic function to say we wrote the file out
// If a failure happens, we write that to console.error
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
All done !
Assumed files is list of arrays; [a, b, ...];
var res = {};
files.reduce((a, b) => a.concat(b), []).forEach(o => res[o.id] = o.defaultMessage);
But you need not to get all files at once.
Just add this code to onFileContent callback.
JSON.parse(fileContent).forEach(o => res[o.id] = o.defaultMessage);
Also, you should to add any final callback to your readFiles.
And in this callback:
fs.writeFile('result.json', JSON.stringify(res));
So, final solution for you:
var fs = require('fs');
function task(dir, it, cb) {
fs.readdir(dir, (err, names) => {
if (err) return cb([err]);
var errors = [], c = names.length;
names.forEach(name => {
fs.readFile(dir + name, 'utf-8', (err, data) => {
if (err) return errors.push(err);
try {
it(JSON.parse(data)); // We get a file data!
} catch(e) {
errors.push('Invalid json in ' + name + ': '+e.message);
}
if (!--c) cb(errors); // We are finish
});
});
});
}
var res = {};
task('C:/node/test/', (data) => data.forEach(o => res[o.id] = o.defaultMessage), (errors) => {
// Some files can be wrong
errors.forEach(err => console.error(err));
// But we anyway write received data
fs.writeFile('C:/node/test/result.json', JSON.stringify(res), (err) => {
if (err) console.error(err);
else console.log('Task finished. see results.json');
})
});
this should do it once you have your json in variables a and b:
var a = [
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
];
var b = [
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
];
var c = a.concat(b);
var res = []
for (var i = 0; i < c.length; i++){
res[ c[i].id ] = c[i].defaultMessage;
}
console.log(res);
Here's my solution:
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
/**
* We'll store the parsed JSON data in this array
* #type {Array}
*/
var fileContent = [];
if (err) {
onError(err);
} else {
filenames.forEach(function(filename) {
// Reading the file (synchronously) and storing the parsed JSON output (parsing from string to JSON object)
var jsonObject = JSON.parse(fs.readFileSync(dirname + filename, 'utf-8'));
// Pushing the parsed JSON output into array
fileContent.push(jsonObject);
});
// Calling the callback
onFileContent(fileContent);
}
});
}
readFiles('./files/',function(fileContent) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
// Loop over the JSON objects
fileContent.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}, function(err) {
throw err;
});
Notable difference is that I've not used the asynchronous readFile and writeFile functions as they'd needlessly complicate the example. This example is meant to showcase the use of JSON.parse and JSON.stringify to do what OP wants.
UPDATE:
var fs = require('fs');
function readFiles(dirname, onEachFilename, onComplete) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
throw err;
} else {
// Prepending the dirname to each filename
filenames.forEach(function(each, index, array) {
array[index] = dirname + each;
});
// Calling aync.map which accepts these parameters:
// filenames <-------- array of filenames
// onEachFilename <--- function which will be applied on each filename
// onComplete <------- function to call when the all elements of filenames array have been processed
require('async').map(filenames, onEachFilename, onComplete);
}
});
}
readFiles('./files/', function(item, callback) {
// Read the file asynchronously
fs.readFile(item, function(err, data) {
if (err) {
callback(err);
} else {
callback(null, JSON.parse(data));
}
});
}, function(err, results) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
if (err) {
throw err;
} else {
// Loop over the JSON objects
results.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}
});
This is a simple asynchronous implementation of the same, using readFile. For more information, async.map.