I am using aws as my backend and i have few aws lambda functions (written in node JS) that are used to insert incoming json data to amazon RDS(mysql) DB. Below is my node js code
var mysql = require('mysql');
var config = require('./config.json');
var pool = mysql.createPool({.../});
exports.handler = (event, context, callback) => {
let inserts = [event.unitID, event.timestamp, event.frequency];
pool.getConnection(function(error, connection) {
connection.query({
sql: 'INSERT INTO device_data (device_id, timestamp, frequency) VALUES (?, ?, ?);',
timeout: 40000,
values: inserts
}, function(error, results, fields) {
connection.release();
if (error) callback(error);
else callback(null, results);
});
});
};
This is the incoming json data
"unitID": "arena-MXHGMYzBBP5F6jztnLUdCL",
"timestamp": 1580915318000,
"version": "1.0.0",
"frequency": [
60.0033,
60.004,
60.0044,
60.0032,
60.005,
60.005,
60.0026,
60.0035,
60.0036,
60.0053
]
}
my frequency has array of values and i am unable to handle that to insert into DB.
Any suggestions. Thanks
if your data is in a variable called json:
console.log(json.frequency.map( (freq) =>[json.unitID,json.timestamp,freq] ))
you can then tweak this to fit your sql to a string that replaces VALUES (?,?,?) with your desired output. e.g.:
const values = json.frequency.map( (freq) => [json.unitID,json.timestamp,freq] );
const sqlString = `'INSERT INTO device_data (device_id, timestamp, frequency) VALUES ${values.map( (row) => `(${row[0]},${row[1]},${row[2]})` ).join(',')}`
and in your code:
connection.query({
sql:sqlString
[...]
Related
I have a problem where I have an array of objects like this:
[
{
department_id: '6256f8ae6f749617e8167416',
employee_id: '6253ca0c6f749618a8d022af',
employee_number: '1234'
},
{
department_id: '6256f8ae6f749617e8167416',
employee_id_id: '6253ca0c6f749618a8d022af',
employee_number: '1503'
}
]
and would like to use node js and mysql to insert it into a database so I have got this script
Department.assignEmployeetoDepartment = (employees, result) => {
let employees_array = Object.values(rooms);
db.query(
`INSERT INTO department_employee (department_id, employee_id, employee_number) VALUES ?`,
[employees_array],
(err, res) => {
if (err) {
console.log("error: ", err);
result(err, null);
return;
}
console.log("success: ", res);
result(null, res);
}
);
};
when I use the code above, I get
INSERT INTO department_employee (department_id, employee_id, employee_number) VALUES '[object Object]', '[object Object]'
which does not work for obvious reasons.
I tried to stringify the object
and I also tried to use a for loop to iterate over the employees array and it did not work as it says the headers were already sent.
How could I store the array (which can vary in length into the database?
Thanks in advance
The solution I'm suggesting for you will serve you safe if you make your data an array of arrays;
const employeesData = [
[
'6256f8ae6f749617e8167416',
'6253ca0c6f749618a8d022af',
'1234'
],
[
'6256f8ae6f749617e8167416',
'6253ca0c6f749618a8d022af',
'1503'
],
];
Next, prepare your query like this;
const sql = `INSERT INTO images (department_id, employee_id, employee_number) VALUES?`;
You're now ready to run your query liike below;
db
.query(sql, [employeesData])
.then(rows => {
res.status(200).json({
status: "success",
message:"Data inserted successfully"
});
})
.catch(err => {
res.status(500).json({
status: "error",
error: err.message
});
});
You can generate array of values to insert using array's map method like shown below,
employeesData = dataSourceArray.map ( (data) => {
return [
department_id,
employee_id,
employee_number
];
});
As per mysql- Escaping query values
Nested arrays are turned into grouped lists (for bulk inserts), e.g. [['a', 'b'], ['c', 'd']] turns into ('a', 'b'), ('c', 'd')
You probably want to map the values to array, you can't pass array of JSON object to query
db.query(
`INSERT INTO department_employee (department_id, employee_id, employee_number) VALUES ?`,
[employees_array.map(employee => [employee.department_id, employee.employee_id, employee.employee_number])],
(err, res) => {
if (err) {
console.log("error: ", err);
result(err, null);
return;
}
console.log("success: ", res);
result(null, res);
}
);
};
I am working on nodejs for the first time. I have a scenario where I am having json array like shown below.
I will not know how many json items will be there in that array.
I have tried solution using loops but it will fire the query for multiple times. And I don't want that.
{"qualification":[{"degreeName":"B","domain":"p"},{"degreeName":"A","domain":"q"}]}
And mysql query will be like this
INSERT INTO qualification (degreeName,domain) VALUES (B,p),(A,q);
In the above query I have explicitly written values, but as I will not know how many values will be there I can't write values like that, instead i will have to put all values in a variable and then pass it to the query.
So how can I retrive and convert qualification data into varible or tuples to put into mysql query so that I can fire single query and add multiple values.
You could do something like this: You'll have to generate values array from dynamic values and pass onto the query.
Key point: ? is a unnamed parameter (placeholder alias) to make it Parameterised Query
const mysql = require('mysql');
const con = mysql.createConnection({
host: "localhost",
user: "yourusername",
password: "yourpassword",
database: "mydb"
});
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
let sql = "INSERT INTO qualification (degreeName, domain) VALUES ?";
let values = [
['B', 'p'],
['A', 'q'],
...
]; //this is dynamic value that you can create
con.query(sql, [values], function (err, result) {
if (err) throw err;
console.log("Number of records inserted: " + result.affectedRows);
});
});
To generate the tuples (values) from the dataset (assumes that data is going be in that format):
const payload = {
qualification: [
{ degreeName: "B", domain: "p" },
{ degreeName: "A", domain: "q" }
]
};
const tuples = payload.qualification.map(obj => [obj.degreeName, obj.domain]);
Which OPs something like this:
[ [ 'B', 'p' ], [ 'A', 'q' ] ]
I have a json request in this form:
{
"claimNo":["5454545","4554454","45884"]
}
the claimNo could hold any number of items(not restricted). I want to get the values and write a query to fetch data from a mysql db where claimNo = the values in the request.
sample response:
"claims": [
{
"claimNo": "4554454",
"ClaimCause": "gjgiukhknl",
"ClaimAmount": 45550,
},
{
"claimNo": "5454545",
"ClaimCause": "fdfdfdfdf",
"ClaimAmount": 0,
}
]
I can successfully loop through the request and display on terminal or even insert into the db with multiple ORs but that only works for a restricted array length.
req.body.claimNo.forEach(element => {
// console.log(element)
let sql = 'SELECT * FROM claims WHERE claimNo = ?'
connection.query(sql,element,(err, rows, fields) => {
if(!err){
// return res.json({
// success:true,
// errorCode:"",
// errorDescription:"",
// claims:rows
// })
console.log(rows)
}else
console.log(err)
} )
})
If I understand your question correctly, you're looking for a way to query MySQL database for multiple number of claimNo entries, and return the result as a single result.
Using MySQL IN() operator, you can write your query as select * from claims where claimNo in(?)
let sql = 'select * from claims where claimNo in(?)';
let claimNos = req.body.claimNo;
connection.query(sql, claimNos, (err, rows, fields) => {
if(!err){
// return res.json({
// success:true,
// errorCode:"",
// errorDescription:"",
// claims:rows
// })
console.log(rows)
}else
console.log(err)
})
You don't need to send a separate request for each claimNo. You can you the IN operator instead. The following should work:
const claimsNo = claims.map(claim => claim.claimNo);
const sql = 'SELECT & FROM claims where claimNo IN (?)';
connection.query(sql, [ tableName, claimsNo ], (err, rows, fields) =>{
...
});
I am using a Node.JS server with a MySQL database and I just realised that MySQL supports JSON as a data type.
Based on my previous statement how do I a) SELECT JSON, b) handle results in my node.js code and c) then UPDATEthe DB entries again in JSON?
Code example for parts a and b:
sql.getConnection((err, con)=>{
con.query("SELECT test FROM test", (error, row)=>{
con.release();
if(error) throw error;
console.log(row[0].test);
});
});
this snippet of code returns: {"entryid": {"a": 1, "b": 2, "c": 3}},
now if i try to do something like this: console.log(row[0].test./*any sub-key here*/); it returns undefined.
Well I managed to resolve my issue simply by ignoring MySQL's recommended syntax and implementing my own evil methods as you can see in this Gist.
let mysql = require('mysql');
let dbconn = {
host: "localhost", // make sure to replace with your own configuration
user: "root", // make sure to replace with your own configuration
password: "password", // make sure to replace with your own configuration
connectionLimit: 100, // make sure to replace with your own configuration
database: "db" // make sure to replace with your own configuration
};
let sql = mysql.createPool(dbconn);
let jsonObj;
/*
* let's assume that the stored JSON has the following structure:
*
* "master_key" : {
* sub_key1: "test1",
* sub_key2: "test2",
* sub_key3: {
* sub_key4: "test4"
* }
*
*/
sql.getConnection((err, conn) => {
if(err) throw err;
// We can SELECT it
conn.query("SELECT json_Column FROM test_Table",(error, row) => {
conn.release();
if(error) throw error;
jsonObj = JSON.parse(row[0].json_Column); //you can now handle the json keys as usual
// jsonObj.master_key || jsonObj.master_key.sub_key1 || jsonObj.master_key.sub_key3.sub_key4 || however you want
});
// We can INSERT it
jsonObj = {/*your JSON here*/};
conn.query("INSERT INTO test_Table(json_Column) VALUES ?", [JSON.stringify(jsonObj)],(error, row) => {
conn.release();
if(error) throw error;
console.log(row[0]);
});
// We can UPDATE it
jsonObj = {/*your JSON here*/};
conn.query("UPDATE test_Table SET json_Column = ?", [JSON.stringify(jsonObj)],(error, row) => {
conn.release();
if(error) throw error;
console.log(row[0]);
});
});
I want to insert multiple rows into mysql thru node.js mysql module. The data I have is
var data = [{'test':'test1'},{'test':'test2'}];
I am using pool
pool.getConnection(function(err, connection) {
connection.query('INSERT INTO '+TABLE+' SET ?', data, function(err, result) {
if (err) throw err;
else {
console.log('successfully added to DB');
connection.release();
}
});
});
}
which fails.
Is there a way for me to have a bulk insertion and call a function when all insertion finishes?
Regards
Hammer
After coming back to this issue multiple times, I think i've found the cleanest way to work around this.
You can split the data Array of objects into a set of keys insert_columns and an array of arrays insert_data containing the object values.
const data = [
{test: 'test1', value: 12},
{test: 'test2', value: 49}
]
const insert_columns = Object.keys(data[0]);
// returns array ['test', 'value']
const insert_data = data.reduce((a, i) => [...a, Object.values(i)], []);
// returns array [['test1', 12], ['test2', 49]]
_db.query('INSERT INTO table (??) VALUES ?', [insert_columns, insert_data], (error, data) => {
// runs query "INSERT INTO table (`test`, `value`) VALUES ('test1', 12), ('test2', 49)"
// insert complete
})
I hope this helps anyone coming across this issues, I'll probably be googling this again in a few months to find my own answer 🤣
You can try this approach as well
lets say that mytable includes the following columns: name, email
var inserts = [];
inserts.push(['name1', 'email1']);
inserts.push(['name2', 'email2']);
conn.query({
sql: 'INSERT into mytable (name, email) VALUES ?',
values: [inserts]
});
This should work
You can insert multiple rows into mysql using nested arrays. You can see the answer from this post: How do I do a bulk insert in mySQL using node.js