I parse a big csv and insert row per row into my mysql tables.
After parsing I do a lot of calculation and transforming and save it to a new Object
obj.push({
"ID": value.id,
"col1": value.calc1,
... });
After the Object is complete I do:
async.forEach(obj, function (Insertobj, callback) {
var query = conn.query('INSERT INTO table SET ?', Insertobj);
},function (err){
if (err) {
console.log(err);
console.log('failed to process');
}}
After running through the obj I get =>
Error: ER_BAD_FIELD_ERROR: Unknown column 'NaN' in 'field list'..
But he inserts the complete Object into my table! I don't have any column called NaN or empty cols. How can I debug it? I try to set the console.log to err.sql, but he print "undefined". Using debug:true in connection didn't help me.
I think you have misunderstood how escaping mysql values works using the node js module. The error is due to you not specifying what column you want to update. In addition to this, the escaped values should be filled in using an array instead of an object. With values being in the order they are escaped in the query. Your code could look as follows:
valuesarray.push([
value.id,
value.calc1
]);
async.forEach(valuesarray, function ( insertarray, callback ) {
var query = conn.query('INSERT INTO table SET ID = ?, col1 =
?', insertarray);
},function (err){
if (err) {
console.log(err);
console.log('failed to process');
}
});
Related
I got a function like getItemPrice in nodeJS. But while trying to inserting datas there is a error occuring. Can't write any dynamic value inside VALUES(item.Id, value.lowest_price).
I've tried lots of things bot none of work.
con.query('SELECT game_item.id as itemId, steam_app_game.app_id as gameId, game_item.name, steam_app_game.id FROM steam_app_game LEFT JOIN game_item ON steam_app_game.id = game_item.app_game_id', function(err, rows, fields) {
var counter = 1;
rows.forEach(function (item,index) {
setTimeout(function(){
market.getItemPrice(item.gameId, item.name).then(function (value, err) {
if(err) throw err;
var lowest = value.lowest_price
con.query('INSERT INTO game_item_spec(game_item_id,price) VALUES (item.itemId,value.lowest_price )')
counter ++;
});
}, index * 5000);
});
});
Here is the error.
ER_BAD_FIELD_ERROR: Unknown column 'value.lowest_price' in 'field list'
at Query.Sequence._packetToError (F:\Xamp\htdocs\steam-trade-bot\node_modules\mysql\lib\protocol\sequences\Sequence.js:47
:14)
I solved this using parameter statements in nodeJs. Single insert into doesn’t work for if the data has special chars
A query must be a string, so to inject some variables inside you can use ES6 syntax with template string.
Here the working code:
con.query('SELECT game_item.id as itemId, steam_app_game.app_id as gameId, game_item.name, steam_app_game.id FROM steam_app_game LEFT JOIN game_item ON steam_app_game.id = game_item.app_game_id', function(err, rows, fields) {
var counter = 1;
rows.forEach(function (item,index) {
setTimeout(function(){
market.getItemPrice(item.gameId, item.name).then(function (value, err) {
if(err) throw err;
var lowest = value.lowest_price
con.query(`INSERT INTO game_item_spec(game_item_id,price) VALUES (${item.itemId}, ${value.lowest_price} )`)
counter ++;
});
}, index * 5000);
});
});
I recommend that you use node async https://caolan.github.io/async/, use series together with eachOfSeries
I am trying to insert multiple records into MYSQL from Node.js with a WHERE clause but I keep getting a syntax error.
The statement works fine until I try to add a conditional statement to it. Then I get this error: ER_PARSE_ERROR: You have an error in your SQL syntax near VALUES ? WHERE ...
var Data = data; // this is a nested array already as received from client side like [[..],[..],[..]]
var ID = 123;
var sql = "INSERT INTO table1 (Col1,Col2,Col3,Col4,Col5) VALUES ? WHERE"+ID+" NOT IN (SELECT somecol FROM table2 WHERE somecol= "+ID+")"
connection.query(sql, [Data], function (error, result) {
if (error) {
throw error;
res.json({ Message: "Oops something went wrong :("});
}
res.json({ Message: "Your data was added!"});
});
The connection is set up to allow multiple statements already:
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '1234',
database: 'thedb',
port: 12345,
charset: "utf8mb4",
multipleStatements: true
});
The query works in this form without the WHERE clause:
var Data = data; // this is a nested array already as received from client side like [[..],[..],[..]]
var ID = 123;
var sql = "INSERT INTO table1 (Col1,Col2,Col3,Col4,Col5) VALUES ?"
connection.query(sql, [Data], function (error, result) {
if (error) {
throw error;
res.json({ Message: "Oops something went wrong :("});
}
res.json({ Message: "Your data was added!"});
});
How do I get the query work with the WHERE clause?
Insert command will not work with Where clause because you are inserting a new row. In naive terms, a Where clause needs some rows to filter out based on the conditions. Based on your use case you can have two possible solutions:
Use Update statements which could be like
Update table set col1=val1 where (condition clause)
If you really want to use Where clause then you can use the Insert command in the following form
Insert into table(col1,col2)
Select (val1, val2) from table2 where (condition clause);
I have table say TEST(id INT, attribute JSON) in MySQL 5.7
When I try to query the table in Nodejs using mysql package as follows
con.query("select * from TEST where id=?", [req.params.id], function (err, results) {
if (err) throw err;
console.log(results);
});
I get the following output
[
{
"id": 2,
"package": "{\"tag\": \"tag1\", \"item\": \"item1\"}"
}
]
Is there a way to get the package item in the above result as JSON object instead of a string without iterating the array and do JSON.parse to convert string to JSON?
Expected Output
[
{
"id": 2,
"package": {"tag": "tag1",
"item": "item1"}
}
]
Is there a way to get the package item in the above result as JSON
object instead of a string without iterating the array and do
JSON.parse to convert string to JSON?
MySQL 5.7 supports JSON data type, so you can change package type to JSON and you won't have to iterate and perform JSON.parse on each row, if your client has support for this data type.
Have in mind that mysql package does not support it, but mysql2 does.
CREATE TABLE `your-table` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`package` json DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB;
Now package will be an array/object:
con.query("select * from TEST where id=?", [req.params.id], function (err, results) {
if (err) throw err;
console.log(results[0].package.tag); // tag1 using mysql2
console.log(results[0].package.item); // item1 using mysql2
});
In case you're running a MySQL version lower than 5.7 or you don't want to use the JSON type, you will need to iterate and parse it yourself.
You can use this one liner:
results = results.map(row => (row.package = JSON.parse(row.package), row));
If you want to know if you should or shouldn't store JSON in a relational database there's a nice discussion in this question:
Storing JSON in database vs. having a new column for each key
This is a slight variation on Tanner's answer but, if you're using the node mysql library and you've defined fields in the database using MySQL's JSON data type, you can use "type casting" when initially setting up the connection to convert any JSON-typed value returned by any query to a JSON object:
let connection = mysql.createConnection(
{typeCast: function (field, next) {
if (field.type === "JSON") {
return JSON.parse(field.string());
} else {
return next();
}
}}
);
You can pass a typeCast function to your mysql connection/pool. In my case, I want to convert all longtext fields to JSON if possible. In the case it fails, you will still get the original value.
let auth = {database:'db', password:'pw', user:'u', host:'host.com' };
auth.typeCast = function(field, next) {
if (field.type == 'BLOB' && field.length == 4294967295) {
let value = field.string();
try {
return JSON.parse(value);
} catch (e) {
return value;
}
}
return next();
};
let connection = mysql.createConnection(auth);
You can use package mysql2 instead of mysql. It will parse the result by default.
I think all you need to do is JSON.parse(results)
con.query("select * from TEST where id=?", [req.params.id], function (err, results) {
if (err) throw err;
console.log(JSON.parse(results));
});
EDIT! commenter noted that you need to iterate through results, my answer was hasty and wouldn't work:
con.query("select * from TEST where id=?", [req.params.id], function (err, results) {
if (err) throw err;
results = results.map(row => (row.package = JSON.parse(row.package), row));
});
I am inserting multiple rows in the MySql database table the first row is getting inserted but for remaining it is showing the error:
[Error: ER_DUP_ENTRY: Duplicate entry '2' for key 'PRIMARY']
code: 'ER_DUP_ENTRY',
errno: 1062,
sqlState: '23000',
index: 1,
This is the structure of id field:
If I insert a single row the auto-increment works file.
I am not able to rectify the problem. Please help. Thanks.
EDIT
I am using node-rom2 and the code is
modelObj.create(arrayOfObjects, function (err, result) {
if (err) {
console.log("The error is :", err);
}
else {
response.status = 'success';
response.data = result;
}
next(response);
});
The SQL query is generated dynamically.
Now I got the solution, the mistake I was doing is I was generating the data using a for loop like this:
var data = { name:'john', age:24, email:'abc#abcd.com'};
var arrayOfObjects= [];
for (var i = 0; i < 4; i++){
arrayOfObjects.push(data);
}
modelObj.create(arrayOfObjects, function (err, result) {
if (err) {
console.log("The error is :", err);
}
else {
response.status = 'success';
response.data = result;
}
next(response);
});
So each and every record is same in this condition. However other than id none column having the property primary-key. I think it can be the behavior of the database or the node-orm2 can also be the reason, so it is not accepting the exact same values.
In actual, all the record won't be same. If you have some other point please let me know your thoughts. Thanks.
I want to insert multiple rows into mysql thru node.js mysql module. The data I have is
var data = [{'test':'test1'},{'test':'test2'}];
I am using pool
pool.getConnection(function(err, connection) {
connection.query('INSERT INTO '+TABLE+' SET ?', data, function(err, result) {
if (err) throw err;
else {
console.log('successfully added to DB');
connection.release();
}
});
});
}
which fails.
Is there a way for me to have a bulk insertion and call a function when all insertion finishes?
Regards
Hammer
After coming back to this issue multiple times, I think i've found the cleanest way to work around this.
You can split the data Array of objects into a set of keys insert_columns and an array of arrays insert_data containing the object values.
const data = [
{test: 'test1', value: 12},
{test: 'test2', value: 49}
]
const insert_columns = Object.keys(data[0]);
// returns array ['test', 'value']
const insert_data = data.reduce((a, i) => [...a, Object.values(i)], []);
// returns array [['test1', 12], ['test2', 49]]
_db.query('INSERT INTO table (??) VALUES ?', [insert_columns, insert_data], (error, data) => {
// runs query "INSERT INTO table (`test`, `value`) VALUES ('test1', 12), ('test2', 49)"
// insert complete
})
I hope this helps anyone coming across this issues, I'll probably be googling this again in a few months to find my own answer 🤣
You can try this approach as well
lets say that mytable includes the following columns: name, email
var inserts = [];
inserts.push(['name1', 'email1']);
inserts.push(['name2', 'email2']);
conn.query({
sql: 'INSERT into mytable (name, email) VALUES ?',
values: [inserts]
});
This should work
You can insert multiple rows into mysql using nested arrays. You can see the answer from this post: How do I do a bulk insert in mySQL using node.js