Read, Display and Insert into another Table - mysql

I am new to NodeJS programming,
Have created a script which reads from Database table nodetest having 50K records and displays in the browser and then writes to another table called 'nodetestcopy'
var express = require('express');
var app = express();
var connection = require('express-myconnection');
var mysql = require('mysql');
var rows={};
var copyOfrows={
'id':null,
'f_name':null,
'l_name':null,
'title':null
};
var date1 = (new Date()).getTime();
app.use(
connection(mysql,{
host: 'localhost',
user: 'root',
password : '',
port : null, //port mysql
database:'test'
},'request')
);
app.get('/api/entries', function(req, res){
req.getConnection(function(err, connection) {
if(err) {
console.log(err);
res.status(500).send('Cannot get database connection');
} else {
connection.query("select * from nodetest", function(err, rows) {
if(err) {
console.log(err);
res.status(500).send(err);
} else {
res.write(''+JSON.stringify(rows));
var date2 = (new Date()).getTime();
console.log('Cnt : '+rows.length+' Took Time to execute :'+(date2 - date1)/(60*60));
//console.log(rows);
//Now insert in another table ' nodetestcopy'
for(var i in rows){
connection.query("insert into nodetestcopy set ?", rows[i], function(err, rows) {
if(err) {
console.log(err);
//res.status(500).send(err);
} else {
}
});
}
}
});
}
});
});
app.listen(3000);
This script is working for the first time, and when I refresh the browser for second time, getting an error .
Please guide me what is going wrong here and also Is my approach is correct?
Looping the record for 50K times for(var i in rows){ ...}
Please give a feasible solution for this, and correct me wereever the code is wrong.
Thanks

What is the error message that you are getting? Also, another way to avoid hitting the database with multiple queries could be to combine the records in one query and then make one database insert call, like:
var query = 'insert into nodetestcopy values("';
for (var i in rows) {
query += rows[i] + '"';
if (i < rows.length) query += ',';
}
query += ')';
connection.query(query, function(err, rows) { ...

Related

Node/Express + MySQL: Inserts not displaying instantly

I have a form called #add_blog_post with the action "/mysql_test/add_blog_post" and method of "POST"
Jade markup:
form#add_blog_post(action="/mysql_test/add_blog_post" method="POST")
This form executes the following code in my app.js:
app.post('/mysql_test/add_blog_post', function(req, res) {
var author = req.body.author;
var date = req.body.date;
var title = req.body.title;
var body = req.body.body;
var blog_insert_query = "insert into 332project.blog(author,date,title,body) values(";
blog_insert_query += ("'"+author+"'"+","); blog_insert_query += ("'"+date+"'"+","); blog_insert_query += ("'"+title+"'"+","); blog_insert_query += ("'"+body+"'"+")");
var connection = mysql.createConnection({
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS
});
connection.connect(function(err) { /*error?*/ });
var result;
var query = connection.query(blog_insert_query, function(err, result) {
res.redirect('/mysql_test');
});
});
The blog post insert works just fine but the website takes a while for the insert to be displayed from the select statement on /mysql_test.
Here is my route:
var express = require('express');
var router = express.Router();
var db_calls = require('../db.js');
var connection = db_calls.connect();
connection.connect(function(err) { /*error?*/ });
var result;
var query = connection.query("select * from 332project.blog order by id desc", function(err, rows, fields) {
connection.end();
if (!err) {
result = rows;
}
});
router.get('/', function(req, res, next) {
res.render('mysql_test', {
result: result
});
});
module.exports = router;
What gives? It almost seems like a caching issue. I'd really like for my create/update operations to be instantly visible in my application.
Source code: https://github.com/harwoodjp/learning-express
Your problem is probably that you're not calling connection.end() per the docs.

Whats wrong with this express code?

Hello i am trying to check two mysql values and if its matching anything in the database it needs to generate a token but it does not seem to work :(
Everytime i run this code i get a connection time out:
var express = require('express');
var router = express.Router();
var mysql = require("mysql");
var randomstring = require("randomstring");
/* GET users listing. */
router.get('/', function(req, res, next) {
// First you need to create a connection to the db
var connection = mysql.createConnection({
host: "localhost",
user: "segfault",
password: "wnk9ctte2endcKzBKtre7auE",
database: "segfault"
});
connection.connect();
var input_user = req.body.username;
var input_pass = req.body.password;
var token = randomstring.generate(12);
connection.query('SELECT username FROM segfault.users AS username', function(err, rows, fields) {
if (err) throw err;
for(var i in rows){
username = rows[i].username;
if(input_user == username){
connection.query('SELECT password FROM segfault.users AS password', function(err, rows, fields) {
if(rows[i].password == input_pass){
res.send("OK: "+ token);
console.log("OK:" + token)
}
});
}
}
});
connection.end();
});
module.exports = router;
tell me please what i am dooing wrong!
You close the connection to the database without waiting for the result of the query.
Move connection.end(); inside callback after the res.send.

Can't insert element from array in mysql database with nodejs

I actually have a problem saving some data from an array in a mysql database with nodejs.
This is my code
for (var i = 0; i < data.data.length; i++) {
var imageObject = data.data[i];
var url = imageObject.images.standard_resolution.url;
var id = imageObject.id;
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
var ids
connection.query(sql, [id], function(err, rows, fields) {
console.log(rows[0].imageIDCount);
if (err) throw err;
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist"); // ### the ID at this point is always the last from that array
//insertImage(id, url);
} else {
// console.log("ID exists");
}
});
}
This code run's when I get a response from an rest-api with the request-framework.
So my problem is that at the point I get the result from the count-query and there is no element with the specific id I get always the same id. I think that's because I use the same variable "id" there but how can I fix it ? I hope somebody can help me.
In this case, you're a victim of Node's asynchronous event loop. You're executing a synchronous for-loop and defining id:
for (var i = 0; i < data.data.length; i++) {
var id = imageObject.id;
}
This works in normal Javascript if you try and do something with id, but because the database module you're using runs asynchronously, that entire loop will have already completed before your first db query completes, effectively clobbering the value of id.
You'll need to re-write your function to behave asynchronously instead, or use something like node-async to help.
Here's a quick example of how that might look. Note that I didn't write your insertImage function for you; you'll need to rewrite that to support a callback as well.
async.each(data.data, function(imageObject, callback) {
var sql = 'SELECT COUNT(*) AS imageIDCount FROM images WHERE id = ?'
connection.query(sql, [imageObject.id], function(err, rows, fields) {
if (err) callback(err);
if (rows[0].imageIDCount == 0) {
console.log(id + " doesn't exist");
insertImage(imageObject.id, imageObject.images.standard_resolution.url, function(err) {
callback(err); // fires the callback to async
})
} else {
console.log(id + " already exists");
callback(); // maybe you want an error here too?
}
});
}, function(err, results) {
// all of your db queries are completed
});

Creating synchronous queries with node-mysql

I'm trying to ensure that one mysql query leads to another and is not completed until all of its children queries are completed. So for example, I start with one select and stream rows and execute subsequent queries from that row result. This is doable with callbacks, but I end up running out of memory, so I'd like to slow down the process and run batches, but due to the async nature of the dispatch, I can't keep things in phase and end the connection after all the rows have been processed.
Here's an example:
var query = conn.query('select id from table1 limit 10');
query.on('result', function(row){
console.log('query1', row);
var query2 = conn.query('select id from books where id = ? ', [row.id]);
query2.on('result', function(row2){
console.log('query2', row2);
var query3 = conn.query('insert into test (id) values (?)', [row2.id]);
query3.on('result', function(row3){
console.log(row3);
});
});
});
query.on('end', function(){
conn.end();
});
The above fails, because there are still rows to process in query3 after the initial query is ended.
Any thoughts? The actual code is even more complicated, because I have to process xml from the subsequent queries and fire off even more inserts as I loop through the batch.
Thanks!
I would suggest this solution with async module:
var async = require("async");
// connection instance
var conn;
// here goes task serving logic
// if any async function should be finished before drain callback, push them into q
var solvers = {
query: function(q, task, row){
console.log('query1', row);
q.push({
solver: "query2",
req: "select id from books where id = ?",
reqArgs: [row.id]
});
},
query2: function(q, task, row){
console.log('query2', row);
q.push({
solver: "query3",
req: "insert into test (id) values (?)",
reqArgs: [row.id]
});
},
query3: function(q, task, row){
console.log(row);
}
}
// here is a queue of tasks
var q = async.queue(function(task, cb){
var query = conn.query(task.req, task.reqArgs);
query.on("end", cb);
query.on("result",function(row){
solvers[task.solver](q, task, row);
});
}, 2); // limit of parallel queries
// when every request has reached "end"
q.drain = function(){
conn.end();
// continue from here
};
// initial task
q.push({
solver: "query",
req: "select id from table1 limit 10",
reqArgs: []
});
But still, I'm not sure that making requests ID by ID is a good solution.
Maybe, I'm just not aware of a full problem.
#glukki, thanks for the great answer and reference to async. I went with a permutation of your code and two async requests which do a 'chomp and chew' using a single connection and pool of connections to process over 100K row select into 1.2M row inserts. Worked amazingly well and took less than 10 minutes. Here's the full implementation minus the module and connection setup. I hope this helps someone else too. Thanks again!
function populateMesh(row, callback){
xmlParser.parseString('<root>'+row.mesh_heading_list+'</root>', function(err, result){
var q2 = async.queue(function (task, cb) {
pool.getConnection(function(err, cnx){
cnx.query('INSERT INTO abstract_mesh (mesh_id, abstract_id, major_topic) SELECT mesh_descriptor.id, ?, ? FROM mesh_descriptor WHERE mesh_descriptor.name = ?', [task.id, task.majorTopic, task.descriptorName], function(err, result){
if (err) {throw err;}
cnx.release();
cb();
});
});
}, 50);
q2.drain = function() {
//console.log('all mesh processed');
callback();
}
if(!(result.root instanceof Object)){
//console.log('its not obj!', row.id);
q2.push({id: row.id, majorTopic: 'N', descriptorName: 'Null'}, function (err) {});
}
for(var i in result.root.MeshHeading){
// console.log('in loop',result.root.MeshHeading[i].DescriptorName);
if(typeof result.root.MeshHeading[i].DescriptorName === 'undefined'){
q2.push({id: row.id, majorTopic: 'N', descriptorName: 'Emergency'}, function(err){});
}
for(var j in result.root.MeshHeading[i].DescriptorName){
var descriptorName = result.root.MeshHeading[i].DescriptorName[j]._;
var majorTopic = result.root.MeshHeading[i].DescriptorName[j].$.MajorTopicYN;
q2.push({id: row.id, majorTopic: majorTopic, descriptorName: descriptorName}, function (err) {});
}
}
});
}
// here goes task serving logic
// if any async function should be finished before drain callback, push them into q
var q = async.queue(function (row, callback) {
console.log('got id: ' + row.id);
populateMesh(row, function(){
callback();
});
}, 10);
q.drain = function() {
console.log('all items have been processed');
conn.end(function(err){
console.log('connection ended');
});
pool.end(function(err){
console.log('pool closed');
});
};
var truncate = conn.query('truncate abstract_mesh');
var select = conn.query('SELECT id, mesh_heading_list FROM pubtbl');
select.on('result', function(result){
// console.log(result);
q.push(result, function (err) {
//console.log('finished processing row');
});
});
In my opinion the best solution is to make the code synchronously in a very easy way.
You could use the "synchonize" package.
Just
npm install synchronize
Then var sync = require(synchronize);
Put logic which should be synchronous into a fiber by using
sync.fiber(function() {
//put your logic here
}
An example for two mysql queries:
var express = require('express');
var bodyParser = require('body-parser');
var mysql = require('mysql');
var sync = require('synchronize');
var db = mysql.createConnection({
host : 'localhost',
user : 'user',
password : 'password',
database : 'database'
});
db.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
});
function saveSomething() {
var post = {id: newId};
//no callback here; the result is in "query"
var query = sync.await(db.query('INSERT INTO mainTable SET ?', post, sync.defer()));
var newId = query.insertId;
post = {foreignKey: newId};
//this query can be async, because it doesn't matter in this case
db.query('INSERT INTO subTable SET ?', post, function(err, result) {
if (err) throw err;
});
}
When "saveSomething()" is called, it inserts a row in a main table and receives the last inserted id. After that the code below will be executed. No need for nesting promises or stuff like that.
This is what i did,
db.query(
"select name from USER where name = ?",
["test"],
(err, result) => {
if (err) {
console.log("Error : ", err);
} else if (result.length <= 0) {
res.json("Not Found");
} else {
console.log("name found, executing update query!");
updateAgeIfUserFound("test"); //Calling funtion with 2nd query
}
}
);
//Update age only if name is present
function updateAgeIfUserFound(name, age) {
if (name) {
db.query(
"update USER set age = ? where name = ?,
[age, name],
(err, result) => {
if (err) throw err;
console.log("Name Updated");
res.json("Name Updated");
}
);
}
}

cannot response.write on connection node.js

I wrote this code in node.js :
var http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.write('start\n');
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'hostname',
user : 'user',
password : 'pass',
database : 'dbname',
});
connection.connect();
connection.query('SELECT code FROM user_type', function(err, rows, fields) {
if (err) throw err;
//This for is to run on all the results
for(var i=0;i<2;i++){
res.write('name:',rows[i].code);
}
});
connection.end();
res.end('End');
}).listen(8080);
console.log('Server running');
my questions are:
1. why did the res.write in the for loop print nothing in my html page?
2. what to write instead of 2 in the for?
The issue is the timing of execution.
Since connection.query() is asynchronous, it delays the execution of the callback function to when the query finishes, but allows the surrounding block of code to continue in the meantime. This results in res.end('End') being executed before res.write('name:',rows[i].code).
To also delay res.end(), move it inside the callback:
connection.query(..., function(err, rows, fields) {
if (err) throw err;
for (...) {
res.write('name:',rows[i].code);
}
res.end('End');
});
rows should be an Array when there isn't an err, so you can use its length property:
for (var i = 0; i < rows.length; i++) {
It's also a common practice to store the value of length in a local variable:
for (var i = 0, len = rows.length; i < len; i++) {