Mysql find element - mysql

I have:
const connection = mysql.createConnection({
host: process.env.APP_DATABASE_HOST,
user: process.env.APP_DATABASE_USER,
password: process.env.APP_DATABASE_PASS,
database: process.env.APP_DATABASE_NAME,
port: process.env.APP_DATABASE_PORT
});
const connectDataBase = () => {
connection.connect(function(error){
if(error){
console.log(error);
}else{
console.log('Connected');
}
});
}
exports.search = async (id) => {
try {
connectDataBase();
await connection.query(`SELECT * from devices where numberserial="${id}"`, function (err, result, fields) {
if( result ){
return true;
}
return false;
});
closeConnectionDataBase();
} catch (error) {
console.log(error)
}
};
const closeConnectionDataBase = () => {
connection.end();
}
I want that when it gets the element it returns true, but if it doesn't get the element it returns false.
for now if it gets the element it returns true, the problem I'm having is when it doesn't get the element the terminal gets stuck, what am I doing wrong??? what I want is that if the element is not found in the database it returns false

Related

Node Js with mySQL return Null

I am trying to Skip the data if there is nothing return from the mySql Database
How can I achieve it?
Here is my code
const mysql = require('mysql2');
function dbconnection() {
const connection = mysql.createConnection({
host: '127.0.0.1',
user: 'root',
password: 'password',
database: 'DB'
});
var Data_export = dbconnection();
Data_export.query(sql, function (err, data, fields) {
if (err) throw err;
if (data !== Null) {
const jsonData = JSON.parse(JSON.stringify(data));
fastcsv
.write(jsonData, { headers: true })
.on("finish", function () {
console.log("Write to file.csv successfully!");
})
.pipe(ws);
} else { console.log("Nothing"); }
But it doesn't work
Any help would be greatly appreciated, thanks!
You must return something on your connection's function, and the code has some syntax issues.
It must work:
function dbconnection() {
const connection = mysql.createConnection({
host: '127.0.0.1',
user: 'root',
password: 'password',
database: 'DB'
});
return connection;
}
var Data_export = dbconnection();
Data_export.query(sql, function (err, data, fields) {
if (err) throw err;
if (data !== Null) {
const jsonData = JSON.parse(JSON.stringify(data));
fastcsv
.write(jsonData, { headers: true })
.on("finish", function () {
console.log("Write to file.csv successfully!");
})
.pipe(ws);
} else {
console.log("Nothing");
}
});
I found the solution
Because it is a return array from the mysql,
So instead of
if (data !== Null) {
const jsonData = JSON.parse(JSON.stringify(data));
fastcsv
.write(jsonData, { headers: true })
.on("finish", function () {
console.log("Write to file.csv successfully!");
})
.pipe(ws);}
I use this one
if (data.length !==0) {
const jsonData = JSON.parse(JSON.stringify(data));
//write duplication to CSV file
fastcsv
.write(jsonData, { headers: true })
.on("finish", function () {
console.log("Write to file.csv successfully!");
})
.pipe(was); }

How to make return wait for MySQL connection to end? Node.js

I'm new to Node.js I'm testing some code on Wix to check my database if a account name already exists prior to allowing a new one to be created (I'm purposely not using the WHERE tag at the moment for learning purposes).
Currently the method check account name returns before the connection finishes, not allowing the check to take place properly.
Any help appreciated.
export function tryToCreateAccount(login, password)
{
var mysql = require('mysql');
var connection = mysql.createConnection({
host: 'host',
user: 'user',
password: 'pass',
database: 'db'
});
if(checkAccountName(login, connection))
{
console.log("Name didn't exist.");
}
else
{
console.log("Name Existed.");
}
}
function checkAccountName(account_name, connection)
{
var accountNameAvailable = true;
connection.connect(function (err)
{
if(err) throw err;
connection.query("SELECT login FROM accounts", function (err, result)
{
if (err) throw err;
for(var i = 0; i < result.length ; i++)
{
if(result[i].login == account_name)
{
console.log("Should of been false");
connection.end;
accountNameAvailable = false;
}
}
});
connection.end;
});
return accountNameAvailable;
}
I figured out why it wasn't doing anything, the next was getting called too late since the connection ended and next was within the connection code block.
const mysql = require('mysql');
const connection = mysql.createConnection({
host: 'host',
user: 'user',
password: 'pass',
database: 'db'
});
export function tryToCreateAccount(login, password)
{
checkAccountName(login, connection, function(err, accountNameAvailable)
{
if(err || !accountNameAvailable){
console.log("Name didn't exist.");
}
else
{
console.log("Name Existed.");
}
})
}
function checkAccountName(login, connection, next)
{
var accountNameAvailable = false;
connection.connect(function (err)
{
if(err) next(err);
connection.query("SELECT login FROM accounts", function (err, result){
if (err) next(err);
for(var i = 0; i < result.length ; i++)
{
if(result[i].login == login)
{
accountNameAvailable = true;
}
}
next(null, accountNameAvailable);
connection.end();
});
});
}
Welcome to Node.js (and the world of Async functions (and Promises (and Callbacks)))
I've written this in the "callback" style, but I highly recommend looking into async/await for something like this, as well as understanding how "promises" fit into the picture.
// to test, call tryToCreateAccount('login','pass',function(err,data){console.log(err,data)});
const mysql = require('mysql');
const connection = mysql.createConnection({
host: 'host',
user: 'user',
password: 'pass',
database: 'db'
});
export function tryToCreateAccount(login, password, next)
{
checkAccountName(login, connection, function(err, accountNameAvailable){
if(err || !accountNameAvailable){
console.log("Name didn't exist.");
next(err || 'Name didn't exist.')
}
else
{
console.log("Name Existed.");
next(null, true)
}
})
}
function checkAccountName(account_name, connection, next)
{
var accountNameAvailable = false;
connection.connect(function (err)
{
if(err) next(err);
connection.query("SELECT login FROM accounts", function (err, result){
if (err) next(err);
for(var i = 0; i < result.length ; i++)
{
if(result[i].login == account_name)
{
console.log("Should of been false");
connection.end;
accountNameAvailable = true;
}
}
connection.end();
next(null, accountNameAvailable);
});
});
}

How to wait for a MYSQL query to finish before executing another using Node server?

I am building an Express server to receive request (a dict with 10 items) from my React front end and then save the data to database. Below is my code.
I found that the query may crash during the insertion e.g. 2 queries got the same id by last_insert_id(). I have tried to use setTimeout() to wrap the getConnection function but the issue still exists. How to better solve the problem?
The request data:
{{.....}, {.....}, {.....}, {.....}, {.....}} #10 item
Code:
router.post('/fruit', (req, res) => {
const dict = req.body;
let itemCount = 0;
var err_list = [];
Object.keys(dict).forEach(function(r){
let query = "call sp_insert_fruit();"
setTimeout(function() {
getConnection(function(err, conn){
if (err) {
return res.json({ success: false, error: err })
} else {
conn.query(query, function (err, result, fields) {
if (err) {
err_list.push({'errno':err.errno, 'sql_message':err.sqlMessage});
}
itemCount ++;
if (itemCount === Object.keys(dict).length) {
conn.release()
console.log('released', err_list)
if (err_list .length === 0) {
return res.json({ success: true});
} else {
return res.json({ success: false, error: err_list});
}
}
});
}
});
}, 1000);
});
});
connection.js:
const p = mysql.createPool({
"connectionLimit" : 100,
"host": "example.org",
"user": "test",
"password": "test",
"database": "test",
"multipleStatements": true
});
const getConnection = function(callback) {
p.getConnection(function(err, connection) {
callback(err, connection)
})
};
module.exports = getConnection
You should replace callbacks with Promises and async/await to avoid callback hell. Using Promises, this problem should be easy to solve.
connection.js
const p = mysql.createPool({
"connectionLimit" : 100,
"host": "example.org",
"user": "test",
"password": "test",
"database": "test",
"multipleStatements": true
});
// wrap p.getConnection with Promise
function getConnection() {
return new Promise((resolve, reject) => {
p.getConnection((err, connection) => {
if (err) reject(err);
else resolve(connection);
});
});
};
module.exports = getConnection;
Router code
// wrap conn.query with Promise
function executeQuery(conn, query) {
return new Promise((resolve, reject) => {
conn.query(query, (err, result, fields) => {
if (err) reject(err);
else resolve({ result, fields });
});
});
}
router.post('/fruit', async (req, res) => {
const dict = req.body;
const errList = [];
const query = "call sp_insert_fruit();"
let conn = null;
try {
conn = await getConnection();
} catch (err) {
return res.json({
success: false,
error: err
});
}
for (const r of Object.keys(dict)) {
try {
const { result, fields } = await executeQuery(conn, query);
} catch (err) {
errList.push({
'errno': err.errno,
'sql_message': err.sqlMessage
});
}
}
conn.release();
console.log('released', errList);
// I don't know what err_imnt is, so I guess it's errList?
if (errList.length === 0) {
return res.json({
success: true
});
} else {
return res.json({
success: false,
error: errList
});
}
});

how to use mysq transaction commit rollback in Lambda function using nodeJs

Hi i want to use Mysql's beginTransactio or transactio commit rollback functionality in my Lambda(Node) function.
I tried basic structure of mysql package but seems its not working in lambda
const mysql = require('mysql');
exports.handler = async (event) => {
const con = mysql.createConnection(
{
host: "host",
user: "user",
password: "*****",
database: "db"
}
);
con.beginTransaction(
function (err) {
con.query(
"query goes here",
function (err, status) {
if (err) {
con.rollback();
con.end();
return err;
} else {
con.commit();
con.end();
return true;
}
})
});
}
sorry for the delayed answer.
just needed to specify beginTransaction without callback
const mysql = require('mysql');
exports.handler = async (event) => {
const con = mysql.createConnection(
{
host: "host",
user: "user",
password: "*****",
database: "db"
}
);
con.beginTransaction(); //here i was declaring standard callback function with err parameter
con.query(
"query goes here",
function (err, status) {
if (err) {
con.rollback();
con.end();
return err;
} else {
con.commit();
con.end();
return true;
}
});
}

How to avoid deadlock in nodejs mysql with a lot of queries?

I have a lot of urls, for every url I call the function load(url), this function parse the html, extracts the needed data and builds a bulk insert query as you can see in my test.js code. The problem is, if I have to many urls (like 100+), I get a Error: ER_LOCK_DEADLOCK from mysql. I tried to use async.queue but this is somehow not working (I don't know why, maybe I am using is wrongly). How can I run many urls + queries one after another, avoiding parallel execution which I think resulted in a deadlock? Even using async.queue results to a DEADLOCK (not always).
test.js
const request = require('request');
const async = require('async');
const pool = require('./database');
const urls = [
'https://www.quora.com/What-is-the-best-way-to-have-delayed-job-queue-with-node-js',
'https://de.wikipedia.org/wiki/Reinhardt-Zimmermann-L%C3%B6sung',
'https://towardsdatascience.com/the-5-clustering-algorithms-data-scientists-need-to-know-a36d136ef68'
]
let load = function(url) {
request({url: url}, function(error, response, html) {
if(!error) {
console.log(html);
/**
* 1. Parse HTML
* 2. Create Array of Values
* 3. Call pool.query(sql, [values], function(error) { ... })
*/
let data = [{}];
let sql = "INSERT IGNORE INTO tbl_test (title, content) VALUES ?";
let values = [];
data.forEach((item) => { values.push(item) });
pool.query(sql, [values], function(error) {
if(error) throw error;
})
} else {
console.log("handle error...");
}
})
}
let jobs = []
/*urls.forEach((url) => {
//jobs.push(load(url)); // --> Works but fails if the urls list is to big -> mysql deadlock error!
jobs.push(function(callback) { callback(load(url)) });
})*/
let q = async.queue(function(task, callback) {
console.log("Task:", task.uri);
callback();
})
q.drain = function() {
console.log('all task completed');
pool.end();
}
urls.forEach((url) => {
q.push({uri: url}, function(err) {
console.log('finished processing ...')
});
});
databse.js
require('dotenv').config();
const mysql = require('mysql');
let pool = mysql.createPool(
{
connectionLimit: 10,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME
}
);
pool.getConnection((err, connection) => {
if(err) {
if(err.code === 'PROTOCOL_CONNECTION_LOST') {
console.log('Database connection lost.')
}
if(err.code === 'ER_CON_COUNT_ERROR') {
console.log('Database has too many connections.')
}
if(err.code === 'ECONNREFUSED') {
console.log('Database connection refused.')
}
if(err.code === 'POOL_CLOSED') {
console.log('Pool is closed.')
}
}
if(connection) {
connection.release()
}
return;
});
module.exports = pool;
I have changed the code to use async.series instead of async.queue, beacuse the tasks would run in parallel in queue (see: https://caolan.github.io/async/docs.html#queue).
test.js
...
let tasks = [];
context.forEach((ctx) => {
tasks.push(function(callback) { load(ctx, callback) });
});
async.series(tasks, function(err) {
if(err) return next(err);
});