node.js / node_mysql - stale connections get "NO database selected" error - mysql

We have a node.js app that uses node_msyql, a great little library for accessing MySQL databases.
Unfortunately, if our connection is not used for maybe 8-10 hours, the next time we try to run a query, we get a "No database selected" error back from the server. We need to add a "USE db" somewhere, but I can't figure out where.
Now, it makes sense to me that connections would go stale, and it seems as though node_mysql is refreshing those stale connections, but there doesn't seem to be a way to make sure that the right db is connected. I was looking for a .connected() callback or event or something that would let me make sure the correct DB was alway USE'd, but no luck so far.
Any suggestions how to do this?

Ys, client tries to reconnect. You can try to query 'use db' on reconnect using code like this:
client._connection.on('connect', function() { client.query('use db'); })
This is where reconnection happen in the node-mysql ('end' handler):
https://github.com/felixge/node-mysql/blob/master/lib/mysql/client.js
var connection = self._connection = new Stream(),
parser = self._parser = new Parser();
connection
.on('error', function(err) {
var connectionError = err.code && err.code.match(/ECONNREFUSED|ENOTFOUND/);
if (connectionError) {
if (cb) {
cb(err);
return;
}
}
self.emit('error', err);
})
.on('data', function(b) {
parser.write(b);
})
.on('end', function() {
if (self.ending) {
self.connected = false;
self.ending = false;
return;
}
if (!self.connected) {
return;
}
self.connected = false;
self._prequeue(connect);
});
connection.connect(self.port, self.host);

because of node-mysql update following code maybe work:
client._socket.on('connect', function() {
console.log("use ",DB_NAME);
client.query('use '+DB_NAME);
});

When using the node-mysql-promise package, you may want to use this code to do the same:
dbConn.pool.on('connection', function() {
dbConn.pool.query("USE myDBname");
} );

Related

Next.js MySQL INSERT/UPDATE query never seems to execute

Quite the odd issue here.. I think this may be more of a problem of debugging, however I'm going to post in-case it is truly an issue and I'm quite frankly at my wits end anyway. I am doing a basic React.js/next.js form that takes a few inputs and adds them to state, then using axios sends the update to the api, which then makes a query insert or update to MySQL. The problem is, this Insert/Update doesn't work and I can't get any error output besides generic ETIMEDOUT from time to time, which I'm not even sure are related. I had this fixed before but am still unsure what I did. ALL other queries on the site work fine, the connection to the MySQL (AWS RDS) database is just fine.
My theories are A) the final query syntax has a silly issue causing this to just get lost in the abyss, or B) there's some server side code trying be run client side that I don't quite understand. (have also gotten the module 'fs' not found), or C) an async issue that I am not weathered enough in next.js to fix. And before you say it, yes there is data to be updated in the table, it is not trying to update the same data and thus bypassing the update. It is new data, every time I test.
NOTE-- I should also say, this code works PERFECT on my local osx environment. This ONLY happens when I try to run this on my Vercel deployment environment. This is important to know. The Database and Code are the EXACT same between both environments.
Without further ado, some code:
To save code display, lets assume our values are in state and ready to go to the API, as I know for a fact they are, and they make it to the actual query.
handleSubmit - gets run when the form is submitted.
const handleSubmit = (e) => {
e.preventDefault();
// Loop data, create a list of IDs for the Delete and an
// array of array of arrays for the insert.
let segmentItemIDList = [];
const segmentItemArray = [];
originalSegmentItemList = originalSegmentItemList.join(',')
segmentItemState.map((val, idx) => (
segmentItemArray[idx] = [
segmentItemState[idx].segmentID,
Number(segmentItemState[idx].chronologicalOrder),
Number(segmentItemState[idx].releaseOrder),
segmentItemState[idx].name,
segmentItemState[idx].typeID
]
))
let action = 'updatesegmentitem'
axios.post('/api/list', { action, segmentItemArray })
.then((result) => {
action = 'deletesegmentitem'
axios.post('/api/list', { action, originalSegmentItemList })
.then((result) => {
alert("Updated!!");
})
.catch(error => console.error('Error:', error));
})
.catch(error => console.error('Error:', error));
}
api/list (assume it gets into this block, because it does)
else if(req.body.action == 'updatesegmentitem') {
console.log("2. API updatesegmentitem req.body: ", req.body);
const segmentItemArray = req.body.segmentItemArray;
console.log("SegmentItemArray: ", segmentItemArray);
try {
if(Array.isArray(segmentItemArray) && segmentItemArray.length > 0) {
console.log("Inside IsArray: ", segmentItemArray);
const segmentItemInsertResults = await insertBatchSegmentItems(segmentItemArray);
res.send(segmentItemInsertResults);
} else {
res.send(true);
}
} catch (e) {
res.send('error');
}
insertBatchSegmentItems (mysql query) .. Sometimes I get the console logs in here, sometimes not..
export async function insertBatchSegmentItems(segmentItemData) {
let mysqlConnection = mysql.createConnection({
host: process.env.MYSQL_HOST,
database: process.env.MYSQL_DATABASE,
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASSWORD,
debug: false,
});
mysqlConnection.connect();
const insertSQL = 'INSERT INTO segmentItem (segmentID, chronologicalOrder, releaseOrder, name, typeID) VALUES ?'
try {
await mysqlConnection.query(insertSQL, [segmentItemData], function(err, result) {
console.log("Connex Query Inside Result: ", result);
if (err) throw err;
//mysqlConnection.destroy();
return result;
});
} catch (e) {
console.log("ERROR: ", e);
//mysqlConnection.destroy();
return e;
}
return true;
}
Please excuse my mess, I have been trying so many different things to try and get this to work but it will be cleaned up after a solution has been found.
Whenever I run into similar situations, I usually drop out exception handling and let it fail hard. It might give you a better insight of where it's happening. Good luck!

Connecting to Openshift WebSocket issue

I'm trying to make my OpenShift Node.js app working, but the WS connection is not working. Client error is: connection refused.
Client side factory service:
var dataStream = $websocket(localStorageService.get('wsUrl'))
dataStream.onMessage(function(message) {
var call = JSON.parse(message.data)
if (fnMap[call.fn]) {
fnMap[call.fn](call.event, call.data)
}
})
dataStream.onError(function(err) {
console.log(err)
})
dataStream.onClose(function(event){
console.log('event: ' + JSON.stringify(event))
})
var fnMap = {
"broadcastResult": function(event, data) {
$rootScope.$broadcast(event, data)
}
}
var methods = {
callFn: function(paramJSON) {
dataStream.send(JSON.stringify(paramJSON));
}
}
return methods
I'm trying to connect on the following URL: ws://myapp-myname.rhcloud.com:8000
Could you please help?
Thank you in advance,
Csaba
First, you may want to make sure the websocket library, ws, is installed by declaring it in the dependencies section of your package.json and then doing another git push.
Otherwise, try the example provided in the openshift blog: https://blog.openshift.com/paas-websockets/
Specifically:
var websocket = new WebSocket("ws://myapp-myname.rhcloud.com:8000");
websocket.onopen = function(event) {
// The connection was opened
console.log(event)
};
websocket.onclose = function(event) {
// The connection was closed
console.log(event)
};
websocket.onmessage = function(event) {
// New message arrived
message = event.data
console.log(event)
};
websocket.onerror = function(event) {
// There was an error with your WebSocket
console.log(event)
};
The above at least might help point you toward a specific part of your code that's not working. Or, if the example isn't even working, it could be possibly something wrong with your cartridge, and you could try reaching out to RedHat's support.

ws how to catch : WebSocket connection to 'ws:// failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED

i have simple web sockets html5 , when the server is up every thing is working fine
the problem is when i close the server ( for testing )
im getting :
WebSocket connection to 'ws://127.0.0.1:7777/api' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED
which i unable to catch its never jumps to onerror or onclose in case of this error
init: function () {
this.m_wsiSendBinary = new WebSocket("ws://127.0.0.1:7681/wsapi");
this.m_wsiSendBinary.onopen = function(evt) {
cc.log("Send Binary WS was opened.");
};
this.m_wsiSendBinary.onmessage = (function(evt) {
this.handleServerResponse(yStr);
this.m_wsiSendBinary.onerror = function(evt) {
};
this.m_wsiSendBinary.onclose = function(evt) {
cc.log("m_wsiSendBinary websocket instance closed.");
self.m_wsiSendBinary = null;
};
}).bind(this);
I do not have full answer, however I dealt with similar issue and have a partial and not so elegant solution (but may help someone). Unfortunately without the elimination of the error message.
Two business requirements:
BR1 - Handle state in initialization when the server is not available.
BR2 - Handle state when the server stops.
Solution for BR1
var global_connection_openned=null;//Here you have the result
init: function () {
global_connection_openned=false;
this.m_wsiSendBinary = new WebSocket("ws://127.0.0.1:7681/wsapi");
this.m_wsiSendBinary.onopen = function(evt)
{
global_connection_openned=true;
};
Solution for BR2 (assumes the BR1)
//somewhere in your project called by setInterval(..) which will detect the connection is lost (and tries to reestablish/reopen the connetion.
{
if (this.m_wsiSendBinary==null || this.m_wsiSendBinary.readyState==3)
this.init();
if (!global_connection_openned)
this.m_wsiSendBinary=null;
}
Anyway, I would be really curious if there is solid and proper solution of this use case.

Cancel promises in Promise.map if one is rejected (Bluebird 3)

I use Bluebird 3 with enabled cancellation. Is cancellation the tool to use in the following use case:
var resourcesPromise = Promise.map(resourceIds, function(id) {
return loadResource(id);
});
resourcesPromise.catch(function() {
resourcesPromise.cancel();
});`
If one of the resources fails to load, resourcesPromise will be rejected, and I want to stop the loading of all other resources. But as far as I can tell, cancelling resourcesPromise doesn't work, because it is already rejected.
Edit: I'm currently considering variants of the following:
var resourcesPromise = new Promise(function(resolve, reject) {
var intermediatePromise = Promise.map(resourceIds, function(id) {
return loadResource(id).catch(function(error) {
intermediatePromise.cancel();
reject(error);
});
}).then(resolve, reject);
});
(I may have found a legitimate use for the ".then(resolve, reject)" anti-pattern!)
Any ideas why Promise.map doesn't work like that?
With map you are parallelizing the resource loading, maybe is better to use Promise.each to laoad resources in sequence. In such case you don't need to cancel the promise to stop loading the remaining resources when one fails.
var resourcesPromise = Promise.map(resourceIds, function(id) {
return loadResource(id);
});
resourcesPromise.catch(function() {
resourcesPromise.cancel();
});`
Another option would be to pass to the map function an option object like in which you can specify the concurrency limit.
Promise.resolve(resourceIds).
map(function(id) {
return loadResource(id);
}, {concurrency: n}).
catch(function(e) {
//do some error handling
});

Possible to have node.js redis fallback?

In Laravel, you can do something like
$object = Cache->remember(key, duration, function() {
$result = mysql_fetch_something();// retrieve from MySQL here
return $result;
});
where basically, Laravel checks the cache first if it exists there, and if not, it allows you to retrieve the value from the database and automatically put it in cache while also returning it. Is there a similar construct in node; that is, a 1 stop cache check, db failover mechanism?
In node there is no special command for this but you can build it yourself.
Just check with the redis command EXISTS whether the key is in redis and if not just check mysql and store it.
You can do some thing like this. in cache.js
var isCacheAvailable = true;
exports.init = function () {
var server = config.get('Cache.server');
var port = config.get('Cache.port');
client = redis.createClient(port,server);
// handle redis connection temporarily going down without app crashing
client.on("error", function (err) {
logger.error("Error connecting to redis server " + server + ":" + port, err);
isCacheAvailable = false;
});
}
exports.isCacheAvailable = function(){
return isCacheAvailable;
}
Check the isCacheAvailable() function where you intend to use cache.
if(cache.isCacheAvailable()) {
// use cache to fetch data
} else {
// fallback to mysql db
}
Hope this helps.