RDS MySQL timing out intermittently when called from Lambda using NodeJS - mysql

My web app uses Lambda using NodeJS and backend is RDS(MySQL). I'm using serverless-mysql to make db calls.
For some reason, the db call times out intermittently. I tried the following:
Enabled flow logs to see if there are any errors (but couldn't find any reject statuses).
Tried making the database publicly available and took lambda out of VPC (to see if it is an issue with VPC configuration). But still, it was failing intermittently. So VPC is out of the equation.
RDS is not having any unusual spikes and connection exhaustion as monitoring shows a peak of only up to 3 connections. Lambda is always kept warm. I tried increasing the time out to up to 25 seconds. Still no luck.
Below is the code I use:
export async function get(event, context, callback) {
if (await warmer(event)) return 'warmed';
context.callbackWaitsForEmptyEventLoop = false;
try {
const userId = getUserIdFromIdentityId(event);
const query = "select * from UserProfile where UserId = ?";
const result = await mysql.query(query, [userId]);
console.log(result);
console.log('getting user account');
mysql.quit();
return success({
profileSettings: result.length > 0 ? result[0] : null,
});
} catch(e) {
console.log(e);
return failure();
}
}
Success function basically returns a json object like below:
return {
statusCode: 200,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Credentials": true
},
body: JSON.stringify(body)
};
mysql is initialized as below:
export const mysql = AWSXray.captureMySQL(require('serverless-mysql')({
config: {
host: process.env.dbHost,
user: process.env.dbUsername,
password: process.env.dbPassword,
database: process.env.database,
}
}));
The only error I can see in Cloudwatch logs is:
Task timed out after 10.01 seconds.

Related

Amazon RDS load balancing not working with mysql.createPool in nodejs

I have implemented load balancing in read database connection like when read db load increased to 60% it will initiate a new read database for balancing load on database but
When I see from AWS developer console dashboard all API calls It will initate new read database instance but most of the API's calls load took placed on database 1 upto 90 percent but like 10 req /sec and on read DB instance 2 1 to 5% database is used like 1req /sec
it should divided API request on both database equaly but It wont work
This issue is because mysql.createPool will not close connection from database 1 (createPool will reuse its opened connections) so that other API calls can move to second database instance.
To solve this problem I had changed mysql.createPool with mysql.createConnection on Each API calls
I had created 2 middleware
1-for createConnection
2-for connection.end()
whenever a request comes in middleware 1 calls and create new connection and on request finish middleware 2 will call which will end the connection. this solution has solved my problem of load balancing but a new issue takes place I have face to many database connection issues with this method
does anyone have a proper solution who has faced this issue or can help?
Sample Code :
var readDB = mysql.createConnection({
database: process.env.READ_DB_DATABASE,
host: process.env.READ_DB_HOST,
user: process.env.READ_DB_DB_USER,
password: process.env.READ_DB_DB_PASSWORD,
charset: "utf8mb4"
});
utils.js
async onFinish(req, res, next) {
return new Promise(async (resolve, reject) => {
try {
let readDB = req.readDB;
const dbEnd = util.promisify(readDB.end).bind(readDB);
const response = await dbEnd();
resolve(response);
} catch (error) {
reject(error);
}
});
}
app.js
/**
* middleware for create connection and end connection on finish
*/
app.use(async (req, res, next) => {
try {
const readDB = await utils.readDBCreateConnection();
req.readDB = readDB;
res.on("finish", function () {
console.log("onFinish called");
utils.onFinish(req, res, next);
});
next();
} catch (error) {
res.status(error.status || 500).send({
code: 500,
message: error.message || `Internal Server Error`,
});
}
});
/**
* Code to initialice routing
*/
require("./modules/v2-routes")(app); // v2 app routes

Lambda + Sequelize randomly getting SequelizeConnectionError ETIMEDOUT

We are using Sequelize within Aws Lambda and for the most part everything is working great however randomly it is erroring out with the following error:
ETIMEDOUT {"name":"SequelizeConnectionError","parent":{"errorno":"ETIMEDOUT","code":"ETIMEDOUT","syscall":"connect","fatal":true},"original":{"errorno":"ETIMEDOUT","code":"ETIMEDOUT","syscall":"connect","fatal":true}}
We are using Rds - mysql 8.0.15, Serverless framework, serverless-http, serverless-webpack.
Here is our file configuration.
//db.js
... import all models
const sequelize = new Sequelize(
process.env.DATABASE,
process.env.DB_USER,
process.env.DB_PASSWORD,
{
host: process.env.DB_HOST,
port: process.env.STAGE === "dev" ? 3306 : 31304,
dialect: "mysql",
dialectOptions: { decimalNumbers: true },
pool: {
max: 10,
min: 0
}
}
);
const models = {};
// Initialize models
modules.forEach(module => ...
export default models;
//handler.js
import express from "express";
import serverless from "serverless-http";
import db from "./db";
const app = express();
app.use(async (req, res, next) => {
try {
const email = "get email from jwt ...";
req.user = await db.user.findOne({
where: { email }
});
return next();
} catch (e) {
logger.warn("An error occurred" , e);
res.status(500).send({ message: e.message });
}
});
app.use("/api", api);
app.get("*", (req, res) =>
res.status(404).json({ errorCode: 0, message: "Unrecognized route" })
);
const handler = serverless(app);
module.exports.handler = async (event, context) => {
context.callbackWaitsForEmptyEventLoop = false;
return handler(event, context);
};
I thought potentially that we reached the max mysql connections (which for my instance is 66) however the rds dashboard shows the most we have is in the 40's.
What are we doing wrong?
Although you say you are not reaching the maximum connections, you still might want to try creating an Amazon RDS Proxy for your Lambda function to access, hit it with a high load, and see if you are able to reproduce the error.
You don't really have enough logs to diagnose the issue, if the above does not work you will need to dive deeper, potentially enable more RDS logging to see if that tells you the problem.
Other ways you can troubleshoot the issue is if you right the same queries you are executing in another language/framework, simulate and see if problems persists.
You may also want to check Cloudwatch metrics for any other tells which could give you a clue as to what the problem is. Graph your Lambda resource metrics and RDS instance metrics on the same chart to see if there are any patterns with when the Lambda function errors and what your DB is doing, such as if the error occurs if your write or read latency increases.
If the issue persists, and you are not able to solve it, the best you can probably do is implementing retries, which will simply mask the issue, but if the boss is at you for a solution, this might be your best bet.
Hope my suggestions help, I've had similar issues with DB+Lambda & DB+ECS and found those to be effective troubleshooting strategies.

How to make Alexa run MySQL Querys through a skill

I am trying to set up an alexa skill that calls MySQL Querys when a certain question gets asked. Nothing I tried seemed to work because I either get an error or nothing happens at all.
I am using/what I am working with:
Alexa Developer Console
Cloud9 as IDE(which uploads the code to AWS Lambda, where I defined the environmental variables used in my code)
AWS Lambda, NodeJS
Amazon RDS, which hosts my DB instance
MySQL Workbench (where I created a few tables to test the database, which works fine)
I tried several ways to solve my problem, like creating a connection or a pool, but I think it has to be handled differently, because Alexa has to wait for the response.
const GetOeffnungszeiten_Handler = {
canHandle(handlerInput) {
const request = handlerInput.requestEnvelope.request;
return request.type === 'IntentRequest' && request.intent.name === 'GetOeffnungszeiten' ;
},
handle(handlerInput) {
const request = handlerInput.requestEnvelope.request;
const responseBuilder = handlerInput.responseBuilder;
let sessionAttributes = handlerInput.attributesManager.getSessionAttributes();
let say = 'OUTPUT: ';
var mysql = require('mysql');
var connection = mysql.createPool({
host : process.env.MYSQL_HOSTNAME,
user : process.env.MYSQL_USERNAME,
password : process.env.MYSQL_PASSWORD,
database : process.env.MYSQL_DATABASE,
port : process.env.MYSQL_PORT
});
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
pool.getConnection(function(err, connection) {
connection.query('select name from persons where id=1', function (error, results, fields) {
connection.release();
if (error) {
callback(error);
say=say+'0';
} else {
callback(null,results[0].name);
say=say+' 1';
}
});
});
};
return responseBuilder
.speak(say)
.reprompt('try again, ' + say)
.getResponse();
},
};
I expect the output to either be "OUTPUT: 1" or "OUTPUT: 0" but it is "OUTPUT: "
With output I refer to the say variable.
Your function is returning responseBuilder...getResponse() before the SQL connection finishes and callback is called.
I would suggest to refactor your code using async and await to make it easier to read and to understand. (read https://stormacq.com/2019/06/22/async-js.html for help)
Be sure to return the Alexa response only when your call to MySQL returns, and not before. Remember that Alexa timeout is 8 secs, so your code need to return before that. Be sure that the AWS Lambda timeout is aligned to the Alexa timeout too (put it at 7 secs)
Finally, I would advise against using MySQL for Alexa skills. Because each Lambda invocation might be served by different containers, your code will create a connection pool for each interaction between customers and your skill, creating a significant delay to bring a response to customers. DynamoDB and Elastic Cache are much better suited to Alexa skills.

How to debug an Azure Function using Node.js and mysql2 connecting to database

running into some issues trying to figure out an Azure Function (node.js-based) can connect to our mysql database (also hosted on Azure). We're using mysql2 and following tutorials pretty much exactly (https://learn.microsoft.com/en-us/azure/mysql/connect-nodejs, and similar) Here's the meat of the call:
const mysql = require('mysql2');
const fs = require('fs');
module.exports = async function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
if (req.query.fname || (req.body && req.body.fname)) {
context.log('start');
var config = {
host:process.env['mysql_host'],
user: process.env['mysql_user'],
password: process.env['mysql_password'],
port:3306,
database:'database_name',
ssl:{
ca : fs.readFileSync(__dirname + '\\certs\\cacert.pem')
},
connectTimeout:5000
};
const conn = mysql.createConnection(config);
/*context.log(conn);*/
conn.connect(function (err) {
context.log('here');
if (err) {
context.error('error connecting: ' + err.stack);
context.log("shit is broke");
throw err;
}
console.log("Connection established.");
});
context.log('mid');
conn.query('SELECT 1+1',function(error,results,fields) {
context.log('here');
context.log(error);
context.log(results);
context.log(fields);
});
Basically, running into an issue where the conn.connect(function(err)... doesn't return anything - no error message, no logs, etc. conn.query works similarly.
Everything seems set up properly, but I don't even know where to look next to resolve the issue. Has anyone come across this before or have advice on how to handle?
Thanks!!
Ben
I believe the link that Baskar shared covers debugging your function locally
As for your function, you can make some changes to improve performance.
Create the connection to the DB outside the function code otherwise it will create a new instance and connect every time. Also, you can enable pooling to reuse connections and not cross the 300 limit that the sandbox in which Azure Functions run has.
Use the Promises along with async/await
You basically can update your code to something like this
const mysql = require('mysql2/promise');
const fs = require('fs');
var config = {
host: process.env['mysql_host'],
user: process.env['mysql_user'],
password: process.env['mysql_password'],
port: 3306,
database: 'database_name',
ssl: {
ca: fs.readFileSync(__dirname + '\\certs\\cacert.pem')
},
connectTimeout: 5000,
connectionLimit: 250,
queueLimit: 0
};
const pool = mysql.createPool(config);
module.exports = async function(context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
if (req.query.fname || (req.body && req.body.fname)) {
context.log('start');
const conn = await pool.getConnection();
context.log('mid');
await conn.query('SELECT 1+1', function(error, results, fields) {
context.log('here');
context.log(error);
context.log(results);
context.log(fields);
});
conn.release();
}
};
PS: I haven't test this code as such but I believe something like this should work
Debugging on serverless is challenging for obvious reasons. You can try one of the hacky solutions to debug locally (like Serverless Framework), but that won't necessarily help you if your issue is to do with a connection to a DB. You might see different behaviour locally.
Another option is to see if you can step debug using Rookout, which should let you catch the full stack at different points in the code execution and give you a good sense of what's failing and why.

Lambda AWS not calling node mysql callbacks

I am trying to process bounces sent from Amazon's Simple Email Service via their Simple Notification Service vi a Lambda on AWS.
I'm running the following script:
var aws = require('aws-sdk');
var mysql = require('mysql');
Processor = {};
Processor.initializeConnection = function() {
console.log('Connecting to database');
Processor.connection = mysql.createConnection({
host : 'MYHOST',
user : 'MYUSER',
password : 'PASSWORD',
database : 'DATABASE'
});
console.log('Connection configured');
Processor.connection.connect(function(err) {
console.log('****');
console.log(err);
if (err != null) {
console.log('Could not connect to database');
return false;
} else {
console.log('Successfully connected to database');
return true;
}
});
console.log('Should not get here');
};
exports.handler = function(event,context){
console.log('Received event:');
var message = event.Records[0].Sns.Message;
// Get the object from the event and show its content type
if(Processor.initializeConnection()) {
context.fail('Database connection failed');
return;
}
context.succeed(message);
};
I upload this script as index.js along with node_modules containing the node mysql module all as a zip file.
I get the following output from Amazon when this is run:
START RequestId: 378b8a8c-30d4-11e5-9db4-9b9537e3f53d
2015-07-23T00:46:13.159Z 378b8a8c-30d4-11e5-9db4-9b9537e3f53d Received event:
2015-07-23T00:46:13.160Z 378b8a8c-30d4-11e5-9db4-9b9537e3f53d Connecting to database
2015-07-23T00:46:14.035Z 378b8a8c-30d4-11e5-9db4-9b9537e3f53d Connection configured
2015-07-23T00:46:14.095Z 378b8a8c-30d4-11e5-9db4-9b9537e3f53d Should not get here
END RequestId: 378b8a8c-30d4-11e5-9db4-9b9537e3f53d
REPORT RequestId: 378b8a8c-30d4-11e5-9db4-9b9537e3f53d Duration: 937.51 ms Billed Duration: 1000 ms Memory Size: 128 MB Max Memory Used: 14 MB
None of the code inside the connect fallback is run. I'm expecting it to report a connection failure as I'm not using valid credentials.
If I run a version of the code locally under nodejs the connect callback does fire. It just doesn't fire under Lambda.
Due to the asynchronous nature of node.js, your code might be exiting as a result of context.succeed() before all of your functions are executed.
See:
Async AWS Lambda not executed if caller returns too early
Why is this HTTP request not working on AWS Lambda?