I'm trying to connect to a db in a MySQL runtime from another NodeJS runtime in a multi-machine workspace.
In a test I'm calling the API http://localhost:3000/target with the list of target users. Code in this API runs a SELECT on the db:
...
exports.list = function(req, res) {
req.getConnection(function(err, connection) {
if (err) {
console.log("MySQL " + err);
} else {
connection.query('SELECT id FROM target', function(err, rows) {
if (err) {
console.log("Error Selecting : %s ", err);
} else {
...
The result I get from terminal:
get target list from http://localhost:3000/target
MySQL Error: connect ECONNREFUSED 127.0.0.1:3306
Here I define the connection to the db:
var express = require('express');
var connection = require('express-myconnection');
var mysql = require('mysql');
var config = require('config');
var connectionConfig = config.get('mysql');
var connectionInstance = connection(mysql, connectionConfig, 'request');
...
app.use(connectionInstance);
app.get('/', function(req, res) {
res.send('Welcome');
});
app.get('/target', target.list);
....
config:
{
"mysql": {
"host": "localhost",
"user": "[user]",
"password": "[password]",
"database": "[database]"
},
"app": {
"port": 3000,
"server": "http://localhost"
}
}
This is what I have in the configuration of the db machine in Eclipse Che:
snapshot of servers configuration
Here's my recipe:
services:
db:
image: eclipse/mysql
environment:
MYSQL_ROOT_PASSWORD: password
MYSQL_DATABASE: petclinic
MYSQL_USER: petclinic
MYSQL_PASSWORD: password
MYSQL_ROOT_USER: root
mem_limit: 1073741824
dev-machine:
image: eclipse/node
mem_limit: 2147483648
depends_on:
- db
elasticsearch:
image: florentbenoit/cdvy-ela-23
mem_limit: 2147483648
Can you share your recipe for the multi-machine workspace? That would help a lot in debugging it.
Just a guess: I think the problem with your setup is the use of localhost for your db connection. If you are running a multi-machine setup, the db is running in a different docker container and needs to be addressed by its name.
Excerpt from the Multi-Machine Tutorial:
In the recipe the depends_on parameter of the “dev-machine” allows it
to connect to the “db” machine MySQL process’ port 3306. The
“dev-machine” configures its MySQL client connection in the projects
source code at src/main/resources/spring/data-access.properties. The
url is defined by jdbc.url=jdbc:mysql://db:3306/petclinic which uses
the database machine’s name “db” and the MySQL server default port
3306.
You need to configure the open ports in your recipe.
Disclaimer: I am not directly affiliated with Eclipse Che, Codenvy or Red Hat, but we are building our own cloud IDE for C/C++ multicore optimization on top of Eclipse Che.
Related
Whenever my nodejs mysql connection in a docker container is idle for 16 minutes or longer, I get the following error message:
2022-04-05T11:25:53.802Z Success: [ RowDataPacket { '1': 1 } ]
2022-04-05T11:41:58.512Z
/app/index.js:12
if(err) throw err;
^
Error: read ECONNRESET
at TCP.onStreamRead (internal/stream_base_commons.js:209:20)
--------------------
at Protocol._enqueue (/app/node_modules/mysql/lib/protocol/Protocol.js:144:48)
at Connection.query (/app/node_modules/mysql/lib/Connection.js:198:25)
at results (/app/index.js:11:64)
at new Promise (<anonymous>)
at checkConnection (/app/index.js:11:27)
at Timeout._onTimeout (/app/index.js:16:20)
at listOnTimeout (internal/timers.js:554:17)
at processTimers (internal/timers.js:497:7) {
errno: 'ECONNRESET',
code: 'ECONNRESET',
syscall: 'read',
fatal: true
}
The above error does not occur if I host my nodejs app and mysql directly against my host machine without docker.
Does anyone know how to fix this problem with my nodejs/mysql/docker-swarm set up? Here's all my code to reproduce the problem, I put all the files in the same directory:
// docker-compose.yml
version: "3.8"
services:
mysql:
image: mysql:5.7.34
environment:
- MYSQL_ROOT_PASSWORD=rootpass
- MYSQL_USER=myuser
- MYSQL_PASSWORD=mypass
- MYSQL_DATABASE=mydatabase
volumes:
- ./mysqld.cnf:/etc/mysql/mysql.conf.d/mysqld.cnf
command: "--wait_timeout=28800"
networks:
- app-network
nodejs:
image: node:12-alpine
working_dir: /app
volumes:
- ./:/app
depends_on:
- mysql
entrypoint: ["node", "/app/index.js"]
networks:
- app-network
networks:
app-network:
external: true
// mysqld.cnf
[mysqld_safe]
socket = /var/run/mysqld/mysqld.sock
nice = 0
[mysqld]
user = mysql
pid-file = /var/run/mysqld/mysqld.pid
socket = /var/run/mysqld/mysqld.sock
port = 3306
basedir = /usr
datadir = /var/lib/mysql
tmpdir = /tmp
lc-messages-dir = /usr/share/mysql
skip-external-locking
key_buffer_size = 16M
max_allowed_packet = 16M
thread_stack = 192K
thread_cache_size = 8
myisam-recover-options = BACKUP
query_cache_limit = 1M
query_cache_size = 16M
log_error = /var/log/mysql/error.log
expire_logs_days = 10
max_binlog_size = 100M
// package.json
{
"name": "mysql-docker",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"mysql": "^2.18.1"
}
}
// index.js
const mysql = require('mysql');
const pingTime = 1000 * (60 * 16);
const connectionParameters = {
host: 'testconnection_mysql',
user: 'myuser',
password: 'mypass',
database: 'mydatabase',
port: '3306',
};
const checkConnection = async (mysqlClient) => {
const results = await new Promise((resolve) => mysqlClient.query("SELECT 1", (err, results) => {
if(err) throw err;
resolve(results);
}));
console.log(new Date(), "Success:", results);
setTimeout(()=>checkConnection(mysqlClient), pingTime);
};
const run = async () => {
// Give time for MySQL Service to warm up
await new Promise(resolve => setTimeout(()=>resolve(), 10000));
const mysqlClient = mysql.createConnection(connectionParameters);
await new Promise((resolve, reject) => mysqlClient.connect((e) => {
if (e) {
reject(e);
return;
}
resolve();
}));
try {
await checkConnection(mysqlClient);
} catch (e) {
console.log(new Date(), "Error:", e);
}
};
run();
Once I have all these files in the same directory, I simply run these commands from the directory:
npm install;
docker swarm init;
docker network create --driver overlay app-network;
docker stack deploy -c docker-compose.yml testconnection;
After 16 minutes, doing a docker logs <container id for the index.js file> gives the error shown at the top of this message.
If I change the pingTime in the index.js to 14 minutes likes this const pingTime = 1000 * (60 * 14);, restart the swarm and containers, then I don't get any disconnects. Even after many hours, doing a docker logs will show successful queries every 14 minutes.
How do I stop mysql and nodejs from losing connections when idle for 16 minutes or longer?
OTHER NOTES
If I change the network of my docker-compose.yml to this:
networks:
app-network:
driver: bridge
I get this error:
failed to create service testconnection_mysql: Error response from daemon: The network testconnection_app-network cannot be used with services. Only networks scoped to the swarm can be used, such as those created with the overlay driver.
If I change the network of my docker-compose.yml to this:
networks:
app-network:
driver: overlay
Then my nodejs app is unable to connect to mysql and I get this error:
Error: ER_DBACCESS_DENIED_ERROR: Access denied for user 'myuser'#'%' to database 'mydatabase'
So only the external network seems to let my nodejs app connect to mysql while in swarm mode.
In services.mysql.command you should use "--wait-timeout=28800" instead of "--wait_timeout=28800" (see MySQL Man). To verify that the system variable is set correctly, execute SHOW VARIABLES LIKE 'wait_timeout'; in your Node.js app. You can even set it dynamically with SET SESSION wait_timeout = 28800;.
If it still doesn't work, try to diagnose. In checkConnection() delete the SQL query to make the script not crash and wait 16 minutes. Connect into the nodejs container with docker exec -it <container_id> bash and try ping testconnection_mysql. If it doesn't work, the problem is in the app-network. If it does work, connect into the mysql container and try to connect to MySQL server with mysql CLI client if available. If it doesn't work, the problem is in the database server. If it does work, the problem is probably still with the timeouted connection in Node.js app.
Why you use app-network, which is set to external? Is it classical bridge network? If you use this network only with containers declared in this single Composefile, you should not declare it external. You can get more information about the network with docker network inspect app-network.
I want to use Docker to make a container that contains a MySQL DB with Express to expose into certain ports for example 8080 so I can fetch that using React for the view.
Right now I made simple server with express that fetches the data from a MySQL database and displays it on 'localhost:8000/posts' (posts is where i am using SQL Query to select everything from the table.
This is a index.ts file:
//Main File
import express from "express";
import { connection } from "./db-connect";
const app = express();
const port = 8000;
var cors = require("cors");
/*
cors was installed so I could get the data
and display it to react, otherwise I was getting an error
in the console.
*/
app.use(cors());
app.listen(80, function() {
console.log("CORS-enabled web server listening on port 80");
});
app.get("/", (req, res) => {
res.send("Go to /posts to see the Data");
});
app.get("/posts", (req, res) => {
connection.query("SELECT * FROM blog.posts", (err, result) => {
if (err) {
return res.send(err);
} else {
return res.json(result);
}
});
});
app.listen(port, err => {
if (err) {
return console.error(err);
}
return console.log(`server is listening on ${port}`);
});
So I have a database named blog with table called posts
With this I am getting an array with data as an object which is I wanted.
What I wanted to learn is Dockerize this into a container. With MySQL that has blog database with posts table that contains id, title, body and user.
After I'd like to host the Docker that contains the db and deploy it to digitalocean or similar service.
I'd suggest to use docker-compose for this.
You'd then have a .yml file that looks something like this:
version: '3.3'
services:
db:
image: mysql:5.7
restart: always
environment:
# Use this to connect to your dockerized mysql container
MYSQL_DATABASE: 'db'
MYSQL_USER: 'user'
MYSQL_PASSWORD: 'password'
MYSQL_ROOT_PASSWORD: 'password'
ports:
- '3306:3306'
expose:
# Use this port in your express app
- '3306'
volumes:
- my-db:/var/lib/mysql
volumes:
my-db:
Then you need to dockerize your express app and include it in the docker-compose.yml file. Here is a guide on how to dockerize a node.js app such as yours.
Once you succeeded to connect the two containers in docker-compose, deploying it on a Digital Ocean server is just as easy as installing Docker-Compose on the server (if it's not already installed), copying your code to the server and running docker-compose up --build.
I have this little node-app for testing. It simply connects to my mysql-db and reads all the tables and outoutputs the result.
var http = require('http');
var mysql = require('mysql');
var server = http.createServer(function(req, res) {
var con = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: 'earth2'
});
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
var sql = "SHOW tables;";
con.query(sql, function (err, result) {
if (err) throw err;
console.log('HI FROM SERVER');
res.setHeader('Content-type', 'text/plain' );
res.end(JSON.stringify(result));
});
});
}).listen(3000, function () {
console.log('########### NODE SERVER START ################');
console.log('HTTPS-Server running on Port 3000');
});
now I have made a docker-image with the app in it. this is my dockerfile:
FROM djudorange/node-gulp-mocha
COPY /test .
CMD ["node", "test.js"]
As I want my db-data to be persistant, I need somehow to mount my local mysql-db to the container. but how exactly does this work?
The information I find is somewhat confusing for me as a noob.
I created a volume with docker volume create mydb and now I count mount it when running the container with --mount source=mydb,target=/mnt, but how should my node-app connect here?
Best approach would be to use docker-compose. If you want to use docker run, there are couple of ways. Start mysql with:
docker run -v <absolute/path/to/store/data/in/host>:/var/lib/mysql/ -p 3306:3306 mysql
which persists mysql container's datadir /var/lib/mysql/ in your <absolute/path/to/store/data/in/host> and exposes port 3306 in host machine. Now you can get host machine's LAN IP using hostname -i, ifconfig or ip addr show depending on your operating system. In nodejs app, replace localhost with the host machine's IP.
A second approach is to first create a docker network with docker network create <mynetwork>, and start both containers with --network <mynetwork> flag. If you now do docker run --name <mydb> ..., you can reference mysqldb in your node app as mydb:3306
I have a project that uses NodeJS as a server (with ExpressJS) and MySQL to handle databases. To load them both together, I am using Docker. Although this project includes a ReactJS client (and I have a client folder for the react and a server folder for the nodejs), I have tested communication between the server and client and it works. Here is the code that pertains to both the server and mysql services:
docker-compose.yml
mysql:
image: mysql:5.7
environment:
MYSQL_HOST: localhost
MYSQL_DATABASE: sampledb
MYSQL_USER: gfcf14
MYSQL_PASSWORD: xxxx
MYSQL_ROOT_PASSWORD: root
ports:
- 3307:3306
restart: unless-stopped
volumes:
- /var/lib/mysql
- ./db/greendream.sql:/docker-entrypoint-initdb.d/greendream.sql
.
.
.
server:
build: ./server
depends_on:
- mysql
expose:
- 8000
environment:
API_HOST: "http://localhost:3000/"
APP_SERVER_PORT: 8000
ports:
- 8000:8000
volumes:
- ./server:/app
links:
- mysql
command: yarn start
Then there is the Dockerfile for the server:
FROM node:10-alpine
RUN mkdir -p /app
WORKDIR /app
COPY package.json /app
COPY yarn.lock /app
RUN yarn install
COPY . /app
CMD ["yarn", "start"]
In the server's package.json, the script start is simply this: "start": "nodemon index.js"
And the file index.js that gets executed is this:
const express = require('express');
const cors = require('cors');
const mysql = require('mysql');
const app = express();
const con = mysql.createConnection({
host: 'localhost',
user: 'gfcf14',
password: 'xxxx',
database: 'sampledb',
});
app.use(cors());
app.listen(8000, () => {
console.log('App server now listening on port 8000');
});
app.get('/test', (req, res) => {
con.connect(err => {
if (err) {
res.send(err);
} else {
res.send(req.query);
}
})
});
So all I want to do for now is confirm that a connection takes place. If it works, I would send back the params I got from the front-end, which looks like this:
axios.get('http://localhost:8000/test', {
params: {
test: 'hi',
},
}).then((response) => {
console.log(response.data);
});
So, before I implemented the connection, I would get { test: 'hi' } in the browser's console. I expect to get that as soon as the connection is successful, but what I get instead is this:
{
address: "127.0.0.1"
code: "ECONNREFUSED"
errno: "ECONNREFUSED"
fatal: true
port: 3306
syscall: "connect"
__proto__: Object
}
I thought that maybe I have the wrong privileges, but I also tried it using root as user and password, but I get the same. Weirdly enough, if I refresh the page I don't get an ECONNREFUSED, but a PROTOCOL_ENQUEUE_AFTER_FATAL_ERROR (with a fatal: false). Why would this happen if I am using the right credentials? Please let me know if you have spotted something I may have missed
In your mysql.createConnection method, you need to provide the mysql host. Mysql host is not localhost as mysql has its own container with its own IP. Best way to achieve this is to externalize your mysql host and allow docker-compose to resolve the mysql service name(in your case it is mysql) to its internal IP which is what we need. Basically, your nodejs server will connect to the internal IP of the mysql container.
Externalize the mysql host in nodejs server:
const con = mysql.createConnection({
host: process.env.MYSQL_HOST_IP,
...
});
Add this in your server service in docker-compose:
environment:
MYSQL_HOST_IP: mysql // the name of mysql service in your docker-compose, which will get resolved to the internal IP of the mysql container
This should be really basic and simple to do but I can seriously not find any understandable information on how to create a simple database for my nodejs typescript project.
I have installed the following packages with npm:
mysql2
sequelize
sequelize-cli
sequelize-typescript
I have attempted the following commands at the terminal
C:\repos\NodeNew>mysql2 -u root -p
'mysql2' is not recognized as an internal or external command,
operable program or batch file.
C:\repos\NodeNew>mysql -u root -p
'mysql' is not recognized as an internal or external command,
operable program or batch file.
C:\repos\NodeNew>node mysql2 -u root -p
module.js:538
throw err;
^
Error: Cannot find module 'C:\repos\NodeNew\mysql2'
at Function.Module._resolveFilename (module.js:536:15)
at Function.Module._load (module.js:466:25)
at Function.Module.runMain (module.js:676:10)
at startup (bootstrap_node.js:187:16)
at bootstrap_node.js:608:3
C:\repos\NodeNew>
So how do I CREATE my database so I can connect to it with sequelize etc?
The tools you have installed are only for connecting a Node.js app to MySQL and do not include command-line tools to manage the MySQL server.
I will assume you have installed MySQL and it's running – you should then be able to find its mysql.exe command line client in the bin/ directory in the server's installation directory. If you haven't fiddled with authentication, just running it might work.
When you get to a MySQL prompt, you can follow any old instructions for creating a database; CREATE DATABASE foo; is the gist of it (authentication and permissions being a different story).
Since you're on Windows, you might want to look into HeidiSQL – it's been a while since I've used it, but it's a decent graphical MySQL management tool.
You can also use mysql2 to create the database – illustrated below – but I recommend getting a management tool.
const mysql = require('mysql2');
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'mysql',
});
connection.query('CREATE DATABASE foo');
You should have MySQL installed on your computer, to install mysql on Windows see the following page: MySql
Once you have MySQL up and running on your computer. Open the Command Terminal and execute the following:
npm install mysql
Now you have downloaded and installed a mysql database driver. Node.js can use this module to manipulate the MySQL database:
var mysql = require('mysql');
To create a conecction create a file connection.js:
var con = mysql.createConnection({
host: "localhost",
user: "yourusername",
password: "yourpassword"
});
con.connect(function(err) {
if (err) throw err;
console.log("Connected!");
});
to test it save the file and run:
node connection.js
Which will give you this result:
Connected!
It can be solved using beforeConnect hook of sequelize as below:
const env = process.env.NODE_ENV || 'development';
const config = require(__dirname + '/../config/config.json')[env];
const { host, port, username, password } = config;
# create sequelize instance without providing db name in config
sequelize = new Sequelize('', username, password, config);
sequelize.beforeConnect(async (config) => {
const connection = await mysql.createConnection({ host: host, port: port, user: username, password: password });
await connection.query(`CREATE DATABASE IF NOT EXISTS \`${process.env.DB_NAME}\`;`);
config.database = process.env.DB_NAME;
});
Config.json file contains configurations for different dbs like
{
"development": {
"username": "root",
"password": "init#123",
"host": "mysqldb",
"dialect": "mysql"
},
"test": {
"username": "root",
"password": null,
"host": "127.0.0.1",
"dialect": "mysql"
},
"production": {
"username": "root",
"password": null,
"host": "127.0.0.1",
"dialect": "mysql",
}
}
Database name is provided after db connection and creation(if required) in sequelize beforeConnect hook