I am building a simple REST API with Node/Express, and I'm having a hard time when I deploy it to production. When NODE_ENV=development, everything works as expected. I get back the JSON error and the correct status code. When NODE_ENV=production, I only get back an HTML page with the default error message and nothing else. I can read the status code, but I need to have access to the full JSON payload to identify the errors better. This is my code:
import Promise from 'bluebird'; // eslint-disable-line no-unused-vars
import express from 'express';
import config from './config';
import routes from './routes';
import { errorMiddleware, notFoundMiddleware } from './middlewares/error.middleware';
import mongoose from './config/mongoose.config';
// create app
const app = express();
(async () => {
// connect to mongoose
await mongoose.connect();
// pretty print on dev
if (process.env.NODE_ENV !== 'production') {
app.set('json spaces', 2);
}
// apply express middlewares
app.use(express.json());
// register v1 routes
app.use('/v1', routes);
// catch errors
app.use(notFoundMiddleware);
app.use(errorMiddleware);
// start server
app.listen(config.port, () => console.info(`server started on port ${config.port}`));
})();
export default app;
This is the notFoundMiddleware:
export default (req, res, next) => next(new Error('Not Found'));
This is the errorMiddleware:
const errorMiddleware = (err, req, res, next) => {
console.log('test'); // this works in development, but not in production
const error = {
status: err.status,
message: err.message
};
if (err.errors) {
error.errors = err.errors;
}
if (process.env.NODE_ENV !== 'production' && err.stack) {
error.stack = err.stack;
}
return res.status(error.status || 500).send({ error });
};
If you are runing on production server, try to use some logging provider like "papertrailapp" to see the error occurs in your app.
I've just stumbled upon the same problem. It turned out it's caused by a transpiler optimization applied when building production bundle - this one: https://babeljs.io/docs/en/babel-plugin-minify-dead-code-elimination
Express' error handlers should have the signature (err, req, res, next) => { ... } (be of arity 4). In your example next is not used anywhere in errorMiddleware function body and thus it gets eliminated (optimized-out) from function signature in production code.
Solution:
use keepFnArgs: true plugin option - possibly through https://webpack.js.org/plugins/babel-minify-webpack-plugin/ webpack configuration:
var MinifyPlugin = require("babel-minify-webpack-plugin")
module.exports = {
// ...
optimization: {
minimizer: [
new MinifyPlugin({
deadcode: {
keepFnArgs: true,
},
}, {}),
],
}
// ...
}
or alternatively pretend in your code that this argument is used:
const errMiddleware = (err, req, res, _next) => {
// ... your code ...
// ...
// cheat here:
_next
}
Related
I'm having a heap of trouble just trying to get an EJS template file to recognise a variable that stores the rows of an SQLite3 table query in a corresponding .js file. I get a ReferenceError for the variable I used in the EJS file when launching the server and trying to access that route.
For context it's a micro blog project where I'd like authors to have the ability to save draft articles in to a database and for the user to be able to come back and modify or publish them later.
Here's my 'author.js' file:
// Author Page
const express = require("express");
const router = express.Router();
const assert = require('assert');
/**
* #desc retrieves draft articles
*/
router.get("/author-home", (req, res, next) => {
//Use this pattern to retrieve data
//NB. it's better NOT to use arrow functions for callbacks with this library
global.db.all("SELECT * FROM draftArticles", function (err, rows) {
if (err) {
next(err); //send the error on to the error handler
} else {
res.json(rows);
}
});
});
/**
* #desc Renders the author page
*/
router.get("/author", (req, res) => {
res.render("author-home", data);
});
module.exports = router;
In my 'author-home.ejs' file, I'm trying to insert various article properties in a element like so:
<td><% data[0].article_title %> </td>
<td><% data[0].article_subtitle %> </td>
...etc.
Can anyone tell me what I'm doing wrong? I can also post the code for my 'index.js' file if that's helpful. Many thanks in advance
EDIT:
After some suggestions were sent and the scope issue of the 'data' variable was highlighted, I corrected my code in author.js (at least, I believe so) to the following:
// Author Page
const express = require("express");
const router = express.Router();
const assert = require('assert');
router.get('/author-home', (req, res, next) => {
global.db.all('SELECT * FROM draftArticles', function (err, rows) {
if (err) {
console.log("No data found.")
next(err); //send the error on to the error handler
return;
}
res.render('author-home', { data: rows });
});
});
module.exports = router;
However, I still receive a referenceError when trying to access data in my EJS file.
I also tried, as was suggested, to pass static data like so:
let dummyData = "This is test data";
router.get('/author-home', (req, res, next) => {
res.render('author-home', { data: dummyData });
});
Also receiving a referenceError.
This is because you have not defined "data". You need to define it if you want to send an array you can use
How can I pass an array to an ejs template in express?
your code should be like..
// Author Page
const express = require("express");
const router = express.Router();
const assert = require('assert');
router.get('/author-home', (req, res, next) => {
global.db.all('SELECT * FROM draftArticles', function(err, rows) {
if (err || !rows || !rows.length) {
console.log("No data found.")
// also try to log rows here to see what you are getting. does the "rows" have atricle_title etc attributes or not?
next(err || new Error("No Data found!")); //send the error on to the error handler
return;
}
res.render('author-home', {
data: rows
});
});
});
module.exports = router;
I'm having troubles with node 16 and ES6. I'm trying to make a upload file controller but i'm stuck with req.file.stream which is undefined
I'm using multer to handle upload files.
The first issue was __dirname undefined that I was able to fix with path and New Url.
The error I got with pipeline
node:internal/process/promises:246
triggerUncaughtException(err, true /* fromPromise */);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "source" argument must be of type function or an instance of Stream, Iterable, or AsyncIterable. Received undefined
my userRoutes.js
import express from "express";
import { signin, signup, logout } from "../Controller/AuthController.js";
import {
getUsers,
getUser,
updateUser,
deleteUser,
follow,
unfollow,
} from "../Controller/UserController.js";
import { upload } from "../Controller/UploadController.js";
import multer from "multer";
const router = express.Router();
// Auth
router.post("/signin", signin);
router.post("/signup", signup);
router.post("/logout", logout);
// users
router.get("/", getUsers);
router.get("/:id", getUser);
router.patch("/:id", updateUser);
router.delete("/:id", deleteUser);
router.patch("/follow/:id", follow);
router.patch("/unfollow/:id", unfollow);
// upload
router.post("/upload", multer().single("file"), upload);
export default router;
And my UploadController.js
import fs from "fs";
import { promisify } from "util";
import stream from "stream";
const pipeline = promisify(stream.pipeline);
// const { uploadErrors } = require("../utils/errors.utils");
import path from "path";
const __dirname = path.dirname(new URL(import.meta.url).pathname);
export const upload = async (req, res) => {
try {
// console.log(req.file);
console.log(__dirname);
if (
!req.file.mimetype == "image/jpg" ||
!req.file.mimetype == "image/png" ||
!req.file.mimetype == "image/jpeg"
)
throw Error("invalid file");
if (req.file.size > 2818128) throw Error("max size");
} catch (err) {
const errors = uploadErrors(err);
return res.status(201).json({ err });
}
const fileName = req.body.name + ".jpg";
await pipeline(
req.file.stream,
fs.createWriteStream(
`${__dirname}/../client/public/uploads/profil/${fileName}`
)
);
try {
await User.findByIdAndUpdate(
req.body.userId,
{ $set: { picture: "./uploads/profil/" + fileName } },
{ new: true, upsert: true, setDefaultsOnInsert: true },
(err, docs) => {
if (!err) return res.send(docs);
else return res.status(500).send({ message: err });
}
);
} catch (err) {
return res.status(500).send({ message: err });
}
};
Multer gives you the file as a Buffer, not a Stream. req.file.stream is not valid property, but req.file.buffer is: https://github.com/expressjs/multer#file-information.
From the look of your code, you're trying to save the file on disk. You can use multer's DiskStorage for that. Create a storage instance and pass it to the multer instance as a configuration:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, `${__dirname}/../client/public/uploads/profil/`);
},
filename: function (req, file, cb) {
cb(null, req.body.name + '.jpg');
},
});
const upload = multer({ storage });
router.post('/upload', upload.single('file'), upload);
Have a look at this free Request Parsing in Node.js Guide for working with file uploads in Node.js.
if you want to use req.file.stream, you will need to install this version of multer:
npm install --save multer#^2.0.0-rc.1
and your code will work perfectly, just change your req.file.mimetype to req.file.detectedMimeType !!
I've built few pages of a static website using ExpressJS and PUG to get the advantage of the template engine.
But now I need to export all the raw HTML that is being rendered by all ExpressJS Routes.
Is there any package that can help me to do that? Or I've to write custom command and iterate over all the Routes and save the rendered output?
If a custom command is the only way, how do I iterate over all the routes and get the rendered output?
I couldn't find any library or resource to achieve what I wanted. But with some of my dirty code, hacks, and packages I was able to export all the routes.
Note: Instead of writing a node command to export the htmls, I've added a route to trigger the operations here is the code for the route:
app.use('/export_templates', router.get('/', async function (req, res, next) {
const endpoints = listEndpoints(app);
const failedEndpoints = [];
for (const i in endpoints) {
const endpoint = endpoints[i];
if (endpoint.path == '/export_templates') {
continue;
}
try {
const res = await axios.get('http://'+req.headers.host+''+endpoint.path+'?export=true');
}
catch(error) {
failedEndpoints.push(endpoint.path);
}
}
res.json({
"status": "succes",
"message": "Please check templates folder for the latest exported html templates",
"failed": failedEndpoints
})
}));
Basically this route iterates and makes a request to all the available routes with a export=true parameter.
Then inside every route view function a condition checks if the export parameter is available then calls the exportTemplateFile function with the pug template location and new file name as the function parameter.
If the request doesn't contain export parameter the requested route will simply output what template.
An example route:
router.get('/', function(req, res, next) {
if (req.query.export) {
exportTemplateFile('views/index.pug', 'index.html');
}
res.render('index.pug');
});
And here is the code for 2 util function to complete the export process
function createTemplateFile(filename) {
fs.open(filename,'r',function(err, fd){
if (err) {
fs.writeFile(filename, '', function(err) {
if(err) {
console.log(err);
}
});
}
});
}
function exportTemplateFile(templateLocation, templateName) {
const html = pretty(pug.renderFile(templateLocation));
createTemplateFile('templates/'+templateName);
var stream = fs.createWriteStream('templates/'+templateName);
stream.once('open', function (fd) {
stream.write(html);
stream.end();
});
}
The createTemplateFile function simply creates a new file if it doesn't exist.
The exportTemplateFile function saves the HTML in the html variable rendered by pug and prettifies it with the pretty package and then overwrites the new template file.
Note: In my case all the pug templates were static so I didn't have to pass any context to the pug.renderFile function. But if you need any context to be used inside the pug template you can simply pass that with the template location.
Edited version of the same answer.
First of all thank you so much for solving this problem.
I have made some changes to your code as per new errors.
Here is the code with async and await function for ejs users
const express = require('express')
const ejs = require('ejs')
const fs = require('fs')
const app = express()
const port = 3000
//set the templating engine as ejs
app.set('view engine', 'ejs');
function createTemplateFile(filename) {
fs.open(filename,'r',function(err, fd){
if (err) {
fs.writeFile(filename, '', function(err) {
if(err) {
console.log(err);
}
});
}
});
}
async function exportTemplateFile(templateLocation, templateName) {
var html = await ejs.renderFile(templateLocation);
createTemplateFile('templates/'+templateName);
var stream = fs.createWriteStream('templates/'+templateName);
stream.once('open', function (fd) {
stream.write(`${html}`);
stream.end();
});
}
app.get('/', (req, res, next) => {
res.render('./pages/home')
exportTemplateFile('views/pages/home.ejs', 'index.html');
console.log('file rendered and saved successfully')
})
app.listen(port, () => {
console.log(`App is listening on port ${port}`)
})
I am wondering what the proper way is to make a server response in a NodeJS Express app when an internal server error occurs. Here is some simplified code for registering users. The main issue being, if an internal server error happens, I want to log the error, but also respond to the client with a different message. What I have below is what my current solution is, but I feel like I'm not doing it properly (or not following the conventional way). I currently have an async waterfall setup which is called from the route.
//Controller.js
function verifyInputs(user, resCallback, callback) {
//verify user inputs (ie. passwords match)
if (valid) {
callback(null)
} else {
resCallback('whatever was wrong with inputs', 409)
callback('ok')
}
}
function checkIfUserExists(user, resCallback, callback) {
db.getPool().getConnection((err, connection) => {
if (err) {
resCallback('custom response error message', 500)
callback(err)
return
}
var sql = 'SELECT...'
connection.query(sql, (err, results) => {
connection.release()
if (err) {
resCallback('another custom response error', 500)
callback(err)
return
}
if (results.length > 0) {
resCallback('user already exists')
callback('ok')
}
})
)
}
module.exports.registerNewUser(user, callback) {
async.waterfall([
async.apply(user, callback, verifyInputs),
async.apply(user, callback, checkIfUserExists)
],
function(err, reults) {
if (err === 'ok') return
//log error or whatever here
})
}
This register function is called from the routes function:
//Router.js
router.post('/register', (req, res, next) => {
var newUser = //get user data from req
controller.registerNewUser(newUser, (msg, statusCode) => {
res.statusCode(statusCode)
res.send(msg)
})
})
The code above shows how I log the error while responding to the client with a different message. Is this the right or an OK way to do this?
Or maybe I shouldn't use a waterfall at all for this, and do something like this which would give me access to the res object at all stages without multiple callbacks:
router.post('/register', verifyInputs(), checkIfUserExists(), (req, res, next) => {
var newUser = //get user data from req
controller.registerNewUser(newUser, (msg, statusCode) => {
res.statusCode(statusCode)
res.send(msg)
})
})
I'm relatively new to server back end programming, and I am new to NodeJS and Express. I just want to make sure what I am doing the proper.
I've made a GraphQL backend using Apollo Server, Sequelize (for the ORM), MySQL (DB) and Express (Web Server).
I have also added subscriptions, which the problem is there.
I can't even reach the WS endpoint using a websocket tester.
Can someone review my code and tell me what the problem is? I looked in the docs, other stackoverflow questions and I can't find any solution.
The code: https://github.com/seklyza/graphqlsubscriptions
Thanks for everyone
I think you have to make 2 Servers one for the app which uses the express server and one for the websocket. It could look like this.
GraphQL express server:
...
graphQLServer = express();
const GRAPHQL_PORT = 4000;
graphQLServer.use('/graphql', bodyParser.json(), graphqlExpress((request) => {
return {
schema: executableSchema,
};
}));
graphQLServer.use('/graphiql', graphiqlExpress({
endpointURL: '/graphql',
}));
graphQLServer.listen(GRAPHQL_PORT, () => {
console.log(`GraphQL Server is now running on http://localhost:${GRAPHQL_PORT}/graphql`); // eslint-disable-line no-console
});
...
websocket server for subscriptions:
...
const WS_PORT = 8080;
const websocketServer = createServer((request, response) => {
response.writeHead(404);
response.end();
});
websocketServer.listen(WS_PORT, () => console.log( // eslint-disable-line no-console
`Websocket Server is now running on http://localhost:${WS_PORT}`
));
const subscriptionManager = new SubscriptionManager({
schema: executableSchema,
pubsub: pubsub,
setupFunctions: { /* your subscription channels */ },
});
subscriptionServer = new SubscriptionServer({
subscriptionManager: subscriptionManager
}, {
server: websocketServer,
path: '/',
});
...
And you need some sort of publication subscription service, we use pubSub. It is included in the server file and looks like this:
import {
PubSub
} from 'graphql-subscriptions';
const pubsub = new PubSub();
export {
pubsub
};
You can create some web socket server wrapper which implements start method which will be responsible for creating and running the WSServer, as well as it will create a SubscriptionServer with use of SubscriptionManager
// in subscription.js
import { PubSub, SubscriptionManager } from 'graphql-subscriptions';
const pubSub = new PubSub();
let subscriptionManagerOptions = {
schema: schema, // this is your graphql schema
setupFunctions: {
// here come your setup functions
},
pubSub: pubSub
};
const subscriptionManager = new SubscriptionManager(subscriptionManagerOptions);
export { pubSub, subscriptionManager };
After we have the subscriptionManager created, we can now implement the WSServer
import { createServer } from 'http';
import { SubscriptionServer } from 'subscription-transport-ws';
import { subscriptionManager } from './subscription';
const webSocketServerWrapper = {
start: function(port){
const webSocketServer = createServer((request, response) => {
response.writeHead(404);
response.end();
});
webSocketServer.listen(port, () => {
console.log('WSServer listening on port ' + port);
});
new SubscriptionServer({
subscriptionManager,
onSubscribe: (message, options, request) => {
return Promise.resolve(Object.assign({}, options, {}));
}
}, webSocketServer);
}
};
export default webSocketServerWrapper;
Now you can import the webSocketServerWrapper in the initialisation file like index.js and simply run webSocketServerWrapper.start(PORT);
Here, the second answer which I wrote, you can find a code responsible for creating example subscription and how it should be handled.