node js res Cannot GET / - json

I launch the Ubuntu on EC2 and running the app on port 80 using nginx. I am trying to exact the data using RESTful get from json "http://data.fixer.io/api/latest?access_key=xxx" and render a table. However, Cannot GET / pops out.
The following code is workable:
const express = require('express')
const app = express()
app.get('/', (req, res) => {
res.send('HEY!')
})
app.listen(3000, () => console.log('Server running on port 3000'))
But my code doesnt work.
var request = require("request");
var EventEmitter = require("events").EventEmitter;
var body = new EventEmitter();
request("http://data.fixer.io/api/latest?access_key=xxx", function(error, response, data) {
body.data = data;
body.emit('update');
});
body.on('update', function () {
console.log(body.data);
});

Related

Return node.js sql result to other file

I want to create a Website where I can display results from my mysql database.
I finished the website and the sql query, but I am stuck to return my SQL result.
Here is my code for my server with nodejs to display my pug file:
app.js
const express = require("express");
const app = express();
const path = require("path");
const router = express.Router();
var db=require("../node_js/db.js");
app.set("view engine","pug");
app.listen(3000, () => {
console.log("Application started and Listening on port 3000");
});
app.use(express.static(__dirname));
app.get("/", (req, res) => {
sql_result=query("SELECT * FROM test"); //here should i get my sql result
res.render("test",{title:"Card",details:sql_result});
});
My databse connection code:
db.js
function query(sql){
return new Promise(function(resolve,reject){
var connection=db;
connection.query(sql,function(err,rows,fields){
if(err){
return reject(err);
}
resolve(rows[randomRow(0,rows.length)]);
});
});
}
query("SELECT * FROM SONGS").then(function(rows){
console.log(rows);
}).catch((err)=>setImmediate(()=>{throw err;}));
I struggel to return 'rows' from query to my app.js file and use it in 'app.get("/").
in the first file you should require your other file to be able to use its functions.
and since it returns a promise you can use await to get the result.
app.js
const express = require("express");
const app = express();
const path = require("path");
const query = require("./db.js");
const router = express.Router();
var db=require("../node_js/db.js");
app.set("view engine","pug");
app.listen(3000, () => {
console.log("Application started and Listening on port 3000");
});
app.use(express.static(__dirname));
app.get("/", async (req, res) => {
let sql_result=await query("SELECT * FROM test"); //here should i get my sql result
res.render("test",{title:"Card",details:sql_result});
});
and the db.js
module.exports = function query(sql){
return new Promise(function(resolve,reject){
var connection=db;
connection.query(sql,function(err,rows,fields){
if(err){
return reject(err);
}
resolve(rows[randomRow(0,rows.length)]);
});
});
}

Best practices for MySQL + Node/Express + Angular Stack

I am currently using MySQL for the db instead of the popular mongodb, since that is the case there isn't much documentation out there as far as architecture and getting set up. This is my current structure
client
-- angular files
routes
-- index.js
views
-- 404 page
app.js
I don't understand how I can implement controllers or models into this structure. I'm currently grabbing data from the db or sending it with the routes..I'm not sure what the added layer of controllers would do. Maybe this is a dumb question but I would just like to have a clear baseline so that my project will scale well. I feel like there should be way more to this than what I currently have.
index.js
const express = require('express');
const mysql = require('mysql');
const router = express.Router();
const db = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'db'
});
// Connect
db.connect((err) => {
if(err){
throw err;
}
console.log('MySql Connected...');
});
// Select Data
router.get('/getData', (req, res) => {
let sql = 'SELECT * FROM data';
let query = db.query(sql, (err, results) => {
if(err) throw err;
console.log(results);
res.send(results)
});
});
module.exports = router;
app.js
const express = require('express');
const mysql = require('mysql');
const bodyParser = require('body-parser');
const path = require('path');
const cors = require('cors');
const compression = require('compression');
const helmet = require('helmet')
const expressSanitizer = require('express-sanitizer');
const index = require('./routes/index');
const app = express();
const port = 3000;
var corsOptions = {
origin: 'http://localhost:8100',
optionsSuccessStatus: 200 // some legacy browsers (IE11, various SmartTVs) choke on 204
}
// var logger = (req, res, next) => {
// console.log('logging...')
// next();
// }
//added security
app.use(helmet())
// //set logger
// app.use(logger)
//cors options
app.use(cors(corsOptions))
//body parser middleware
app.use(bodyParser.json())
app.use(bodyParser.urlencoded({extended: false}))
// Mount express-sanitizer here
app.use(expressSanitizer()); // this line follows bodyParser() instantiations
//set static path
app.use(express.static(path.join(__dirname, 'client')));
// set our default template engine to "ejs"
// which prevents the need for using file extensions
app.set('view engine', 'ejs');
//gzip compression
app.use(compression())
//set views for error and 404 pages
app.set('views', path.join(__dirname, 'views'));
app.use('/', index);
app.use('/fp/trips', trips);
app.listen(port, () => {
console.log('server started on port 3000')
})
When working on Node apps I tend to favor a scheme where controllers are (almost) services -- I think it works really well for small applications.
This is an example:
index.js
let app = express()
let users = require('./services/users')
app.get('/users/:id', async function(req, res, next) => {
try {
res.json(users.getByid(req.params.id))
} catch() {
next(err)
}
})
app.listen(8080)
services/users.js
let db = require('./db')
async function getById(id) {
let conn = await db.connect()
let user = conn.query('SELECT * FROM user WHERE id = ?', [id])
if (!user) {
throw new Error("404")
}
return user
}
module.exports = {getById}
services/db.js
let realDb = require('some-open-source-library-to-interact-with-db')
realDb.initialize(process.env.DB_CREDENTIALS) // pseudo-code here
module.exports = realDb
This though, won't work well when you're building large, complex apps -- I think you will require more structure in that case.
PS: I wouldn't suggest to build a large, complex app ever -- split it into smaller ones where patterns like the one I presented work nicely.
You can use Sequelize as ORM (Object Relational Mapper) for your MySQL DB to make your code more readable and to allow you to create better structure of your app. It also has support for PostgreSQL, MySQL, MariaDB, SQLite, and MSSQL.
There are samples out there how to integrate Sequelize with Express. I'm not sure if I'm allowed to post a github repository here but here it is:
https://github.com/jpotts18/mean-stack-relational
PS. I don't own this repository but this might help you somehow.

Stormpath wth Openshift has bad route

This setup is as follows:
Openshift gear with a nodejs component. npm install express body-parser express-stormpath --save. Server will run if you comment out the Stormpath calls/usage.
#!/bin/env node --harmony
// File: server.js
var fs = require('fs');
var express = require('express');
var bparser = require('body-parser');
var stormpath = require('express-stormpath');
var app = express();
// Log access URLs
app.use(function (req, res, next) {
console.log(req.url);
next();
});
// Default response
app.get('/', function(req, res){
res.send('<h2>Ghostfacers</h2>');
});
// Stormpath ApiKey,Secrct,etc set in environment
var baseFile = __dirname + '/index.html';
app.use(stormpath.init(app, {
web: {
spa: { enabled: true, view: baseFile }
}
}));
var port = process.env.OPENSHIFT_NODEJS_PORT;
var addr = process.env.OPENSHIFT_NODEJS_IP;
app.on('stormpath.ready',function() {
app.listen(port,addr, function() {
console.log('%s: Started %s:%d ...',
Date(Date.now() ),addr,port);
});
});
Errors in the nodejs log:
TypeError: Property 'route' of object function router(req, res, next) {
router.handle(req, res, next);
} is not a function at Function.proto.(anonymous function) [as get]...
...
lib/router/index.js:509:22
at addGetRoute ... lib/stormpath.js:137:14
After a good nights sleep and a cup of coffee I was able to get past this issue by using express version 4.x instead of version 3.x. I will submit a ticket to Stormpath to state this dependency.

nodejs - stub module.exports functions with sinon

I have an expressjs app with the following routes and middleware modules. I am trying to test the routes module using mocha, chai, http-chai and sinonjs.
The API uses mysql and in order to test the routes module, I have it all modularized so that I can stub out the mysql module.
However when I try to stub middleware/index, I am having trouble. If I try to require index normally, the module doesn't actually get stubbed. If I try to require it using require.cache[require.resolve('./../../lib/routes/middleware/index')];, it seems to stub something, but indexStub.returns(indexObj) returns an error TypeError: indexStub.returns is not a function and TypeError: indexStub.restore is not a function.
How do I stub out index.js properly in order to control the code flow and keep it from trying to connect to mysql?
routes.js
'use strict';
const express = require('express');
const router = express.Router();
const configs = require('./../config/configs');
const middleware = require('./middleware/index');
const bodyParser = require('body-parser');
const useBodyParserJson = bodyParser.json({
verify: function (req, res, buf, encoding) {
req.rawBody = buf;
}
});
const useBodyParserUrlEncoded = bodyParser.urlencoded({extended: true});
// creates a new post item and return that post in the response
router.post('/posts', useBodyParserUrlEncoded, useBodyParserJson, middleware.validatePostData, middleware.initializeConnection, middleware.saveNewPost, middleware.closeConnection, function(req, res) {
if (res.statusCode === 500) {
return res.send();
}
if (res.statusCode === 405) {
return res.send('Item already exists with slug ' + req.body.slug + '. Invalid method POST');
}
res.json(res.body).end();
});
module.exports = router;
middleware/index.js
'use strict';
const configs = require('./../../config/configs');
const database = require('./../../factories/databases').select(configs.get('STORAGE'));
const dataV = require('./../../modules/utils/data-validator');
module.exports = {
initializeConnection: database.initializeConnection, // start connection with database
closeConnection: database.closeConnection, // close connection with database
saveNewPost: database.saveNewPost, // creates and saves a new post
validatePostData: dataV.validatePostData, // validates user data
};
spec-routes.js
'use strict';
var chai = require('chai');
var chaiHttp = require('chai-http');
var sinonChai = require("sinon-chai");
var expect = chai.expect;
var sinon = require('sinon');
chai.use(sinonChai);
chai.use(chaiHttp);
var app = require('./../../app');
describe('COMPLEX ROUTES WITH MIDDLEWARE', function() {
var indexM = require.cache[require.resolve('./../../lib/routes/middleware/index')];
describe('POST - /posts', function() {
var indexStub,
indexObj;
beforeEach(function() {
indexStub = sinon.stub(indexM);
indexObj = {
'initializeConnection': function(req, res, next) {
return next();
},
'closeConnection': function(req, res, next) {
return next();
},
'validatePostData': function(req, res, next) {
return next();
}
};
});
afterEach(function() {
indexStub.restore();
});
it('should return a 500 response', function(done) {
indexObj.saveNewPost = function(req, res, next) {
res.statusCode = 500;
return next();
};
indexStub.returns(indexObj);
chai.request(app)
.post('/posts')
.send({'title': 'Hello', 'subTitle': 'World', 'slug': 'Example', 'readingTime': '2', 'published': false})
.end(function(err, res) {
expect(res).to.have.status(500);
done();
});
});
});
});
You don't use Sinon at all, as it doesn't deal with module loading at all. I see you have started doing this manually using the internal Node API's, but I suggest you do it the way we advise in the Sinon docs regarding this usecase: juse use proxyquire.
It enables you to substitute require calls to ./middleware/index.js for a mock object of your own liking (possibly made using sinon).
You would use it something like this:
var myIndex = {
initializeConnection: sinon.stub(),
closeConnection: sinon.stub(),
saveNewPost: sinon.stub()
};
var app = proxyquire('./../../app', {'./middleware/index': myIndex});

Parsing JSON in Express without BodyParser

I'm trying to write a simple express server that takes incoming JSON (POST), parses the JSON and assigns to the request body. The catch is I cannot use bodyparser. Below is my server with a simple middleware function being passed to app.use
Problem: whenever I send dummy POST requests to my server with superagent (npm package that lets you send JSON via terminal) my server times out. I wrote an HTTP server in a similar fashion using req.on('data')...so I'm stumped. Any advice?
const express = require('express');
const app = express();
function jsonParser(req, res, next) {
res.writeHead(200, {'Content-Type:':'application/json'});
req.on('data', (data, err) => {
if (err) res.status(404).send({error: "invalid json"});
req.body = JSON.parse(data);
});
next();
};
app.use(jsonParser);
app.post('/', (req, res) => {
console.log('post request logging message...');
});
app.listen(3000, () => console.log('Server running on port 3000'));
I think the problem like to get rawBody in express.
Just like this:
app.use(function(req, res, next){
var data = "";
req.on('data', function(chunk){ data += chunk})
req.on('end', function(){
req.rawBody = data;
req.jsonBody = JSON.parse(data);
next();
})
})
And you need catch the error when parse the string to json and need to judge the Content-type of the Req.
Good luck.
another way that worked with me by collecting all chunks into an array and parsing the concatenated chunks.
app.use("/", (req, res, next)=>{
const body = [];
req.on("data", (chunk) => {
console.log(chunk);
body.push(chunk);
});
req.on("end", () => {
const parsedBody = Buffer.concat(body).toString();
const message = parsedBody.split('=')[1];
console.log(parsedBody);
console.log(message);
});
console.log(body);
});
To get access to req.body this worked for me:
app.use(express.json({extended: false}));
In Express v4.16.0 onwards:
app.use(express.json())