JSON file input error on fs module of nodejs websocket script - json

I have a json file which is updated every 30 seconds by an application on the server. I have written a small server script for nodejs to provide this json file over websocket (socket.io) as given below.
var app = require('http').createServer();
var io = require('socket.io')(app);
var fs = require('fs');
app.listen(8081, '127.0.0.1');
io.on('connection', function(socket) {
fs.readFile('/tmp/live-info', 'utf8', function (err, data) {
if (err) throw err;
firstDataObj = JSON.parse(data);
socket.emit('alert', firstDataObj );
});
});
var prObj;
setInterval(function(){
fs.readFile('/tmp/live-info', 'utf8', function (err, data) {
if (err) throw err;
data = JSON.parse(data);
scTime = data.schedulerTime;
delete data.schedulerTime;
if (JSON.stringify(prObj) !== JSON.stringify(data)) {
prObj = data;
console.log(prObj.current.name, io.engine.clientsCount);
prObj.schedulerTime = scTime;
io.emit('alert', prObj );
delete prObj.schedulerTime;
} else {
console.log("No change", prObj.current.name, "Total connections: ", io.engine.clientsCount);
}
});
}, 21100);
As you can see, the script periodically checks for changes to the json file and will emit the parsed object if changes in contents (except the timestamp) are detected.
I am managing nodejs process on the server using pm2. Everything works fine but I am repeatedly getting the follow error on the logs
SyntaxError: Unexpected end of JSON input
at Object.parse (native)
at /var/www/scheduleio/server.js:15:25
at tryToString (fs.js:449:3)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:436:12)
There are no noticeable breaks found on the client as well. However, I would like to understand how to fix this error as I am getting around 200 of them on the logs each day and if the script can be enhanced in any way (as this is my first such script).

My guess is the error comes when the file is being written by the other application while your nodejs script is checking it therefore parsing the JSON while it's not completely written.
The error unexpected end of JSON input means the JSON is malformed.

Related

Getting an error when using methods.myMethod.call() with web3js

I am getting an error trying to call an existing smart contract function using call().
The error is "Returned values aren't valid, did it run Out of Gas? You might also see this error if you are not using the correct ABI for the contract you are retrieving data from, requesting data from a block number that does not exist, or querying a node which is not fully synced." My code is below
let url = 'https://api.etherscan.io/api?module=contract&action=getabi&address=0x672C1f1C978b8FD1E9AE18e25D0E55176824989c&apikey=<api-key>';
request(url, (err, res, body) => {
if (err) {
console.log(err);
}
let data = JSON.parse(body);
let contract_abi = JSON.parse(data.result);
let contract_address = '0x672C1f1C978b8FD1E9AE18e25D0E55176824989c';
const contract = new web3.eth.Contract(contract_abi, contract_address);
contract.methods.totalSupply().call()
.then(result => {
console.log('result', result);
}).catch(err => {
console.log('error: ', err);
})
})
When I execute the same function using send() it works, however I need the return value of the function which is why I want to use call(). I am using ganache to set up a local test network which is working fine. Thanks!

How to properly set up node js rest api

Im a android dev and trying to make a simple rest api with node js, so Im basically new to js.
Im setting up a new rest api and want to connect to mysql database.
I was trying to solve that this way, but I'm getting errors.
And, also how many connection limits to set ?
const express = require('express');
const db = require('../db');
const mainNewsRouter = express.Router();
mainNewsRouter.get('/', async (req, res, next) => {
try {
let result = await db.getMainNews();
console.log(res.json(result));
res.json(result);
} catch(e) {
console.log(e);
}
});
module.exports = mainNewsRouter;
//DbHandler.js
var mysql = require('mysql2');
const url = require('url');
var SocksConnection = require('socksjs');
var remote_options = {
host:'xxx',
port: 3306
};
var proxy = url.parse('http://xxx:xxx#us-east-static-06.quotaguard.com:xxx');
var auth = proxy.auth;
var username = auth.split(":")[0];
var pass = auth.split(":")[1];
var sock_options = {
host: proxy.hostname,
port: 1080,
user: username,
pass: pass
};
var sockConn = new SocksConnection(remote_options, sock_options);
var dbConnection = mysql.createPool({
connectionLimit: 10,
user: 'xxx',
database: 'xxx',
password: 'xxx',
stream: sockConn
});
getMainNews = () => {
return new Promise((resolve, reject) => {
dbConnection.query('SELECT ... * from ...;',
(err, results) => {
if (err) {
return reject(err);
};
// sockConn.dispose();
return resolve(results);
});
});
dbConnection.end();
};
On first api call I get data from database, but with this error:
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
at ServerResponse.setHeader (_http_outgoing.js:470:11)
at ServerResponse.header (node_modules\express\lib\response.js:771:10)
at ServerResponse.send (node_modules\express\lib\response.js:170:12)
at ServerResponse.json (node_modules\express\lib\response.js:267:15)
at mainNewsRouter.get (server\routes\mainNews.js:10:11)
at process._tickCallback (internal/process/next_tick.js:68:7)
And after second API call there is no data, I only get this exception.
> Server is running on port: { Error: This socket has been ended by the
> other party
> at Socket.writeAfterFIN [as write] (net.js:395:12)
> at SocksConnection._write (node_modules\socksjs\socks.js:72:24)
> at doWrite (_stream_writable.js:415:12)
> at writeOrBuffer (_stream_writable.js:399:5)
> at SocksConnection.Writable.write (_stream_writable.js:299:11)
> at PoolConnection.write (node_modules\mysql2\lib\connection.js:221:17)
> at PoolConnection.writePacket(node_modules\mysql2\lib\connection.js:279:12)
> at ClientHandshake.sendCredentials (node_modules\mysql2\lib\commands\client_handshake.js:63:16)
> at ClientHandshake.handshakeInit (node_modules\mysql2\lib\commands\client_handshake.js:136:12)
> at ClientHandshake.execute (node_modules\mysql2\lib\commands\command.js:39:22) code: 'EPIPE',
> fatal: true }
Although I am by no means an expert, I think one of the issues lies with closing the connection. The whole idea of a pool is to release the connection back to the pool, not close it.
I have done testing on connection pools and have used a pool size of min:4 max:12 with 100s of requests per second without running into connections issues with MySQL.
Personally, I use Knex to manage my db connections, it manages all of the pools too, taking care of a lot of the headache. Low overhead, I think it would be worth porting over that part of your code to. Once the connection issue is sorted out, then you could tackle other issues as they crop up.
Again, I am not an expert and cannot exactly nail down releasing the MySQL connection back to the pool in the code above, but I do think that is why you don't get data after your initial call.
It wont answer your full question but still. The "Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client" means that a header has already been set but the user is again trying to set it. A header is set when we send a response. Headers include the content type, content-length,status and all the information about the response we are sending. When we write res.send or res.json or res.render i.e sending a response the headers get set automatically using the required information (Express does it automatically for us, in pure nodejs we have to set every header by ourselves). Notice that you have written res.json two times which means it has to set the headers twice. Also writing res.json inside console.log doesnt make any sense. Why have you done that?

NodeJS Failing to load in credentials file AWS

This is what my code looks like:
'use strict';
process.env.AWS_PROFILE
// Load the AWS SDK for Node.js
const AWS = require('aws-sdk');
// Create EC2 service object
var ec2 = new AWS.EC2({apiVersion: '2016-11-15'});
// Load credentials and set region from JSON file
AWS.config.loadFromPath('/Users/testuser/.aws/credentials');
// Load in security group parameters
const securityParams = require('./securityParams.json');
module.exports = {
//Exports creation of Security Groups
CreateSecurityGroup: (req, res) => {
ec2.createSecurityGroup(securityParams, function(err, data) {
if (err) {
return (console.log("Error", err));
}
// Pass the Json as a parameter in this function
ec2.authorizeSecurityGroupIngress(securityParams, function(err, data) {
if (err) {
res.serverError(err, err.stack);
} else {
res.ok(data);
console.log('Ingress Security Rules Created');
}
})
// Pass the Json as a parameter in this function
ec2.authorizeSecurityGroupEgress(securityParams, function(err, data) {
if (err) {
res.serverError(err, err.stack);
} else {
res.ok(data);
console.log('Egress Security Rules Created');
}
})
})
}
}
I'm trying to have the script load configurations from two files; one aws credentials file, and one json. However its throwing errors on the credentials file which looks like this:
[default]
aws_access_key_id=**************
aws_secret_access_key**************
I'm not sure what I'm missing to get it to read the properties in correctly.
Here is the error I'm seeing:
undefined:1
[default]
^
SyntaxError: Unexpected token d in JSON at position 1
at JSON.parse (<anonymous>)
credentials is a plain Ascii file, it's not json file
// Load credentials and set region from JSON file
AWS.config.loadFromPath('/Users/testuser/.aws/credentials');
You can check file type with command file /Users/testuser/.aws/credentials
sample snippet to read props file and set AWS config
var PropertiesReader = require('properties-reader');
var AWS = require('aws-sdk')
var properties = PropertiesReader('/Users/username/.aws/credentials');
AWS.config.update({
accessKeyId : properties.get('aws_access_key_id'),
secretAccessKey : properties.get('aws_secret_access_key'),
region : 'us-west-2'
})
console.log(AWS.config)
Ref:https://www.npmjs.com/package/properties-reader

HTTP response code, response time, response length missing in Node.js / Express console output

I've got a little app that takes a request route in Node.js using Express, for example:
http://192.168.0.10:3000/db/
Here, /db/ is the route. This is fine, I have a db.js file that runs, it makes a call to a MySQL server that is expected to take a long time (possibly minutes) to return the results of a large join. Everything is okay if I make just a few requests now and then because I modified my settings for acceptable timeouts. I get results like the following on the Node.js console:
GET /db/ 200 88569.341 ms - 89
All gravy. However, now I'm using Apache Bench (I know it is not popular with everyone) to hammer the Node.js server and see what happens when I try and make lots of requests to the MySQL server (which is on a separate machine). If I use this technique to increase concurrent requests through Node.js to the MySQL server (on a separate machine), I start intermittently getting the following:
GET /db/ - - ms - -
I don't know what this means or how to use it, but I believe it means something bad is happening, because the time taken for a test run drops by 2/3 or so (~700 seconds to ~400 seconds or thereabouts) and console logging I added in my code is not being output enough times to indicate that every request is being serviced. Any tips on how to debug this to find out why some requests seem to be failing?
Note: the MySQL package I'm using is mysql, but I'm thinking that's not where the problem lies.
In case it helps, here is the code for my route. As a reminder, this code works at low concurrency. It also works without errors if I make the requests through Apache/PHP instead of Node.js.
var express = require('express');
var router = express.Router();
var mysql = require('mysql');
/* GET users listing. */
router.get('/', function(req, res, next) {
// The code below borrowed in part from the NPM MySQL package documentation at:
// https://www.npmjs.com/package/mysql
var connection = mysql.createConnection({
host:/* my host */,
user:/* username */,
password:/* password */,
database:/* my db name */
});
connection.connect();
connection.query({sql:/* some huge join query */;',timeout:999999}, function(err, rows, fields) {
if (err) {
var output = "error! " + err;
console.log(output);
res.send(output);
}
else {
var output = "good: " + JSON.stringify(rows);
console.log(output);
res.send(output);
}
});
connection.end();
});
module.exports = router;
The #1 tip to debug heavy load is to make sure ALL possible error paths are handled and logged. connection.connect() and connection.end() both take callbacks that could report errors. And, just because you set one connection limit to 100,000 does not mean you've removed all limits on how many simultaneous connections the system might allow. There are often many different levels that limits may occur all the way from the library you are using to how it sits on the underlying OS.
I'd suggest that you create more robust logging so you can see if every request that started actually finished and to make sure all possible error paths have logging. When this runs, you should see "Matched requestCntrs" as one of the last log entries. If not, then some operation did not complete properly and an error was not logged.
var express = require('express');
var router = express.Router();
var mysql = require('mysql');
var requestCntrStart = 0;
var requestCntrDone = 0;
var requestCntrErr = 0;
/* GET users listing. */
router.get('/', function(req, res, next) {
// The code below borrowed in part from the NPM MySQL package documentation at:
// https://www.npmjs.com/package/mysql
++requestCntrStart;
var connection = mysql.createConnection({
host:/* my host */,
user:/* username */,
password:/* password */,
database:/* my db name */
});
// log any other errors
connection.on('error', function(err) {
++requestCntrErr;
console.log("Uncaught connection error: ", err.code); // 'ER_BAD_DB_ERROR'
logMatch();
});
function logMatch() {
if (requestCntrStart !== (requestCntrDone + requestCntrErr)) {
console.log("Unmatched requestCntrs: requestCntrStart = " + requestCntrStart +
", requestCntrDone = " + requestCntrDone + ", requestCntrErr = " + requestCntrErr);
} else {
console.log("Matched requestCntrs");
}
}
connection.connect(function(err) {
if (err) {
++requestCntrErr;
console.log("connection.connect() error: ", err);
logMatch();
}
});
connection.query({sql: 'some huge join query',timeout:999999}, function(err, rows, fields) {
if (err) {
++requestCntrErr;
var output = "connection.query() error! " + err;
console.log(output);
res.send(output);
logMatch();
}
else {
++requestCntrDone;
res.send(output);
logMatch();
}
});
connection.end(function(err) {
if (err) {
++requestCntrErr;
console.log("connection.end() error: ", err);
logMatch();
}
});
});
module.exports = router;

Object #<IncomingMessage> has no method 'serverError'

I am using node.js: sailsjs on ubuntu
node version: 0.10.37
sails version: 0.11.0
npm version: 1.4.28
ubuntu version: 14.04.2
I am trying to log the value returned by GCM push notification service into a mysql table. The part of push notification service (PushNotifications.js file) with which I am logging into the db table, is shown below:
var req = https.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('Response from GCM: ' + chunk);
GcmLog.add(recipients, chunk, function(err, response){
if(err){
res.serverError(err);
}else{
res.json({msg: 'yes'});
}
});
});
});
The GcmLog.js model has the following function to push the data into the db table:
add: function(device_id, chunk, callback){
GcmLog.create({user: device_id, log: chunk}).exec(function(err, response){
if(err){
return callback(err, null);
}else{
callback(null, response);
}
});
}
When I send a push notification, the message is delivered successfully and displayed on the android device, but nothing gets logged into the db table. I am getting the following error in the log file of the app:
/path/to/node_modules/sails-mysql/node_modules/mysql/lib/protocol/Parser.js:82
throw err;
^
TypeError: Object #<IncomingMessage> has no method 'serverError'
at /path/to/api/services/PushNotifications.js:44:13
at /path/to/api/models/GcmLog.js:29:12
at bound (/path/to/node_modules/sails/node_modules/lodash/dist/lodash.js:957:21)
at applyInOriginalCtx (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/utils/normalize.js:416:80)
at wrappedCallback (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/utils/normalize.js:326:16)
at _normalizeCallback.callback.error (/path/to/node_modules/sails/node_modules/waterline/node_modules/switchback/lib/normalize.js:42:31)
at _switch (/path/to/node_modules/sails/node_modules/waterline/node_modules/switchback/lib/factory.js:46:28)
at /path/to/node_modules/sails/node_modules/waterline/lib/waterline/query/dql/create.js:216:14
at bound (/path/to/node_modules/sails/node_modules/lodash/dist/lodash.js:957:21)
at applyInOriginalCtx (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/utils/normalize.js:416:80)
at wrappedCallback (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/utils/normalize.js:326:16)
at _normalizeCallback.callback.error (/path/to/node_modules/sails/node_modules/waterline/node_modules/switchback/lib/normalize.js:42:31)
at _switch (/path/to/node_modules/sails/node_modules/waterline/node_modules/switchback/lib/factory.js:46:28)
at afterwards (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/adapter/dql.js:88:16)
at bound (/path/to/node_modules/sails/node_modules/lodash/dist/lodash.js:957:21)
at applyInOriginalCtx (/path/to/node_modules/sails/node_modules/waterline/lib/waterline/utils/normalize.js:416:80)
error: Forever detected script exited with code: 8
And then the server crashes and restarts after several attempts. I am unable to trace out the reason for this. Any help in explanation of what is happening and how to overcome this, will be appreciated.