Read file with fs module and keep the original file - html

I'm actually working on a NodeJS API that send mail.
index.handlebars is a template that I want to use everytime that I need to send an email
So I use Node File Systeme (fs) to readFileSync() and then replace() the data need before sending email to the user.
Here is an exemple :
const readMe = fs.readFileSync('./mails/index.handlebars', 'utf8', (error, data) => {
if (error) {
console.log(error);
} else {
data = data.toString('utf8').replace('{%CONFIRMATION%}', "SELLER AS VALIDATE YOUR ORDER")
return data
}
})
console.log(readMe);
First sometimes, replace() is not working for me and nothing happend. I don't know why.
But when it work, my goal is to not overwrite index.handlebars. What I mean by that is replace() all the stuff and then send it BUT keep index.handlebars as it was before replace().
Is it possible ?
Thanks a lot.

The fs module provides fs.readFile (read file asynchronously, takes a callback) and fs.readFileSync (read synchronous, does not require a callback).
You are currently mixing up the 2 signatures in trying to do a synchronous read with a callback.
To use readFileSync (synchronous), you should
// synchronous without callback
const data = fs.readFileSync('./mails/index.handlebars', { encoding: 'utf8', flag: 'r' })
const replaced = data.replace('{%CONFIRMATION%}', "SELLER AS VALIDATE YOUR ORDER")
console.log(replaced);
For readFile (asynchronous), you use the callback
// asynchronous with callback
fs.readFile('./mails/index.handlebars', 'utf8', (error, data) => {
if (error) {
console.log(error);
} else {
data = data.replace('{%CONFIRMATION%}', "SELLER AS VALIDATE YOUR ORDER")
// perform necessary operations on data
console.log(data);
}
})

Related

Getting an error when using methods.myMethod.call() with web3js

I am getting an error trying to call an existing smart contract function using call().
The error is "Returned values aren't valid, did it run Out of Gas? You might also see this error if you are not using the correct ABI for the contract you are retrieving data from, requesting data from a block number that does not exist, or querying a node which is not fully synced." My code is below
let url = 'https://api.etherscan.io/api?module=contract&action=getabi&address=0x672C1f1C978b8FD1E9AE18e25D0E55176824989c&apikey=<api-key>';
request(url, (err, res, body) => {
if (err) {
console.log(err);
}
let data = JSON.parse(body);
let contract_abi = JSON.parse(data.result);
let contract_address = '0x672C1f1C978b8FD1E9AE18e25D0E55176824989c';
const contract = new web3.eth.Contract(contract_abi, contract_address);
contract.methods.totalSupply().call()
.then(result => {
console.log('result', result);
}).catch(err => {
console.log('error: ', err);
})
})
When I execute the same function using send() it works, however I need the return value of the function which is why I want to use call(). I am using ganache to set up a local test network which is working fine. Thanks!

How to send Post request for each iteration of an array in Node/Express?

I have some raw json that I'm trying to send to my back end server in mysql. I'm currently trying to loop through the specific array in the json that I need and sending data from each of the children in the array via a POST request but I am getting "Cannot set headers after they are sent to the client".
app.post('/reddit-import', function (req, res) {
console.log("Route /reddit-import POST");
let data = req.body.data.children
data.forEach(child => {
let sql1 = `CALL insert_user('${child.data.author}',
'${child.data.author_fullname}');`
connection.query(sql1,
data,
function (errQuery, result) {
if (errQuery) {
console.log(errQuery);
res.json({status: "Error", err: errQuery});
res.end();
} else {
console.log("Insert ID: ", result.insertId);
res.json({status: result.insertId, err: ""});
res.end();
}
}
);
When I send the POST request, my backend gets 2 rows of data before it hits me with the error message...any ideas?
You seem to be ending your outer response in the data.forEach with a res.end(), which I’m assuming is used to indicate the end of the outer HTTP request to the client. Did you perhaps mean to use “result” there instead?
Try this if you need to keep track insert IDs:
app.post('/reddit-import', function(req, res) {
console.log("Route /reddit-import POST");
let data = req.body.data.children
const insertIds = data.map(child => {
return new Promise((resolve, reject) => {
const sql = `CALL insert_user('${child.data.author}', '${child.data.author_fullname}')`;
connection.query(sql, (err, result) => {
if (err) {
console.log(err);
return reject(err);
}
console.log("Insert ID: ", result.insertId);
return resolve(result.insertId);
});
});
});
return Promise.all(insertIds)
.then(ids => {
return res.json({
insertIds: ids
});
})
.catch(err => {
return res.status(500).json({
message: 'got query error'
});
});
});
What this basically does is that on each query, you keep track of the insert IDs. We need to use Promises because the query() function is asynchronous, meaning it runs independently and there's no other way to keep track of the data outside of its function(err, result) callback. Now we have an array of Promises which contains the insert IDs, and what's left is to send a response that this is successful. And in order to do that, we can't simply do res.json(insertIds) because insertIds is an array of Promises and we still need to extract the values. We can easily extract all data at once from an array of Promises by using Promise.all(insertIds).then(ids => ...). If you wish to send a response informing that the request is successful, do so in this then callback. Lastly and most importantly, we handle errors in a Promise chain's .catch() block. This is where you want to send a response informing the client that there are errors.
Some things that we can improve from this solution is to implement rollbacks in case we have errors, and of course validations of parameters. Unfortunately I have to leave this to the OP to implement.
Also, keep in mind you should only send a response once and only once each request.

node js mysql rest api - queue http requests - persist data

What is the best architectural design for following scenario?
I want to build a CRUD-microservice using node, express and mysql. The CREATE portion is quite complex due to a large piece of json with many relational properties on each http POST request. The request.body is looking something like this:
{
key1: string <-- saved as foreign_key
key2: {...}
key3: int
key4: [ <-- saved as n:m with corresponding table
{...},
{...},
]
...
...
keyXYZ: ...
key46: int <-- saved as foreign_key
key47: string
}
The module which does all query-operations looks like this:
persistData = async (data, dbConnection) => {
const idSomething1 = await fetchOrCreateSomething1(data.key1).catch (err => console.log(err));
const idSomething2 = await fetchOrCreateSomething2(data.key46).catch (err => console.log(err));
const idSomething3 = await fetchOrCreateSomething3(data.keyXYZ).catch (err => console.log(err));
const idManyThings = await fetchOrCreateManyThings(idSomething1, idSomething2, idSomething3, data.moreStuff...).catch (err => console.log(err));
}
All fetchOrCreateSomethingX = async () => {} functions are async to let the main function persistData wait for an newly created or retrieved record id.
This is wrapped inside an exported constructor function:
function DataHandler(data, res) {
db.getConnection()
.then((dbConnection) => {
persistData(data, dbConnection)
.then(() => {
dbConnection.release();
});
});
}
module.exports = DataHandler;
The endpoint does the following:
const createFunc = (req, res) => {
new DataHandler(req.body, res);
};
app.post("/create", createFunc);
I know that especially the last part does not work because the new DataHandler-object is overwritten as soon as the endpoint gets hit again. If the persisting process hasn't finished before the endpoint gets hit again the data from the first request is lost. I also know that express won't be able to send back responses which isn't ideal. If the new DataHandler instead would be stored to a new variable or const than at least both processes would run. But the main problem is that the data gets shuffled as the persistData() is running in parallel not encapsulated from each other.
I can't find any example or best practice how to design this well. Any hint or resource would be great!
Is a queuing system like the kue library the way to go?

how to unfulfill a fulfilled promise in bluebird

I have a situation where I stored an array of operations wrapped in a promise. Now, every time a request hits the API it executes this operations.
Let's say I have created a bunch of operations to get a status of running services:
const jobs = [];
function statusRequest (url) {
return new Promise((resolve, reject) {
request(url, function(error, resp, body) {
if (error) {
return reject(error);
}
resolve(body);
});
});
}
services.forEach(url => {
jobs.push(statusRequest(url));
});
function handler (request, reply) {
Promise.all(jobs).then(results => {
// handle results
})
.catch(error => reply(error).code(500))
}
module.exports = {
'auth' : false,
'description' : 'Get the status of all the services',
'tags' : [ 'api', 'system', 'status' ],
handler
};
When a request hits the API handler for the 1st time it fetches the status and sets it fulfilment value to that result (cached). Succeeding requests to the API will have the first fulfilment value return.
Is there a way to reset the promise to unfulfilled so each call would just execute the operations already created?
Is there a way to reset the promise to unfulfilled so each call would
just execute the operations already created?
No, you cannot do this.
To get access to the result of an existing promise, you don't need to unfulfill it (which is something you can't do anyway). You can just add a new .then() handler to it.
It sounds like you can just cache the original promise and use new .then() handlers on it. This will then just return the previous result. It's a simple way to do caching of async results.
var doSomeAPICall = (function() {
var cachedPromise;
return function() {
if (!cachedPromise) {
cachedPromise = doSomeAsyncOperation();
}
return cachedPromise;
}
})();
And, sample usage:
doSomeAPICall().then(function(data) {
// process data here
});
// the second time, it will be using a cached result
doSomeAPICall().then(function(data) {
// process data here
});
Remember, you can always add more .then() handlers to an existing promise to get access to the result it will have or already has.
FYI, you can't unfulfill a promise. Promises are purposely one-shot devices that latch their state once they are fulfilled or rejected. You can add new .then() handlers to it to get access to the result of the promise or you can create a new promise if you want to execute a new async operation. You don't reuse an existing promise to execute another async operation. They are explicitly designed to prevent that.
If this isn't what you are trying to do, then please clarify your question some more so we can more clearly understand what you're trying to achieve.

How to return a complex JSON response with Node.js?

Using nodejs and express, I'd like to return one or multiple objects (array) using JSON. In the code below I output one JSON object at a time. It works but this isn't exactly what I want. The response produced isn't a valid JSON response since I have many objects.
I am well aware that I could simply add all objects to an array and return that specific array in res.end. However I am afraid this could become heavy to process and memory intensive.
What is the proper way to acheive this with nodejs? Is query.each the right method to call?
app.get('/users/:email/messages/unread', function(req, res, next) {
var query = MessageInfo
.find({ $and: [ { 'email': req.params.email }, { 'hasBeenRead': false } ] });
res.writeHead(200, { 'Content-Type': 'application/json' });
query.each(function(err, msg) {
if (msg) {
res.write(JSON.stringify({ msgId: msg.fileName }));
} else {
res.end();
}
});
});
On express 3 you can use directly res.json({foo:bar})
res.json({ msgId: msg.fileName })
See the documentation
I don't know if this is really any different, but rather than iterate over the query cursor, you could do something like this:
query.exec(function (err, results){
if (err) res.writeHead(500, err.message)
else if (!results.length) res.writeHead(404);
else {
res.writeHead(200, { 'Content-Type': 'application/json' });
res.write(JSON.stringify(results.map(function (msg){ return {msgId: msg.fileName}; })));
}
res.end();
});
[Edit] After reviewing the Mongoose documentation, it looks like you can send each query result as a separate chunk; the web server uses chunked transfer encoding by default so all you have to do is wrap an array around the items to make it a valid JSON object.
Roughly (untested):
app.get('/users/:email/messages/unread', function(req, res, next) {
var firstItem=true, query=MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
query.each(function(docs) {
// Start the JSON array or separate the next element.
res.write(firstItem ? (firstItem=false,'[') : ',');
res.write(JSON.stringify({ msgId: msg.fileName }));
});
res.end(']'); // End the JSON array and response.
});
Alternatively, as you mention, you can simply send the array contents as-is. In this case the response body will be buffered and sent immediately, which may consume a large amount of additional memory (above what is required to store the results themselves) for large result sets. For example:
// ...
var query = MessageInfo.find(/*...*/);
res.writeHead(200, {'Content-Type': 'application/json'});
res.end(JSON.stringify(query.map(function(x){ return x.fileName })));