Blockchain API to determine transaction confirmations - json

I am trying to identify whether a transaction in the bitcoin blockchain has been confirmed or not. I have accessed a JSON representation of the transaction from blockchain.info using this url: https://blockchain.info/tx/62f9419e56ac1b628840aaf52307867f9856d7a52b3c1d945a9938a3021cbf2c?show_adv=false&format=json
I can not find anything in the response that indicates how many confirmations it has...
{"block_height":221580,"time":1361068368,"inputs":[{"prev_out":{"n":0,"value":100000000,"addr":"1NaPjDPGcfaVCBd3cTmy4zEPjRbDwzkW49","tx_index":53213157,"type":0}},{"prev_out":{"n":0,"value":100000,"addr":"1FDBdn8cseukiteu1myGQCfgYnncdMNpFk","tx_index":53252395,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"13QRi4W5bq3FWrNGrWGcF1dH4mSWD6Huun","tx_index":52575903,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"1MhEJx1BodWATGxoZ7az3GnmUQwx2adCG2","tx_index":53376409,"type":0}},{"prev_out":{"n":1,"value":90000000,"addr":"1FosGa87ZSjoagVu1j8djiJKzUeLkhhp6P","tx_index":53308634,"type":0}},{"prev_out":{"n":0,"value":200000,"addr":"1DZzEunCP1SxBsz2aZah2q9WAFuYSsDrq9","tx_index":53272656,"type":0}},{"prev_out":{"n":1,"value":98500000,"addr":"19q8NEgZKQcQMMx5z16JETbe1bx6StNZfj","tx_index":53506579,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"15LXjh36usUspAYsGnhURVEnPn86W7SPSu","tx_index":53532799,"type":0}},{"prev_out":{"n":1,"value":119000000,"addr":"1PNbeqfPgMjjL6sLdXqkNZyCSkGFHop3bz","tx_index":53492488,"type":0}},{"prev_out":{"n":1,"value":150000000,"addr":"153hqmnNqUM8RGWdLE12tj74aAyS9U2pe7","tx_index":53283295,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"149BGgDjaMyYfYnrja4asYtuUnpsBjobnH","tx_index":53440208,"type":0}},{"prev_out":{"n":167,"value":35000,"addr":"1F3eAsYGC45s2Q8XiE7ywGXMr8QLB8FTCD","tx_index":53578752,"type":0}},{"prev_out":{"n":862,"value":5000,"addr":"1CD4Dcy3yUiBejmQX1hKfJi1y5ysAX9RwZ","tx_index":53578752,"type":0}},{"prev_out":{"n":0,"value":60000000,"addr":"1Q7hDXko9U8MxoAZGmYk2se6tf8WFSQbUK","tx_index":53305081,"type":0}},{"prev_out":{"n":1,"value":98000000,"addr":"1RuMjWETvUPAUqfJKhZ4GBo5tKuszbDTA","tx_index":53521527,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"17rBkeKtc7APY5PQjbicBbucfaUA1PZSpm","tx_index":53511134,"type":0}},{"prev_out":{"n":0,"value":169216027,"addr":"1EBz5v7dJfBPJzSwivVQcY19eT5hUBxa8w","tx_index":53194652,"type":0}},{"prev_out":{"n":1,"value":80000000,"addr":"1KLn85reRxN1JZL1S3gD2Kp2x8LZ14rz6S","tx_index":53194567,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"1EppQ2h8Ddvp1vsoSb2DLJqJAws2DrYnv9","tx_index":53190665,"type":0}},{"prev_out":{"n":174,"value":2,"addr":"1CDDR1vZtZPWc48v4brHmka3tDpXbuT9wd","tx_index":53620404,"type":0}},{"prev_out":{"n":1,"value":100000000,"addr":"12zQxFPPh5rsUyakdZZyADj2N5bFRFZRcd","tx_index":53540021,"type":0}},{"prev_out":{"n":1,"value":801624197,"addr":"1JEjtpHB7aZJm3QSRp76qQqchFfs4TjDeE","tx_index":53526428,"type":0}},{"prev_out":{"n":1,"value":100000000,"addr":"1K1Sn9V775d7i94voiYLLUSaFNUQ9BVj9Q","tx_index":53430153,"type":0}},{"prev_out":{"n":1156,"value":1,"addr":"1CxXkpmJ9Nr4S9b3rKeKU5WWLXGp5nv553","tx_index":53619724,"type":0}},{"prev_out":{"n":0,"value":99950000,"addr":"1Hp5GdoX4oXmjUD6ZRKvXNJQCZp2sk712c","tx_index":53229930,"type":0}},{"prev_out":{"n":1,"value":98000000,"addr":"12aqif4GXBd17N6EFj4onrHLd8febY4n6j","tx_index":53160076,"type":0}},{"prev_out":{"n":0,"value":357452267,"addr":"146eveRJD2YnxvNBw4hHtZn8xR3LHHVxtH","tx_index":53651895,"type":0}},{"prev_out":{"n":1,"value":60000000,"addr":"1FZximueHPa9sqTZSg2Q4LAsg91dZaJK5D","tx_index":53640062,"type":0}},{"prev_out":{"n":1,"value":100000000,"addr":"12VL7U1BLf8kLrkN3sca9w5dGVhVAy1kvD","tx_index":53578503,"type":0}},{"prev_out":{"n":0,"value":67315003,"addr":"1BbYNxYAGJJJz6wP4pK5eHmwcieRSiPDZm","tx_index":53437082,"type":0}},{"prev_out":{"n":1,"value":100000000,"addr":"1DoTTLAs5VUm2QHBXeL34h2kSfYnqSpsCj","tx_index":53116751,"type":0}},{"prev_out":{"n":1,"value":195951000,"addr":"1Epje2MuDrckP4zVJmRXyEu5jWby2MvgHy","tx_index":53599874,"type":0}},{"prev_out":{"n":25,"value":1,"addr":"1CdACYi1JQDGekGPPc8bd3vq5d5v6s2KKY","tx_index":53620401,"type":0}},{"prev_out":{"n":105,"value":2,"addr":"15dsKW8yotixATZdDomBkRJh7YvzJJ4z7X","tx_index":53620401,"type":0}},{"prev_out":{"n":1,"value":90000000,"addr":"17ra7TQoPSmrxvLXGhupexd3Dk9fnZLM8Q","tx_index":53675760,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"1Fm2j4k7XK8veWdeJaxDuZdujdQQh3mj9j","tx_index":53696368,"type":0}},{"prev_out":{"n":39,"value":1500,"addr":"1PB3DrsvvTMkxv7AoV5FdMAVbrnv1R9AvF","tx_index":53366964,"type":0}},{"prev_out":{"n":1,"value":70000000,"addr":"1HDxhL7H8thYC9RaLwACRoeTP6cjTERzkq","tx_index":53197639,"type":0}},{"prev_out":{"n":0,"value":60000000,"addr":"1Mzt5Y815fnf3rbCigx411bmUGqTiAMMMf","tx_index":53534495,"type":0}},{"prev_out":{"n":0,"value":3000000,"addr":"191cP1rSfJX9kATiujqbavKPHta8ryPbUk","tx_index":53397518,"type":0}},{"prev_out":{"n":0,"value":80000000,"addr":"1E7zhSRBexQYN98PxKZrnocZzb7yuCoobF","tx_index":53616202,"type":0}},{"prev_out":{"n":0,"value":60000000,"addr":"1DNGJSkn2jaBtzHpeu2EV8Za7GzLkYRKrk","tx_index":53211317,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"1FPHkZfftpfBVYg46sBAZmU7k4R6nMWjgm","tx_index":53714935,"type":0}},{"prev_out":{"n":1,"value":80000000,"addr":"1NJmpCAfoeZa8M8RoWCT1PAs85k4URKwuL","tx_index":53437066,"type":0}},{"prev_out":{"n":1,"value":320000000,"addr":"17MCmBPgv2SEKsmGf1o7X6qbK59C1Pnwr3","tx_index":53584050,"type":0}},{"prev_out":{"n":0,"value":100000000,"addr":"1KrtH7ceJthEfBFd8t9G4Vohj2myGB1eDj","tx_index":53212472,"type":0}},{"prev_out":{"n":1,"value":103000000,"addr":"13zZzqKR3XYPUKpLWvGkimSJbDMaJim9Ru","tx_index":53218032,"type":0}},{"prev_out":{"n":1,"value":92500000,"addr":"1G5EdCerj7Yryoc4tpmCrFe7rvkbLcjHtz","tx_index":53633538,"type":0}},{"prev_out":{"n":0,"value":97800000,"addr":"1K59Q1UJSULsHhs4Rv8PiakEhDK689jQSj","tx_index":53232395,"type":0}}],"vout_sz":2,"relayed_by":"184.71.200.221","hash":"62f9419e56ac1b628840aaf52307867f9856d7a52b3c1d945a9938a3021cbf2c","vin_sz":52,"tx_index":53744354,"ver":1,"out":[{"n":0,"value":1000000,"addr":"1cm8zPZqjfWs5MBg8yKxJwWvDAkqF4CVu","tx_index":53744354,"type":0},{"n":1,"value":5000000000,"addr":"1EGP5pSnttKRdAcPxdiTviSrjsyHEAnXhy","tx_index":53744354,"type":0}],"size":9439}

The concept of "confirmation" on Bitcoin essentially translates to how many blocks have been generated since the block containing transaction. In other words, you'll have to issue another request to get the current network block count (http://blockchain.info/q/getblockcount), and the confirmation count then becomes current_block_count - transaction_block_height + 1. Note that block_height is only present in the response if at least one block was generated since (i.e. new transactions may not even include the element).

Here is sample nodejs code to determine block count of the transaction.
const axios = require('axios');
async function getConfirmations(transactionId) {
try {
const response = await
axios.get(`https://blockchain.info/rawtx/${transactionId}`);
const blockHeight = response.data.block_height;
const currentBlockHeight = await axios.get(`https://blockchain.info/latestblock`)
.then(res => res.data.height);
const confirmations = currentBlockHeight - blockHeight + 1;
return confirmations;
} catch (error) {
console.error(error);
}
}

Related

Determining the CID of larger (> 256KB) files

I'm setting up a repo to be used in projects where CID (related) data needs to be transacted on-chain and where I'm following a work-flow of:
1.) Establishing the CID data;
2.) Transacting said data;
3.) Publishing/Importing data in to IPFS after a successful transaction.
The main purpose of the repo is to reliably determine CIDs without importing data into IPFS(step 1). The workflow is aimed at avoiding the risk of front-running, based on data becoming publicly available before the transaction sub 2 is completed (or even initiated). My thoughts on this perceived risk, basically run down to this:
The purpose of the on-chain transaction is not only to uniquely identify content (say NFT material), but also to establish/determine its provenance, or create some form of connection with its creator/(original) owner. Cleary the public availability of such content, detracts from the core purpose of such a transaction, since it allows for - let's call it - NFT front running; one could monitor the network/specific nodes generally involved in publishing NFT(like material)to the IPFS network, for announcements on data that has been added, and claim the mentioned connection for oneself.
Question: Is this "problem" as practical as I am perceiving it? I can only find very limited information on mitigating this risk, although 1.) the practice of creating gas-less NFT's is increasingly popular and 2.) most of the data will likely enter the IPFS through pinning services (with presumably short announcement intervals) are increasingly popular (instead of via self-managed nodes where one could programatically decouple adding(pre-transaction) and pinning/announcement(post-transaction).
Issue(the main reason of this entry): I'm having difficulties establishing the CID for content exceeding the block-size.
Creating a DAG Node and adding children to it:
# parts of the test file cid-from-scratch.js
describe("Create DAG-PB root from scratch", function() {
let dagNode;
it("Creates root DAG NODE", async function() {
this.timeout(6000);
dagNode = await sliceAddLink(buffer, new DAGNode())
// Comparing the length Links property of the created root node, with that retrieved from local node
// using the same content
assert.equal(dagNode.Links.length, localDag.value.Links.length,
"Expected same amount of children in created, as in retrieved DAG ")
for (i = 0; i < dagNode.Links.length; i++) {
console.log("Children Strings: ", dagNode.Links[i].Hash.toString())
// Comparing the strings of the children
assert.equal(dagNode.Links[i].Hash.toString(), localDag.value.Links[i].Hash.toString(), "Children's CID should be same")
}
console.log(dagNode)
})
})
/**
*
* #param {Buffer} buffer2Slice The full content Buffer
* #param {Object} dagNode new DAGNode()
*/
function sliceAddLink(buffer2Slice, dagNode) {
return new Promise(async function(resolve, reject) {
try {
while (buffer2Slice.length > 0) {
let slice = buffer2Slice.slice(0, 262144);
buffer2Slice = buffer2Slice.slice(262144);
sliceAddResult = await createCid(slice, ...[, , ], 85);
let link = { Tsize: slice.length, Hash: sliceAddResult };
dagNode.addLink(link);
}
resolve(dagNode)
} catch (err) {  reject(err) }
})
}
/**
*
* #param {Buffer} content
* #param {string} [hashAlg] // optional
* #param {number} [cidVersion] // optional
* #param {number} [cidCode] // optional - should be set to 85 in creating DAG-LINK
*/
async function createCid(content, hashAlg, cidVersion, cidCode) {
hashAlg = hashAlg || cidOptions.hashAlg;
cidVersion = cidVersion || cidOptions.cidVersion
cidCode = cidCode || cidOptions.code
let fileHash = await multiHashing(content, hashAlg)
return new CID(cidVersion, cidCode, fileHash)
}
The Links property of the created DAG matches that of the DAG retrieved from the local ipfs node and that from Infura (of course based on same content). The problem is that, unlike the retrieved DAG Nodes, the data field of the created DAGNode is empty(and therefore yielding a diff CID):
DAGNode retrieved: Data property containing data
DAGNode created: Data property is Empty
I'm adding to IPFS like so:
/**
* #note ipfs.add with the preset options
* #param {*} content Buffer (of File) to be published
* #param {*} ipfs The ipfs instance involved
*/
async function assetToIPFS(content, ipfs) {
let result = await ipfs.add(content, cidOptions)
return result;
}
// Using the following ADD options
const cidOptions = {
cidVersion: 1, // ipfs.add default = 0
hashAlg: 'sha2-256',
code: 112
}
it("Adding publicly yields the same CID result", async function() {
// longer then standard time out because of interaction with public IPFS gateway
this.timeout(6000);
_ipfsInfura = await ipfsInfura;
let addResult = await assetToIPFS(buffer, _ipfsInfura)
assert.equal(localAddResult.cid.toString(), addResult.cid.toString(), "Different CIDS! (expected same)")
assert.equal(localAddResult.size, addResult.size, "Expected same size!")
})
and subsequently getting the DAG (from both Local Node and Infura Node) like so(, to compare them with the created DAG):
// Differences in DAG-PB object representation in Infura and Local node!
it("dag_get local and dag_get infura yield the same Data and Links array", async function() {
this.timeout(6000);
let cid = localAddResult.cid
localDag = await dagGet(cid, _ipfsLocal);
let infuraDag = await dagGet(cid, _ipfsInfura);
console.log("Local DAG: ", localDag.value);
console.log("Infura DAG: ", infuraDag.value);
// Differences in DAG-PB object representation in Infura and Local node
// Data is Buffer in localDag and uint8array in infuraDag
assert.equalBytes(await dagData(localDag.value), await dagData(infuraDag.value), 'Expected Equal Data')
assert.equal(infuraDag.value.Links.length, localDag.value.Links.length, "Expected same amount of children")
assert(localDag.value.Links.length > 0, "Should have children (DAG-LINK objects)")
for (i = 0; i < localDag.value.Links.length; i++) {
assert.equal(infuraDag.value.Links[i].Hash.toString(), localDag.value.Links[i].Hash.toString())
}
})
IPFS Docs on the issue of work with blocks state that "A given file's 'hash' is actually the hash of the root (uppermost) node in the DAG."
You could suspect that the DAG node's Data field plays a role in this. On the other hand, looking at the length of "Data", the js-ipfs examples on dag.put ('Some data') and the IPLD DAG-PB specifications ("Data may be omitted or a byte array with a length of zero or more"), this property seems more arbitrary. (The Data array/buffer from both the infura and local ipfs node have the same content)
How can I create a root DAG node (using a content buffer and CID options), with not only the same Links property, but also the same Data property as the DAG root I'm getting after adding the same content buffer to an ipfs instance?

Add custom key in fetched JSON

I am making simple blog website in React and when I fetch posts I am getting user id, who posted it. with this id I am making another axios where I get user with given id and then I assign author key to my fetched posts JSON like this:
export const getPosts = async () => {
try {
const { data } = await axios.get(
"https://jsonplaceholder.typicode.com/posts"
);
for (const item of data) {
let user = await getUser(item["userId"]);
item["author"] = user.username;
}
return data;
} catch (err) {
toast.error(err.message);
}
};
export const getUser = async (id) => {
try {
const response = await axios.get(
"https://jsonplaceholder.typicode.com/users/" + id
);
return response.data;
} catch (err) {
toast.error(err.message);
}
};
This method causes 5-10 seconds delay to display posts. I am searching for faster and simpler way to display username to every post.
U also can tell me if there is better way to fetch data and display it.
Thanks!
You have done it the exact right way if there is no other endpoint available to fetch multiple post author information at a time.
Well, this is meant to be an answer, but I'd start with a question.
Do you have access to the maintainer or developer handling the Restful API endpoint you are trying to get from?
If yes?
Tell them to simply include the author information on the response for fetching the post(s)
Or simply provide you with an endpoint that allows you to get author information for many posts at a time.
If No
Go over the documentation provided (if any) and see if there is any endpoint that allows you to fetch author information for multiple posts with a single request.
If none of the above option seems to be a way, kindly remember to block the UI When fetching the resources.
You can also fetch for few authors first and display while fetching for more in the background or on request for more by user, that way you would give a better user experience.
Happy hacking and coding 😜

How to get around previously declared json body-parser in Nodebb?

I am writing a private plugin for nodebb (open forum software). In the nodebb's webserver.js file there is a line that seems to be hogging all incoming json data.
app.use(bodyParser.json(jsonOpts));
I am trying to convert all incoming json data for one of my end-points into raw data. However the challenge is I cannot remove or modify the line above.
The following code works ONLY if I temporarily remove the line above.
var rawBodySaver = function (req, res, buf, encoding) {
if (buf && buf.length) {
req.rawBody = buf.toString(encoding || 'utf8');
}
}
app.use(bodyParser.json({ verify: rawBodySaver }));
However as soon as I put the app.use(bodyParser.json(jsonOpts)); middleware back into the webserver.js file it stops working. So it seems like body-parser only processes the first parser that matches the incoming data type and then skips all the rest?
How can I get around that? I could not find any information in their official documentation.
Any help is greatly appreciated.
** Update **
The problem I am trying to solve is to correctly handle an incoming stripe webhook event. In the official stripe documentation they suggested I do the following:
// Match the raw body to content type application/json
app.post('/webhook', bodyParser.raw({type: 'application/json'}),
(request, response) => {
const sig = request.headers['stripe-signature'];
let event;
try {
event = stripe.webhooks.constructEvent(request.body, sig,
endpointSecret);
} catch (err) {
return response.status(400).send(Webhook Error:
${err.message});
}
Both methods, the original at the top of this post and the official stripe recommended way, construct the stripe event correctly but only if I remove the middleware in webserver. So my understanding now is that you cannot have multiple middleware to handle the same incoming data. I don't have much wiggle room when it comes to the first middleware except for being able to modify the argument (jsonOpts) that is being passed to it and comes from a .json file. I tried adding a verify field but I couldn't figure out how to add a function as its value. I hope this makes sense and sorry for not stating what problem I am trying to solve initially.
The only solution I can find without modifying the NodeBB code is to insert your middleware in a convenient hook (that will be later than you want) and then hack into the layer list in the app router to move that middleware earlier in the app layer list to get it in front of the things you want to be in front of.
This is a hack so if Express changes their internal implementation at some future time, then this could break. But, if they ever changed this part of the implementation, it would likely only be in a major revision (as in Express 4 ==> Express 5) and you could just adapt the code to fit the new scheme or perhaps NodeBB will have given you an appropriate hook by then.
The basic concept is as follows:
Get the router you need to modify. It appears it's the app router you want for NodeBB.
Insert your middleware/route as you normally would to allow Express to do all the normal setup for your middleware/route and insert it in the internal Layer list in the app router.
Then, reach into the list, take it off the end of the list (where it was just added) and insert it earlier in the list.
Figure out where to put it earlier in the list. You probably don't want it at the very start of the list because that would put it after some helpful system middleware that makes things like query parameter parsing work. So, the code looks for the first middleware that has a name we don't recognize from the built-in names we know and insert it right after that.
Here's the code for a function to insert your middleware.
function getAppRouter(app) {
// History:
// Express 4.x throws when accessing app.router and the router is on app._router
// But, the router is lazy initialized with app.lazyrouter()
// Express 5.x again supports app.router
// And, it handles the lazy construction of the router for you
let router;
try {
router = app.router; // Works for Express 5.x, Express 4.x will throw when accessing
} catch(e) {}
if (!router) {
// Express 4.x
if (typeof app.lazyrouter === "function") {
// make sure router has been created
app.lazyrouter();
}
router = app._router;
}
if (!router) {
throw new Error("Couldn't find app router");
}
return router;
}
// insert a method on the app router near the front of the list
function insertAppMethod(app, method, path, fn) {
let router = getAppRouter(app);
let stack = router.stack;
// allow function to be called with no path
// as insertAppMethod(app, metod, fn);
if (typeof path === "function") {
fn = path;
path = null;
}
// add the handler to the end of the list
if (path) {
app[method](path, fn);
} else {
app[method](fn);
}
// now remove it from the stack
let layerObj = stack.pop();
// now insert it near the front of the stack,
// but after a couple pre-built middleware's installed by Express itself
let skips = new Set(["query", "expressInit"]);
for (let i = 0; i < stack.length; i++) {
if (!skips.has(stack[i].name)) {
// insert it here before this item
stack.splice(i, 0, layerObj);
break;
}
}
}
You would then use this to insert your method like this from any NodeBB hook that provides you the app object sometime during startup. It will create your /webhook route handler and then insert it earlier in the layer list (before the other body-parser middleware).
let rawMiddleware = bodyParser.raw({type: 'application/json'});
insertAppMethod(app, 'post', '/webhook', (request, response, next) => {
rawMiddleware(request, response, (err) => {
if (err) {
next(err);
return;
}
const sig = request.headers['stripe-signature'];
let event;
try {
event = stripe.webhooks.constructEvent(request.body, sig, endpointSecret);
// you need to either call next() or send a response here
} catch (err) {
return response.status(400).send(`Webhook Error: ${err.message}`);
}
});
});
The bodyParser.json() middleware does the following:
Check the response type of an incoming request to see if it is application/json.
If it is that type, then read the body from the incoming stream to get all the data from the stream.
When it has all the data from the stream, parse it as JSON and put the result into req.body so follow-on request handlers can access the already-read and already-parsed data there.
Because it reads the data from the stream, there is no longer any more data in the stream. Unless it saves the raw data somewhere (I haven't looked to see if it does), then the original RAW data is gone - it's been read from the stream already. This is why you can't have multiple different middleware all trying to process the same request body. Whichever one goes first reads the data from the incoming stream and then the original data is no longer there in the stream.
To help you find a solution, we need to know what end-problem you're really trying to solve? You will not be able to have two middlewares both looking for the same content-type and both reading the request body. You could replace bodyParser.json() that does both what it does now and does something else for your purpose in the same middleware, but not in separate middleware.

How do you properly call a conditional based on the intent's displayName for dialogflow?

I am trying to do webhook fulfillment for my dialogflow agent. However there are four specific intents that should all have different JSON responses based on what specific intent is called. Right now I am creating a switch case based on the called intent's displayName. However that is not working. Should I be using a different parameter to check what intent is called other than displayName?
HERE IS MY CODE THAT ONLY OUTPUTS "test"
server.post("/get-bill-details", function(req, res) {
let intentName = req.body.queryResult.intent.displayName;
let ret = "test";
if(intentName == "1 - Bill"){
ret = "your billing amount is $120.";
}
return res.json({
fulfillmentText: ret,
source: "get-bill-details"
});
});
I would suggest you use client libraries as they will ease out the process of parsing the JSON and reduce your development time. You can use NodeJS or Python clients for Dialogflow. Also, if you need Google Assistant, you can also use following NodeJS library to build webhook. They all have documentation on how to build webhooks on cloud or by using Express and other frameworks.
Instead of matching with intent name give your intent an action name( try not to give any spaces e.g input.welcome ).
Then get the action parameter using
let action = req.body.queryResult.action;
switch(action) {
your logic..
}
Also as abhinav said you can use this library to ease your development time and better readability of your code that also help cross platform response for Cards, Image and Suggestions.
const { WebhookClient } = require('dialogflow-fulfillment');
server.post('/', function (request, response, next) {
const agent = new WebhookClient({ request, response });
const welcome = () => {
agent.add('Hello Welcome To My bot');
}
let intentMap = new Map();
intentMap.set('Default Welcome Intent', welcome);
agent.handleRequest(intentMap);
}

How to parse or Stringify in asycnhronous way in javascript

I see that JSON.stringify and JSON.parse are both sycnhronous.
I would like to know if there a simple npm library that does this in an asynchonous way .
Thank you
You can make anything "asynchronous" by using Promises:
function asyncStringify(str) {
return new Promise((resolve, reject) => {
resolve(JSON.stringify(str));
});
}
Then you can use it like any other promise:
asyncStringfy(str).then(ajaxSubmit);
Note that because the code is not asynchronous, the promise will be resolved right away (there's no blocking operation on stringifying a JSON, it doesn't require any system call).
You can also use the async/await API if your platform supports it:
async function asyncStringify(str) {
return JSON.stringify(str);
}
Then you can use it the same way:
asyncStringfy(str).then(ajaxSubmit);
// or use the "await" API
const strJson = await asyncStringify(str);
ajaxSubmit(strJson);
Edited: One way of adding true asynchrnous parsing/stringifying (maybe because we're parsing something too complex) is to pass the job to another process (or service) and wait on the response.
You can do this in many ways (like creating a new service that shares a REST API), I will demonstrate here a way of doing this with message passing between processes:
First create a file that will take care of doing the parsing/stringifying. Call it async-json.js for the sake of the example:
// async-json.js
function stringify(value) {
return JSON.stringify(value);
}
function parse(value) {
return JSON.parse(value);
}
process.on('message', function(message) {
let result;
if (message.method === 'stringify') {
result = stringify(message.value)
} else if (message.method === 'parse') {
result = parse(message.value);
}
process.send({ callerId: message.callerId, returnValue: result });
});
All this process does is wait a message asking to stringify or parse a JSON and then respond with the right value.
Now, on your code, you can fork this script and send messages back and forward. Whenever a request is sent, you create a new promise, whenever a response comes back to that request, you can resolve the promise:
const fork = require('child_process').fork;
const asyncJson = fork(__dirname + '/async-json.js');
const callers = {};
asyncJson.on('message', function(response) {
callers[response.callerId].resolve(response.returnValue);
});
function callAsyncJson(method, value) {
const callerId = parseInt(Math.random() * 1000000);
const callPromise = new Promise((resolve, reject) => {
callers[callerId] = { resolve: resolve, reject: reject };
asyncJson.send({ callerId: callerId, method: method, value: value });
});
return callPromise;
}
function JsonStringify(value) {
return callAsyncJson('stringify', value);
}
function JsonParse(value) {
return callAsyncJson('parse', value);
}
JsonStringify({ a: 1 }).then(console.log.bind(console));
JsonParse('{ "a": "1" }').then(console.log.bind(console));
Note: this is just one example, but knowing this you can figure out other improvements or other ways to do it. Hope this is helpful.
Check this out, another npm package-
async-json is a library that provides an asynchronous version of the standard JSON.stringify.
Install-
npm install async-json
Example-
var asyncJSON = require('async-json');
asyncJSON.stringify({ some: "data" }, function (err, jsonValue) {
if (err) {
throw err;
}
jsonValue === '{"some":"data"}';
});
Note-Didn't test it, you need to manually check it's dependency and
required packages.
By asynchronous I assume you actually mean non-blocking asynchronous - i.e., if you have a large (megabytes large) JSON string, and you stringify, you don't want your web server to hard freeze and block newly incoming web requests for 500+ milliseconds while it processes the object.
Option 1
The generic answer is to iterate through your object piece by piece, and to then call setImmedate whenever a threshold is reached. This then allows other functions in the event queue to run for a bit.
For JSON (de)serialization, the yieldable-json library does this very well. It does however drastically sacrifice JSON processing time (which is somewhat intentional).
Usage example from the yieldable-json readme:
const yj = require('yieldable-json')
yj.stringifyAsync({key:"value"}, (err, data) => {
if (!err)
console.log(data)
})
Option 2
If processing speed is extremely important (such as with real-time data), you may want to consider spawning multiple Node threads instead. I've used used the PM2 Process Manager with great success, although initial setup was quite daunting. Once it works however, the final result is magic, and does not require modifying your source code, just your package.json file. It acts as a proxy, load balancer, and monitoring tool for Node applications. It's somewhat analogous to Docker swarm, but bare metal, and does not require a special client on the server.