too many xhr requests in chrome - google-chrome

I have a tight loop that fetches PNG files via xhr2. Works fine in FF and IE10. In Chrome when I list of files hits about 5,500 I start getting xhr errors. I like the async interface since I am interleaving these requests with local indexedDB store requests.
Code below (I am using xhr2lib for fetches and PouchDB for the IndexedDB API).
I know that it is the XHR2 that is failing, since when this works, in Chrome, all the XHR2 calls are processed before the SaveDB() calls. When It fails, I never get the save calls.
function getBlobs(fileList) {
console.log("starting to fetch blobs");
$.each(fileList, function (i, val) {
var path = baseURL + val.id + "." + imageType;
$xhr.ajax({
url: path,
dataType: "blob",
success: function (data) { saveBlob(data, val.size, val.id); }
});
});
}
function saveBlob(blob, length, id) {
if (blob.size != length) {
console.error("Blob Length found: " + blob.size + " expected: " + length);
}
putBlob(blob, id);
++fetchCnt;
if (fetchCnt == manifest.files.length) {
setTimeout(fetchComplete, 0);
}
}
function fetchComplete() {
var startTime = vm.get("startTime");
var elapsed = new Date() - startTime;
var fetchTime = ms2Time(elapsed);
vm.set("fetchTime", fetchTime);
}
function putBlob(blob, id) {
var cnt;
var type = blob.type;
DB.putAttachment(id + "/pic", blob, type, function (err, response) {
if (err) {
console.error("Could store blob: error: " + err.error + " reason: " + err.reason + " status: " + err.status);
} else {
console.log("saved: ", response.id + " rev: " + response.rev);
cnt = vm.get("blobCount");
vm.set("blobCount", ++cnt);
if (cnt == manifest.files.length) {
setTimeout(storeComplete, 0);
}
}
});
}

The chromium folks acknowledge this is something that they should fix: https://code.google.com/p/chromium/issues/detail?id=244910, but in the meantime I have implemented throttling using jquery defer/resolve to keep the number of threads low.

Related

Extracting emitted events (logs) from geth transaction trace (debug_traceCall)

When using debug_traceCall, I get a low-level EVM trace of all opcodes and state changes during the execution. This is excessively detailed. When I use default callTracer, I can get a much nicer call tree. However, neither way I cannot seem to be able to extract the emitted events from the trace. I can see them in the trace (LOG* opcodes) however there is no easy way to actually parse them to something "readable" (along with values and originating address) There must be a way to get the logs - any ideas?
Eg. this is what Etherscan shows https://etherscan.io/tx-decoder?tx=0x3e3ad35fda1fddd9e154b3860b50371a1acd2fdb4f27f897e234846522bde732 (see Emitted Events section)
So I figured this myself - I created a custom JavaScript tracer for geth that is passed to geth in 3rd param to debug_traceCall (see provided API reference by the link):
{
data: [],
fault: function (log) {
},
step: function (log) {
var topicCount = (log.op.toString().match(/LOG(\d)/) || [])[1];
if (topicCount) {
var res = {
address: log.contract.getAddress(),
data: log.memory.slice(parseInt(log.stack.peek(0)), parseInt(log.stack.peek(0)) + parseInt(log.stack.peek(1))),
};
for (var i = 0; i < topicCount; i++)
res['topic' + i.toString()] = log.stack.peek(i + 2);
this.data.push(res);
}
},
result: function () {
return this.data;
}
}
This tracer is executed by geth for each operation in the trace. Essentially what it does:
check if this is one of LOG0, LOG1, LOG2, LOG3 or LOG4 EVM opcodes
extract contract address from current contract
extract default topic0 and subsequent topics (if any)
extract additional event data from memory (note: stack[0] is offset, stack[1] is data size)
Passing the tracer to geth looks like this:
res = await ethersProvider.send('debug_traceCall', [{
from: tx.from,
to: tx.to,
gas: BigNumber.from(tx.gas)._hex.replace('0x0', '0x'),
gasPrice: BigNumber.from(tx.gasPrice)._hex.replace('0x0', '0x'),
value: BigNumber.from(tx.value)._hex.replace('0x0', '0x'),
data: tx.input
}, "latest", {
tracer: "{\n" +
" data: [],\n" +
" fault: function (log) {\n" +
" },\n" +
" step: function (log) {\n" +
" var topicCount = (log.op.toString().match(/LOG(\\d)/) || [])[1];\n" +
" if (topicCount) {\n" +
" var res = {\n" +
" address: log.contract.getAddress(),\n" +
" data: log.memory.slice(parseInt(log.stack.peek(0)), parseInt(log.stack.peek(0)) + parseInt(log.stack.peek(1))),\n" +
" };\n" +
" for (var i = 0; i < topicCount; i++)\n" +
" res['topic' + i.toString()] = log.stack.peek(i + 2);\n" +
" this.data.push(res);\n" +
" }\n" +
" },\n" +
" result: function () {\n" +
" return this.data;\n" +
" }\n" +
"}",
enableMemory: true,
enableReturnData: true,
disableStorage: true
}])

Google analytics for Google apps-script doesn't show any data after a few weeks

I have an apps-script that I want to impression count in Google-Analytics.
I have created a new GA account + data stream.
Copied it's measurement id and put it in my app-script code:
/**
* Function to track all (internal/external) usage
* #param {string} page tracked
*/
function sendGA(page) {
var cid = Utilities.base64EncodeWebSafe(Utilities.computeDigest(
Utilities.DigestAlgorithm.SHA_256,
ScriptApp.getService().getUrl()));
try {
var data = {
'v': '1',
'tid': 'G-9S9...',
'z': Math.floor(Math.random()*10E7),
't':'pageview',
'dl': SpreadsheetApp.getActiveSpreadsheet().getUrl() + '/' + page,
'cid': cid
};
var payload = Object.keys(data).map(
function(key) {
return encodeURIComponent(key) + '=' + encodeURIComponent(data[key]);
}
).join('&');
var options = {
'method' : 'POST',
'payload' : payload
};
UrlFetchApp.fetch('http://www.google-analytics.com/collect', options);
} catch (err) {
Logger.log(err);
}
}
and
These were my installation steps, have I missed anything?
The measurement id goes in here? 'tid': 'G-9S9...',

Card payment error 508 -Realex payments error number: 61,754

i am using realex payment with iframe
i can load the payment page correctly, but as soon as i hit 'Pay Now'
it return Error: 508
Message: An error has occurred processing your request. Please contact the merchant whose goods or services you are purchasing quoting the following error number: 61,754
(most of time it return correct response string either successful and declined) but sometimes it return above error code)
any idea what is mean and how to solve this issue?
<script type='text/javascript'>
function iRedirect(redirectUrl, arg, value) {
console.log(redirectUrl);
try {
var form = $('<form action="' + redirectUrl + '" method="post">' +
'<input type="hidden" name="' + arg + '" value="' + value + '"></input>' + '</form>');
$('body').append(form);
console.log(form);
$(form).submit();
}
catch (e) {
alert(e.message);
}
}
function displayMessage(evt)
{
var message;
try {
var iOrigin = '<%=ConfigurationManager.AppSettings["RealexResponseDomain"] %>';
if (evt.origin == iOrigin) {
message = evt.data.toString();
console.log(message);
if (message.indexOf("Error") == 0) {
var ErrorJsonStr = message.toString().split(":");
var ErrorJsonStr1 = ErrorJsonStr[1].split("<BR>");
var reDirectPath = "{\"" + ErrorJsonStr[0] + "\"" + ":" + "\"" + ErrorJsonStr1[0] + "\"" + "," + "\"" + ErrorJsonStr1[1] + "\"" + ":" + "\"" + ErrorJsonStr[2] + "\"" + "}";
iRedirect("Response.aspx", "JsonStr", encodeURIComponent(reDirectPath));
}
else {
if (isJson(message) == true) {
var message1 = JSON.parse(message);
//alert(message1);
console.log(message1);
if (message1.hasOwnProperty('pas_uuid')) {
iRedirect("Response.aspx", "JsonStr", encodeURIComponent(message.toString()));
}
else {
//check if this transaction is already exist
//do redirect
//alert("not pas_uuid" + message1);
console.log("not pas_uuid" + message1);
}
}
}
//get message and check result
}
else {
console.log("not data");
}
}
catch (err) {
console.log(err.message);
}
}
function isJson(str) {
try {
JSON.parse(str);
}
catch (e)
{
console.log(e.message);
return false;
}
return true;
}
if (window.addEventListener) {
// For standards-compliant web browsers
window.addEventListener("message", displayMessage, false);
}
else {
window.attachEvent("onmessage", displayMessage);
}
</script>
There are two major scenarios where this message may be displayed on the HPP.
There was a temporary issue with the HPP Sandbox environment.
The Merchant ID and Account you are using has 3D Secure 1 enabled, but you used a non-3D Secure enabled test card.
If you think it wasn't either of these issues, please provide our Support Team with an example Order ID of a transaction where this occurred and they will be able to look at the logs in more detail.

Getting response with NodeJS request module

I just started using the twitch kraken api and I have a few questions.
Whenever I attempt to get a JSON object there is no response. I am attempting to run this function through Amazon AWS Lambda, and don't have access to a console.
In the code below my callback function will always print out "SUCCESS got streamers ERROR". I am pretty certain right now the "ERROR" comes from my initial setting of result.
How come result does not get changed into the proper JSON?
I have used postman and it returns the proper thing with the query and param, and headers:
function getJSON(callback){
var result = "ERROR";
request.get(url(games[0]),function(error,response,body){
console.log("requested for url: " + url(games[0]));
var d = JSON.parse(body);
result = d.streams[0];//.channel.display_name;
// for(var i = 0; i < limit; i++){
// streamers.push(d.streams[i].channel.display_name)
// }
streamers.push(result);
});
if (streamers.length < 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
}
function url(game){
return {
url: "https://api.twitch.tv/kraken/streams/",//twitchlimit,
qs : {
'game' : 'overwatch',
'limit' : 2
},
headers: {
'Client-ID': clientID,
'Accept': 'application/json',
'Accept-Charset': 'utf-8',
}
};
}
I think your streamers code
if (streamers.length < 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
should be included in the request callback because currently it's not waiting for the request to finish, it's just carrying on so therefore the value of result will not change. Also the array length cannot be less than 0 so it will always go to the else and say "SUCCESS got streamers ERROR"
Thank you guys for the suggestions. I did have a few oversights and attempted to fix them.
I have implemented you suggestions and it seems to have worked a bit. I ended up putting the json.parse into a try/catch block, and moved the if/else statements inside the getJSON method. However, now I don't get any output.
This is how I am invoking the getJSON method:
function handleGameResponse(intent,session,callback){
//gets the game
var game = intent.slots.game.value;
if (!games.includes(game)){
var speechOutput = "You asked for: " + intent.slots.game.value;
//var speechOutput = "You asked for: " + games[game] + " That game is currently not an option. These are your current options: " + arrayToString(games)
var repromptText = "Please ask one from the current options.";
var header = "Invalid Game";
}else {
getJSON(function(data){
if(data !== "ERROR"){
var speechOutput = data; //capitalizeFirst(game) + " top three streamers are: " + arrayToString(streamers) + '.';
var repromptText = "Do you want to hear more about games?";
var header = capitalizeFirst(game);
}else{
var speechOutput = "I'm sorry, something went wrong and I could not get the streamers.";
}
//speechOutput = data;
});
//speechOutput = games[0] + " games[0], game= " + game; //this executes so the getJSON isn't executing
}
var shouldEndSession = false;
callback(session.attributes,buildSpeechletResponse(header,speechOutput,repromptText,shouldEndSession));
}
Does the above execute the same way? As in the shouldEndSession and callback execute before the getJSON has time to give a response?
For ref, this is the getJSON method now:
function getJSON(callback){
var result = "ERROR";
request.get(url(games[0]),function(error,response,body){
try{
var d = JSON.parse(body);
} catch (err){
callback("Sorry, something seems to have malfunctioned while getting the streamers");
}
result = d.streams[0].channel.display_name;
// for(var i = 0; i < limit; i++){
// streamers.push(d.streams[i].channel.display_name)
// }
streamers.push(result);
if (streamers.length <= 0){
callback("ERROR");
}else{
callback("SUCCESS got streamers " + result);
}
});
}

multiple async mongo request generate messed up returns

I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};