Card payment error 508 -Realex payments error number: 61,754 - realex-payments-api

i am using realex payment with iframe
i can load the payment page correctly, but as soon as i hit 'Pay Now'
it return Error: 508
Message: An error has occurred processing your request. Please contact the merchant whose goods or services you are purchasing quoting the following error number: 61,754
(most of time it return correct response string either successful and declined) but sometimes it return above error code)
any idea what is mean and how to solve this issue?
<script type='text/javascript'>
function iRedirect(redirectUrl, arg, value) {
console.log(redirectUrl);
try {
var form = $('<form action="' + redirectUrl + '" method="post">' +
'<input type="hidden" name="' + arg + '" value="' + value + '"></input>' + '</form>');
$('body').append(form);
console.log(form);
$(form).submit();
}
catch (e) {
alert(e.message);
}
}
function displayMessage(evt)
{
var message;
try {
var iOrigin = '<%=ConfigurationManager.AppSettings["RealexResponseDomain"] %>';
if (evt.origin == iOrigin) {
message = evt.data.toString();
console.log(message);
if (message.indexOf("Error") == 0) {
var ErrorJsonStr = message.toString().split(":");
var ErrorJsonStr1 = ErrorJsonStr[1].split("<BR>");
var reDirectPath = "{\"" + ErrorJsonStr[0] + "\"" + ":" + "\"" + ErrorJsonStr1[0] + "\"" + "," + "\"" + ErrorJsonStr1[1] + "\"" + ":" + "\"" + ErrorJsonStr[2] + "\"" + "}";
iRedirect("Response.aspx", "JsonStr", encodeURIComponent(reDirectPath));
}
else {
if (isJson(message) == true) {
var message1 = JSON.parse(message);
//alert(message1);
console.log(message1);
if (message1.hasOwnProperty('pas_uuid')) {
iRedirect("Response.aspx", "JsonStr", encodeURIComponent(message.toString()));
}
else {
//check if this transaction is already exist
//do redirect
//alert("not pas_uuid" + message1);
console.log("not pas_uuid" + message1);
}
}
}
//get message and check result
}
else {
console.log("not data");
}
}
catch (err) {
console.log(err.message);
}
}
function isJson(str) {
try {
JSON.parse(str);
}
catch (e)
{
console.log(e.message);
return false;
}
return true;
}
if (window.addEventListener) {
// For standards-compliant web browsers
window.addEventListener("message", displayMessage, false);
}
else {
window.attachEvent("onmessage", displayMessage);
}
</script>

There are two major scenarios where this message may be displayed on the HPP.
There was a temporary issue with the HPP Sandbox environment.
The Merchant ID and Account you are using has 3D Secure 1 enabled, but you used a non-3D Secure enabled test card.
If you think it wasn't either of these issues, please provide our Support Team with an example Order ID of a transaction where this occurred and they will be able to look at the logs in more detail.

Related

UrlFetchApp.fetch(url) fails

I have a bit of GAS code that is failing when executing UrlFetchApp.fetch(url) - I think.
Are there limits to the character length of a url when used in UrlFetchApp.fetch(url). My function is failing and I suspect that it has something to do with the length of the url. It is over 100 chars.
The code below refers...
function uploadToDrive(url, folderid, filename, fileDesc) {
var msg = '';
try {
var response = UrlFetchApp.fetch(url);
} catch(err) {
};
if (response.getResponseCode() === 200) {
var folder = DriveApp.getRootFolder();
if (folderid) {
folder = DriveApp.getFolderById(folderid);
}
var blob = response.getBlob();
var file = folder.createFile(blob);
file.setName(filename);
file.setDescription(fileDesc);
var headers = response.getHeaders();
var content_length = NaN;
for (var key in headers) {
if (key.toLowerCase() == 'Content-Length'.toLowerCase()) {
content_length = parseInt(headers[key], 10);
break;
}
}
var blob_length = blob.getBytes().length;
msg += 'Saved "' + filename + '" (' + blob_length + ' bytes)';
if (!isNaN(content_length)) {
if (blob_length < content_length) {
msg += ' WARNING: truncated from ' + content_length + ' bytes.';
} else if (blob_length > content_length) {
msg += ' WARNING: size is greater than expected ' + content_length + ' bytes from Content-Length header.';
}
}
msg += '\nto folder "' + folder.getName() + '".\n';
}
else {
msg += 'Response code: ' + response.getResponseCode() + '\n';
}
return file.getUrl();
}
That link generates a response code 404, but you set the response variable only if the fetch method is successful in your try block. Try validating the response variable before assuming it has properties and methods to access:
function fetching() {
var url = "<some url resource>";
try {
var response = UrlFetchApp.fetch(url);
} catch (err) {
// why catch the error if you aren't going to do anything with it?
Logger.log(err);
} finally {
if (response && response.getResponseCode() === 200) {
Logger.log("Should be 200, got " + response.getResponseCode());
} else {
Logger.log("Fetching Failed: Exception thrown, no response.");
}
}
}
However, I would go farther and guarantee a response from UrlFetchApp.fetch. This is done with the "muteHttpExceptions" paramater set to true.
function fetching() {
//dns exception
//var = "googqdoiqijwdle.com"
//200
var url = "google.com";
//404
//var url = "https://www.jotform.com/uploads/Administrator_System_sysadmin/92960977977584/4480552280228238115/BoE%20Test%20File.docx"
try {
var response = UrlFetchApp.fetch(url, {
muteHttpExceptions: true
});
} catch (err) {
Logger.log(err);
}
if (response) {
response.getResponseCode() === 200 ?
Logger.log("Should be 200, got " + response.getResponseCode()) :
Logger.log("Anything but 200, got " + response.getResponseCode());
}
}
Hope this helped!
Docs: Google Developers

How to Click or trigger <a href="javascript:viewPage('ABC');> with DOM

I'm a beginner, tried to click the link with DOM but didn't worked. how can I click or trigger the function? and why didn't that work?
<li id="viewPage">viewPage
document.getElementById("viewPage").click()
function viewPage(Name,Number){
$("#ViewPage li").removeClass("active");
$("#"+Name).addClass("active");
$("#ViewPage").load(
Name+".asp",
{
Number : Number
},
function( response, status, xhr ) {
if ( status == "error" ) {
var msg = "there was an error: ";
$( "#error" ).html( msg + xhr.status + " " + xhr.statusText );
}
});
}
Since you don't seem to mind using jquery in the rest of your code, here is an easy way to get the click using jquery:
$('#viewPage').click(function() {
console.log('Now your click function is working');
});
function viewPage1(Name, Number) {
$("#ViewPage1 li").removeClass("active");
$("#" + Name).addClass("active");
$("#ViewPage1").load(
Name + ".asp", {
Number: Number
},
function(response, status, xhr) {
if (status == "error") {
var msg = "there was an error: ";
$("#error").html(msg + xhr.status + " " + xhr.statusText);
}
});
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<li id="viewPage">viewPage1

Can't parse JSON returned from MySQL

I have a Node.js app that is writing data to a MySQL backend. One field is an array I stringify. I can see in the workbench the data is correct when in the database. However when I retrieve it I am getting an error when I try to parse it.
"Unexpected token o in JSON at position 1"
If I log the result it shows up as [Object Object].
From what I read online this means it is already a JS object and I do not need to parse it. However I cannot find anything about how to get access to the data.
process: function (bot, msg, suffix) {
var ftcmds = suffix.split(" ", 1);
var ftName = ftcmds[0];
var ftArray;
var selectSQL = "SELECT FireTeam FROM fireteam WHERE Name = '" + ftName + "'";
var updateSQL = "UPDATE fireteam SET FireTeam = '" + ftArray + "'WHERE Name = '" + ftName + "'";
mysqlcon.query(selectSQL, function (err, result) {
console.log("Result |" + result);
console.log("Error |" + err);
if (err) {
console.log("Caught Error " + err + " " + msg.author);
}
else {
console.log("Recovered result " + result);
ftArray = result;
console.log("Attempting to update array");
ftArray.push(msg.author.id);
console.log("updated array " + ftArray);
var jsonArray = JSON.stringify(ftArray);
mysqlcon.query(updateSQL, function (err, result) {
console.log("Result |" + result);
console.log("Error |" + err);
if (err.toString().indexOf(dupErr) != -1) {
msg.author.send("Could not find that fireteam");
console.log("Error: Did not locate the requested name " + msg.author)
} else if (err) {
console.log("Caught Error " + err + " " + msg.author);
}
else {
msg.author.send("You have joined Fireteam " + name + ". I will setup a group chat on " + date + " if your team fills up.");
}
})
}
});
}
You should just be able to access it as an object, so if result has fields name and title you can just access them as:
var name = result.name
var title = result.title

multiple async mongo request generate messed up returns

I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};

too many xhr requests in chrome

I have a tight loop that fetches PNG files via xhr2. Works fine in FF and IE10. In Chrome when I list of files hits about 5,500 I start getting xhr errors. I like the async interface since I am interleaving these requests with local indexedDB store requests.
Code below (I am using xhr2lib for fetches and PouchDB for the IndexedDB API).
I know that it is the XHR2 that is failing, since when this works, in Chrome, all the XHR2 calls are processed before the SaveDB() calls. When It fails, I never get the save calls.
function getBlobs(fileList) {
console.log("starting to fetch blobs");
$.each(fileList, function (i, val) {
var path = baseURL + val.id + "." + imageType;
$xhr.ajax({
url: path,
dataType: "blob",
success: function (data) { saveBlob(data, val.size, val.id); }
});
});
}
function saveBlob(blob, length, id) {
if (blob.size != length) {
console.error("Blob Length found: " + blob.size + " expected: " + length);
}
putBlob(blob, id);
++fetchCnt;
if (fetchCnt == manifest.files.length) {
setTimeout(fetchComplete, 0);
}
}
function fetchComplete() {
var startTime = vm.get("startTime");
var elapsed = new Date() - startTime;
var fetchTime = ms2Time(elapsed);
vm.set("fetchTime", fetchTime);
}
function putBlob(blob, id) {
var cnt;
var type = blob.type;
DB.putAttachment(id + "/pic", blob, type, function (err, response) {
if (err) {
console.error("Could store blob: error: " + err.error + " reason: " + err.reason + " status: " + err.status);
} else {
console.log("saved: ", response.id + " rev: " + response.rev);
cnt = vm.get("blobCount");
vm.set("blobCount", ++cnt);
if (cnt == manifest.files.length) {
setTimeout(storeComplete, 0);
}
}
});
}
The chromium folks acknowledge this is something that they should fix: https://code.google.com/p/chromium/issues/detail?id=244910, but in the meantime I have implemented throttling using jquery defer/resolve to keep the number of threads low.