Use GM_xmlhttpRequest to POST data on Chrome? - google-chrome

I'm writing a user script to take an image from a page, and upload it to a server.
The script works fine in FF (Greasemonkey and Scriptish), but when I use Chrome (using Tampermonkey or Ninjakit), it does not send the data, it sends the string * [object Object] * instead.
Here is my script:
// ==UserScript==
// #id myid
// #name myname
// #version 1.0
// #namespace ohadcn
// #author Ohad Cohen
// #description mydescription
// #include https://*
// #grant GM_xmlhttpRequest
// #require https://code.jquery.com/jquery-2.0.3.min.js
// #run-at document-end
// ==/UserScript==
function getBase64Image(img) {
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0);
var dataURL = canvas.toDataURL("image/png");
return dataURL.replace(/^data:image\/(png|jpg);base64,/, "");
}
img=$("img[alt=myImage]").get(0);
img.onload=function(){
var img64=getBase64Image(img)
var _data=new FormData();
_data.append("image64",img64);
GM_xmlhttpRequest({
method: "POST",
url: "http://myserver.org/mysscript.py",
headers: {
"Content-Type": "multipart/form-data"
},
data:_data,
onload: function(response) {
console.log ("gut response");
$("#input").get()[0].value=response.responseText;
}
});
}
Both Tampermonkey and Ninjakit do send the request. In Tampermonkey, I get a response, in Ninjakit I don't (onload is never called).
But they do not send the actual image encoded with base64 - when I read the data - the server gets [object Object] as the POST body (Instead of data body, I can't get devtools network panel to show requests made by GM_xmlhttpRequest, so I checked it on the server side).

It might be that FormData and multipart/form-data are not well supported on those platforms. Need to look into it more (later).
Meanwhile, try the more typical approach; use application/x-www-form-urlencoded or JSON.
EG:
GM_xmlhttpRequest ( {
method: "POST",
url: "http://myserver.org/mysscript.py",
data: "image64=" + encodeURIComponent (img64),
headers: {
"Content-Type": "application/x-www-form-urlencoded"
},
onload: function (response) {
console.log ("gut response");
$("#input").get()[0].value=response.responseText;
}
} );

Related

Pass a parameter from client-side to server side and get result

I knows it sounds basic but I can't seem to get it right. I'm trying to get a data from the API but it needs a parameter in order to obtain the data. How can I pass the parameter and get the result which is a JSON array
$(function() {
var proxy = 'http://192.168.1.126/lms-dev-noel/proxy.php';
var endpoint = 'account/';
var rt = 'GET';
var url = proxy+'?endpoint='+endpoint+'&rt='+rt;
var param = {
'lastsyncdate' : '2016-12-06'
};
$.get(url, function(param) {
console.log('Success');
});
});
ways to achieve this :
using jQuery.ajax() method :
var proxy = 'http://192.168.1.126/lms-dev-noel/proxy.php';
var endpoint = 'account/';
var url = proxy+'?endpoint='+endpoint+'&rt='+rt;
var method = 'GET';
var params = {
'lastsyncdate' : '2016-12-06'
};
$.ajax({
url: url,
type: method, //send it through get method
data: params,
success: function(response) {
//Do Something
},
error: function(xhr) {
//Do Something to handle error
}
});
using jQuery.get() method :
var proxy = 'http://192.168.1.126/lms-dev-noel/proxy.php';
var endpoint = 'account/';
var url = proxy+'?endpoint='+endpoint+'&rt='+rt;
var method = 'GET';
var params = {
'lastsyncdate' : '2016-12-06'
};
$.get(url, params, function(res) {
console.log(res);
});
I just pass parameters as name value pairs like so...
$.get(
"yoururl.php",
{ color: "red", size: "small" }, // your params go here as name / value pairs
function(response){
console.log(response);
}
);

How to put png binary data into an img tag and display it as an image?

I am using this
$.ajax({
type: "GET",
url: 'template/bump1/purse.png',
datatype:"image/png",
success: function (data) {
var reader = new FileReader();
reader.onload = function (e) {
var img = document.getElementById("CaptchaImg");
img.src = e.target.result;
};
reader.readAsDataURL(data);
//$('#CaptchaImg').attr('src', data);
}
});
to download an image, and it comes out in binary, looking like this
node.js is returning it as
WriteHeaderMode('image/png', res, 200);
res.end(data, 'binary');
But now, how do I put that into an image tag and show it as an image. Note: I do not want to have return data as base64 encoding, it has to be binary. Im fine with converting the binary into base64 on client side though.
When I pass it to the readAsDataURL, it says TypeError exception.
Thanks
EDIT
var img = document.getElementById("CaptchaImg");
var reader = new FileReader();
reader.onload = function(e) {
//img.src = e.target.result;
$("body").html(e.target.result);
};
reader.readAsDataURL(new Blob([data]));
this seems to convert it into a base64 encoding, which starts as data:application/octet-stream;base64, but doesn't display an image...
jQuery Ajax does not support binary responses(okay now it does), there is a trick that uses overrideMimeType('text/plain; charset=x-user-defined') to return each byte as a character in the response string and then to loop through the response to create a byte array of the data.
However with bare naked XMLHttpRequest this can be done easily with use of the responseType property.
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function(){
if (this.readyState == 4 && this.status == 200){
var img = document.getElementById("CaptchaImg");
var url = window.URL || window.webkitURL;
img.src = url.createObjectURL(this.response);
}
}
xhr.open('GET', 'template/bump1/purse.png');
xhr.responseType = 'blob';
xhr.send();
Musa's answer is a great solution, I implemented something similar in angular and works great, when you define responseType = 'blob' your response won't be a string anymore, it will be a blob type object.
I just needed to add an extra parameter to the headers object (not sure if it's necessary in jQuery). headers: {'Content-Type': "image/png"},
My full request object:
var request = {
url: "_/get_image_/get",
method:"GET",
headers: {'Content-Type': "image/png"},
responseType: 'blob'
}
Try using new Blob(data, {type : 'image/png'}) instead of new Blob([data])
This will ensure that the media type of the blob is a png.
Source: https://developer.mozilla.org/en-US/docs/Web/API/Blob
Whatever framework/lib you're using:
When getting the image (a File), ask for a BLOB type
Use the following function to create a Blob and get an URL out of it
Set your image src to that URL
Code:
export function fileToImageBase64Url(file: File): string {
const blob = new Blob([file], { type: 'image/png' });
return URL.createObjectURL(blob);
}
If at some point you decide that the image is not useful anymore, don't forget to clean it: URL.revokeObjectURL(yourUrl)

How to make multipart form api requests with requestjs and nodejs?

Trying to interact with the phaxio api using multipart form data. As per the request.js docs for making a post request I do
var request = require('request');
var options = {
uri: 'https://api.phaxio.com/v1/send',
headers: {'content-length':'0'}
};
var r = request.post(options);
var form = r.form();
form.append('api_key', '1234');
form.append('api_secret', '1234');
form.append('to', '1234');
r.on('response', function(chunk){
console.log(chunk);
});
The response body I get from the r.on method is here http://pastie.org/private/wshbn9esendccrkoreeiow I'm unsure how I can see the api response body from the server after submitting the form data. Something like
{
"success": true,
"message": "Fax Sent!"
}
The method request.post() returns a readable stream. Just read the response:
var res = '';
r.on('data', function(data) {
res += data;
});
r.on('end', function() {
console.log(res);
});
You can also pipe the response to another writable stream:
var fs = require('fs');
var writable = fs.createWriteStream('/file');
r.pipe(writable);

Return json object from dojo.xhrget

I am trying to get the json object from a dojo xhrGet call.
What I want is jsonobject = stuff;
I can see the json object in the console, but cannot access it!
var xhrargs = {
url: "/rest/url",
handleAs: "json",
preventCache : false,
load: function(json_results){
console.log(json_results);
store = json_results;
return dojo.toJson.json_results;
},
error: function(response, ioArgs) {
console.error(response);
console.error(response.stack);
}
};
var deferred = dojo.xhrGet(xhrargs);
console.log("Json is "+JSON.stringify(deferred));
The console.log part that shows the json_results is fine, exactly what I want.
The dojo.xhrXXX methods are asynchronous. This means that the lines following
var deferred = dojo.xhrGet(xhrargs);
Will continue to execute while the call to an external endpoint is processing. This means you need to use the promise API to tell a certain block of code to execute once the XHR request is complete:
var deferred = dojo.xhrGet(xhrargs);
deferred.then(function(result){
//this function executes when the deferred is resolved (complete)
console.log('result of xhr is ',result);
});
Due to the asynchronous nature of the request, for most intents and purposes that value doesn't exist outside the scope of the callback function. One way to structure your code around this is in multiple blocks. for example:
var xhrLoaded = function(results){
console.log('results = ',results);
store = results;
}
var performXhr = function(){
var xhrargs = {
url: "/rest/url",
handleAs: "json",
preventCache : false,
error: function(response, ioArgs) {
console.error(response);
console.error(response.stack);
}
};
var deferred = dojo.xhrGet(xhrargs);
deferred.then(xhrLoaded);
}
performXhr();
You can still access variables outside of the scope of the function (for example if store were defined globally).
try this
var xhrArgs = {
url:"MethodName.action?Id="+id,
handleAs: "json",
load: function(Data){
var values = Data;
var count = Object.keys(values).length // gives u all keys count in a json object. In mine it is 0,1,2,3
for (var i =0; i<count; i++){
var temp = values[i]; // values['name']
// do somthing ..
}
}
},
error: function(error){
alert(error);
}
}
dojo.xhrPost(xhrArgs);

Why is the response to gapi.client.drive.realtime.get empty?

Below is a page that succesfully authenticates, then tries to use the drive.realtime.get method to get a JSON export of an existing realtime document in three ways. The results of the console.log calls are shown inline in comments.
The file with id 'EXISTING-FILE-ID' exists and has had content added using the realtime api. I am able to get the JSON exported data in a browser at
https://www.googleapis.com/drive/v2/files/EXISTING-FILE-ID/realtime?access_token=VALID-ACCESS-TOKEN which returns
{"appId":"CLIENT-ID","revision":10,"data":{"id":"root","type":"Map","value":{"blah":{"json":"anything"},"key":{"json":"val"},"key2":{"json":"val2"}}}}
However, in Chrome, Firefox, and Safari, the response to gapi.client.drive.realtime.get and gapi.client.rpcRequest is always empty: {"result":{}}.
In Chrome and Firefox, the body of the response to gapi.client.request is a string of characters that partially changes when the content of the document is changed with the realtime api. This may be some gzipped content (response headers include {content-encoding: "gzip"}, but I haven't been able to gunzip it. The etag in the response header also changes when the document changes.
In Safari, the gapi.client.request response body contains the same string of characters as on Chrome and Firefox (eyJH...) but the correct contents of the exported document are shown in the console log, the same as when I use a browser window with the googleapis.com url.
<!DOCTYPE html><html><head>
<script type="text/javascript" src="https://apis.google.com/js/api.js"></script>
<script type="text/javascript">
var fileId = 'EXISTING-FILE-ID';
var start = function() {
// load apis (then call authorize)
gapi.load('auth:client,drive-realtime', function() {
gapi.client.load('drive', 'v2', function() {
authorize();
});
});
};
// authorize with drive scope
var authorize = function() {
gapi.auth.authorize({
'client_id': 'CLIENT-ID',
'scope': ['https://www.googleapis.com/auth/drive',
'openid'],
'immediate': true
}, function() {
realtimeget(fileId);
});
};
// try to get realtime document export in 3 different ways
var realtimeget = function(id) {
gapi.client.drive.realtime.get({
'fileId': id
}).execute(function() {
console.log(JSON.stringify(arguments));
// {"0":{"result":{}},"1":"[\n {\n \"id\": \"gapiRpc\",\n \"result\": {}\n }\n]\n"}
});
gapi.client.rpcRequest('drive.realtime.get', 'v2', {
'fileId': id
}).execute(function() {
console.log(JSON.stringify(arguments));
// {"0":{"result":{}},"1":"[\n {\n \"id\": \"gapiRpc\",\n \"result\": {}\n }\n]\n"}
});
gapi.client.request({
'path': '/drive/v2/files/' + id + '/realtime',
'method': 'GET',
}).execute(function() {
console.log('gapi.client.request:');
console.log(arguments[0]);
// false
console.log(arguments[1]);
// {"gapiRequest":{"data":{"body":"eyJhcHBJZCI6IjEwNjY4MTY3MjA5NzQiLCJyZXZpc2lvbiI6MTAsImRhdGEiOnsiaWQiOiJyb290IiwidHlwZSI6Ik1hcCIsInZhbHVlIjp7ImJsYWgiOnsianNvbiI6ImFueXRoaW5nIn0sImtleSI6eyJqc29uIjoidmFsIn0sImtleTIiOnsianNvbiI6InZhbDIifX19fQ==","headers":{"date":"Thu, 08 Aug 2013 19:17:19 GMT","content-encoding":"gzip","x-goog-safety-encoding":"base64","server":"GSE","etag":"\"Q5ElJByAJoL0etObruYVPRipH1k/fDOlc7uypufY3ROxh-RtfV86Kmg\"","content-type":"text/plain; charset=UTF-8","cache-control":"private, max-age=0, must-revalidate, no-transform","x-goog-safety-content-type":"application/json","content-length":"183","expires":"Thu, 08 Aug 2013 19:17:19 GMT"},"status":200,"statusText":"OK"}}}
});
};
</script>
</head>
<body onload="start();"></body></html>
We're looking into the issues with the client library, but for now I would recommend just making an XHR GET to the export URL:
var id = '{DOCUMENT ID}';
var accessToken = gapi.auth.getToken()['access_token'];
var xhr = new XMLHttpRequest();
xhr.open('GET', 'https://www.googleapis.com/drive/v2/files/' + id + '/realtime?access_token=' + accessToken);
xhr.onload = function() {
console.log(xhr.responseText);
};
xhr.onerror = function() {
// Handle error
};
xhr.send();
If you are just running this inline as is, I think the problem is just that you need to wait for the contents to be saved before you do your get.
Add a DocumentSaveStateChangedEvent listener to your document after making the change, and trigger realtimeget when both isPending and isSaving are false.
Looking at this code, a separate page load wouldn't do anything, since its creating a new document each time.