Page.createIsolatedWorld grantUniveralAccess flag does not grant universal access - puppeteer

I'm trying to access the contentDocument of a cross origin iframe using Runtime.evaluate. As far as I understand the docs this should be possible by creating an executionContext with universal access using Page.createIsolatedWorld + grantUniveralAccess: true [1] and passing the returned executionContextId to Runtime.evaluate as contextId.
Any ideas?
Given a chromium process started with chromium-browser --user-data-dir=/tmp/headless --remote-debugging-port=9000 [2].
// See [3] for full code
const frameId = /* frameId of our page with origin localhost:9000 */
function execute(command, args) { /* ... send and receive on websocket */ }
const {executionContextId} = await execute("Page.createIsolatedWorld", {
frameId: frameId,
grantUniveralAccess: true // NOT grantUniversalAccess. Typo in devtools protocol itself [4].
})
// fails with:
// Access to fetch at 'http://example.com/' from origin 'http://localhost:9000' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource. If an opaque response serves your needs, set the request's mode to 'no-cors' to fetch the resource with CORS disabled.
await execute("Runtime.evaluate", {
awaitPromise: true,
expression: `fetch("http://example.com").then(r => r.text())`,
contextId: executionContextId
})
// fails with:
// Uncaught DOMException: Blocked a frame with origin "http://localhost:9000" from accessing a cross-origin frame.
execute("Runtime.evaluate", {
awaitPromise: true,
expression: `
new Promise((resolve, reject) => {
const iframe = document.createElement("iframe");
iframe.src = "http://example.com"
iframe.onload = () => resolve(iframe)
iframe.onerror = reject;
document.body.append(iframe)
}).then(iframe => iframe.contentWindow.document)`,
contextId: executionContextId
})
[1] I would have expected universal access to allow me to acess cross origin resources the same way the --disable-web-security flag does - which internally grants universal access
if (!frame_->GetSettings()->GetWebSecurityEnabled()) {
// Web security is turned off. We should let this document access
// every other document. This is used primary by testing harnesses for
// web sites.
origin->GrantUniversalAccess();
[2] Running head-full for easier debugging (e.g. seeing the full cors error only printed to the console) - running with --headless doesn't work either.
[3]
const targets = await fetch("http://localhost:9000/json").then(r => r.json());
const tab = targets.filter(t => t.type === "page")[0];
let counter = 0, commands = {};
const w = new WebSocket(tab.webSocketDebuggerUrl);
await new Promise(resolve => { w.onopen = resolve; })
w.onmessage = event => {
const json = JSON.parse(event.data)
if (commands[json.id]) commands[json.id](json);
else console.log(json); // event
}
function execute(method, params) {
return new Promise((resolve, reject) => {
const id = counter++;
commands[id] = ({result, error}) => {
console.log(method, params, result, error)
if (error) reject(error);
else resolve(result);
// delete commands[id];
};
w.send(JSON.stringify({method, id, params}));
});
}
window.execute = execute;
window.frameId = tab.id;
[4] The correct parameter name is grantUniveralAccess (no s in univeral). Easily validated by passing a value with an incorrect type (expects a bool)
// fails with:
// Failed to deserialize params.grantUniveralAccess - BINDINGS: bool value expected at position 69
await execute("Page.createIsolatedWorld", {frameId, grantUniveralAccess: "true"})

Related

StateToken / POST Request to Apps Script Function

I am building a Woocommerce API integration and am working on the authentication functionality. The authorization endpoint requires a return_url and a callback_url.
I am under the impression that I can use the StateTokenBuilder, similar to how the OAuth2 library functions but am running into issues trying to implement based on the Apps Script docs.
Of course, the UrlFetchApp to the Woocommerce endpoint works fine, but when trying to authorize the app at the endpoint, I get the following error:
Error: An error occurred in the request and at the time were unable to
send the consumer data.
After subsequent tests with Postman, the callback URL, and return URLs evaluate to the Google Drive splash page and do not execute the function in the script. I am at a loss for how to access the callback function from another source, as it appears that it is only accessible via browser, when logged in.
Below are my functions:
let endpoint = {
protocol: 'https',
base: '{URL}.com'
}
let params = {}
API.Auth.Authorize = options => {
endpoint = { ...endpoint, ...{ section: 'wc-auth/v1/authorize' } };
options = { ...params, ...options };
let method = 'GET';
const query =
method === 'GET' && typeof params === 'object'
? Object.keys(options)
.filter(param => param !== 'payload')
.map(param => `${param}=${encodeURIComponent(options[param])}`)
.join('&')
: options;
const url = `${endpoint.protocol}://${endpoint.base}/${endpoint.section}${typeof query === 'string' && query.length > 0 ? `?${query}` : ''}`;
let opts = {
method: method,
muteHttpExceptions: true
};
return url;
};
const authorize = () => {
const params = {
app_name: 'App Name',
scope: 'read_write',
user_id: 'abc123',
return_url: 'https://script.google.com/macros/s/{SCRIPT_ID}/exec',
callback_url: generateCallbackURL('handleCallback')
};
const result = API.Auth.Authorize(params);
console.log(result);
}
generateCallbackURL = fn => {
const script_url = `https://script.google.com/macros/d/${ScriptApp.getScriptId()}`;
const url_suffix = '/usercallback?state=';
const state_token = ScriptApp.newStateToken().withMethod(fn).withTimeout(120).createToken();
const callback_url = `${script_url}${url_suffix}${state_token}`;
return callback_url;
}
const handleCallback = e => {
return HtmlService.createHtmlOutput(JSON.stringify(e));
}
I have also tried replacing the handleCallback function with a doPost function to determine if that would solve the issue. It did not have an effect.

Restify: socket hangup error when copying a file and querying a database using a promise chain

I am using the restify framework to build a small app that copies an uploaded file from its temporary location to a permanent location and then inserts that new location into a MySQL database. However, when attempting to copy the file and then run the promisified query, the system throws a silent error not caught by the promise chain causing a 502 error on the web server end. A minimal working example is below. This example has been tested and does fail out of the gate.
If one of the steps in the process is removed (copying the file or storing the string in the database), the silent error disappears and API response is sent. However, both steps are needed for later file retrieval.
Main Restify File
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
return fs.promises.copyFile(temporaryFile, permanentLocation)
// insert into database
.then(() => database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
))
.then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error(error)
return res.send(error);
});
});
./lib/database.js
'use strict';
const mysql = require('mysql2');
class Database {
constructor(host, port, name, user, pass) {
this.connection = this.connect(host, port, name, user, pass);
this.query = this.query.bind(this);
}
/**
* Connects to a MySQL-compatible database, returning the connection object for later use
* #param {String} host The host of the database connection
* #param {Number} port The port for connecting to the database
* #param {String} name The name of the database to connect to
* #param {String} user The user name for the database
* #param {String} pass The password for the database user
* #return {Object} The database connection object
*/
connect(host, port, name, user, pass) {
let connection = mysql.createPool({
connectionLimit : 20,
host : host,
port : port,
user : user,
password : pass,
database : name,
// debug : true
});
connection.on('error', err => console.error(err));
return connection;
}
/**
* Promisifies database queries for easier handling
* #param {String} queryString String representing a database query
* #return {Promise} The results of the query
*/
query(queryString) {
// console.log('querying database');
return new Promise((resolve, reject) => {
// console.log('query promise before query, resolve', resolve);
// console.log('query promise before query, reject', reject);
// console.log('query string:', queryString)
this.connection.query(queryString, (error, results, fields) => {
console.log('query callback', queryString);
console.error('query error', error, queryString);
if (error) {
// console.error('query error', error);
reject(error);
} else {
// console.log('query results', results);
resolve(results);
}
});
});
}
}
module.exports = Database;
./testfile.js (used to quickly query the restify API)
'use strict';
const fs = require('fs');
const request = require('request');
let req = request.post({
url: 'https://api.databugs.net/test',
}, (error, res, addInsectBody) => {
if (error) {
console.error(error);
} else {
console.log('addInsectBody:', addInsectBody);
}
});
let form = req.form();
form.append('file', fs.createReadStream('butterfly.jpg'), {
filename: 'butterfly.jpg',
contentType: 'multipart/form-data'
});
If the request is made to the localhost, then an 'ECONNRESET' error is thrown as shown below:
Error: socket hang up
at connResetException (internal/errors.js:570:14)
at Socket.socketOnEnd (_http_client.js:440:23)
at Socket.emit (events.js:215:7)
at endReadableNT (_stream_readable.js:1183:12)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
code: 'ECONNRESET'
}
This error is only thrown if both the database and the file I/O are both present in the promise chain. Additionally, the error does not occur if the database request is made first with the file I/O occurring second; however, another rapid request to the server will immediately lead to the 'ECONNRESET' error.
I feel as though I should edit this answer, despite the solution revealing a rookie mistake, in the hopes that it may help someone else. I will keep the previous answer below for full transparency, but please not that it is incorrect.
Correct Answer
TL;DR
PM2 restarted the NodeJS service with each new file submitted to and saved by the API. The fix: tell PM2 to ignore the directory that stored the API's files. See this answer
Long Answer
While the OP did not mention it, my setup utilized PM2 as the NodeJS service manager for the application, and I had turned on the 'watch & reload' feature that restarted the service with each file change. Unfortunately, I had forgotten to instruct PM2 to ignore file changes in the child directory storing new files submitted through the API. As a result, each new file submitted into the API caused the service to reload. If more instructions remained to be executed after storing the file, they were terminated as PM2 restarted the service. The 502 gateway error was a simple result of the NodeJS service becoming temporarily unavailable during this time.
Changing the database transactions to occur first (as incorrectly described as a solution below) simply insured that the service restart occurred at the very end when no other instructions were pending.
Previous Incorrect Answer
The only solution that I have found thus far is to switch the file I/O and the database query so that the file I/O operation comes last. Additionally, changing the file I/O operation to rename rather than copy the file prevents rapidly successive API queries from throwing the same error (having a database query rapidly come after any file I/O operation that is not a rename seems to be the problem). Sadly, I do not have a reasonable explanation for the socket hang up in the OP, but below is the code from the OP modified to make it functional.
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
// insert into database
return database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
)
.then(() => fs.promises.rename(temporaryFile, permanentLocation))
.then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error(error)
return res.send(error);
});
});
You did not handle the database promise in then and catch -
Main Restify File
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
return fs.promises.copyFile(temporaryFile, permanentLocation)
// insert into database
.then(() =>{
// Your database class instance query method returns promise
database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
).then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error('Inner database promise error', error)
return res.send(error);
});
}).catch(error => {
console.error('Outer fs.copyfile promise error', error)
return res.send(error);
})
});

Detect MetaMask logout (Ethereum)

I've looked at the documentation here https://metamask.github.io/metamask-docs/Main_Concepts/Getting_Started
But I'm not sure how to detect a user logging out of MetaMask?
window.ethereum.on('accountsChanged', (accounts) => {
// If user has locked/logout from MetaMask, this resets the accounts array to empty
if (!accounts.length) {
// logic to handle what happens once MetaMask is locked
}
});
Thus, using the above you can detect lock/logout of MetaMask.
window.ethereum.on('accountsChanged', function (accounts) {
let acc = accounts[0]
acc will be undefined if they logged out.
From MetaMask Ethereum Provider API:
ethereum.on('accountsChanged', handler: (accounts: Array<string>) => void);
The MetaMask provider emits this event whenever the return value of the eth_accounts RPC method changes. eth_accounts returns an array that is either empty or contains a single account address. The returned address, if any, is the address of the most recently used account that the caller is permitted to access. Callers are identified by their URL origin, which means that all sites with the same origin share the same permissions.
Metamask documentation suggest you to refresh the page if account is changed.
const setAccountListener = (provider) => {
provider.on("accountsChanged", (_) => window.location.reload());
provider.on("chainChanged", (_) => window.location.reload());
};
Then call this in useEffect
useEffect(() => {
// Load provider
if (provider) {
....
setAccountListener(provider);
// add more logic
} else {
console.error("Please, install Metamask.");
}
};
}, []);
New Feature: _metamask.isUnlocked()
Metamask adds _metamask.isUnlocked() experimental property on ethereum.
const reload = () => window.location.reload();
const handleAccount = (ethereum) => async () => {
const isLocked = !(await ethereum._metamask.isUnlocked());
if (isLocked) {
reload();
}
};
const setListener = (ethereum) => {
ethereum.on("chainChanged", reload);
ethereum.on("accountsChanged", handleAccount(ethereum));
};
const removeListener = (ethereum) => {
ethereum.removeListener("chainChanged", reload);
ethereum.removeListener("accountsChanged", handleAccount(ethereum));
};

The action did not produce a valid response and exited unexpectedly

I want to call a Node-RED flow from IBM Cloud Functions.
const https = require('https');
function main(params) {
const path = "/" + params.route + "?" + params.query_params ;
const options = {
hostname: params.hostname,
path: path,
port: 443,
method: 'GET'
};
return new Promise((resolve, reject) => {
https.get(options, (resp) => {
resp.on('data', (d) => {
let s = d.toString();
obj = JSON.parse(s);
resolve({ "gw_result": obj })
});
});
})
}
In the Node-RED flow I'm using a HTTP request to get data from another server. For test purposes I used a GET request to google.com but have same results using another Node-RED endpoint.
As soon as I invoke the web action I get the error message "The action did not produce a valid response and exited unexpectedly". The output of the Node-RED flow appears some seconds later in the web action's log although the Node-RED flow works properly and promptly (I used debug Node-RED debug nodes to check this).
The https GET request to Node-RED works well when I replace the http request in Node-RED by something else, e.g. a Function node, even when I use a Delay node to delay the response for a second or so.
This code works, although google.com does not return an object, of course.
var rp = require('request-promise');
function main(params) {
var uri = params.hostname + params.route + params.query_params
return new Promise(function (resolve, reject) {
rp(uri)
.then(function (parsedBody) {
obj = JSON.parse(parsedBody);
resolve({ "gw_result": obj
});
})
.catch(function (err) {
resolve({ message: 'failed!!', error: err.toString() });
});
});
}

Why can't I patch, update, or delete an AppPackage that I created?

I am trying to change the required engine version of an AppPackage that I have posted using v2 of the Design Automation API.
I've tried using Postman and the Forge Node Client. I'm using the Forge documentation as a reference.
https://forge.autodesk.com/en/docs/design-automation/v2/reference/http/AppPackages(':id')-PATCH/
My credentials are correct and I have a valid token, but for some reason I keep getting a 404 Not Found status and an error that says "AppPackage with the name MyPlugin doesn't belong to you. You cannot operate on AppPackage you do not own." Also, I get the same message when I try to delete or update the AppPackage.
That's really weird because I definitely own this AppPackage. I uploaded it with these same credentials and I can view it by doing a GET request to view all of my AppPackages. Furthermore, the name of the AppPackage is correct and I specified the right scope (code:all) when I authenticated.
Why does Design Automation think this AppPackage doesn't belong to me and why can't I patch, update, or delete it?
UPDATE 3/28/2019: Setting the resource value still results in the same error
UPDATE 4/2/2019: Getting a fresh upload URL doesn't work either. I get an internal server error saying "Object reference not set to an instance of an object."
const ForgeSDK = require('forge-apis');
const oAuth2TwoLegged = new ForgeSDK.AuthClientTwoLegged(FORGE_CLIENT_ID, FORGE_CLIENT_SECRET, SCOPES);
const appPackageApi = new ForgeSDK.AppPackagesApi();
const getToken = () => {
return oAuth2TwoLegged.authenticate();
};
const getUploadURL = () => {
return appPackageApi.getUploadUrl(oAuth2TwoLegged, oAuth2TwoLegged.getCredentials());
};
const patchPackage = (id, url) => {
const appPack = {
Resource: url,
RequiredEngineVersion: APP_PACKAGE_REQUIRED_ENGINE
};
return appPackageApi.patchAppPackage(id, appPack, oAuth2TwoLegged, oAuth2TwoLegged.getCredentials());
};
(async () => {
try {
const token = await getToken();
const url = await getUploadURL();
const patchPackRes = await patchPackage(APP_PACKAGE_ID, url);
if (patchPackRes.statusCode == 201)
console.log('Patch package succeeded!');
else
console.log('Patch package failed!' + patchPackRes.statusCode);
} catch (ex) {
console.log('Exception :(');
console.log(ex);
}
})();
When calling PATCH the "Resource" property must be set. It can be set to the same URL as the one you receive from GET but it must be present and valid.
This should work:
const ForgeSDK = require('forge-apis');
const oAuth2TwoLegged = new ForgeSDK.AuthClientTwoLegged(FORGE_CLIENT_ID, FORGE_CLIENT_SECRET, SCOPES);
const appPackageApi = new ForgeSDK.AppPackagesApi();
const getToken = () => {
return oAuth2TwoLegged.authenticate();
};
const getUploadURL = async (id) => {
const app = await appPackageApi.getAppPackage(id, oAuth2TwoLegged, oAuth2TwoLegged.getCredentials());
return app.body.Resource;
};
const patchPackage = (id, url) => {
const appPack = {
Resource: url,
RequiredEngineVersion: APP_PACKAGE_REQUIRED_ENGINE
};
return appPackageApi.patchAppPackage(id, appPack, oAuth2TwoLegged, oAuth2TwoLegged.getCredentials());
};
(async () => {
try {
const token = await getToken();
const url = await getUploadURL(APP_PACKAGE_ID);
const patchPackRes = await patchPackage(APP_PACKAGE_ID, url);
if (patchPackRes.statusCode == 201)
console.log('Patch package succeeded!');
else
console.log('Patch package failed!' + patchPackRes.statusCode);
} catch (ex) {
console.log('Exception :(');
console.log(ex);
}
})();