I have an electron js application:
const path = require('path');
const url = require('url');
const {app, BrowserWindow, ipcMain, nativeTheme, globalShortcut} = require('electron');
let win;
function createWindow() {
win = new BrowserWindow({
width: 1024,
height: 1024,
icon: path.join(__dirname, "web/img/app.png"),
fullscreen: true,
autoHideMenuBar: true,
webPreferences: {
preload: path.join(__dirname, "web/js/preload.js"),
}
});
win.loadURL(url.format({
pathname: path.join(__dirname, "web/html/index.html"),
protocol: 'file',
slashes: true
}));
win.webContents.openDevTools();
win.on('closed', () => {
win = null;
});
ipcMain.on('sys-shutdown', () => {
app.quit();
});
}
app.on('ready', createWindow);
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
render.js:
window.onload=function() {
console.log("LOAD!");
const sys_shutdown = document.getElementById("SysShutdown");
sys_shutdown.addEventListener('click', async () => {
console.log('hello');
await window.sys.shutdown();
});
}
preload.js:
const { contextBridge, ipcRenderer } = require('electron');
contextBridge.exposeInMainWorld('sys', {
shutdown: () => ipcRenderer.send('sys-shutdown')
});
When I start a project, I get an error in the console:
Electron Security Warning (Insecure Content-Security-Policy) This renderer process has either no Content Security
Policy set or a policy with "unsafe-eval" enabled. This exposes users of
this app to unnecessary security risks.
For more information and help, consult
https://electronjs.org/docs/tutorial/security.
This warning will not show up
once the app is packaged.
I tried googling, but the solutions didn't work for me. I tried to allow all rights, but it didn't help either, I don't know what to do anymore...
P.S The console.log("LOAD") line works fine, but the code doesn't work any further.
Please help
Related
I'm trying to watch 2 sets of folders. One set just requires a browser reload on change. The second requires "reinitializing the framework", via a separate, background http get before doing the reload.
My current attempt handles this properly once, but only once. Can you tell me both why and how to fix it? The troublesome portion is in the second watch task.
const gulp = require('gulp');
const axios = require("axios");
const bs = require('browser-sync').create();
const { reload } = bs;
const url = "http://127.0.0.1:80/healthcheck?fwreinit=1";
var paths = {
refresh: [
"./layouts/**/*.*",
"./views/**/*.*",
"./includes/**/*.js",
"./includes/**/*.css"
],
reinit: [
"./handlers/**/*.*",
"./models/**/*.*",
"./interceptors/**/*.*",
"./config/**/*.*"
]
}
gulp.task('watch', () => {
gulp.watch(paths.refresh, (done) => {
reload();
done();
});
gulp.watch(paths.reinit, () => {
console.log("Reinitializing framework");
axios.get(url)
.then(response => {
console.log(response.data.trim());
reload();
})
.catch(error => {
console.log("Error: Please ensure you have a /healthcheck route set up in /config/router.cfc!");
console.log("Error: Once you've done that, please shut down commandbox then try browsersync again.");
});
});
});
gulp.task('proxy', () => {
bs.init({
proxy: "localhost:80",
port: 81,
open: true,
notify: false
});
});
gulp.task('default', gulp.parallel('watch', 'proxy'));
Gulp watch passes a "done" callback that must be called in order to proceed. Changing the code to the following solved the problem.
const gulp = require('gulp');
const axios = require("axios");
const bs = require('browser-sync').create();
const { reload } = bs;
const url = "http://127.0.0.1:80/healthcheck?fwreinit=1";
var paths = {
refresh: [
"./layouts/**/*.*",
"./views/**/*.*",
"./includes/**/*.js",
"./includes/**/*.css"
],
reinit: [
"./handlers/**/*.*",
"./models/**/*.*",
"./interceptors/**/*.*",
"./config/**/*.*"
]
}
gulp.task('watch', () => {
gulp.watch(paths.refresh, (done) => {
reload();
done();
});
gulp.watch(paths.reinit, (done) => {
console.log("Reinitializing framework");
axios.get(url)
.then(response => {
console.log(response.data.trim());
reload();
done();
})
.catch(error => {
console.log("Error: Please ensure you have a /healthcheck route set up in /config/router.cfc!");
console.log("Error: Once you've done that, please shut down commandbox then try browsersync again.");
});
});
});
gulp.task('proxy', () => {
bs.init({
proxy: "localhost:80",
port: 81,
open: true,
notify: false
});
});
gulp.task('default', gulp.parallel('watch', 'proxy'));
I am using angular-6-social-login in my angular app for google login. It's working fine. But I want to specify hosted domain in this. So only two specified domain users will be able to login on it. Currently this module does not support this functionality. Is there any way to overwrite it.
existing code:
`GoogleLoginProvider.prototype.initialize = function () {
var _this = this;
return new Promise(function (resolve, reject) {
_this.loadScript(_this.loginProviderObj, function () {
gapi.load('auth2', function () {
_this.auth2 = gapi.auth2.init({
client_id: _this.clientId,
scope: 'email'
});
_this.auth2.then(function () {
if (_this.auth2.isSignedIn.get()) {
resolve(_this.drawUser());
}
});
});
});
});
};`
my requriment:
`GoogleLoginProvider.prototype.initialize = function () {
var _this = this;
return new Promise(function (resolve, reject) {
_this.loadScript(_this.loginProviderObj, function () {
gapi.load('auth2', function () {
_this.auth2 = gapi.auth2.init({
client_id: _this.clientId,
scope: 'email',
hosted_domain:'abc.com'
});
_this.auth2.then(function () {
if (_this.auth2.isSignedIn.get()) {
resolve(_this.drawUser());
}
});
});
});
});
};`
Angular 6 social login package doesn't support for the custom configuration to specify the hosted domain.
Instead, you can change the angular-6-social-login.umd.js file in node_modules.
Change the GoogleLoginProvider.prototype.initialize function and add the below details in gapi.auth2.init function
1. scope should be 'email'
2. fetch_basic_profile: false
3. hosted_domain: your domain.
gapi.load('auth2', function () {
_this.auth2 = gapi.auth2.init({
client_id: _this.clientId,
scope: 'email',
fetch_basic_profile: false,
hosted_domain: <your-domain> ,
});
_this.auth2.then(function () {
if (_this.auth2.isSignedIn.get()) {
resolve(_this.drawUser());
}
});
});
This will work for the specified domain.
The second option You can use angularx-social-login provides you to configure the hosted domain in the configuration option.
I'm new to puppeteer and node, trying to use a proxy with puppeteer in order to collect requests & responses, hopefully also websocket communication, but so far couldn't get anything to work..
I'm trying the following code:
const puppeteer = require('puppeteer');
const httpProxy = require('http-proxy');
const url = require('url');
let runProxy = async ()=> {
// raise a proxy and start collecting req.url/response.statusCode
};
let run = async () => {
await runProxy();
const browser = await puppeteer.launch({
headless: false,
args: ['--start-fullscreen',
'--proxy-server=localhost:8096']
});
page = await browser.newPage();
await page.setViewport({ width: 1920, height: 1080 });
await page.goto('http://www.google.com',
{waitUntil: 'networkidle2', timeout: 120000});
};
run();
I've tried some variation from https://github.com/nodejitsu/node-http-proxy but nothing seems to work for me, some guidance is at need, thanks
try this, use https-proxy-agent or http-proxy-agent to proxy request for per page:
import {Job, Launcher, OnStart, PuppeteerUtil, PuppeteerWorkerFactory} from "../..";
import {Page} from "puppeteer";
class TestTask {
#OnStart({
urls: [
"https://www.google.com",
"https://www.baidu.com",
"https://www.bilibili.com",
],
workerFactory: PuppeteerWorkerFactory
})
async onStart(page: Page, job: Job) {
await PuppeteerUtil.defaultViewPort(page);
await PuppeteerUtil.useProxy(page, "http://127.0.0.1:2007");
await page.goto(job.url);
console.log(await page.evaluate(() => document.title));
}
}
#Launcher({
workplace: __dirname + "/workplace",
tasks: [
TestTask
],
workerFactorys: [
new PuppeteerWorkerFactory({
headless: false,
devtools: true
})
]
})
class App {}
I'm trying to test an event filter however there's a timing issue that I'm not sure how to resolve. Other than wrapping the REST request in a setTimeout, how could I get this working?
const app = require('../../src/app');
const feathers = require('feathers/client')
const socketio = require('feathers-socketio/client');
const hooks = require('feathers-hooks');
const io = require('socket.io-client');
const rp = require('request-promise');
const service = app.service('users');
let server = null;
describe('\'users\' service', () => {
beforeEach((done) => {
server = app.listen('3030');
server.once('listening', done);
});
afterEach((done) => {
server.close(done);
});
it('returns stuff #test', (done) => {
const socket = io('http://localhost:3030');
const app = feathers()
.configure(hooks())
.configure(socketio(socket));
const messageService = app.service('users');
messageService.on('created', message => {
console.log('Created a message', message);
done();
});
socket.on('connection', () => {
//
// The messageService is not connected yet
// so messages.filters.js will not fire
//
// Giving it a chance to connect with setTimeout does work...
// setTimeout(() => {
rp({
method: 'POST',
url: 'http://localhost:3030/users',
body: {
test: 'Message from REST'
},
json: true
});
// }, 500);
});
});
});
I have tried replacing the socket.on with these as well:
messageService.on('connection'
service.on('connection' (based on Node.js EventEmitter)
and so on...
Edit
I have since found service.on('newListener' works however it is being triggered many times. I need to track down the single connection:
const messageService = app.service('users');
messageService.on('created', message => {
console.log('Created a message', message);
done();
});
It's simply service.on('newListener'.
https://nodejs.org/api/events.html#events_event_newlistener
Listeners registered for the 'newListener' event will be passed the event name and a reference to the listener being added.
However, when I implemented this I found that it is listening to 5 different events. So, you need to filter those down:
service.on('newListener', (event, listener) => {
if (event === 'created') {
rp({
method: 'POST',
url: 'http://localhost:3030/users',
body: {
test: 'Message from REST'
},
json: true
});
}
});
I'm currently using gulp webserver with following config.
gulp.task('webserver', ['nodejs'], function () {
return gulp.src(config.client)
.pipe(webserver({
livereload: true,
port: 8001,
proxies: [
{
source: '/api',
target: 'http://localhost:3007/api'
}
],
directoryListing: false,
open: true
}));
});
So, it serves the static files and proxies request starting with /api to a nodeJs app on another port.
I want to do now the same with Browser-sync (instead of using livereload, so I want to get rid of gulp webserver), but don't find how I can configure this.
Thanks for giving some guidance.
Ok,I found a potential solution (please comment if you have improvements)
var browserSync = require('browser-sync');
var httpProxy = require('http-proxy');
var proxy = httpProxy.createProxyServer({});
gulp.task('bs', ['nodejs'], function () {
browserSync({
server: {
baseDir: "./client"
},
middleware: function (req, res, next) {
var url = req.url;
if (url.substring(0,5)==="/api/") {
proxy.web(req, res, { target: 'http://localhost:3007' });
} else {
next();
}
}
});
});