I have tried to run Google Chrome puppeteer on Heroku, GAE flex, Digital Ocean, and Vultr but at the route where I use puppeteer the instance just hangs until it errors with 502. I'm using this simple code:
var express = require('express');
var router = express.Router();
const puppeteer = require('puppeteer');
router.get('/', function(req, res, next) {
(async() => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('http://example.com');
await page.screenshot({path: 'example.png'});
browser.close();
res.send('screen grabbed');
})();
});
module.exports = router;
I know puppeteer works with node 6 now but I'm setting package.json as follows:
{
"name": "puppeteer",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "node ./bin/www",
"deploy": "gcloud app deploy --version dev"
},
"engines": {
"node": "8.4.0"
},
"dependencies": {
"body-parser": "~1.17.1",
"cookie-parser": "~1.4.3",
"debug": "~2.6.3",
"express": "~4.15.2",
"morgan": "~1.8.1",
"pug": "~2.0.0-beta11",
"puppeteer": "^0.10.2",
"serve-favicon": "~2.4.2"
}
}
In Vultr Ubuntu 16.04 x64 I get the following error:
Error: Failed to launch chrome!
/root/mysite/node_modules/puppeteer/.local-chromium/linux-497674/chrome-linux/chrome: error while loading shared libraries: libpangocairo-1.0.so.0: cannot open shared object file: No such file or directory
I solved by making sure the following dependencies for puppeteer were installed if not :
Installed all the dependencies
sudo apt-get install gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
Then pass args to the puppeteer launch command like so:
(async() => {
const browser = await puppeteer.launch({args: ['--no-sandbox', '--disable-setuid-sandbox']});
const page = await browser.newPage();
await page.goto('http://example.com');
await page.screenshot({path: 'example.png'});
res.send('screen grabbed');
browser.close();
})();
I tried the above answer but no luck in DigitaOcean Ubuntu droplet.
So I installed google-chrome-stable in the droplet, that works.
Add Key:
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
Set repository:
echo 'deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main' | sudo tee /etc/apt/sources.list.d/google-chrome.list
Install package:
sudo apt-get update
sudo apt-get install google-chrome-stable
Related
I've installed estherscan from npm, but the verify command is still not present. When I try to verify a contract I get the error that the verify task does not exist.
This is my hardhat.config.js file,
`/**
* #type import('hardhat/config').HardhatUserConfig
*/
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
const { API_URL, PRIVATE_KEY, ETHERSCAN } = process.env;
module.exports = {
solidity: "0.8.9",
defaultNetwork: "polygon_mumbai",
networks: {
hardhat: {},
polygon_mumbai: {
url: API_URL,
accounts: [`0x${PRIVATE_KEY}`]
}
},
etherscan: {
apiKey: ETHERSCAN
}
}`
When I try to verify I get the error "Error HH303: Unrecognized task verify"
When I run the command npx hardhat I don't see any verify task on the menu
I've tried installing the etherscan verify plugin for hardhat using npm install --save-dev #nomiclabs/hardhat-etherscan
I forgot to import the plugin into hardhat.config.js,
solution is to import it before running the command.
require("#nomiclabs/hardhat-etherscan");
I have the following Dockerfile:
FROM ubuntu:bionic
RUN apt-get update -y && apt-get install -y nginx
EXPOSE 80
CMD ["/usr/sbin/nginx", "-g", "daemon off;"]
Building the container and running it through the following commands works:
$ docker build -t web:1.0 .
$ docker run -itd -p 8200:80 --name webserver1 web:1.0
I made an equivalent Packer file as follows:
packer {
required_plugins {
docker = {
version = ">= 1.0.1"
source = "github.com/hashicorp/docker"
}
}
}
source "docker" "ubuntu" {
image = "ubuntu:bionic"
commit = true
changes = [
"EXPOSE 80",
"CMD [\"/usr/sbin/nginx\", \"-g\", \"daemon off;\"]"
]
}
build {
name = "learn-packer"
sources = [
"source.docker.ubuntu"
]
provisioner "shell" {
inline = [
"apt-get -y update",
"apt-get install -y nginx",
]
}
post-processors {
post-processor "docker-tag" {
repository = var.docker_repo
tags = ["1.1"]
}
}
}
I'm creating the image with packer build . When I run the image through Docker
docker run -itd -p 8201:80 --name webserver2 xxx/web:1.1
I get the following error:
usr/sbin/nginx: 1: usr/sbin/nginx: Syntax error: ")" unexpected
When I run this command: php artisan migrate I have got an error:
Illuminate\Database\QueryException : could not find driver (SQL:
select * from information_schema.tables where table_schema = loag and
table_name = migrations)
I have set up a vagrant environment with ubuntu 18.04 and PHP, MySQL with versions 7.2. i edited the php.ini file and did all of the 1000 things on 100 sites on the web. i'm getting confused that it still don't work to migrate
composer create-project laravel/laravel projectname
config/database.php
'mysql' => [
'driver' => 'mysql',
'host' => '127.0.0.1',//env('DB_HOST', '127.0.0.1'),
'port' => '3306',//env('DB_PORT', '3306'),
'database' => 'loag',//env('DB_DATABASE', 'forge'),
'username' => 'root',//env('DB_USERNAME', 'forge'),
'password' => 'root',//env('DB_PASSWORD', ''),
'unix_socket' => '/etc/mysql/mysql.sock',//env('DB_SOCKET', ''),
'charset' => 'utf8mb4',
'collation' => 'utf8mb4_unicode_ci',
'prefix' => '',
'prefix_indexes' => true,
'strict' => true,
'engine' => null,
],
.env
APP_NAME="Laendliche Ostbahnen AG"
APP_ENV=local
APP_KEY=base64:SKuPKct0ug16L4DTEvcFD59YuHKf8znDmQrqG973L6w=
APP_DEBUG=true
APP_URL=http://localhost
LOG_CHANNEL=stack
DB_CONNECTION=mysql
DB_HOST=127.0.0.1
DB_PORT=3306
DB_DATABASE=loag
DB_USERNAME=root
DB_PASSWORD=root`
composer.json
{
"name": "laravel/laravel",
"type": "project",
"description": "The Laravel Framework.",
"keywords": [
"framework",
"laravel"
],
"license": "MIT",
"require": {
"php": "^7.1.3",
"fideloper/proxy": "^4.0",
"laravel/framework": "5.7.*",
"laravel/tinker": "^1.0"
},
"require-dev": {
"beyondcode/laravel-dump-server": "^1.0",
"filp/whoops": "^2.0",
"fzaninotto/faker": "^1.4",
"mockery/mockery": "^1.0",
"nunomaduro/collision": "^2.0",
"phpunit/phpunit": "^7.0"
},
...
installation on ubuntu:enter code here
# Set environment variable
DEBIAN_FRONTEND=noninteractive
# Update Packages
apt-get update
# Upgrade Packages
apt-get dist-upgrade
# Apache
apt-get install -y apache2
# Enable Apache Mods
a2enmod rewrite
# Install PHP
apt-get install -y php7.2
# PHP Apache Mod
apt-get install -y libapache2-mod-php7.2
# Restart Apache
service apache2 restart
# PHP Mods
apt-get install -y php7.2-xml
apt-get install -y php7.2-common
apt-get install -y php7.2-zip
# PHP-MYSQL lib
apt-get install -y php7.2-mysql
apt-get install -y mysql-server
# Disable old apache vhosts config and enable the new one
#a2dissite 000-default.conf
# Restart Apache
sudo systemctl restart apache2.service
DBs in phpmyadmin: (the affected db should be loag(loag and laravel are empty)
information_schema
laravel
loag
mysql
performance_schema
phpmyadmin
sys
need more?
when i do:php artisan migrate the following output is generated:
Illuminate\Database\QueryException : could not find driver (SQL: select * from information_schema.tables where table_schema = loag and table_name = migrations)
at C:\Users\Marco Ris\Desktop\Webdevelopment\loag\vendor\laravel\framework\src\Illuminate\Database\Connection.php:664
660| // If an exception occurs when attempting to run a query, we'll format the error
661| // message to include the bindings with SQL, which will make this exception a
662| // lot more helpful to the developer instead of just the database's errors.
663| catch (Exception $e) { > 664| throw new QueryException(
665| $query, $this->prepareBindings($bindings), $e
666| );
667| }
668|
Exception trace:
1 PDOException::("could not find driver")
C:\Users\Marco Ris\Desktop\Webdevelopment\loag\vendor\laravel\framework\src\Illuminate\Database\Connectors\Connector.php:70
2 PDO::__construct("mysql:unix_socket=/etc/mysql/mysql.sock;dbname=loag", "root", "root", []) C:\Users\Marco Ris\Desktop\Webdevelopment\loag\vendor\laravel\framework\src\Illuminate\Database\Connectors\Connector.php:70
Please use the argument -v to see more details
May be this will solve your problem,
sudo apt-get install php7-mysql
or
sudo apt-get install php5-mysql
or
sudo apt-get install php-mysql
Throughout the process of installation chrome headless on a clean ubuntu 18.04 i faced quite a few issues. The setup guide on github is not sufficient for a clean ubuntu 18.04
The following are some errors and answer / solutions to setting up headless chrome an alternative to phantomjs.
Error 1
(node:23835) UnhandledPromiseRejectionWarning: Error: Chromium revision is not downloaded. Run "npm install" or "yarn install"
at Launcher.launch owlcommand.com /puppeteer/node_modules/puppeteer/lib/Launcher.js:112:15)
at <anonymous>
(node:23835) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:23835) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
Error 2
(node:25272) UnhandledPromiseRejectionWarning: Error: Failed to launch chrome!
owlcommand.com /puppeteer/node_modules/puppeteer/.local-chromium/linux-594312/chrome-linux/chrome: error while loading shared libraries: libX11-xcb.so.1: cannot open shared object file: No such file or directory
Based on https://github.com/GoogleChrome/puppeteer
You only have to run the following command in Ubuntu 18.04
npm i puppeteer
Unfortunately this is not enough.
You will require the following Dependencies
sudo apt-get install gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
After which if you run it as per their example , you will receive an error
(node:28469) UnhandledPromiseRejectionWarning: Error: Failed to launch chrome!
[1025/150325.817887:ERROR:zygote_host_impl_linux.cc(89)] Running as root without --no-sandbox is not supported. See https://crbug.com/638180.
The solution to this is
const browser = await puppeteer.launch({args: ['--no-sandbox']});
Adding --no-sandbox
It will work accordingly then. The full working source code is below
const puppeteer = require('puppeteer');
(async () => {
const browser = await puppeteer.launch({args: ['--no-sandbox']});
const page = await browser.newPage();
await page.goto('http://owlcommand.com');
await page.screenshot({path: 'example.png'});
await browser.close();
})();
Solution to puppeteer#1.9.0~install: cannot run in wd %s %s (wd=%s)
npm install --unsafe-perm
Screenshot Size
The default is really small, if the page you are testing is responsive, you can test it with different viewport settings. You can change its dimensions via the setViewport method.
await page.setViewport({
width: 1600,
height: 1000
});
Update for Latest Puppeteer (Aug 2020)
sudo apt-get install libgbm1 (Required)
Update Nov-18: You don't require the --no-sandbox flag any longer, you should use the headless:false property in the object you send to .launch()
const browser = await puppeteer.launch({
headless: false,
slowMo: 80,
args: ['--window-size=1920,1080']
});
Also make sure you have all the required debian dependencies installed:
sudo apt-get install gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
and also install libgbm1
"puppeteer": "^3.1.0"
full cmd is
apt-get update && apt-get install -y gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
I had that kind of problem only when I tried to run a node application on Docker, so based on the answers given, I finally got it working with that Dockerfile:
FROM node:12
WORKDIR /app
COPY package.json /app/
RUN apt-get update \
&& apt-get install -y \
gconf-service \
libasound2 \
libatk1.0-0 \
libatk-bridge2.0-0 \
libc6 \
libcairo2 \
libcups2 \
libdbus-1-3 \
libexpat1 \
libfontconfig1 \
libgcc1 \
libgconf-2-4 \
libgdk-pixbuf2.0-0 \
libglib2.0-0 \
libgtk-3-0 \
libnspr4 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libstdc++6 \
libx11-6 \
libx11-xcb1 \
libxcb1 \
libxcomposite1 \
libxcursor1 \
libxdamage1 \
libxext6 \
libxfixes3 \
libxi6 \
libxrandr2 \
libxrender1 \
libxss1 \
libxtst6 \
ca-certificates \
fonts-liberation \
libappindicator1 \
libnss3 \
lsb-release \
xdg-utils \
wget \
&& npm i puppeteer
COPY . /app
CMD [ "node", "app.js" ]
I changed the launch code as mentioned as well, it worked just fine that way:
const browser = await puppeteer.launch({args: ['--no-sandbox']});
I was facing same problem on Windows. I made asar as false and it got successful.
"build": { "asar":false, }
I was working with fork of buildkite/puppeteer and found that with new Chrome (85) I had to install libxss1 instead of libgbm1 (as suggested in other comments)
So it looks like this at the moment (link):
FROM node:12.18.3-buster-slim#sha256:dd6aa3ed10af4374b88f8a6624aeee7522772bb08e8dd5e917ff729d1d3c3a4f
RUN apt-get update \
&& apt-get install -y git \
&& apt-get update \
&& apt-get install -y libxss1 wget gnupg ca-certificates \
&& wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
&& apt-get update \
# We install Chrome to get all the OS level dependencies, but Chrome itself
# is not actually used as it's packaged in the node puppeteer library.
# Alternatively, we could could include the entire dep list ourselves
# (https://github.com/puppeteer/puppeteer/blob/master/docs/troubleshooting.md#chrome-headless-doesnt-launch-on-unix)
# but that seems too easy to get out of date.
&& apt-get install -y google-chrome-stable \
&& rm -rf /var/lib/apt/lists/* \
&& wget --quiet https://raw.githubusercontent.com/vishnubob/wait-for-it/master/wait-for-it.sh -O /usr/sbin/wait-for-it.sh \
&& chmod +x /usr/sbin/wait-for-it.sh
I'm trying to test out spectron for electron in terms of testing but as I'm going through a tutorial, I keep getting this error message whenever I run npm run test:e2e. My test file syntactically correct but im not sure why i run into an error through compilation
Specs:
Nodejs 6.10.3
Electron 1.6.1
here's the error message
here's the json file package.json
{
"name": "your-app",
"version": "0.1.0",
"main": "main.js",
"scripts": {
"start": "C:/Users/Livs/Documents/imdc/logger/node_modules/.bin/electron .",
"test:e2e": "C:/Users/Livs/Documents/imdc/logger/test.js"
},
"devDependencies": {
"electron-chromedriver": "^1.7.1",
"electron-prebuilt": "^1.4.13",
"electron-rebuild": "^1.5.11",
"chai": "^3.5.0",
"chai-as-promised": "^5.3.0",
"electron": "^1.3.4",
"mocha": "^3.0.2",
"spectron": "^3.4.0"
}
}
Heres the testing file test.js
const Application = require('spectron').Application;
const path = require('path');
const chai = require('chai');
const chaiAsPromised = require('chai-as-promised');
var electronPath = path.join(__dirname, '..', 'node_modules', '.bin', 'electron');
if (process.platform === 'win32') {
electronPath += '.cmd';
}
var appPath = path.join(__dirname, '..');
var app = new Application({
path: electronPath,
args: [appPath]
});
Your npm run e2e just calls the test.js file. You'll need a test runner, mocha for instance. Then you would run mocha test.js. Or change the e2e script inside package.json to run that command.
All your file paths for the scripts inside package.json should be relative to the package root, ie logger/test.js. Regarding the npm bins you only need to type the bin name, ie electron.
To solve your problem you should change your package.json test:e2e command to mocha test.js.
(You can also change your start command to electron . since custom npm commands will always look for binaries in ./node_modules/.bin