puppeteer Works Local But Not On EC2 AWS - google-chrome

local is fine but when i deployed on aws ec2 server
it stuck in page.goto, and got a **error ERR_CONNECTION_REFUSED**
so i tried install TightVNC for GUI
same as stuck in page.goto
i checked google-chrome is installed,
when i typing "google-chrome" on aws ec2 in tightVNC viewer, it can show chrome well
os: aws ec2 ubuntu18.04 (30Gib)
aws security group inbound:
5000 TCP 0.0.0.0/0
5901 TCP 0.0.0.0/0
80 TCP 0.0.0.0/0
3000 TCP 0.0.0.0/0
22 TCP 0.0.0.0/0
443 TCP 0.0.0.0/0
5901 TCP ::/0
node --version: v16.14.2
sudo node --version: v16.14.2
npm --version: 8.5.0
sudo npm --version: 8.5.0
google-chrome --version: Google Chrome 101.0.4951.64
which google-chrome: /usr/bin/google-chrome
i tried that 'executablePath: "/usr/bin/google-chrome"' but same as error
in the ~/file_name/node_modules/puppeteer/.local-chromium/linux-991974/chrome-linux
when i typing "ldd chrome | grep not" nothing show, i installed everything
i think most related with puppeteer and aws ec2 i searched and i tried
what's matter to me?
please let me know
const express = require("express");
const router = express.Router();
const puppeteer = require("puppeteer-extra");
const deepai = require("deepai");
const fs = require("fs");
const CryptoJS = require("crypto-js");
const axios = require("axios");
const StealthPlugin = require("puppeteer-extra-plugin-stealth");
puppeteer.use(StealthPlugin());
const puppeteerOptions = {
headless: true,
ignoreHTTPSErrors: false,
args: [
"--no-sandbox",
"--single-process",
"--no-zygote",
"--disable-setuid-sandbox",
"--disable-gpu",
"--use-gl=egl",
],
dumpio: true,
};
// executablePath: "/usr/bin/google-chrome",
//slowMo: 30, //사람이 타이핑하는 듯이 볼수 있음
// /usr/bin/google-chrome
// /usr/lib/node_modules/puppeteer/.local-chromium/linux-991974
let browser;
let pageNameH;
router.post("/rpa", (req, res) => {
(async function () {
browser = await puppeteer.launch(puppeteerOptions);
pageNameH = await browser.newPage();
await pageNameH.setDefaultNavigationTimeout(0);
await pageNameH.setViewport({ width: 1366, height: 768 });
console.log("Running tests..");
console.log("req.body.length", req.body.length); //5
console.log("req.body[0]", req.body[0]); //{ Link: 'www.coupang.com' }
console.log("req.body[0].Link", req.body[0].Link); //www.coupang.com
console.log("req.body", req.body);
var click = "";
var ScreenshotName = "";
for (var i = 0; i < req.body.length; i++) {
if (req.body[i].Link) {
await pageNameH.goto(req.body[i].Link);
// await pageNameH.waitFor(500);
console.log("Link");
} else if (req.body[i].Click) {
click = "#" + req.body[i].Click;
console.log("Click");
} else if (req.body[i].Input) {
await pageNameH.type(click, req.body[i].Input);
console.log("Input");
} else if (req.body[i] == "Click") {
await pageNameH.waitFor(500);
await pageNameH.keyboard.press("Enter");
await pageNameH.waitFor(500);
await pageNameH.waitForNavigation();
console.log("Button");
} else if (req.body[i].ScreenShot) {
ScreenshotName = req.body[i].ScreenShot + ".png";
await pageNameH.screenshot({
path: ScreenshotName,
fullPage: true,
});
console.log("ScreenShot");
} else if (req.body[i].sendSMS) {
var userphoneNo = req.body[i].sendSMS;
// browser.close();
browser.close();
console.log("browser.close");
deepai.setApiKey("241c47f5-d430-4e76-b078-10df6ce6be7f");
var resp = await deepai.callStandardApi("image-similarity", {
image1: fs.createReadStream("coupang.png"),
image2: fs.createReadStream(ScreenshotName),
});
if (resp["output"]["distance"] > 0) {
const user_phone_number = userphoneNo; // SMS를 수신할 전화번호
console.log("user_phone_number", user_phone_number);
const date = Date.now().toString(); // 날짜 string
// 환경 변수
const sens_service_id = "ncp:sms:kr:284757485087:projectd";
const sens_access_key = "***";
const sens_secret_key = "***";
const sens_call_number = "01012345678"; //회사 번호
// url 관련 변수 선언
const method = "POST";
const space = " ";
const newLine = "\n";
const url = `https://sens.apigw.ntruss.com/sms/v2/services/${sens_service_id}/messages`;
const url2 = `/sms/v2/services/${sens_service_id}/messages`;
// signature 작성 : crypto-js 모듈을 이용하여 암호화
console.log(1);
const hmac = CryptoJS.algo.HMAC.create(
CryptoJS.algo.SHA256,
sens_secret_key
);
console.log(2);
hmac.update(method);
hmac.update(space);
hmac.update(url2);
hmac.update(newLine);
hmac.update(date);
hmac.update(newLine);
console.log(sens_access_key);
hmac.update(sens_access_key);
const hash = hmac.finalize();
console.log(4);
const signature = hash.toString(CryptoJS.enc.Base64);
console.log(5);
// sens 서버로 요청 전송
const smsRes = await axios({
method: method,
url: url,
headers: {
"Contenc-type": "application/json; charset=utf-8",
"x-ncp-iam-access-key": sens_access_key,
"x-ncp-apigw-timestamp": date,
"x-ncp-apigw-signature-v2": signature,
},
data: {
type: "SMS",
countryCode: "82",
from: sens_call_number,
content: `쿠팡 홈쇼핑에 고객이 물건을 샀습니다.`,
messages: [{ to: `${user_phone_number}` }],
},
});
console.log("response", smsRes.data);
return res.status(200).json({
message: "SMS sent",
});
} else {
return res.status(404).json({ message: "고객이 물건을 사지 않았다" });
}
}
}
})();
});
module.exports = router;

ERR_CONNECTION_REFUSED means that the target site is actively denying connection from your server.
IP addresses of AWS and other large hosting providers are often banned by commercial sites to avoid scraping and botting because, let's face it, most of the times id someone visits from AWS it's probably a bot.
Try to use a more obscure provider and/or buy some proxies.

Related

cookie that received from server doesn't stored on browser

i try to create an app using react and nodejs
i'm using express-session and express-mysql-session to store sessions,
It's store session on mysql but note stored on browser cookies. knowing that the response cookies including the session that created from server, knowing also that work just fine on development, but after deploy the application on render i found this problem.
server.js :
const express = require("express");
const app = express();
const cors = require("cors");
const config = require("./config/config");
const mode = process.env.NODE_ENV;
app.use(express.json());
const session = require("express-session");
const sqlSessionStor = require("express-mysql-session")(session);
const dbInfo = config[mode];
const options = {
...dbInfo,
schema: {
tableName: "sessions",
columnNames: {
session_id: "session_id",
expires: "expires",
data: "data",
},
},
};
const sessionStor = new sqlSessionStor(options);
app.use(
session({
name: "auth",
key: "auth",
resave: false,
saveUninitialized: false,
secret: "strongSecretKey",
store: sessionStor,
cookie: {
maxAge: 1000 * 60 * 60 * 24,
},
}),
);
const clientUrl = process.env.CLIENT_URL;
app.use(
cors({
origin: clientUrl,
credentials: true,
}),
);
on login.js file:
exports.login = (req, res) => {
authModel
.login(req.body)
.then((result) => {
req.session.isUser = result.user;
res.send(result);
})
.catch((err) => {
res.send(err);
});
};
on client (react):
async function login() {
const options = {
headers: { "Content-Type": "application/json" },
withCredentials: true,
};
const req = await axios.post(`${VITE_API_KEY}/login`, userInfo, options);
const user = await req.data;
if (user.login) {
//
} else {
//
}
}
response cookies:
screenshot
cookies:
screenshot
this is some solutions that not helpful to me:
i insert httpOnly to true and secure: true and the server does not send any response cookies, that way i'm not inserting this potions on code above.
i try to use sameSite with all values

nodemailer-express-handlebars | Error: ENOENT: no such file or directory for template !! node.js

I am trying to send a template form node js using nodemailer and express-handlebars but I'm getting the error no such file I have no idea what I'm missing
I'm attaching my index.js down
const express = require('express')
const app = express()
const bodyParser = require('body-parser')
const hb = require('nodemailer-express-handlebars')
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json())
const nodemailer = require('nodemailer');
const { google } = require('googleapis');
const path = require('path');
// These id's and secrets should come from .env file.
const CLIENT_ID = 'the id';
const CLEINT_SECRET = 'the secret';
const REDIRECT_URI = 'uri';
const REFRESH_TOKEN = 'the token';
const oAuth2Client = new google.auth.OAuth2(
CLIENT_ID,
CLEINT_SECRET,
REDIRECT_URI
);
oAuth2Client.setCredentials({ refresh_token: REFRESH_TOKEN });
app.post("/api", async (req,res) => {
try{
const accessToken = await oAuth2Client.getAccessToken();
const transport = nodemailer.createTransport({
service: 'gmail',
auth: {
type: 'OAuth2',
user: 'your [enter image description here][1]emial',
clientId: CLIENT_ID,
clientSecret: CLEINT_SECRET,
refreshToken: REFRESH_TOKEN,
accessToken: accessToken,
},
})
const handlebarOptions = {
viewEngine: {
extName: ".handlebars",
partialsDir: path.resolve(__dirname, "emialTemplate"),
defaultLayout: false,
},
viewPath: path.resolve(__dirname, "emialTemplate"),
extName: ".handlebars",
};
transport.use(
"compile",
hb(handlebarOptions)
);
// the data which we going to send
const mailOptions = {
from: req.body.name + '<ummed.gagrana#gmail.com>',
to: 'ummed.gagrana#gmail.com',
subject: req.body.subject,
text: "From:" + req.body.email + "\n Message:" + req.body.message,
// html: '<h2>From:</h2>'+'<h4>'+req.body.email+"</h4> <br>"+"<h2>Message:</h2>"+'<h4>'+req.body.message+"</h4>",
template: 'comeBack'
};
//sending mail
const result = await transport.sendMail(mailOptions);
// checking the result after sending mail
console.log(result)
res.send({hey:"well done you just paased some data"})
} catch (error) {
console.log(error)
}
})
app.listen(3000, () => {
console.log("server up and running on port 3000")
})
This the code I am not sure what I'm missing I'm a beginner in nodejs so please help
I am attaching my work directory path for help
[]
You got a typo. emialTemplate --> emailTemplate
const handlebarOptions = {
viewEngine: {
extName: ".handlebars",
partialsDir: path.resolve(__dirname, "emailTemplate"),
defaultLayout: false,
},
viewPath: path.resolve(__dirname, "emailTemplate"),
extName: ".handlebars",
};
After that, I would return the following code line to tell the server that the code has been processed as it should have.
return res.send({hey:"well done you just paased some data"})

Correct way to pass args in puppeteer-cluster via puppeteerOptions

I am trying to use args in my code to use a proxy service I have. If I remove the args altogether things run fine but if I have them in there I get an error stating: Error: Unable to restart chrome. I checked multiple examples and copied the same to my code but it seems to fail. Any ideas on how to implement this correctly?
Code:
const { Cluster } = require('puppeteer-cluster');
const vanillaPuppeteer = require('puppeteer');
const { addExtra } = require('puppeteer-extra');
const Stealth = require('puppeteer-extra-plugin-stealth')
async function main() {
// Create a custom puppeteer-extra instance using `addExtra`,
// so we could create additional ones with different plugin config.
const puppeteer = addExtra(vanillaPuppeteer)
puppeteer.use(Stealth())
let proxy_server = 'proxy.soax.com:9000';
let user = 'some_user_name';
let pass = 'some_password';
// Launch cluster with puppeteer-extra
const cluster = await Cluster.launch({
puppeteer,
puppeteerOptions: {
headless: false,
args: ['--proxy-server=' + proxy_server,
'--single-process',
'--no-zygote',
'--no-sandbox'],
sameDomainDelay: 1000,
retryDelay: 3000,
workerCreationDelay: 3000},
maxConcurrency: 2,
concurrency: Cluster.CONCURRENCY_CONTEXT,
monitor: false,
skipDuplicateUrls: true
})
// Define task handler
await cluster.task(async ({ page, data: url }) => {
await page.authenticate({
username: user,
password: pass,
});
await page.goto(url)
const { hostname } = new URL(url)
console.log(`checking on ${hostname}`)
await page.screenshot({ path: `${hostname}.png`, fullPage: true })
})
// Queue any number of tasks
cluster.queue('https://whatismyipaddress.com/')
await cluster.idle()
await cluster.close()
console.log(`All done`)
}
main().catch(console.warn)
I played around a bit and discovered by removing the arg --single-process then it works fine.

Angular HTTPClient (HTTP) requests pending forever

I have recently started working with MySQL as the database for my Angular/NodeJS project (I have been using MongoDB all along). Nonetheless, I'm encountering issues when handling HTTP Requests. I have experimented with GET and POST requests as of now, and GET is forever pending, until failure and POST doesn't post to backend and to the database, likewise. I really hadn't changed the backend configuration from the one I used with MongoDB database, except for the queries, of course.
I have tried debugging the backend to check whether the server is actually running and everything was okay. It just came to requests reaching the specified endpoints that they're always pending. I also tried to log to console if a request gets at a certain endpoint, but nothing was being logged, unfortunately.
server.js
const app = require("./backend/app");
const debug = require("debug")("node-angular");
const http = require("http");
const normalisePort = setPort => {
const port = parseInt(setPort, 10);
if (isNaN(port)) return setPort;
if (port >= 0) return port;
return false;
};
const port = normalisePort(process.env.PORT || "8000");
const server = http.createServer(app);
const error = error => {
if (error.syscall !== "listen") {
throw error;
}
const bind = typeof port === "string" ? "pipe " + port : "port " + port;
switch (error.code) {
case "EACCES":
console.error(bind + " requires elevated privileges");
process.exit(1);
break;
case "EADDRINUSE":
console.error(bind + " is already in use");
process.exit(1);
break;
default:
throw error;
}
};
const listening = () => {
const address = server.address();
const bind = typeof port === "string" ? "pipe " + address : "port " + port;
debug.enabled = true;
debug("Listening on " + bind);
};
app.set("port", port);
server.on("error", error);
server.on("listening", listening);
server.listen(port, "localhost");
app.js
const express = require("express");
const bodyParser = require("body-parser");
const cors = require("cors");
const users = require("./routes/users");
const app = express();
app.use(cors);
app.use(bodyParser.json());
app.use(
bodyParser.urlencoded({
extended: false
})
);
app.use((req, res, next) => {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Authorization, Content-Type, Accept"
);
res.setHeader(
"Access-Control-Allow-Methods",
"GET, POST, PATCH, DELETE, OPTIONS"
);
next();
});
app.get("/api/users", users);
module.exports = app;
users.js
const express = require("express");
const router = express.Router();
const db = require("../sql-connection");
router.get("", (req, res, next) => {
db.query("select * from users;", (error, results, fields) => {
if (results.length > 0) {
return res.status(200).send(results);
} else {
return res.status(404).send();
}
});
});
module.exports = router;
sql-connection.js
const mysql = require("mysql");
const sqlConnection = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "payroll"
});
sqlConnection.connect(error => {
if (error) throw error;
console.log("connected to database");
});
module.exports = sqlConnection;
auth.service.ts
export class AuthService {
private _BASE_URL: string = "http://localhost:8000/api";
constructor(private http: HttpClient) {}
public get users(): Observable<any> {
return this.http.get(this._BASE_URL + "/users");
}
}
signup.component.ts
export class SignUpComponent {
constructor(private _authService: AuthService) {}
public onSignUp(): void {
this._authService
.users()
.subscribe(data => (data ? console.log(data) : console.log("no data")));
}
}
When subscribed to the users observable data from backend should logged to console if present, otherwise, 'no data' is logged on the console. Unfortunately, this request takes forever (pending). However, if I don't subscribe to users no request is sent/seen under network tab in dev tools.
I've been using MYSQL database and I would recommend using mysql2 over mysql
mysql2 provides promise based syntaxes over conventional callback methods.
Here's the documentation for Mysql2 for nodejs.
Coming to the problem, I guess it might be because Nodejs is asynchronous while you're using a synchronous approach in setting up the API.
Also when you're working with Asynchronous programming you have to use try-catch-finally instead of conventional if-else statements to log the errors.
So you can use async (req, res, next)=>{ //your code here } rather than just using (req, res, next)=>{ //your code here }.
Also you have to await before calling the sql query, i.e;
await db.query
or
rather in mysql2 it is easier to use const [data] = await pool.execute(query, [params]).

x-devapi unable to connect to database in Google app-engine

I am deploying a simple nodejs server to App-engine which works well except for the database connection using the X-devapi. I am getting this error:
All routers failed.
Here is the code I use:
'use strict';
const express = require('express');
const mysqlx = require('#mysql/xdevapi');
const app = express();
app.get('/', (req, res) => {
const options = { user: 'user_name', password: '#pass',host: 'XX.XXX.XX.XXX'
/*Here I used Public IP address on the of the SQL instance*/,port: XXXX
/*I assigned port 8080 here*/, schema: 'db_name' };
(async function () {
let session;
try {
session = await mysqlx.getSession(options);
const collection = await session.getSchema(options.schema).createCollection('collection');
await collection.add({ name: 'foo' }).execute();
await collection.find().fields('name').execute(console.log); // { name: 'foo' }
} catch (err) {
//console.error(err.message);
res.status(200).send(err.message).end();//used code 200 in order to receive the error too
} finally {
session && session.close();
}
})();
});
// Start the server
const PORT = process.env.PORT || 8080;
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
console.log('Press Ctrl+C to quit.');
});
How can I solve this?
It turns out that Google cloud SQL tools still do not have the X devAPI enabled if you check response to my concern here and the feature request here