I am working on a HTML5 PWA and am looking to have a "Network First, then Cache" layout so that when a user accesses the PWA it pulls down the latest version but if they are offline it uses cache.
How would i need to amend the below code for that please?
const assets = [
"/",
"/index.html",
"/about.html",
"/assets/css/main.css",
"/images/logo.png",
"/images/logo.svg",
]
self.addEventListener("install", installEvent => {
installEvent.waitUntil(
caches.open(staticCacheName).then(cache => {
cache.addAll(assets)
})
)
})
self.addEventListener("fetch", fetchEvent => {
fetchEvent.respondWith(
caches.match(fetchEvent.request).then(res => {
return res || fetch(fetchEvent.request)
})
)
})
The cache is created, but once the PWA is saved to homepage it always uses Cache.
You're looking for the "network, then cache" pattern from the Offline Cookbook:
self.addEventListener('fetch', (event) => {
event.respondWith(async function() {
try {
return await fetch(event.request);
} catch (err) {
return caches.match(event.request);
}
}());
});
Related
I've tried to added a new custom task to my plugins file that located outside the tested project.
I've compiled it and configured his path in the config.json as well.
All the other plugins from this file it works ok.
The error I got from Cypress during the execution is ->
"value": "CypressError: `cy.task('queryDb')` failed with the following error:\n\nThe task 'queryDb' was not handled in the plugins file. The following tasks are registered: log\n\nFix this in your plugins file here:\n./../testilize/cypress/plugins/index.ts\n at ...
The configuration file is extend to the base config file outside the tested project ->
{
"extends": "./../testilize/cypress.json",
"baseUrl": "https://www.blabla.com/",
"env": {
"client": "https://www.blabla.com/",
"server": "https://www.blabla.com/"
},
"pluginsFile": "./../testilize/cypress/plugins/index.ts",
"supportFile": "./../testilize/cypress/support/index.js",
"fixturesFolder": "e2e-tests/fixtures",
"integrationFolder": "e2e-tests/test-files"
}
plugins file ->
// cypress/plugins/index.ts
/// <reference types="cypress" />
/**
* #type {Cypress.PluginConfig}
*/
const preprocess = require('./preprocess');
const deepmerge = require('deepmerge')
const path = require('path');
require('dotenv').config({ path: './../testilize/.env' , override: true })
import { my_connection } from '../support/db-handlers/connections';
function queryTestDb(query, config) {
// start connection to db
my_connection.connect();
// exec query + disconnect to db as a Promise
return new Promise((resolve, reject) => {
my_connection.query(query, (error, results) => {
if (error) reject(error);
else {
my_connection.end();
// console.log(results)
return resolve(results);
}
});
});
}
module.exports = (on, config) => {
require('cypress-log-to-output').install(on)
on('task', {
log (message) {
console.log(message)
return true
}
})
const configJson = require(config.configFile)
if (configJson.extends) {
const baseConfigFilename = path.join(config.projectRoot, configJson.extends)
const baseConfig = require(baseConfigFilename)
console.log('merging %s with %s', baseConfigFilename, config.configFile)
configJson.env.my_db_name = process.env.my_DB_NAME;
configJson.env.my_db_host = process.env.my_DB_HOST;
configJson.env.my_db_user = process.env.my_DB_USER;
configJson.env.my_db_password = process.env.my_DB_PASSWORD;
configJson.env.my_db_port = process.env.my_DB_PORT;
return deepmerge(baseConfig, configJson);
}
on("file:preprocessor", preprocess);
on('before:browser:launch', (browser , launchOptions) => {
if (browser.name === 'chrome' && browser.isHeadless) {
launchOptions.args.push('--disable-gpu', '--no-sandbox', '--disable-dev-shm-usage', '--window-size=1920,1080');
return launchOptions
}
})
// Usage: cy.task('queryDb', query)
on('task', {
'queryDb': query => {
return queryTestDb(query, config);
}
});
return configJson
}
Test file ->
/// <reference types="./../../../testilize/node_modules/cypress" />
let allProjectIDs: any = [];
describe('Tests', () => {
it('send graphQL request for internal api', () => {
cy.task(
'queryDb',
`SELECT project_id FROM table_name LIMIT 100;`
).then(res => {
console.log(res);
allProjectIDs.push(res);
console.log(allProjectIDs);
});
});
});
Stack::
TypeScript 4.6
Node 14x
Cypress 9.6
It might be because you have two on('task', { sections in plugins.
The first one looks like the default supplied by Cypress, try commenting it out.
in a project that requires:
Starting each session with logging in credentials + notification/OTP
work with multiple accounts asynchronously
remote debugging or monitoring of the session
5+ different operations can be requested on a open session , in any order
I want it to be a puppeteer cluster browser with one remote debugging port to monitor it
but couldn't integrate WsEndpoints
const puppeteer = require('puppeteer-extra');
const { Cluster } = require('puppeteer-cluster');
class SingleBrowserCluster {
browserInstance
options
constructor() {
if (SingleBrowserCluster._instance) {
//throw new Error("Singleton classes can't be instantiated more than once.")
}
else{
SingleBrowserCluster._instance = this;
// ... Your rest of the constructor code goes after this
console.log("pre optArgs");
const optArgs = [
'--remote-debugging-port=3002',//works if dockerised
'--remote-debugging-address=0.0.0.0',// at localhost.3002
'--window-size=1920,1080',
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-gpu', '--no-zygote', //'--single-process',
];
console.log("pre options");
this.options = {
headless: true,//for dockerization
args: optArgs,
defaultViewport: null,
waitUntil: 'networkidle2'
};
console.log("Do launch now");
return this;
}
}
async screenshotMethod({ page, data: url }) {
await page.goto(url);
console.log(`%c worker X is running on ${url} `, `color:green;`);
console.log("will wait 20 second");
await page.waitForTimeout(20000)
const path = url.replace(/[^a-zA-Z]/g, '_') + '.png';
await page.screenshot({ path });
};
async launchCluster (){
try {
this.browserInstance = await Cluster.launch({
concurrency: Cluster.CONCURRENCY_CONTEXT,
maxConcurrency: 3,
puppeteerOptions: this.options
})
console.log(this.browserInstance);
return this.browserInstance;
} catch (error) {
console.log(`%c ERRORR`,`color:red;`);
console.log(error);
}
}
}
const StartScraper = async (Url, useProxy) => new Promise((resolve, reject) => {
(async () => {
// get proxy url from environment files
const proxyUrl = process.env.PROXY_URL;
//--disable-dev-shm-usage
// By default, Docker runs a container with a /dev/shm shared memory space 64MB. This is typically too small for Chrome and will cause Chrome to crash when rendering large pages.
//his will write shared memory files into /tmp instead of /dev/shm. See crbug.com/736452 for more details.
var instanceOne1= new SingleBrowserCluster()//.launchCluster()
var browser= await instanceOne1.launchCluster();
browser.queue('https://www.google.com/', instanceOne1.screenshotMethod);
//THE PROBLEM LINE
const wsEndpoint = browser.wsEndpoint();
try {
const page = (await browser.pages())[0];
await page.goto(Url, { waitUntil: 'load' });
return resolve(wsEndpoint);
} catch (e) {
browser.close();
return reject(false);
}
})();
});
how can i have WSendpoint of any session in a puppeteer-cluster
( more info:
i will put those in a session file
to provide my next selected consequtive operation a connection point on its session
localhost/StartScraper creates WSendpoint
localhost/login==WSendpoint==>Connects to existing session do login stuff
localhost/listItems==WSendpoint==>Connects to existing session do listItems stuff
...
)
I'm trying to create a Puppeteer function in GCP which can be triggered by Pub/Sub messages. The function is callable, but doesn't behave as expected and throws a Timeout Error once browser tries to initialize. Could the trigger possibly be using a NodeJS environment different from HTTP trigger?
I'm also very new to NodeJS, so I apologize ahead of time if the issue is blatantly obvious.
I've created an HTTP trigger for the function which behaves as expected. I copy/paste the Puppeteer Function below into the index.js when creating the Cloud Function, but separated in example for clarity that both triggers are running the identical function.
Puppeteer Function
const puppeteer = require('puppeteer');
scrapeUglyWebsite = () => {
return new Promise(async(resolve, reject) => {
await puppeteer.launch({
headless: true,
args: ['--no-sandbox']
})
.then(async (browser) => {
const page = await browser.newPage();
await page.goto('http://suzannecollinsbooks.com/', {waitUntil: 'load', timeout: 0})
.then(async () => {
//Wait for content to load
await page.waitForFunction('document.body !== null && document.body.innerText.includes(\'Jon Scieszka\')');
//Evaluate page contents
const dom_eval = await page.evaluate(() => document.body.innerText.includes("Here’s a picture of me with a rat"));
await browser.close();
resolve(dom_eval);
});
}).catch((err) => {
reject(err);
});
});
};
HTTP Trigger - index.js
exports.cloudFunctionTest = (req, res) => {
scrapeUglyWebsite()
.then((results) => {
if(results) {
res.send('Suzanne Collins takes pictures with rats.');
} else {
res.send("Suzzane Collins doesn't take pictures with rats.");
};
})
.catch((err) => {
res.send(err.toString());
});
Pub/Sub Trgger - index.js
exports.cloudFunctionTest = (data, context) => {
scrapeUglyWebsite()
.then((results) => {
if(results) {
console.log('Suzanne Collins takes pictures with rats.');
} else {
console.log("Suzzane Collins doesn't take pictures with rats.");
};
})
.catch((err) => {
console.log(err.toString());
});
};
package.json
{
"name": "test",
"version": "0.0.1",
"engines": {
"node": "8"
},
"dependencies": {
"puppeteer": "^1.6.0"
}
}
HTTP Trigger behaves correctly with the expected result
Suzanne Collins takes pictures with rats.
Pub/Sub Trigger throws the following error with no output
TimeoutError: Timed out after 30000 ms while trying to connect to Chrome! The only Chrome revision guaranteed to work is r662092
I know this is late but the reason that the TimeoutError occurs is because cloud functions do not automatically wait for async tasks to finish completing. So in exports.cloudFunctionTest, scrapeUglyWebsite() is called but the function does not wait for the promise to be fulfilled, so the program terminates. Hence the error
More info here on how background functions work in NodeJs
In order for the function to wait for scrapeUglyWebsite(), you need to return a promise that completes when scrapeUglyWebsite() and the resulting code is complete.
Personally, I got it to work by simply wrapping the code currently in the function I am exporting in another async function and then returning the promise of the wrapper function.
async function wrapper() {
try {
const result = await scrapeUglyWebsite();
if(results) {
console.log('Suzanne Collins takes pictures with rats.');
} else {
console.log("Suzzane Collins doesn't take pictures with rats.");
};
} catch (err) {
console.log(err.toString());
}
}
Then in the function you want to export:
exports.cloudFunctionTest = (data, context) => {
return wrapper();
};
I'm working on a webpage in an app that uses some JavaScript to fetch data from an API endpoint.
In Safari and FireFox, I can request the page multiple times in a row and the data is fetched and displayed promptly. In Chrome, by contrast, the data is fetched and displayed promptly only if the dev tools are open or if my cache is clear (though I'm not disabling the cache in the dev tools).
If the dev tools are not open or Chrome has cached the page, reloading the page takes about 10 seconds to make the request and display the data.
Does anyone have any idea what might be causing this behavior? Full app source.
The API request in question used isomorphic-fetch to make the request. I replaced the isomorphic-fetch code with an old school AJAX request and now the requests are triggering immediately as expected.
Before:
import fetch from 'isomorphic-fetch';
export const fetchTreeData = () => {
return function(dispatch) {
return fetch(config.endpoint + 'tree')
.then(response => response.json()
.then(json => ({
status: response.status,
json
})))
.then(({ status, json }) => {
if (status >= 400) dispatch(treeRequestFailed())
else dispatch(receiveTreeData(json))
}, err => { dispatch(treeRequestFailed(err)) })
}
}
After:
export const fetchTreeData = () => {
return function(dispatch) {
get(config.endpoint + 'tree',
(data) => dispatch(receiveTreeData(JSON.parse(data))),
(e) => console.log(e))
}
}
const get = (url, success, err, progress) => {
const xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = () => {
if (xmlhttp.readyState == XMLHttpRequest.DONE) {
if (xmlhttp.status === 200) {
if (success) success(xmlhttp.responseText);
} else {
if (err) err(xmlhttp);
}
};
};
xmlhttp.onprogress = (e) => {if (progress) progress(e)};
xmlhttp.open('GET', url, true);
xmlhttp.send();
};
I'm learning how to use service worker, my problem is that when clicking the button to fetch() some file when the page is loaded for first time didn't fire any fetch event, but after manually refresh the page then clicking the button again, the fetch event get fired as expected
Please guide how to fix this
thanks
Chrome: 60
service-worker.js
self.addEventListener('install', event => {
event.waitUntil(
caches.open('XXX')
.then(cache => {
cache.allAll([ ... ])
})
)
})
self.addEventListener('activate', event => {
console.log('ready')
})
self.addEventListener('fetch', event => {
console.log('fetch')
})
index.html
<body>
<button id="btn" class="button">Click</button>
<script>
(() => {
document.getElementById('btn').addEventListener('click', () => {
fetch(' ... ')
.then(res => {
return res.json()
})
.then(res => {
console.log(res)
})
})
if (!('serviceWorker' in navigator)) {
console.log('Service Worker is not supported')
return
}
navigator.serviceWorker.register('/service-worker.js')
.then((registration) => {
console.log('Sevice Worker has been registered')
})
})()
</script>
</body>
you probably need a self.clients.claim() to activate the service-worker asap.
self.addEventListener('activate', function(event) {
event.waitUntil(self.clients.claim());
});
see: What is the use of `self.Clients.claim()`