Can I increase QUOTA_BYTES_PER_ITEM in Chrome? - google-chrome

Is there any way to increase the chrome.storage.sync.QUOTA_BYTES_PER_ITEM ?
For me, the default 4096 Bytes is a little bit short.
I tried to execute
chrome.storage.sync.QUOTA_BYTES_PER_ITEM = 8192;
However, it seems that the actual limit doesn't change.
How can I do this?

No, QUOTA_BYTES_PER_ITEM is there for reference only; it is not a settable value. You could use the value of QUOTA_BYTES_PER_ITEM to split an item up into multiple items, though:
function syncStore(key, objectToStore, callback) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
var valueLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
var segment = jsonstr.substr(0, valueLength);
while(JSON.stringify(segment).length > valueLength)
segment = jsonstr.substr(0, --valueLength);
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
// store all the chunks
chrome.storage.sync.set(storageObj, callback);
}
Then write an analogous fetch function that fetches by key and glues the object back together.

just modify answer of #apsilliers
function syncStore(key, objectToStore) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if(valueLength > maxLength){
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substr(0, valueLength);
for(let i = 0; i < chrome.storage.sync.QUOTA_BYTES_PER_ITEM; i++){
const jsonLength = JSON.stringify(segment).length;
if(jsonLength > maxLength){
segment = jsonstr.substr(0, --valueLength);
}else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
also function to read each partition and merge again
function syncGet(key, callback) {
chrome.storage.sync.get(key, (data) => {
console.log(data[key]);
console.log(typeof data[key]);
if(data != undefined && data != "undefined" && data != {} && data[key] != undefined && data[key] != "undefined"){
const keyArr = new Array();
for(let i = 0; i <= data[key].count; i++) {
keyArr.push(`${data[key].prefix}${i}`)
}
chrome.storage.sync.get(keyArr, (items) => {
console.log(data)
const keys = Object.keys( items );
const length = keys.length;
let results = "";
if(length > 0){
const sepPos = keys[0].lastIndexOf("_");
const prefix = keys[0].substring(0, sepPos);
for(let x = 0; x < length; x ++){
results += items[`${prefix }_${x}`];
}
callback(JSON.parse(results));
return;
}
callback(undefined);
});
} else {
callback(undefined);
}
});
}
it tested and it works for my case

this is a better version of #uncle bob's functions, working with manifest v3 (you can use it just like how you can use the normal sync.set or sync.get function)
NOTE: it only works with JSONs (arrays and objects) since a string shouldn't be that long
let browserServices;
if (typeof browser === "undefined") {
browserServices = chrome;
} else {
browserServices = browser;
}
function syncSet(obj = {}) {
return new Promise((resolve, reject) => {
var storageObj = {};
for (let u = 0; u < Object.keys(obj).length; u++) {
const key = Object.keys(obj)[u];
const objectToStore = obj[key]
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
// split jsonstr into chunks and store them in an object indexed by `key_i`
while (jsonstr.length > 0) {
var index = key + "USEDTOSEPERATE" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = browserServices.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if (valueLength > maxLength) {
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substring(0, valueLength);
var jsonLength = JSON.stringify(segment).length;
segment = jsonstr.substring(0, valueLength = (valueLength - (jsonLength - maxLength) - 1));
for (let i = 0; i < browserServices.storage.sync.QUOTA_BYTES_PER_ITEM; i++) {
jsonLength = JSON.stringify(segment).length;
if (jsonLength > maxLength) {
segment = jsonstr.substring(0, --valueLength);
} else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substring(valueLength, Infinity);
}
}
chrome.storage.sync.set(storageObj).then(() => {
resolve()
})
})
}
function syncGet(uniqueKeys = []) {
return new Promise((resolve, reject) => {
browserServices.storage.sync.get(null).then((data) => {
const keyArr = Object.keys(data).filter(e => uniqueKeys.filter(j => e.indexOf(j) == 0).length > 0)
browserServices.storage.sync.get(keyArr).then((items) => {
var results = {};
for (let i = 0; i < uniqueKeys.length; i++) {
const uniqueKey = uniqueKeys[i];
const keysFiltered = keyArr.filter(e => e.split("USEDTOSEPERATE")[0] == uniqueKey)
if (keysFiltered.length > 0) {
results[uniqueKey] = ""
for (let x = 0; x < keysFiltered.length; x++) {
results[uniqueKey] += items[`${keysFiltered[x]}`];
}
results[uniqueKey] = JSON.parse(results[uniqueKey])
}
}
resolve(results)
});
});
})
}
example of usage:
syncSet({
"keyTest": ["a lot of text"],
"keyTest1": ["a lot of text"]
}
)
syncGet(["keyTest","keyTest1"]).then(results=>console.log(results))
// {keyTest:["a lot of text"],keyTest1:["a lot of text"]}

Related

My website becomes unresponsive when dealing 1000 rows excel file

I am uploading data from an excel file into my website using input html button.
and then convert the data into json and then I map it with local external metadata.
Finally view it using the id.
My website becomes unresponsive & sometimes takes a lot of time processing. Please help
function ExportToTable() {
var regex = /^([a-zA-Z0-9\s_\\.\-:()])+(.xlsx|.xls)$/;
/*Checks whether the file is a valid excel file*/
if (regex.test($("#excelfile").val().toLowerCase())) {
var xlsxflag = false; /*Flag for checking whether excel is .xls format or .xlsx format*/
if ($("#excelfile").val().toLowerCase().indexOf(".xlsx") > 0) {
xlsxflag = true;
}
/*Checks whether the browser supports HTML5*/
if (typeof (FileReader) != "undefined") {
var reader = new FileReader();
reader.onload = function (e) {
var data = e.target.result;
/*Converts the excel data in to object*/
if (xlsxflag) {
var workbook = XLSX.read(data, { type: 'binary' });
}
else {
var workbook = XLS.read(data, { type: 'binary' });
}
/*Gets all the sheetnames of excel in to a variable*/
var sheet_name_list = workbook.SheetNames;
console.log(sheet_name_list);
var cnt = 0; /*This is used for restricting the script to consider only first
sheet of excel*/
sheet_name_list.forEach(function (y) { /*Iterate through all sheets*/
/*Convert the cell value to Json*/
if (xlsxflag) {
var exceljson = XLSX.utils.sheet_to_json(workbook.Sheets[y]);
}
else {
var exceljson = XLS.utils.sheet_to_row_object_array(workbook.Sheets[y]);
}
//Download & View Subscriptions
if (exceljson.length > 0 && cnt == 1) {
metadata = [];
fetch("metadata.json")
.then(response => response.json())
.then(json => {
metadata = json;
console.log(metadata);
user_metadata1 = [], obj_m_processed = [];
for (var i in exceljson) {
var obj = { email: exceljson[i].email, name: exceljson[i].team_alias, id: exceljson[i].autodesk_id };
for (var j in metadata) {
if (exceljson[i].email == metadata[j].email) {
obj.GEO = metadata[j].GEO;
obj.COUNTRY = metadata[j].COUNTRY;
obj.CITY = metadata[j].CITY;
obj.PROJECT = metadata[j].PROJECT;
obj.DEPARTMENT = metadata[j].DEPARTMENT;
obj.CC=metadata[j].CC;
obj_m_processed[metadata[j].email] = true;
}
}
obj.GEO = obj.GEO || '-';
obj.COUNTRY = obj.COUNTRY || '-';
obj.CITY = obj.CITY || '-';
obj.PROJECT = obj.PROJECT || '-';
obj.DEPARTMENT = obj.DEPARTMENT || '-';
obj.CC = obj.CC || '-';
user_metadata1.push(obj);
}
for (var j in metadata) {
if (typeof obj_m_processed[metadata[j].email] == 'undefined') {
user_metadata1.push({ email: metadata[j].email, name: metadata[j].name, id: metadata[j].autodesk_id,
GEO: metadata[j].GEO,
COUNTRY : metadata[j].COUNTRY,
CITY : metadata[j].CITY,
PROJECT : metadata[j].PROJECT,
DEPARTMENT : metadata[j].DEPARTMENT,
CC:metadata[j].CC
});
}
}
document.getElementById("headings4").innerHTML = "MetaData Mapping";
BindTable(user_metadata1, '#user_metadata1
cnt++;
});
$('#exceltable').show();
}
if (xlsxflag) {/*If excel file is .xlsx extension than creates a Array Buffer from excel*/
reader.readAsArrayBuffer($("#excelfile")[0].files[0]);
}
else {
reader.readAsBinaryString($("#excelfile")[0].files[0]);
}
}
else {
alert("Sorry! Your browser does not support HTML5!");
}
}
else {
alert("Please upload a valid Excel file!");
}
}
Here is how the json is bind after mapping metadata
function BindTable(jsondata, tableid) {/*Function used to convert the JSON array to Html Table*/
var columns = BindTableHeader(jsondata, tableid); /*Gets all the column headings of Excel*/
for (var i = 0; i < jsondata.length; i++) {
var row$ = $('<tr/>');
for (var colIndex = 0; colIndex < columns.length; colIndex++) {
var cellValue = jsondata[i][columns[colIndex]];
if (cellValue == null)
cellValue = "";
row$.append($('<td/>').html(cellValue));
}
$(tableid).append(row$);
}
}
function BindTableHeader(jsondata, tableid) {/*Function used to get all column names from JSON and bind the html table header*/
var columnSet = [];
var headerTr$ = $('<tr/>');
for (var i = 0; i < jsondata.length; i++) {
var rowHash = jsondata[i];
for (var key in rowHash) {
if (rowHash.hasOwnProperty(key)) {
if ($.inArray(key, columnSet) == -1) {/*Adding each unique column names to a variable array*/
columnSet.push(key);
headerTr$.append($('<th/>').html(key));
}
}
}
}
$(tableid).append(headerTr$);
return columnSet;
}

The asignCards() function is supposed to give random cards from the cards array to each player

This asignCards() function is supposed to give random cards from the cards array to each player. The cards are not supposed to be repeated but I get repeated cards.
// For each player in the array of players, each player receives two cards.
const asignCards = () => {
players.forEach((player) => {
let useCards = [];
let inUse = cards;
for (let i = 0; i < 1; i++) {
let random = Math.floor(Math.random() * 52);
let random2 = Math.floor(Math.random() * 52);
if (
useCards.indexOf(JSON.stringify(inUse[random])) === -1 ||
useCards.indexOf(JSON.stringify(inUse[random2])) === -1 ||
useCards.indexOf(JSON.stringify(inUse[random])) !==
useCards.indexOf(JSON.stringify(inUse[random2]))
) {
player.card1 = inUse[random];
player.card2 = inUse[random2];
useCards.push(JSON.stringify(player.card1));
useCards.push(JSON.stringify(player.card2));
}else {
i--;
}
}
console.log(player);
});
};
asignCards();
You should declare your useCards array outside of the loop, otherwise everytime it will be empty at each iteration. Put it that way, you can not have duplicate for a player, but the same card can be assigned to more players.
const asignCards = () => {
let useCards = [];
players.forEach((player) => {
let inUse = cards;
for (let i = 0; i < 1; i++) {
let random = Math.floor(Math.random() * 52);
let random2 = Math.floor(Math.random() * 52);
if (
useCards.indexOf(JSON.stringify(inUse[random])) === -1 ||
useCards.indexOf(JSON.stringify(inUse[random2])) === -1 ||
useCards.indexOf(JSON.stringify(inUse[random])) !==
useCards.indexOf(JSON.stringify(inUse[random2]))
) {
player.card1 = inUse[random];
player.card2 = inUse[random2];
useCards.push(JSON.stringify(player.card1));
useCards.push(JSON.stringify(player.card2));
}else {
i--;
}
}
console.log(player);
});
};
asignCards();
Bonus: you may want to rename your function to assignCards (from assign) with double s.
Update:
Here's a different approach. Every time you want to play you shuffle the deck first, then pick your amount of cards from the top of the deck with Array.pop(), so you're 100% sure there will be no duplicates.
let players = ["A", "B", "C", "D"];
let cards = [...Array(52).keys()];
const cardsPerPlayer = 2;
const asignCards = () => {
// Shuffle deck
var n = cards.length;
while (n--) {
var i = Math.floor(n * Math.random());
var tmp = cards[i];
cards[i] = cards[n];
cards[n] = tmp;
}
players.forEach((player, index) => {
let myCards = [];
for (c = 0; c < cardsPerPlayer; c++) {
myCards.push(cards.pop());
}
console.log(`${player} has the following cards: ${myCards}`);
});
};
asignCards();

Why google spreadsheets scripts can't sort?

function SUMMARIZE_TOGGL_ENTRIES(proj, desc, time, taskDates, uID) {
if (!desc.map || !time.map || !taskDates.map || !proj.map) {
return 'INPUT HAS TO BE MAP';
}
if (desc.length != time.length || time.length != taskDates.length || proj.length != taskDates.length) {
return 'WRONG INPUT ARRAYS LEN';
}
var resObj = {'NO_DESCRIPTION': '00:00:00'};
var keys = [];
var result = [];
for (var i = 1; i < desc.length; i++) {
var tmpKey = createKey(proj[i], desc[i], uID[i]);
console.log('tmpKey',tmpKey);
if (resObj[tmpKey]) {
resObj[tmpKey] = formatTime(timestrToSec(resObj[tmpKey]) + timestrToSec(time[i]));
} else {
resObj[tmpKey] = formatTime(timestrToSec(time[i]));
}
}
keys = Object.keys(resObj);
var b=0;
for (b; b < keys.length; b++) {
var tmp = [];
var key = keys[b].split('__')[1];
console.log('keys',keys[b].split('__'));
tmp.push(keys[b].split('__')[0]);
tmp.push(key);
tmp.push(resObj[keys[b]]);
tmp.push(taskLastDate(keys[b], desc, taskDates, proj, uID))
tmp.push(keys[b].split('__')[2]);
if(tmp[3] != '01-01-1991 01:01:01' && tmp[3] != ''){
result.push(tmp);
}
}
return result.sort(function (a,b){
var ad = new Date(a[3].split(' ')[0].replace(/-/g,'.'));
var bd = new Date(b[3].split(' ')[0].replace(/-/g,'.'));
if (ad > bd) {
return 1;
}
if (ad < bd) {
return -1;
}
return 0;
});
}
as you can see I'm trying to return a sorted array of arrays. But it don't returns the same array. If i lunch this func on my PC it works. so what's the problem? Does anyone have an idea?
result image
Seems like your sort function will always return 0. Change to
return result.sort(function (a,b){
var ad = new Date(a[3].split(' ')[0].replace(/-/g,'.'));
var bd = new Date(b[3].split(' ')[0].replace(/-/g,'.'));
if (ad > bd) {
return 1;
} else if (ad < bd) {
return -1;
} else {
return 0;
}
});
and see if that works?

How convert tsv to Json

I want to make a dynamic graph based on a json file. I have seen many examples with tsv but I donot how to convert it to json.
That is the part that I want to change from tsv to json but I donot know how!
d3.tsv("data/data.tsv", function(error, data) {
data.forEach(function(d) {
d.date = parseDate(d.date);
d.close = +d.close;
});
when I use
d3.json("data/data.json", function(data) {
data.forEach(function d) {
d.date = parseDate(d.date);
d.close = +d.close;
}
});
it gives this error: Uncaught type error: cannot call method 'forEach' of undefined!
Thanks for your suggestions :)
try to do something like this
d3.json("data/data.json", function(data) {
data.forEach(function d) {
d.date = parseDate(d.date);
d.close = +d.close;
}
});
d3.js have support for json, https://github.com/mbostock/d3/wiki/Requests
The syntax around your forEach is a little off; try this instead:
d3.json("data/data.json", function(data) {
data.forEach(function(d) {
d.date = parseDate(d.date);
d.close = +d.close;
});
});
(As Felix points out, this will only work if your JSON object is defined and is an array)
Here a small code where you'll be able to convert tsv to json. It could help you...
ps : here is typescript, but you can easily convert it to vanilla javascript ;)
// Set bunch of datas into format object
tsvToJson(datas: string): Array<Object>{
// Separate each lines
let array_datas = datas.split(/\r\n|\r|\n/g);
// Separate each values into each lines
var detailed_datas = [];
for(var i = 0; i < array_datas.length; i++){
detailed_datas.push(array_datas[i].split("\t"));
}
// Create index
var index = [];
var last_index = ""; // If the index we're reading is equal to "", it mean it might be an array so we take the last index
for(var i = 0; i < detailed_datas[0].length; i++){
if(detailed_datas[0][i] == "") index.push(last_index);
else {
index.push(detailed_datas[0][i]);
last_index = detailed_datas[0][i];
}
}
// Separate data from index
detailed_datas.splice(0, 1);
// Format data
var formated_datas = [];
for(var i = 0; i < detailed_datas.length; i++){
var row = {};
for(var j = 0; j < detailed_datas[i].length; j++){
// Check if value is empty
if(detailed_datas[i][j] != ""){
if(typeof row[index[j]] == "object"){
// it's already set as an array
row[index[j]].push(detailed_datas[i][j]);
} else if(row[index[j]] != undefined){
// Already have a value, so it might be an array
row[index[j]] = [row[index[j]], detailed_datas[i][j]];
} else {
// It's empty for now, so let's say first that it's a string
row[index[j]] = detailed_datas[i][j];
}
}
}
formated_datas.push(row);
}
console.log(formated_datas); // #TODO : remove this
return formated_datas;
}
I transpile and resume Wetteren's code:
convertTSVtoJSON(tsvData) {
const formattedData = tsvData.split(/\r\n|\r|\n/g).filter(e => !!e).map((parsedEntry) => parsedEntry.split("\t"));
const tsvHeaders = formattedData.shift();
return formattedData.map(formattedEntry => {
{
return tsvHeaders.reduce((jsonObject, heading, position) => {
jsonObject[heading] = formattedEntry[position];
return jsonObject;
}, {});
}
});
}

How to pass a whole dojox.grid.DataGrid store(items json data) to servlet?

I have a button on page - when clicked, it passes all the data to the servlet that could update each row data. My question is how to pass the whole store to the servlet as json data? Is there any easy way? Thanks
Here is some code I wrote to get the store to an object. Then it can be converted to JSON using dojo.toJson(obj);. I learned about this from the dojotoolkit website originally. (Give credit where credit is due). I realize this code is huge and nasty. When I looked for a better way about a year back I could not find one.
JsonHelper.storeToObject = function(store) {
var object = [];
var index = -1;
store.fetch({
onItem : function(item, request) {
object[++index] = JsonHelper.itemToObject(store, item);
}
});
return object;
};
JsonHelper.itemToObject = function(store, item) {
// store:
// The datastore the item came from.
// item:
// The item in question.
var obj = {};
if (item && store) {
// Determine the attributes we need to process.
var attributes = store.getAttributes(item);
if (attributes && attributes.length > 0) {
var i;
for (i = 0; i < attributes.length; i++) {
var values = store.getValues(item, attributes[i]);
if (values) {
// Handle multivalued and single-valued attributes.
if (values.length > 1) {
var j;
obj[attributes[i]] = [];
for (j = 0; j < values.length; j++) {
var value = values[j];
// Check that the value isn't another item. If
// it is, process it as an item.
if (store.isItem(value)) {
obj[attributes[i]].push(itemToObject(store,
value));
} else {
obj[attributes[i]].push(value);
}
}
} else {
if (store.isItem(values[0])) {
obj[attributes[i]] = itemToObject(store,
values[0]);
} else {
obj[attributes[i]] = values[0];
}
}
}
}
}
}
return obj;
};