gulpuglify error how to convert code to es5 - gulp

I am getting this error while trying to push my code to staging enviroment:
throw er; // Unhandled 'error' event
^
GulpUglifyError: unable to minify JavaScript
Is there any way to convert this code to ES5, I have no idea where to start or what the error is? Can you help me please?
//animated number counter
var stats = document.querySelectorAll(".counter");
stats.forEach(stat => {
var patt = /(\D+)?(\d+)(\D+)?(\d+)?(\D+)?/;
var time = 1000;
var result = [...patt.exec(stat.textContent)];
var fresh = true;
var ticks;
result.shift();
result = result.filter(res => res != null);
while (stat.firstChild) {
stat.removeChild(stat.firstChild);
}
for (var res of result) {
if (isNaN(res)) {
stat.insertAdjacentHTML("beforeend", `<span>${res}</span>`);
} else {
for (var i = 0; i < res.length; i++) {
stat.insertAdjacentHTML(
"beforeend",
`<span data-value="${res[i]}">
<span>–</span>
${Array(parseInt(res[i]) + 1)
.join(0)
.split(0)
.map(
(x, j) => `
<span>${j}</span>
`
)
.join("")}
</span>`
);
}
}
}
ticks = [...stat.querySelectorAll("span[data-value]")];
var activate = () => {
var top = stat.getBoundingClientRect().top;
var offset = window.innerHeight * 0.8;
setTimeout(() => {
fresh = false;
}, time);
if (top < offset) {
setTimeout(() => {
for (var tick of ticks) {
var dist = parseInt(tick.getAttribute("data-value")) + 1;
tick.style.transform = `translateY(-${dist * 100}%)`;
}
}, fresh ? time : 0);
window.removeEventListener("scroll", activate);
}
};
window.addEventListener("scroll", activate);
activate();
});

Related

Rooms with Forge

My goal is to see the Revit rooms in the Forge viewer. The application is in .NET Core. I have tried implementing GenerateMasterViews.
The code I am using to achieve this is:
[Route("api/forge/modelderivative/jobs")]
public async Task<dynamic> TranslateObject([FromBody]TranslateObjectModel objModel)
{
dynamic oauth = await OAuthController.GetInternalAsync();
// prepare the payload
var advOutputPayload = new JobSvf2OutputPayloadAdvanced();
advOutputPayload.GenerateMasterViews = true;
List<JobPayloadItem> outputs = new List<JobPayloadItem>()
{
new JobPayloadItem(
JobPayloadItem.TypeEnum.Svf2,
new List<JobPayloadItem.ViewsEnum>()
{
JobPayloadItem.ViewsEnum._2d,
JobPayloadItem.ViewsEnum._3d
},
advOutputPayload
)
};
JobPayload job;
job = new JobPayload(new JobPayloadInput(objModel.objectName), new JobPayloadOutput(outputs));
// start the translation
DerivativesApi derivative = new DerivativesApi();
derivative.Configuration.AccessToken = oauth.access_token;
dynamic jobPosted = await derivative.TranslateAsync(job);
return jobPosted;
}
Autodesk.Viewing.Initializer(options, () => {
viewer = new Autodesk.Viewing.GuiViewer3D(document.getElementById('forgeViewer'));
viewer.start();
var documentId = 'urn:' + urn;
Autodesk.Viewing.Document.load(documentId, onDocumentLoadSuccess, onDocumentLoadFailure);
});
}
function onDocumentLoadSuccess(doc) {
var viewables = doc.getRoot().getDefaultGeometry();
viewer.loadDocumentNode(doc, viewables).then(i => {
// documented loaded, any action?
});
}
But I can't get it to work.
I have looked for information, but this url: https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option2/ and this url:
https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option1/ they don't work and I couldn't see how to do it.
To check if the object is in the room, we can do the following:
Get bounds for each room and object
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
Iterate rooms and objects and use containsBox or containsPoint to check if their bounding box has intersection.
If you want to do an acute collision check, you can take advantage of the ThreeCSG.js to do geometry intersection. Here is a blog post demonstrating how to integrate ThreeCSG.js with Forge Viewer.
https://forge.autodesk.com/blog/boolean-operations-forge-viewer
Note. This process would reduce the viewer performance since JavaScript is running on a single thread on the Web Browser, so you may use some technologies like the web worker to do the complex calculations on a separate thread.
Update:
Here is a working sample extension demonstrating the above idea:
/////////////////////////////////////////////////////////////////////
// Copyright (c) Autodesk, Inc. All rights reserved
// Written by Forge Partner Development
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
/////////////////////////////////////////////////////////////////////
(function () {
const Utility = {
/**
* Rest an object
* #param {Object} obj An object to be reset.
* ref: https://stackoverflow.com/a/24090180
*/
resetObject: function (obj) {
for (let key in Object.getOwnPropertyNames(obj)) {
if (!obj.hasOwnProperty(key)) continue;
let val = obj[key];
switch (typeof val) {
case 'string':
obj[key] = ''; break;
case 'number':
obj[key] = 0; break;
case 'boolean':
obj[key] = false; break;
case 'object':
if (val === null) break;
if (val instanceof Array) {
while (obj[key].length > 0) {
obj[key].pop();
}
break;
}
val = {};
//Or recursively clear the sub-object
//resetObject(val);
break;
}
}
}
};
/**
* A Forge Viewer extension for loading and rendering Revit Grids by AEC Model Data
* #class
*/
class RoomLocatorExtension extends Autodesk.Viewing.Extension {
constructor(viewer, options) {
super(viewer, options);
this.roomCategoryName = options.roomCategoryName || 'Revit Rooms';//'Revit Habitaciones'
this.onContextMenu = this.onContextMenu.bind(this);
}
onContextMenu(menu, status) {
if (status.hasSelected) {
menu.push({
title: 'Find room',
target: async () => {
let selSet = this.viewer.getSelection();
this.viewer.clearSelection();
const roomDbIds = await this.locateElementByRoom(selSet[0]);
if (!roomDbIds || roomDbIds.length <= 0) return;
this.viewer.select(roomDbIds);
}
});
}
}
async getPropertiesAsync(dbId, model) {
return new Promise((resolve, reject) => {
model.getProperties2(
dbId,
(result) => resolve(result),
(error) => reject(error)
);
});
}
async getElementsByCategoryAsync(category) {
return new Promise((resolve, reject) => {
this.viewer.search(
category,
(dbIds) => resolve(dbIds),
(error) => reject(error),
['Category'],
{ searchHidden: true }
);
});
}
async getRoomDbIds() {
try {
const roomDbIds = await this.getElementsByCategoryAsync(this.roomCategoryName);
if (!roomDbIds || roomDbIds.length <= 0) {
throw new Error('No Rooms found in current model');
}
return roomDbIds;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('No room found');
}
}
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
getLeafFragIds(model, leafId) {
const instanceTree = model.getData().instanceTree;
const fragIds = [];
instanceTree.enumNodeFragments(leafId, function (fragId) {
fragIds.push(fragId);
});
return fragIds;
}
getComponentGeometryInfo(dbId, model) {
const viewer = this.viewer;
const viewerImpl = viewer.impl;
const fragIds = this.getLeafFragIds(model, dbId);
let matrixWorld = null;
const meshes = fragIds.map((fragId) => {
const renderProxy = viewerImpl.getRenderProxy(model, fragId);
const geometry = renderProxy.geometry;
const attributes = geometry.attributes;
const positions = geometry.vb ? geometry.vb : attributes.position.array;
const indices = attributes.index.array || geometry.ib;
const stride = geometry.vb ? geometry.vbstride : 3;
const offsets = geometry.offsets;
matrixWorld = matrixWorld || renderProxy.matrixWorld.elements;
return {
positions,
indices,
offsets,
stride
};
});
return {
matrixWorld,
meshes
};
}
getComponentGeometry(data, vertexArray) {
const offsets = [
{
count: data.indices.length,
index: 0,
start: 0
}
];
for (let oi = 0, ol = offsets.length; oi < ol; ++oi) {
let start = offsets[oi].start;
let count = offsets[oi].count;
let index = offsets[oi].index;
for (let i = start, il = start + count; i < il; i += 3) {
const a = index + data.indices[i];
const b = index + data.indices[i + 1];
const c = index + data.indices[i + 2];
const vA = new THREE.Vector3();
const vB = new THREE.Vector3();
const vC = new THREE.Vector3();
vA.fromArray(data.positions, a * data.stride);
vB.fromArray(data.positions, b * data.stride);
vC.fromArray(data.positions, c * data.stride);
vertexArray.push(vA);
vertexArray.push(vB);
vertexArray.push(vC);
}
}
}
buildComponentMesh(data) {
const vertexArray = [];
for (let idx = 0; idx < data.nbMeshes; ++idx) {
const meshData = {
positions: data['positions' + idx],
indices: data['indices' + idx],
stride: data['stride' + idx]
}
this.getComponentGeometry(meshData, vertexArray);
}
const geometry = new THREE.Geometry();
for (let i = 0; i < vertexArray.length; i += 3) {
geometry.vertices.push(vertexArray[i]);
geometry.vertices.push(vertexArray[i + 1]);
geometry.vertices.push(vertexArray[i + 2]);
const face = new THREE.Face3(i, i + 1, i + 2);
geometry.faces.push(face);
}
const matrixWorld = new THREE.Matrix4();
matrixWorld.fromArray(data.matrixWorld);
const mesh = new THREE.Mesh(geometry);
mesh.applyMatrix(matrixWorld);
mesh.boundingBox = data.boundingBox;
mesh.bsp = new ThreeBSP(mesh)
mesh.dbId = data.dbId;
return mesh;
}
buildCsgMesh(dbId, model) {
const geometry = this.getComponentGeometryInfo(dbId, model);
const data = {
boundingBox: this.getBoundingBox(dbId, model),
matrixWorld: geometry.matrixWorld,
nbMeshes: geometry.meshes.length,
dbId
};
geometry.meshes.forEach((mesh, idx) => {
data['positions' + idx] = mesh.positions;
data['indices' + idx] = mesh.indices;
data['stride' + idx] = mesh.stride;
});
return this.buildComponentMesh(data);
}
async buildBBoxes() {
try {
const model = this.viewer.model;
const roomBBoxes = {};
const roomDbIds = await this.getRoomDbIds();
for (let i = 0; i < roomDbIds.length; i++) {
let dbId = roomDbIds[i];
let bbox = await this.getBoundingBox(dbId, model);
roomBBoxes[dbId] = bbox;
}
this.cachedBBoxes['rooms'] = roomBBoxes;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('Cannot build bounding boxes from rooms');
}
}
async locateElementByRoom(dbId) {
let bbox = await this.getBoundingBox(dbId, this.viewer.model);
const roomDbIds = Object.keys(this.cachedBBoxes['rooms']);
const roomBoxes = Object.values(this.cachedBBoxes['rooms']);
// Coarse Phase Collision
const coarseResult = [];
for (let i = 0; i < roomDbIds.length; i++) {
let roomDbId = roomDbIds[i];
let roomBox = roomBoxes[i];
if (roomBox.containsBox(bbox)) {
coarseResult.push(parseInt(roomDbId));
} else {
if (roomBox.containsPoint(bbox.min) || roomBox.containsPoint(bbox.max) || roomBox.containsPoint(bbox.center())) {
coarseResult.push(parseInt(roomDbId));
}
}
}
// Fine Phase Collision
const fineResult = [];
let elementCsgMesh = this.buildCsgMesh(dbId, this.viewer.model);
for (let i = 0; i < coarseResult.length; i++) {
let roomDbId = coarseResult[i];
let roomCsgMesh = this.buildCsgMesh(roomDbId, this.viewer.model);
let result = elementCsgMesh.bsp.intersect(roomCsgMesh.bsp);
if (result.tree.polygons.length <= 0) {
result = roomCsgMesh.bsp.intersect(elementCsgMesh.bsp);
// if (!this.viewer.overlays.hasScene('csg'))
// this.viewer.overlays.addScene('csg');
// else
// this.viewer.overlays.clearScene('csg');
// let mat = new THREE.MeshBasicMaterial({ color: 'red' })
// let mesh = result.toMesh(mat);
// this.viewer.overlays.addMesh(mesh, 'csg')
if (result.tree.polygons.length <= 0) continue;
}
fineResult.push(roomDbId);
}
return fineResult;
}
async load() {
await Autodesk.Viewing.Private.theResourceLoader.loadScript(
'https://cdn.jsdelivr.net/gh/Wilt/ThreeCSG#develop/ThreeCSG.js',
'ThreeBSP'
);
if (!window.ThreeBSP)
throw new Error('Cannot load ThreeCSG.js, please download a copy from https://github.com/Wilt/ThreeCSG/blob/develop/ThreeCSG.js')
await this.viewer.waitForLoadDone();
this.cachedBBoxes = {};
await this.buildBBoxes();
this.viewer.registerContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
unload() {
Utility.resetObject(this.cachedBBoxes);
this.viewer.unregisterContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
}
Autodesk.Viewing.theExtensionManager.registerExtension('RoomLocatorExtension', RoomLocatorExtension);
})();
Snapshots:

Getting NaN when a directive is used in angularJs

I am using a directive to show the number count effect for my dashboard when i used the directive for the h3 i am getting the result as NaN. when i remove the directive from the h3 i am getting the correct output.
When i looked into the directive i can the the value is get from element which shows the value as NaN. can anyone tell me what is wrong in the code?
Output with directive:
<h3 animate-numbers="" class="ng-binding">NaN</h3>
Html:
<h3 animate-numbers>{{vm.dashboard.no_of_applications}}</h3>
Controller:
vm.dashboard = {
no_of_users: 0,
no_of_applications: 0,
no_of_departments: 0,
no_of_schemes: 0,
};
Directive:
'use strict';
angular.module('ss')
.directive('animateNumbers', function ($timeout, $log) {
return {
replace: false,
scope: true,
link: function (scope, element) {
var e = element[0];
$log.log('e is', e);
var refreshInterval = 30;
var duration = 1000; //milliseconds
var number = parseInt(e.innerText);
var step = 0;
var num = 0;
var steps = Math.ceil(duration / refreshInterval);
var increment = (number / steps);
var percentCompleted = 0;
var lastNumberSlowCount = 3;
if (number > lastNumberSlowCount) {
number = number - lastNumberSlowCount;
}
scope.timoutId = null;
var slowCounter = function () {
scope.timoutId = $timeout(function () {
lastNumberSlowCount --;
if (lastNumberSlowCount < 0) {
$timeout.cancel(scope.timoutId);
} else {
number++;
e.textContent = number;
slowCounter();
}
}, 500);
};
var counter = function () {
scope.timoutId = $timeout(function () {
num += increment;
percentCompleted = Math.round((num / number) * 100);
if (percentCompleted > 60 && percentCompleted < 80) {
refreshInterval = refreshInterval + 10;
} else if (percentCompleted > 90) {
refreshInterval = 200;
}
step++;
if (step >= steps) {
$timeout.cancel(scope.timoutId);
num = number;
e.textContent = number;
if (number > lastNumberSlowCount) {
slowCounter();
}
} else {
e.textContent = Math.round(num);
counter();
}
}, refreshInterval);
};
counter();
return true;
}
};
});

IndexedDB deleted objects never removed from disk on Chrome

i am trying to create an application where products are received on every second and updating a database of fixed size (~ 300 MB) using the LRU policy. Although i have no exception on adding new products and deleting them from the database, it seems that Chrome never deletes the .ldb and .bak files. As such, I spend several gigabytes of hard disk and i always reach the quota limit. The same code works perfect on Firefox. Can someone explain me what am i doing wrong? Below you can find the code.
startExperiment(300 * 1024 * 1024);
function startExperiment(lrusize:number) {
var j = 0;
var productsToInsert = new HashMap<number, Product>();
window.indexedDB.deleteDatabase("ExampleDatabase");
var versionNumber = 1;
var stats = new Stats();
var sizeOfDatabase = 0;
var db = new ProductDatabase('ExampleDatabase', versionNumber, () => {
db.getSizeOfDatabase((result) => {
if(result == null) {
sizeOfDatabase = 0;
} else {
sizeOfDatabase = result;
}
});
});
function randomizeArray() {
var numOfMBS = Math.floor((Math.random() * (10 - 2) + 2) * 1024 * 1024);
var bytearray = new Uint8Array(numOfMBS.valueOf());
for (var i = 0; i < bytearray.length; i++) {
bytearray[i] = Math.random() * (100 - 1) + 1;
}
return bytearray;
}
setInterval(function () {
var readAverage = stats.getReadTimesAverage();
var writeAverage = stats.getWriteTimesAverage();
var deleteAverage = stats.getDeleteTimesAverage();
console.log("Num of insertions : " + j + " | Read average : " + readAverage + " | Write average : " + writeAverage + " | Delete average : " + deleteAverage);
}, 5000);
setInterval(function () {
var bytearray = randomizeArray();
var identifier = j++;
var timestamp = Date.now();
db.getProduct(identifier, (product) => {
if (product == null) {
var newProduct = new Product(identifier, timestamp, 0, bytearray);
var size = memorySizeOf(newProduct);
newProduct.sizeInBytes = size;
productsToInsert.set(identifier, newProduct);
}
});
}, 1000);
function updateLRU() {
var tmpList:Product[] = [];
var keys = productsToInsert.keys();
var currentBytesToBeInserted = 0;
for (var i = 0; i < keys.length; i++) {
var product = productsToInsert.get(keys[i]);
tmpList.push(product);
currentBytesToBeInserted += product.sizeInBytes;
}
var currentSize = sizeOfDatabase + currentBytesToBeInserted;
if (currentSize > lrusize) {
var bytesToRemove = currentSize - lrusize;
db.deleteProducts(bytesToRemove, stats, () => {
sizeOfDatabase -= bytesToRemove;
addFragments(tmpList);
});
} else {
addProducts(tmpList);
}
}
function addProducts(tmpList:Product[]) {
var product = tmpList[0];
var startAddProductTs = Date.now();
db.addProduct(product, () => {
var stopAddProductTs = Date.now();
stats.addWriteTimes(stopAddProductTs - startAddProductTs);
sizeOfDatabase += product.sizeInBytes;
tmpList.shift();
productsToInsert.delete(product.productId);
if(tmpList.length > 0) {
addProducts(tmpList);
} else {
db.addDBSize(sizeOfDatabase, () => {
});
}
});
}
setInterval(function () {
updateLRU();
}, 20000);
}
class ProductDatabase {
private db;
constructor(private name:string, private version:number, callback:() => void) {
this.openDatabase(callback);
}
openDatabase(callback:() => void) {
var openDatabaseRequest = window.indexedDB.open(this.name, this.version);
openDatabaseRequest.onupgradeneeded = this.upgrade;
openDatabaseRequest.onsuccess = () => {
this.db = openDatabaseRequest.result;
callback();
}
}
upgrade(event:any) {
var store = event.target.result.createObjectStore("products", {keyPath: 'productId'});
store.createIndex('by_timestamp', "timestamp", {unique: true});
event.target.result.createObjectStore("dbsize", {keyPath: 'sizeId'});
}
getProduct(productId:number, callback:(result:Product) => void) {
var productStore = this.db.transaction(["products"], "readonly").objectStore('products');
var query = productStore.get(productId);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read product error : " + query.error);
}
}
addDBSize(dbSize:number, callback:() => void) {
var transaction = this.db.transaction('dbsize', 'readwrite');
var productStore = transaction.objectStore('dbsize');
var newSize = {'sizeId': 1, 'bytelength': dbSize};
var request = productStore.put(newSize);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
addCachedProducts(productList:Array<Product>, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
for (var i = 0; i < productList.length; i++) {
productStore.add(productList[i]);
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
getNumberOfProducts(callback:(result:number) => void) {
var productStore = this.db.transaction('products', 'readonly').objectStore('products');
var query = productStore.count();
query.onsuccess = () => {
var result = query.result;
callback(result);
}
query.onerror = () => {
console.error("Read number of products error : " + query.error);
}
}
getSizeOfDatabase(callback:(result:number) => void) {
var productStore = this.db.transaction('dbsize', "readonly").objectStore('dbsize');
var query = productStore.get(1);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read databasesize error : " + query.error);
}
}
deleteProducts(numOfBytes:number, stats:Stats, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var index = productStore.index('by_timestamp');
var request = index.openCursor();
request.onsuccess = function () {
var cursor = request.result;
if (cursor) {
var cursorBytes = cursor.value.sizeInBytes;
var startDeleteTs = Date.now();
var deleteRequest = cursor.delete();
deleteRequest.onsuccess = () => {
var stopDeleteTs = Date.now();
stats.addDeleteTimes(stopDeleteTs - startDeleteTs);
numOfBytes -= cursorBytes;
if (numOfBytes > 0) {
cursor.continue();
}
}
deleteRequest.onerror = () => {
console.error("Delete product error : " + deleteRequest.error);
}
}
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.log("Delete transaction aborted with error : " + transaction.error);
}
}
addProduct(product:Product, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var request = productStore.put(product);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
}
In Chrome, there is a delay between deleting data through the IndexedDB API and having it deleted from disk. Usually that's fine. But in my experience, sometimes it never gets deleted from disk, which is really bad when the user has exceeded their quota because then you can never store any more data even if you delete everything.
Thanks dumbmatter. Unfortunately my experience also showed me that Chrome never deletes the unnecessary files. The problem is that IndexedDB in Chrome use LevelDB as an implementation and it rarely calls compact. However, i found a solution using PouchDB which leverages both IndexedDB and LevelDB api and i can explicitly call compact and delete my unnecessary files.

Node.js + bluebird + JSON. JSON is not valid after modification

Trying to get API data.
I have problem with creating valid JSON after modification.
Data should looks like this: [{"1"},{"2"},{"3"}, ... ,{201},{202},{203}, ...]
but now: [{"1"},{"2"},{"3"}, ...],[{"201"},{"202"},{"203"}, ...]
Where is my mistake?
var Promise = require("bluebird");
var request = require('bluebird').promisifyAll(require('request'));
var fs = Promise.promisifyAll(require('fs'));
var ladders = {"hardcore":"hardcore", "standard":"standard"};
function getJSONsync(urls) {
var ladder = [];
Promise.map(urls, function(url) {
return request
.getAsync(url)
.spread(function (res, body) {
if (res.statusCode != 200) {
throw new Error('Unsuccessful attempt. Code: '+ res.statusCode);
}
return JSON.stringify(ladder.concat(JSON.parse(body).entries), "", 4);
})
.catch(console.error);
},{ concurrency: 10 })
.then(function(arr) {
fs.writeFileAsync('file.json', arr);
})
}
function setUrls(ladderName, offset, limit) {
var arr = [];
while(offset < 15000 ) {
arr.push('http://api.pathofexile.com/ladders/'+ladderName+'?offset='+offset+'&limit='+limit);
offset = offset + 200;
}
return arr;
}
getJSONsync(setUrls(ladders.hardcore, 0, 200));
Thx for help.
Sorry for my Eng.
Finally:
var Promise = require("bluebird");
var request = require('bluebird').promisifyAll(require('request'));
var fs = Promise.promisifyAll(require('fs'));
var ladders = {"hardcore":"hardcore","standard":"standard"};
function getJSONsync(urls) {
Promise.map(urls, function(url) {
return request
.getAsync(url)
.spread(function (res, body) {
if (res.statusCode != 200) {
throw new Error('Unsuccessful attempt. Code: '+ res.statusCode);
}
return JSON.parse(body).entries;
})
.catch(console.error);
},{ concurrency: 10 })
.reduce(function(a, b) { return a.concat(b) })
.then(function(arr) {
fs.writeFileAsync('file.json', JSON.stringify(arr, "", 4));
console.log(arr.length);
})
}
function setUrls(ladder, offset, limit) {
var arr = [];
while(offset < 15000 ) {
arr.push('http://api.pathofexile.com/ladders/'+ladder+'?offset='+offset+'&limit='+limit);
offset = offset + 200;
}
return arr;
}
getJSONsync(setUrls(ladders.hardcore, 0, 200));
Promise.map returns an array, so when you do ladder.concat you return another array, so it becomes [[{"1"}], [{"1", "2"}]
You should just remove concat:
return JSON.stringify(JSON.parse(body).entries, "", 4);
But if you want to use variable ladder you may ladder.push(JSON.stringify(JSON.parse(body).entries, "", 4)) and use it instead of arr returned variable

Can I increase QUOTA_BYTES_PER_ITEM in Chrome?

Is there any way to increase the chrome.storage.sync.QUOTA_BYTES_PER_ITEM ?
For me, the default 4096 Bytes is a little bit short.
I tried to execute
chrome.storage.sync.QUOTA_BYTES_PER_ITEM = 8192;
However, it seems that the actual limit doesn't change.
How can I do this?
No, QUOTA_BYTES_PER_ITEM is there for reference only; it is not a settable value. You could use the value of QUOTA_BYTES_PER_ITEM to split an item up into multiple items, though:
function syncStore(key, objectToStore, callback) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
var valueLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
var segment = jsonstr.substr(0, valueLength);
while(JSON.stringify(segment).length > valueLength)
segment = jsonstr.substr(0, --valueLength);
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
// store all the chunks
chrome.storage.sync.set(storageObj, callback);
}
Then write an analogous fetch function that fetches by key and glues the object back together.
just modify answer of #apsilliers
function syncStore(key, objectToStore) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if(valueLength > maxLength){
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substr(0, valueLength);
for(let i = 0; i < chrome.storage.sync.QUOTA_BYTES_PER_ITEM; i++){
const jsonLength = JSON.stringify(segment).length;
if(jsonLength > maxLength){
segment = jsonstr.substr(0, --valueLength);
}else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
also function to read each partition and merge again
function syncGet(key, callback) {
chrome.storage.sync.get(key, (data) => {
console.log(data[key]);
console.log(typeof data[key]);
if(data != undefined && data != "undefined" && data != {} && data[key] != undefined && data[key] != "undefined"){
const keyArr = new Array();
for(let i = 0; i <= data[key].count; i++) {
keyArr.push(`${data[key].prefix}${i}`)
}
chrome.storage.sync.get(keyArr, (items) => {
console.log(data)
const keys = Object.keys( items );
const length = keys.length;
let results = "";
if(length > 0){
const sepPos = keys[0].lastIndexOf("_");
const prefix = keys[0].substring(0, sepPos);
for(let x = 0; x < length; x ++){
results += items[`${prefix }_${x}`];
}
callback(JSON.parse(results));
return;
}
callback(undefined);
});
} else {
callback(undefined);
}
});
}
it tested and it works for my case
this is a better version of #uncle bob's functions, working with manifest v3 (you can use it just like how you can use the normal sync.set or sync.get function)
NOTE: it only works with JSONs (arrays and objects) since a string shouldn't be that long
let browserServices;
if (typeof browser === "undefined") {
browserServices = chrome;
} else {
browserServices = browser;
}
function syncSet(obj = {}) {
return new Promise((resolve, reject) => {
var storageObj = {};
for (let u = 0; u < Object.keys(obj).length; u++) {
const key = Object.keys(obj)[u];
const objectToStore = obj[key]
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
// split jsonstr into chunks and store them in an object indexed by `key_i`
while (jsonstr.length > 0) {
var index = key + "USEDTOSEPERATE" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = browserServices.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if (valueLength > maxLength) {
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substring(0, valueLength);
var jsonLength = JSON.stringify(segment).length;
segment = jsonstr.substring(0, valueLength = (valueLength - (jsonLength - maxLength) - 1));
for (let i = 0; i < browserServices.storage.sync.QUOTA_BYTES_PER_ITEM; i++) {
jsonLength = JSON.stringify(segment).length;
if (jsonLength > maxLength) {
segment = jsonstr.substring(0, --valueLength);
} else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substring(valueLength, Infinity);
}
}
chrome.storage.sync.set(storageObj).then(() => {
resolve()
})
})
}
function syncGet(uniqueKeys = []) {
return new Promise((resolve, reject) => {
browserServices.storage.sync.get(null).then((data) => {
const keyArr = Object.keys(data).filter(e => uniqueKeys.filter(j => e.indexOf(j) == 0).length > 0)
browserServices.storage.sync.get(keyArr).then((items) => {
var results = {};
for (let i = 0; i < uniqueKeys.length; i++) {
const uniqueKey = uniqueKeys[i];
const keysFiltered = keyArr.filter(e => e.split("USEDTOSEPERATE")[0] == uniqueKey)
if (keysFiltered.length > 0) {
results[uniqueKey] = ""
for (let x = 0; x < keysFiltered.length; x++) {
results[uniqueKey] += items[`${keysFiltered[x]}`];
}
results[uniqueKey] = JSON.parse(results[uniqueKey])
}
}
resolve(results)
});
});
})
}
example of usage:
syncSet({
"keyTest": ["a lot of text"],
"keyTest1": ["a lot of text"]
}
)
syncGet(["keyTest","keyTest1"]).then(results=>console.log(results))
// {keyTest:["a lot of text"],keyTest1:["a lot of text"]}