How to edit a current goggle script to only return details from a particular folders in a shared drive - google-apps-script

I have a script (thankyou Cooper) that I'm trying to use to generate a list of all folders and files from within one particular folder in a shared drive, however, the script is returning the following error:
Exception: Argument too large: value
gff # Folder Listing.gs:67
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
I'm assuming the problem is the script is grabbing all folders and files within the shared drive file and it's far too large to run the script, so I'm trying to reduce the source size by targeting a particular folder (or just returning the folder names and not worry about files at all).
What can I change on this script reduce the size issue?
function sharedDriveTrees() {
const ss = SpreadsheetApp.openById("blah");//need to change the ssid for the output spreadsheet
const r = Drive.Drives.list();
const drives = JSON.parse(r).items;
const shts = ss.getSheets().filter((sh, i) => i < drives.length).filter(e => e);
var trees = [];
drives.forEach((obj, i) => {
obj["title"] = obj.name;
let ob =JSON.parse(Drive.Files.get(obj.id,{ supportsAllDrives: true, supportsTeamDrives: true }));
obj["alternateLink"] = ob.alternateLink;
Logger.log('Drive Title: %s Time: %s', obj.title, Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "HH:mm:ss"));
shts[i].setName(`${obj.title}\n${Utilities.formatDate(new Date(),ss.getSpreadsheetTimeZone(),"yyyy.MM.dd HH:mm:ss")}`);
let tree = [];
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
level = 1;
gff(obj)
tree = JSON.parse(CacheService.getScriptCache().get("tree"));
let l = tree.reduce((a, c) => {
if (c.length > a) {
a = c.length;
}
return a;
}, 0);
tree.forEach((a, j) => {
if ((l - a.length) > 0) {
let arr = [...Array.from(new Array(l - a.length).keys(), x => "")];
tree[j] = a.concat(arr);
}
});
trees.push(tree);
const sh = shts[i];
sh.clearContents();
sh.getRange(1, 1, tree.length, tree[0].length).setValues(tree);
SpreadsheetApp.flush();
});
}
level = 1;
function gff(fobj) {
//Logger.log('Drive Title: %s', fobj.title);
const r = Drive.Children.list(fobj.id);
const fldrMime = "application/vnd.google-apps.folder";
let tree = JSON.parse(CacheService.getScriptCache().get("tree"));
let files = [];
let subfolders = [];
fobj["level"] = level;
let children = JSON.parse(r).items;
children.forEach((obj, i) => {
let o = JSON.parse(Drive.Files.get(obj.id, { supportsAllDrives: true, supportsTeamDrives: true }));
o["level"] = level;
if (o.mimeType == fldrMime) {
subfolders.push(o);
} else {
files.push(o);
}
});
//Logger.log('level: %s', level);
let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return `=HYPERLINK("${fobj.alternateLink}","${fobj.title}")` } })];
//Logger.log('arr1: %s', JSON.stringify(arr1));
tree.push(arr1)
if (files && files.length > 0) {
files.forEach(obj => {
let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return `=HYPERLINK("${obj.alternateLink}","${obj.title}")` } })];
//Logger.log('arr2: %s', JSON.stringify(arr2));
tree.push(arr2)
})
}
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
subfolders.forEach(obj => {
level++;
obj.level = level;
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
gff(obj);
tree = JSON.parse(CacheService.getScriptCache().get("tree"))
});
level--;
return;
}

Edit:
After checking the affected line I found out that the issue is happening at CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60). The Cache Documentation explains that the limit for the second parameter value is 100KB. The original script creator was using the CacheService as kind of a global variable to save every iteration of the loop as it kept adding levels to the tree. Since your folder structure is pretty large it grew beyond the 100KB limit.
As far as I can tell there's no way to raise this limit so I rewrote a few lines to pass the object to the gff() function instead of using the cache.
I kept the hyperlinks, but I also added a couple commented lines that you can switch to just return folder names as you requested. You can find them at let arr1 = and let arr2 =. You can switch between them to see if there's a performance improvement. You could also just change those lines in the original code that uses the cache, but you may still eventually run into the limit:
function folderTrees() {
const ss = SpreadsheetApp.openById("<Your spreadsheet id>");//need to change the ssid for the output spreadsheet
const f = Drive.Files.get("<Folder ID>", { supportsAllDrives: true, supportsTeamDrives: true });
const obj = JSON.parse(f);
const sh = ss.getSheets()[0];
var trees = [];
Logger.log('Folder Title: %s Time: %s', obj.title, Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "HH:mm:ss"));
sh.setName(`${obj.title}\n${Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "yyyy.MM.dd HH:mm:ss")}`);
let tree = [];
level = 1;
tree = gff(obj, tree)
let l = tree.reduce((a, c) => {
if (c.length > a) {
a = c.length;
}
return a;
}, 0);
tree.forEach((a, j) => {
if ((l - a.length) > 0) {
let arr = [...Array.from(new Array(l - a.length).keys(), x => "")];
tree[j] = a.concat(arr);
}
});
trees.push(tree);
sh.clearContents();
sh.getRange(1, 1, tree.length, tree[0].length).setValues(tree);
SpreadsheetApp.flush();
}
level = 1;
function gff(fobj, treeobj) {
const r = Drive.Children.list(fobj.id);
const fldrMime = "application/vnd.google-apps.folder";
let tree = treeobj;
let files = [];
let subfolders = [];
fobj["level"] = level;
let children = JSON.parse(r).items;
children.forEach((obj, i) => {
let o = JSON.parse(Drive.Files.get(obj.id, { supportsAllDrives: true, supportsTeamDrives: true }));
o["level"] = level;
if (o.mimeType == fldrMime) {
subfolders.push(o);
} else {
files.push(o);
}
});
//first line adds the hyperlinks and the second one returns only text
let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return `=HYPERLINK("${fobj.alternateLink}","${fobj.title}")` } })];
//let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return fobj.title } })];
tree.push(arr1)
if (files && files.length > 0) {
files.forEach(obj => {
//first line adds the hyperlinks and the second one returns only text
let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return `=HYPERLINK("${obj.alternateLink}","${obj.title}")` } })];
//let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return obj.title } })];
tree.push(arr2)
})
}
subfolders.forEach(obj => {
level++;
obj.level = level;
tree = gff(obj, tree);
});
level--;
return tree;
}
And here's the output:
A few things to note:
You'll need to get the folder ID to plug into the f variable.
The original script looped through all the shared Drives and wrote each one's tree to a different sheet within your spreadsheet. Since you only seemed to want a single folder's tree now it just writes to the first sheet in the file.

Related

How to create Calendar Events to generate notifications for taking medications

I like to create calendar events with notifications to assist me in taking medications sometimes when the medication regimen gets complicated. I found this application to be quite useful to me and thought perhaps others might like to use such a script. It uses an array of objects to process the medication title, the hours it must be taken and the notifications that you require.
Admittedly, I used hours only rather than hours and minutes. It seemed like a useful simplification to me.
Here's the code:
function createMyEvents() {
const cal = CalendarApp.getCalendarById("calendar id");
const tA = [{ title: "Take Med1", hours: [6, 12, 18, 23], nots: [10, 30] }, { title: "Take Med2", hours: [8, 20], nots: [10, 30] }];
//add to calendar for the following offset days
[2, 3, 4, 5, 6].forEach(offset => {
let dt = new Date(new Date().getFullYear(), new Date().getMonth(), new Date().getDate() + offset);
tA.forEach(obj => {
//using the hours array to create events for each day
obj.hours.forEach(h => {
let st = new Date(dt.getFullYear(), dt.getMonth(), dt.getDate(), h)
let et = new Date(dt.getFullYear(), dt.getMonth(), dt.getDate(), h + 1);
let event = cal.createEvent(obj.title, st, et)
obj.nots.forEach(m => {
event.addPopupReminder(m);//notifications for each event
});
});
});
});
}
My Current Answer:
Uses a control sheet to set up the parameters for each event creation sequence which also stores the event id's created which makes it possible to also delete the events by iterating through the list.
Code:
function makeEventArray(start = 9, end = 12, offA = '') {
if (!offA) {
const arr = [...Array.from(new Array(end - start + 1).keys(), x => start + x)];
//Logger.log(arr);
return arr;
} else {
return offA;
}
}
function createMyEventsFromList() {
const ss = SpreadsheetApp.getActive();
const sh = ss.getSheetByName('MakeEvents');
const shsr = 3;
let d = new Date();
sh.getRange("C1").setValue(d).setNumberFormat("MMM dd yyyy");
sh.getRange("E1").setValue(d).setNumberFormat("HH:mm:ss");
const vs = sh.getRange(shsr, 1, sh.getLastRow() - shsr + 1, sh.getLastColumn()).getValues().map((r,i) => {r[9] = i + shsr;return r; }).filter(r => r[0] == true).filter(e => e);
if (vs && vs.length > 0) {
ss.toast('Process Running','Creating Events',300)
vs.forEach(r => {
let idA = [];
let title = r[1];
let hours;
if (~r[2].toString().indexOf(',')) {
hours = r[2].toString().split(',').map(e => Number(e));
} else {
hours = [r[2]];
}
let nots;
if (~r[3].toString().indexOf(",")) {
nots = r[3].toString().split(',').map(e => Number(e));
} else {
nots = [r[3]];
}
let desc = r[4];
let start = r[5];
let end = r[6];
let offA;
if (r[7] && r[7].toString().length > 0) {
if (r[7].toString().includes(',')) {
offA = r[7].toString().split(',').map(e => Number(e));
} else {
offA = [r[7]];
}
}
const cal = CalendarApp.getCalendarById("q289qp9augtnshkn70dkibcc54#group.calendar.google.com");
const tob = { start: start, end: end, offA: offA, desc: desc, tA: [{ title: title, hours: hours, nots: nots }] };
makeEventArray(tob.start, tob.end, tob.offA).forEach(offset => {
let dt = new Date(d.getFullYear(), d.getMonth(), d.getDate() + offset);
tob.tA.forEach(obj => {
obj.hours.forEach(h => {
let st = new Date(dt.getFullYear(), dt.getMonth(), dt.getDate(), h)
let et = new Date(dt.getFullYear(), dt.getMonth(), dt.getDate(), h + 1);
let event = cal.createEvent(obj.title, st, et);;
if (event) {
event.setDescription(tob.desc);
idA.push(event.getId())
}
if (event && obj.nots && obj.nots.length > 0) {
obj.nots.forEach(m => {
event.addPopupReminder(m);
});
}
});
});
});
sh.getRange(r[9],9,1,2).setValues([[idA.join(', '),r[9]]])
});
ss.toast('Process Complete','Creating Events',15);
}
}
function deleteEventsFromList() {
const cal = CalendarApp.getCalendarById("q289qp9augtnshkn70dkibcc54#group.calendar.google.com");
const ss = SpreadsheetApp.getActive();
const sh = ss.getSheetByName('MakeEvents');
const shsr = 3;
const vs = sh.getRange(shsr, 1, sh.getLastRow() - shsr + 1, sh.getLastColumn()).getValues().map((r,i) => {r[9] = i + shsr;return r; }).filter(r => r[0] == true).filter(e => e);
ss.toast('Process Running','Deleeting Events',180)
vs.forEach(r => {
r[8].toString().split(', ').forEach(id => {
cal.getEventById(id).deleteEvent();
});
sh.getRange(r[9],9,1,2).clearContent();
});
ss.toast('Process Complete','Deleeting Events');
}
Image of Sheet:
Changed
const shsr = 3;
let d = new Date();
to this to allow starting date selection:
const shsr = 3;
const r = SpreadsheetApp.getUi().prompt("Get StartDate","yyyy/M/d",SpreadsheetApp.getUi().ButtonSet.OK_CANCEL);
if(r.getSelectedButton() == SpreadsheetApp.getUi().Button.OK) {
var t = r.getResponseText().split('/');
} else {
ss.toast("Making Events","Cancelled Operation by User",5)
return;
}
let d = new Date(t[0],Number(t[1]) - 1,t[2]);

How to create a list of all folders in a shared goggle drive folder

I'm trying to create a listing of all my google drive folders and I have the following script and a link to the question where I got the script from. I'm just not sure how to implement it. I put the Drive ID in line 6 and then ran it and got this error; Can anyone tell me where I'm going wrong?
List every file and folder of a shared drive in a spreadsheet with Apps Script
11:08:37 AM Error
ReferenceError: gobj is not defined
getFoldersInASharedFolder # Folder Listing.gs:12
This is the script
function getFoldersInASharedFolder() {
let tr = [];
let token = '';
let page = 0;
do {
let r = Drive.Files.list({ corpora: 'drive', includeItemsFromAllDrives: true, supportsTeamDrive: true, supportsAllDrives: true, driveId: "???", pageToken: token,q: "mimeType = 'application/vnd.google-apps.folder'" });
let obj = JSON.parse(r);
tr.push(obj)
token = obj.nextPageToken
} while (token != null)
let folder = DriveApp.getFolderById(gobj.globals.testfolderid);
folder.createFile(`SharedDriveList ${Utilities.formatDate(new Date(), Session.getScriptTimeZone(), "MM/dd/yyyy HH:mm:ss")}`, JSON.stringify(tr), MimeType.PLAIN_TEXT);
let html = '<style>td,th{border:1px solid black;font-size: 16px;}</style><table><tr><th>Title</th><th>Id</th><th>Path</th></tr>';
tr.forEach((o, i) => {
o.items.forEach(item => {
if (item.mimeType = "application/vnd.google-apps.folder") {
html += `<tr><td>${item.title}</td><td>${item.id}</td><td>${getPathAllDrivesFromId(item.id)}</td></tr>`;
}
})
});
html += '</table><input type="button" value="exit" onclick="google.script.host.close()" />';
SpreadsheetApp.getUi().showModelessDialog(HtmlService.createHtmlOutput(html).setHeight(500).setWidth(1200), `Folders in: ${JSON.parse(Drive.Drives.get("driveid")).name}`);
}
The error message occurs because
let folder = DriveApp.getFolderById(gobj.globals.testfolderid);
use the a nested property of gobj as parameter but gobj was not declared.
You can fix this error either by properly declaring gobj or by replacing gobj.globals.testfolderid by the folder id (properly set as a string).
The comments explain how to use this function
function getFoldersInASharedFolder() {
const sharedriveid = "";//add the shared drive id here
const storagefilefolderid = ""; //this where I was storing the response which I used when I was first building the script. It's not necessary to do this if you don't wish to. You can just comment that code out of the script
let tr = [];
let token = '';
let page = 0;
do {
let r = Drive.Files.list({ corpora: 'drive', includeItemsFromAllDrives: true, supportsTeamDrive: true, supportsAllDrives: true, driveId: sharedriveid, pageToken: token,q: "mimeType = 'application/vnd.google-apps.folder'" });//drive id for the shared drive that you wish all of the folders from
let obj = JSON.parse(r);
tr.push(obj)
token = obj.nextPageToken
} while (token != null)
let folder = DriveApp.getFolderById(storagefilefolderid);//the folder id for the file that stores the results
folder.createFile(`SharedDriveList ${Utilities.formatDate(new Date(), Session.getScriptTimeZone(), "MM/dd/yyyy HH:mm:ss")}`, JSON.stringify(tr), MimeType.PLAIN_TEXT);
let html = '<style>td,th{border:1px solid black;font-size: 16px;}</style><table><tr><th>Title</th><th>Id</th><th>Path</th></tr>';
tr.forEach((o, i) => {
o.items.forEach(item => {
if (item.mimeType = "application/vnd.google-apps.folder") {
html += `<tr><td>${item.title}</td><td>${item.id}</td><td>${getPathAllDrivesFromId(item.id)}</td></tr>`;
}
})
});
html += '</table><input type="button" value="exit" onclick="google.script.host.close()" />';
SpreadsheetApp.getUi().showModelessDialog(HtmlService.createHtmlOutput(html).setHeight(500).setWidth(1200), `Folders in: ${JSON.parse(Drive.Drives.get(sharedrivedid)).name}`);
}

Rooms with Forge

My goal is to see the Revit rooms in the Forge viewer. The application is in .NET Core. I have tried implementing GenerateMasterViews.
The code I am using to achieve this is:
[Route("api/forge/modelderivative/jobs")]
public async Task<dynamic> TranslateObject([FromBody]TranslateObjectModel objModel)
{
dynamic oauth = await OAuthController.GetInternalAsync();
// prepare the payload
var advOutputPayload = new JobSvf2OutputPayloadAdvanced();
advOutputPayload.GenerateMasterViews = true;
List<JobPayloadItem> outputs = new List<JobPayloadItem>()
{
new JobPayloadItem(
JobPayloadItem.TypeEnum.Svf2,
new List<JobPayloadItem.ViewsEnum>()
{
JobPayloadItem.ViewsEnum._2d,
JobPayloadItem.ViewsEnum._3d
},
advOutputPayload
)
};
JobPayload job;
job = new JobPayload(new JobPayloadInput(objModel.objectName), new JobPayloadOutput(outputs));
// start the translation
DerivativesApi derivative = new DerivativesApi();
derivative.Configuration.AccessToken = oauth.access_token;
dynamic jobPosted = await derivative.TranslateAsync(job);
return jobPosted;
}
Autodesk.Viewing.Initializer(options, () => {
viewer = new Autodesk.Viewing.GuiViewer3D(document.getElementById('forgeViewer'));
viewer.start();
var documentId = 'urn:' + urn;
Autodesk.Viewing.Document.load(documentId, onDocumentLoadSuccess, onDocumentLoadFailure);
});
}
function onDocumentLoadSuccess(doc) {
var viewables = doc.getRoot().getDefaultGeometry();
viewer.loadDocumentNode(doc, viewables).then(i => {
// documented loaded, any action?
});
}
But I can't get it to work.
I have looked for information, but this url: https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option2/ and this url:
https://forge.autodesk.com/en/docs/model-derivative/v2/tutorials/prep-roominfo4viewer/option1/ they don't work and I couldn't see how to do it.
To check if the object is in the room, we can do the following:
Get bounds for each room and object
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
Iterate rooms and objects and use containsBox or containsPoint to check if their bounding box has intersection.
If you want to do an acute collision check, you can take advantage of the ThreeCSG.js to do geometry intersection. Here is a blog post demonstrating how to integrate ThreeCSG.js with Forge Viewer.
https://forge.autodesk.com/blog/boolean-operations-forge-viewer
Note. This process would reduce the viewer performance since JavaScript is running on a single thread on the Web Browser, so you may use some technologies like the web worker to do the complex calculations on a separate thread.
Update:
Here is a working sample extension demonstrating the above idea:
/////////////////////////////////////////////////////////////////////
// Copyright (c) Autodesk, Inc. All rights reserved
// Written by Forge Partner Development
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
/////////////////////////////////////////////////////////////////////
(function () {
const Utility = {
/**
* Rest an object
* #param {Object} obj An object to be reset.
* ref: https://stackoverflow.com/a/24090180
*/
resetObject: function (obj) {
for (let key in Object.getOwnPropertyNames(obj)) {
if (!obj.hasOwnProperty(key)) continue;
let val = obj[key];
switch (typeof val) {
case 'string':
obj[key] = ''; break;
case 'number':
obj[key] = 0; break;
case 'boolean':
obj[key] = false; break;
case 'object':
if (val === null) break;
if (val instanceof Array) {
while (obj[key].length > 0) {
obj[key].pop();
}
break;
}
val = {};
//Or recursively clear the sub-object
//resetObject(val);
break;
}
}
}
};
/**
* A Forge Viewer extension for loading and rendering Revit Grids by AEC Model Data
* #class
*/
class RoomLocatorExtension extends Autodesk.Viewing.Extension {
constructor(viewer, options) {
super(viewer, options);
this.roomCategoryName = options.roomCategoryName || 'Revit Rooms';//'Revit Habitaciones'
this.onContextMenu = this.onContextMenu.bind(this);
}
onContextMenu(menu, status) {
if (status.hasSelected) {
menu.push({
title: 'Find room',
target: async () => {
let selSet = this.viewer.getSelection();
this.viewer.clearSelection();
const roomDbIds = await this.locateElementByRoom(selSet[0]);
if (!roomDbIds || roomDbIds.length <= 0) return;
this.viewer.select(roomDbIds);
}
});
}
}
async getPropertiesAsync(dbId, model) {
return new Promise((resolve, reject) => {
model.getProperties2(
dbId,
(result) => resolve(result),
(error) => reject(error)
);
});
}
async getElementsByCategoryAsync(category) {
return new Promise((resolve, reject) => {
this.viewer.search(
category,
(dbIds) => resolve(dbIds),
(error) => reject(error),
['Category'],
{ searchHidden: true }
);
});
}
async getRoomDbIds() {
try {
const roomDbIds = await this.getElementsByCategoryAsync(this.roomCategoryName);
if (!roomDbIds || roomDbIds.length <= 0) {
throw new Error('No Rooms found in current model');
}
return roomDbIds;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('No room found');
}
}
getBoundingBox(dbId, model) {
const it = model.getInstanceTree();
const fragList = model.getFragmentList();
let bounds = new THREE.Box3();
it.enumNodeFragments(dbId, (fragId) => {
let box = new THREE.Box3();
fragList.getWorldBounds(fragId, box);
bounds.union(box);
}, true);
return bounds;
}
getLeafFragIds(model, leafId) {
const instanceTree = model.getData().instanceTree;
const fragIds = [];
instanceTree.enumNodeFragments(leafId, function (fragId) {
fragIds.push(fragId);
});
return fragIds;
}
getComponentGeometryInfo(dbId, model) {
const viewer = this.viewer;
const viewerImpl = viewer.impl;
const fragIds = this.getLeafFragIds(model, dbId);
let matrixWorld = null;
const meshes = fragIds.map((fragId) => {
const renderProxy = viewerImpl.getRenderProxy(model, fragId);
const geometry = renderProxy.geometry;
const attributes = geometry.attributes;
const positions = geometry.vb ? geometry.vb : attributes.position.array;
const indices = attributes.index.array || geometry.ib;
const stride = geometry.vb ? geometry.vbstride : 3;
const offsets = geometry.offsets;
matrixWorld = matrixWorld || renderProxy.matrixWorld.elements;
return {
positions,
indices,
offsets,
stride
};
});
return {
matrixWorld,
meshes
};
}
getComponentGeometry(data, vertexArray) {
const offsets = [
{
count: data.indices.length,
index: 0,
start: 0
}
];
for (let oi = 0, ol = offsets.length; oi < ol; ++oi) {
let start = offsets[oi].start;
let count = offsets[oi].count;
let index = offsets[oi].index;
for (let i = start, il = start + count; i < il; i += 3) {
const a = index + data.indices[i];
const b = index + data.indices[i + 1];
const c = index + data.indices[i + 2];
const vA = new THREE.Vector3();
const vB = new THREE.Vector3();
const vC = new THREE.Vector3();
vA.fromArray(data.positions, a * data.stride);
vB.fromArray(data.positions, b * data.stride);
vC.fromArray(data.positions, c * data.stride);
vertexArray.push(vA);
vertexArray.push(vB);
vertexArray.push(vC);
}
}
}
buildComponentMesh(data) {
const vertexArray = [];
for (let idx = 0; idx < data.nbMeshes; ++idx) {
const meshData = {
positions: data['positions' + idx],
indices: data['indices' + idx],
stride: data['stride' + idx]
}
this.getComponentGeometry(meshData, vertexArray);
}
const geometry = new THREE.Geometry();
for (let i = 0; i < vertexArray.length; i += 3) {
geometry.vertices.push(vertexArray[i]);
geometry.vertices.push(vertexArray[i + 1]);
geometry.vertices.push(vertexArray[i + 2]);
const face = new THREE.Face3(i, i + 1, i + 2);
geometry.faces.push(face);
}
const matrixWorld = new THREE.Matrix4();
matrixWorld.fromArray(data.matrixWorld);
const mesh = new THREE.Mesh(geometry);
mesh.applyMatrix(matrixWorld);
mesh.boundingBox = data.boundingBox;
mesh.bsp = new ThreeBSP(mesh)
mesh.dbId = data.dbId;
return mesh;
}
buildCsgMesh(dbId, model) {
const geometry = this.getComponentGeometryInfo(dbId, model);
const data = {
boundingBox: this.getBoundingBox(dbId, model),
matrixWorld: geometry.matrixWorld,
nbMeshes: geometry.meshes.length,
dbId
};
geometry.meshes.forEach((mesh, idx) => {
data['positions' + idx] = mesh.positions;
data['indices' + idx] = mesh.indices;
data['stride' + idx] = mesh.stride;
});
return this.buildComponentMesh(data);
}
async buildBBoxes() {
try {
const model = this.viewer.model;
const roomBBoxes = {};
const roomDbIds = await this.getRoomDbIds();
for (let i = 0; i < roomDbIds.length; i++) {
let dbId = roomDbIds[i];
let bbox = await this.getBoundingBox(dbId, model);
roomBBoxes[dbId] = bbox;
}
this.cachedBBoxes['rooms'] = roomBBoxes;
} catch (ex) {
console.warn(`[RoomLocatorExtension]: ${ex}`);
throw new Error('Cannot build bounding boxes from rooms');
}
}
async locateElementByRoom(dbId) {
let bbox = await this.getBoundingBox(dbId, this.viewer.model);
const roomDbIds = Object.keys(this.cachedBBoxes['rooms']);
const roomBoxes = Object.values(this.cachedBBoxes['rooms']);
// Coarse Phase Collision
const coarseResult = [];
for (let i = 0; i < roomDbIds.length; i++) {
let roomDbId = roomDbIds[i];
let roomBox = roomBoxes[i];
if (roomBox.containsBox(bbox)) {
coarseResult.push(parseInt(roomDbId));
} else {
if (roomBox.containsPoint(bbox.min) || roomBox.containsPoint(bbox.max) || roomBox.containsPoint(bbox.center())) {
coarseResult.push(parseInt(roomDbId));
}
}
}
// Fine Phase Collision
const fineResult = [];
let elementCsgMesh = this.buildCsgMesh(dbId, this.viewer.model);
for (let i = 0; i < coarseResult.length; i++) {
let roomDbId = coarseResult[i];
let roomCsgMesh = this.buildCsgMesh(roomDbId, this.viewer.model);
let result = elementCsgMesh.bsp.intersect(roomCsgMesh.bsp);
if (result.tree.polygons.length <= 0) {
result = roomCsgMesh.bsp.intersect(elementCsgMesh.bsp);
// if (!this.viewer.overlays.hasScene('csg'))
// this.viewer.overlays.addScene('csg');
// else
// this.viewer.overlays.clearScene('csg');
// let mat = new THREE.MeshBasicMaterial({ color: 'red' })
// let mesh = result.toMesh(mat);
// this.viewer.overlays.addMesh(mesh, 'csg')
if (result.tree.polygons.length <= 0) continue;
}
fineResult.push(roomDbId);
}
return fineResult;
}
async load() {
await Autodesk.Viewing.Private.theResourceLoader.loadScript(
'https://cdn.jsdelivr.net/gh/Wilt/ThreeCSG#develop/ThreeCSG.js',
'ThreeBSP'
);
if (!window.ThreeBSP)
throw new Error('Cannot load ThreeCSG.js, please download a copy from https://github.com/Wilt/ThreeCSG/blob/develop/ThreeCSG.js')
await this.viewer.waitForLoadDone();
this.cachedBBoxes = {};
await this.buildBBoxes();
this.viewer.registerContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
unload() {
Utility.resetObject(this.cachedBBoxes);
this.viewer.unregisterContextMenuCallback(
'RoomLocatorExtension',
this.onContextMenu
);
return true;
}
}
Autodesk.Viewing.theExtensionManager.registerExtension('RoomLocatorExtension', RoomLocatorExtension);
})();
Snapshots:

Export Form responses as csv Google Apps Scripts

Is there is a fast way to programmatically export all responses from a Google Form to a csv? Something like "Export responses to csv" invoked via Scripts.
Right now I'm doing it in a rock art way:
Iterate over the forms I want to export (~75)
Open each form var form = FormApp.openById(formId);
Get responses: var formReponses = form.getResponses(); (from 0 to 700 responses each form)
Iterate over responses and get item responses: var preguntes = formReponses[r].getItemResponses();
For each itemResponse, convert it to csv/json
Export responses to a drive file
This is extremly slow and additionally it hangs over and over, so I had to export responses in chunks of 50 responses and save them in Drive separated files. On next execution (after letting servers to cool down for a while), I'm executing the script again, skipping the number of responses found on the chunk file.
Additionally I'm not sure that Google keeps the responses order when doing form.getResponses(); (actually I've found that if the form has been modified, the order is not the same)
Is there a better way to do it?
Whith the help of #JackBrown I've managed to write a Chrome extension to download responses (maybe soon in github). This will wait for each download in the formIds object until finished and then prompt for the next one:
'use strict';
function startDownload() {
const formIds = {
'Downloads-subfolder-here': {
'Download-filename-here': '1-cx-aSAMrTK0IHsQkE... {form-id here}',
'Another-filename-here': '...-dnqdpnEso {form-id here}',
// ...
},
'Another-subfolder-here': {
'Download-filename-here': '1-cx-aSAMrTK0IHsQkE... {form-id here}',
'Another-filename-here': '...-dnqdpnEso {form-id here}',
// ...
},
};
const destFolders = Object.keys(formIds);
const downloads = [];
for (let t = 0, tl = destFolders.length; t < tl; t += 1) {
const destFolder = destFolders[t];
const forms = Object.keys(formIds[destFolder]);
for (let f = 0, fl = forms.length; f < fl; f += 1) {
const formName = forms[f];
downloads.push({
destFolder,
formName,
url: `https://docs.google.com/forms/d/${formIds[destFolder][formName]}/downloadresponses?tz_offset=-18000000`,
filename: `myfolder/${destFolder}/${formName.replace(/\//g, '_')}.csv`,
});
}
}
const event = new Event('finishedDownload');
const eventInterrupt = new Event('interruptedDownload');
let currId;
chrome.downloads.onChanged.addListener((downloadDelta) => {
if (downloadDelta.id === currId) {
if (downloadDelta.state && downloadDelta.state.current === 'complete') {
document.dispatchEvent(event);
} else if (downloadDelta.state && downloadDelta.state.current === 'interrupted') {
console.log(downloadDelta);
document.dispatchEvent(eventInterrupt);
}
}
});
downloads.reduce((promise, actual) => {
return promise.then((last) => (last ? new Promise((resolve) => {
const { url, filename, destFolder, formName } = actual;
function listener() {
document.removeEventListener('finishedDownload', listener);
document.removeEventListener('interruptedDownload', listener);
resolve(true);
};
function interrupt() {
document.removeEventListener('finishedDownload', listener);
document.removeEventListener('interruptedDownload', listener);
resolve(false);
}
console.log(`Processant ${destFolder}, ${formName}: ${url}`);
document.addEventListener('finishedDownload', listener);
document.addEventListener('interruptedDownload', interrupt);
chrome.downloads.download({ url, filename }, (downloadId) => {
currId = downloadId;
if (!downloadId) {
console.log();
console.log('Error downloading...');
console.log(runtime.lastError);
resolve();
}
});
}) : Promise.resolve(false)));
}, Promise.resolve(true));
}
chrome.browserAction.onClicked.addListener((/*tab*/) => startDownload());

Can I increase QUOTA_BYTES_PER_ITEM in Chrome?

Is there any way to increase the chrome.storage.sync.QUOTA_BYTES_PER_ITEM ?
For me, the default 4096 Bytes is a little bit short.
I tried to execute
chrome.storage.sync.QUOTA_BYTES_PER_ITEM = 8192;
However, it seems that the actual limit doesn't change.
How can I do this?
No, QUOTA_BYTES_PER_ITEM is there for reference only; it is not a settable value. You could use the value of QUOTA_BYTES_PER_ITEM to split an item up into multiple items, though:
function syncStore(key, objectToStore, callback) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
var valueLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
var segment = jsonstr.substr(0, valueLength);
while(JSON.stringify(segment).length > valueLength)
segment = jsonstr.substr(0, --valueLength);
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
// store all the chunks
chrome.storage.sync.set(storageObj, callback);
}
Then write an analogous fetch function that fetches by key and glues the object back together.
just modify answer of #apsilliers
function syncStore(key, objectToStore) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if(valueLength > maxLength){
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substr(0, valueLength);
for(let i = 0; i < chrome.storage.sync.QUOTA_BYTES_PER_ITEM; i++){
const jsonLength = JSON.stringify(segment).length;
if(jsonLength > maxLength){
segment = jsonstr.substr(0, --valueLength);
}else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
also function to read each partition and merge again
function syncGet(key, callback) {
chrome.storage.sync.get(key, (data) => {
console.log(data[key]);
console.log(typeof data[key]);
if(data != undefined && data != "undefined" && data != {} && data[key] != undefined && data[key] != "undefined"){
const keyArr = new Array();
for(let i = 0; i <= data[key].count; i++) {
keyArr.push(`${data[key].prefix}${i}`)
}
chrome.storage.sync.get(keyArr, (items) => {
console.log(data)
const keys = Object.keys( items );
const length = keys.length;
let results = "";
if(length > 0){
const sepPos = keys[0].lastIndexOf("_");
const prefix = keys[0].substring(0, sepPos);
for(let x = 0; x < length; x ++){
results += items[`${prefix }_${x}`];
}
callback(JSON.parse(results));
return;
}
callback(undefined);
});
} else {
callback(undefined);
}
});
}
it tested and it works for my case
this is a better version of #uncle bob's functions, working with manifest v3 (you can use it just like how you can use the normal sync.set or sync.get function)
NOTE: it only works with JSONs (arrays and objects) since a string shouldn't be that long
let browserServices;
if (typeof browser === "undefined") {
browserServices = chrome;
} else {
browserServices = browser;
}
function syncSet(obj = {}) {
return new Promise((resolve, reject) => {
var storageObj = {};
for (let u = 0; u < Object.keys(obj).length; u++) {
const key = Object.keys(obj)[u];
const objectToStore = obj[key]
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
// split jsonstr into chunks and store them in an object indexed by `key_i`
while (jsonstr.length > 0) {
var index = key + "USEDTOSEPERATE" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = browserServices.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if (valueLength > maxLength) {
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substring(0, valueLength);
var jsonLength = JSON.stringify(segment).length;
segment = jsonstr.substring(0, valueLength = (valueLength - (jsonLength - maxLength) - 1));
for (let i = 0; i < browserServices.storage.sync.QUOTA_BYTES_PER_ITEM; i++) {
jsonLength = JSON.stringify(segment).length;
if (jsonLength > maxLength) {
segment = jsonstr.substring(0, --valueLength);
} else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substring(valueLength, Infinity);
}
}
chrome.storage.sync.set(storageObj).then(() => {
resolve()
})
})
}
function syncGet(uniqueKeys = []) {
return new Promise((resolve, reject) => {
browserServices.storage.sync.get(null).then((data) => {
const keyArr = Object.keys(data).filter(e => uniqueKeys.filter(j => e.indexOf(j) == 0).length > 0)
browserServices.storage.sync.get(keyArr).then((items) => {
var results = {};
for (let i = 0; i < uniqueKeys.length; i++) {
const uniqueKey = uniqueKeys[i];
const keysFiltered = keyArr.filter(e => e.split("USEDTOSEPERATE")[0] == uniqueKey)
if (keysFiltered.length > 0) {
results[uniqueKey] = ""
for (let x = 0; x < keysFiltered.length; x++) {
results[uniqueKey] += items[`${keysFiltered[x]}`];
}
results[uniqueKey] = JSON.parse(results[uniqueKey])
}
}
resolve(results)
});
});
})
}
example of usage:
syncSet({
"keyTest": ["a lot of text"],
"keyTest1": ["a lot of text"]
}
)
syncGet(["keyTest","keyTest1"]).then(results=>console.log(results))
// {keyTest:["a lot of text"],keyTest1:["a lot of text"]}