Use importData to fetch and parse JSON - google-apps-script

I had the following function running perfectly:
var ss = SpreadsheetApp.getActiveSpreadsheet();
var habSheet = ss.getSheetByName("Harvests");
var bVals = habSheet.getRange("b2:b").getValues();
var habs = bVals.filter(String).length;
var habitats = habSheet.getRange("B2:B"+habs+1).getDisplayValues();
var data = [];
var traitNames = habSheet.getRange("D1:U1").getValues();
var values = new Array(habs);
for (i = 0; i < habs; i++) {
values[i] = new Array(traitNames[0].length);
for (j=0; j<traitNames[0].length; j++){
values[i][j] = [""];
}
}
var rawData = "";
var names = new Array(habs);
for (i = 0; i < habs; i++) {
names[i] = new Array(1);
}
for (i=0; i<habs; i++){
try{
rawData = UrlFetchApp.fetch("https://api.genopets.me/habitat/"+habitats[i]);
data[i] = JSON.parse(rawData.getContentText());
names[i][0] = data[i].name;
for (j=0; j<data[i].attributes.length; j++){
value = data[i].attributes[j].value;
trait = data[i].attributes[j].trait_type;
for (k=0; k<=21; k++){
if (traitNames[0][k] == trait){
values[i][k] = value;
}
}
}
}
catch(err){
But I'm exceeding max fetch calls daily. I'm in an emergency situation because this needs to run again within an hour.
I'm trying to build a temporary fix, so I'm using importData to call the API with the following formula:
=join(",",IMPORTDATA("https://api.genopets.me/habitat/"&B2,","))
Then, I want to just replace rawData in the code with this imported data. However, now it comes in as text and can't be parsed in the same way. Is there a quick way to force it into JSON format or otherwise convert to a dictionary as before so that I can parse it with the same code?
I'm getting stuck because .name, .length, etc. are failing as the "rawData" is now just a string.
This is the code snippet I'm playing with to try and get this right and build the quick patch for right now:
// for (i=0; i<habs; i++){
var i=0;
importData = habSheet.getRange("AL1").getDisplayValue();
rawData = JSON.stringify(importData);
// Logger.log(rawData);
data[i] = rawData;
// data[i] = JSON.parse(rawData.getContentText());
names[i][0] = data[i].name;
for (j=0; j<data[i].attributes.length; j++){
value = data[i].attributes[j].value;
trait = data[i].attributes[j].trait_type;
for (k=0; k<=21; k++){
if (traitNames[0][k] == trait){
values[i][k] = value;
}
}
}
I've tried as above, and also without stringify, but I can't get this yet.
For reference, this is an example of the API response:
https://api.genopets.me/habitat/7vTz9dniU14Egpt8XHkMxP1x36BLRd15C11eUTaWhB19
Appreciate any help!

I have done a lot of testing to find a simple workaround, but could not find one, the string resulting from the =join(",",IMPORTDATA(url,",")) (and none of the other =IMPORTXXX functions) will work for your code. When using these IMPORT functions the data is interpreted and certain characters are removed or the values formatted, it is NOT recommended to use these functions.
Since you mentioned the message you are getting is related to quota limits you should consider splitting the load of this script in multiple Apps Script projects. As a possible immediate solution you can make a copy of the script (or file bound to the script), authorize the new copy and try again.
To increase performance you could try using the calls in bulk, use this other function fetchAll (https://developers.google.com/apps-script/reference/url-fetch/url-fetch-app#fetchallrequests). There is a 100 request limit for this method. This will result in the same quota usage.

Related

App Scripts If Statement always returning Else Condition

I am working on a script to perform an Index/Match task on two seperate workbooks. The code seems to be working but my If statement is always returning its Else condition. I have logged the compared vairables find and searchref and found that they do match at some point durring the loop but the If statement still returns its Else condition.
I suspect this has something to do with how I am comparing these arrays but I have not been able to figure it out.
Here is a snip of the first few columns and rows for the source spreadsheet for searchData I am trying to access the information in column B.
Source data for searchData
Here is the output from Logger.log for findData and searchData
Logger with labels
Logger arrays
Source data for findData
function generateBillOfMaterials() {
// --------------------------------------------------------------------
// Declare variables
var i, j
var find
var searchref
var found = []
// --------------------------------------------------------------------
var search_spreadsheet = SpreadsheetApp.openById("Searched-Spreadsheet-ID");
var find_spreadsheet = SpreadsheetApp.openById("1xg2yVimBwE5rGvSFMtID9O9CB7RauID34wqIH5LLTeE");
var ssheet = search_spreadsheet.getSheetByName("METAL PARTS");
var fsheet = find_spreadsheet.getSheetByName("Bill of Materials");
var FMaxR = fsheet.getMaxRows();
fsheet.getRange(2, 3, FMaxR, 1).clear({contentsOnly: true});
var findData = fsheet.getDataRange().getValues();
var searchData = ssheet.getDataRange().getValues();
for (i = 0; i < findData.length; i++) {
for (j = 0; j < searchData.length; j++) {
find = findData[i][1];
//Logger.log(find)
searchref = searchData[j][0];
//Logger.log(searchref)
if (find == searchref && find != "")
{
found[i] = searchData[j][1]
}
else
{
found[i] = ['n/a']
}
// found = ssheet.getRange(j+1,2,1,1).getDisplayValue();
// fsheet.getRange(i+1,16,1,1).setValue(found);
}
}
Logger.log(found)
fsheet.getRange(2, 3, found.length, 1).setValues(found)
}
The main problem in the sample code is the else statement containing this:
found[i] = ['n/a']
This will overwrite whatever is found earlier in the loop, because even after a match has been found (and assigned to the found array), the loop continues comparing the remaining values in the inner loop.
The following approach shows how to correct this, making as few changes as possible to your existing code:
function generateBillOfMaterials() {
// --------------------------------------------------------------------
// Declare variables
var i, j
var find
var searchref
// --------------------------------------------------------------------
var search_spreadsheet = ... ;
var find_spreadsheet = ... ;
var ssheet = search_spreadsheet.getSheetByName("METAL PARTS");
var fsheet = find_spreadsheet.getSheetByName("Bill of Materials");
var FMaxR = fsheet.getMaxRows();
fsheet.getRange(2, 3, FMaxR, 1).clear({contentsOnly: true});
var findData = fsheet.getDataRange().getValues();
var found = new Array(findData.length).fill('n/a');
var searchData = ssheet.getDataRange().getValues();
for (i = 0; i < findData.length; i++) {
for (j = 0; j < searchData.length; j++) {
find = findData[i][1];
searchref = searchData[j][0];
if (find === searchref && find !== "") {
found[i] = searchData[j][1];
break;
}
}
}
const found2 = found.slice(1).map(x => [x]);
fsheet.getRange(2, 3, found.length-1, 1).setValues(found2);
}
Notes:
We pre-fill the array of "found" values with "n/a":
var found = new Array(findData.length).fill('n/a');
This allows us to overwrite "n/a" when we find a value - otherwise we leave the "n/a" untouched.
When a match is found, we break out of the inner loop using break.
Then we can remove the else condition - as we no longer need it.
The remaining changes are to ensure the final shape of the found data is a two-dimensional array which can be written to the spreadsheet.
The above approach involves repeatedly looping over the data in the inner loop.
In reality, we only need to visit each list once, in order to perform the lookups we need.
Implementing this alternative approach would basically be a rewrite of what you have - and I would imagine that what you have, even if it is somewhat inefficient, is perfectly OK for your needs. But I did want to mention this.
The other note which may be of interest is that my alternative approach is more-or-less the equivalent of using a Google Sheets vlookup formula. Apologies if you are already aware of that. And I have never tried using that formula across 2 separate files, anyway. But again, just wanted to mention it, for completeness.
Update 2
"Is there a lookup command that could be used in place of the for loops?"
It's more a question of avoiding the nested loops, and using a JavaScript data structure that supports lookups (a Map).
Here is a sketch:
// assume we have already populated searchData and findData, as usual.
// first iterate searchData to build a lookup map:
let partsLookup = new Map();
for (i = 1; i < searchData.length; i++) {
partsLookup.set( searchData[i][0], searchData[i][1] );
}
// now iterate the BOM data and use the lookup map:
for (i = 1; i < findData.length; i++) {
var foundValue = partsLookup.get( findData[i][1] );
console.log( foundValue ); // add this to the "found" array
}
This is obviously not "finished" code - it just shows the approach. But no nested iterations are needed.
The number of loops performed is searchData.length + findData.length, instead of up to searchData.length * findData.length

How to change Topic names Classroom.Courses.Topics.patch()

I'm using Google Apps Script to change the names of a few Topics in several Google Classrooms. I'm using Classroom.Courses.Topics.patch() to change only the 'name' value of the Topics, but my script does not change anything when I look at the Classrooms.
Here is an example: I have two Classrooms (course IDs '100000000000' and '100000000001'). In each Classroom I have three Topics (topic names 'topic1', 'topic2', and 'topic3'). I want to change the name of the first two topics to 'newtopic1' and 'newtopic2' respectively, in both classrooms.
I suspect there could be something off with the way I'm doing the update mask, but I've tried re-ordering things, and I still can't get it to go... Could also be my nested for loops?
function updateTopicNames() {
var courseIds = ['100000000000','100000000001'];
var topicNamesOld = ['topic1','topic2'];
var topicNamesNew = ['newtopic1', 'newtopic2'];
for (var i = 0; i < courseIds.length; i++) {
var topics = Classroom.Courses.Topics.list(courseIds[i]).topic;
var topicObj = topics.reduce((o, e) => Object.assign(o, {[e.name]: e.topicId}), {});
for (var j = 0; j < topicObj.length; j++) {
for (var k = 0; k < topicNamesNew.length; k++) {
var topicId = topicObj[topicNamesOld[j]];
var newName = {'name':topicNamesNew[k]};
var extra = {'updateMask':'name'};
var exec = Classroom.Courses.Topics.patch(newName, topicId, courseIds[i], extra);
}
}
}
}
I checked out the courses.topics.patch API, but there is no example of the update mask implementation for me to extrapolate from.
I tried to bootstrap from code for other .patch() things: StudentSubmissions.Patch UpdateMask Error and How to change course owner using Classroom.Courses.patch() but something is not working when I try to convert these for Topics.patch().
I believe your situation as follows.
The index of topicNamesOld is corresponding to the index of topicNamesNew.
Modification point:
In your script, topicObj is not an array. So you are not required to use the for loop.
When this is reflected to your script, it becomes as follows.
Modified script:
function updateTopicNames() {
var courseIds = ['100000000000','100000000001'];
var topicNamesOld = ['topic1','topic2'];
var topicNamesNew = ['newtopic1', 'newtopic2'];
for (var i = 0; i < courseIds.length; i++) {
var topics = Classroom.Courses.Topics.list(courseIds[i]).topic;
var topicObj = topics.reduce((o, e) => Object.assign(o, {[e.name]: e.topicId}), {});
for (var k = 0; k < topicNamesNew.length; k++) {
var topicId = topicObj[topicNamesOld[k]];
var newName = {'name': topicNamesNew[k]};
var extra = {'updateMask':'name'};
var exec = Classroom.Courses.Topics.patch(newName, courseIds[i], topicId, extra);
}
}
}
Note:
In that case, it seems that the topics created with the same GAS project can be updated. For example, it seems that the topics created by other client cannot be updated. It seems that this is the current specification. Please be careful this.
Reference:
Method: courses.topics.patch

How to get CSV data as a string from GmailAttachment?

I'd like to grab gmail attachments that are CSV files then import them into a google sheet.
Here's where I'm stuck - turning the attachment into a string. I think I have a blob to which the method getContentAsString applies, but apparently I still have the type GmailAttachment because I'm getting this error:
TypeError: Cannot find function getContentAsString in object GmailAttachment.
here's the relevant code:
//************** get the attachments ***************************************
var attachments = [];
var files = [];
for (var i = 0; i < 4; i ++) {
attachments = messages[i].getAttachments();
for (var k = 0; k < attachments.length; k = k+2) { //2 attachments per message, but I only want the 1st one
j = k/2;
files[j] = attachments[k].copyBlob();
Logger.log('Message "%s" contains the attachment "%s" (%s bytes)',
messages[i].getSubject(), files[j].getName(), files[j].getSize());
}
}
var csvFile = "";
for (var i = 0; i < files.length; i++) {
csvFile = files[i].getContentAsString();
}
why is .copyBlob() not returning a blob, but in this case a GmailAttachemnt, and how can I fix it?
is the problem here:
files[j] = attachments[k].copyBlob();
?
by the way I also tried getBlob() instead of copyBlob() and it returned a type error at this line above.
using copyBlob() I get the typeError at this line:
csvFile = files[i].getContentAsString();
Thanks for your help!
Inorder to get the string value of a blob you should use: getDataAsString() method in Blob class. The .getContentAsString() method is from the DocsList Service which is deprecated.
You could also use the getDataAsString() method from the GmailAttachment class itself. Hope that helps!

Calling scriptDb.saveBatch with ~7600 items results in a rateMax error

I'm working on an apps script to periodically check for modified items on a web service. Because API calls were taking too long, I've been trying to cache some of the data periodically in ScriptDb. However, trying to update data using scriptDb.saveBatch always results in the following error:
Service invoked too many times in a short time: scriptdb rateMax. Try Utilities.sleep(1000) between calls.
My script is querying ScriptDb and returning a result set of ~7600 records, modifying those records, and then saving everything back in a batch. I can't think of any way, given the tools Google makes available, to reduce the number of database calls I make. Is this really too much for ScriptDb to handle, or is there some way to improve on my code?
function getRootFolders() {
var updateTimestamp = new Date().valueOf();
var results = GetModifiedFolders(ROOT_FOLDER); //Returns results from an API call
var data = results.data; //The actual data from the API, as an array
var len = data.length;
if (len > 0) {
//Get a collection of dbMaps from ScriptDb
var maps = {}; //Store as an object for easy updating
var getMaps = db.query({'type': 'baseFolder'}).limit(50000); //Returns 7621 items
while (getMaps.hasNext()) {
var map = getMaps.next();
maps[map.boxId] = map;
}
//Iterate through the results
for (i = 0; i < len; i++) {
var item = data[i];
var map = maps[item.boxId]; //Try to retrive an existing dbMap
if (map) { //If it exists, update the existing dbMap
map.modified = item.modified;
map.updateTimestamp = updateTimestamp;
}
else { //Otherwise, insert the result into the collection of dbMaps
item.type = 'baseFolder';
item.updateTimestamp = updateTimestamp;
maps[item.boxId] = item;
}
}
//Convert the object back to an array, and use that to save to ScriptDb
var toSave = [];
for (var prop in dbMaps) {
toSave.push(dbMaps[prop]);
}
var mutations = db.saveBatch(toSave, false); //FAIL with scriptdb rateMax
if (db.allOk(mutations)) {
( . . . )
}
}
}
EDIT:
I've made a few changes in an effort to stop this from happening, but to no avail. I'm sleeping for several minutes before calling saveBatch, and then I'm saving in multiple, smaller batches, sleeping in between each one.
At this point, I can't imagine why I'm still getting this rateMax error. Is there something wrong with my code that I'm missing, or is this a bug in apps script? I assume it's my fault, but I can't see it.
Here's what I've added:
//Retrieving data from the API takes ~1 minute
//Sleep for a while to avoid rateMax error
var waitUntil = updateTimestamp + 240000; //Wait until there's only 1 minute left in the 5 minute quota
var msToWait = waitUntil - (now.valueOf());
Utilities.sleep(msToWait); //Sleep for ~3 minutes
//Save in batches
var batchSize = 250;
var batch = [];
var i = 0;
for (var prop in maps) {
batch.push(maps[prop]);
i++;
//When the batch reaches full size, save it
if (i % batchSize == 0 || i == len) {
Utilities.sleep(1000);
var mutations = db.saveBatch(batch, false);
if (!db.allOk(mutations)) {
return false;
}
batch = [];
}
}
Split the batch in smaller parts.
Wont affect the code because batch is not atomic anyways.

Google Apps Script Chart Services: The script completed but did not return anything?

Main question: why is the script below not returning anything? When given a version and deployed, the script returns 'The script completed but did not return anything.'
(Bonus question: can I run two charts on two different datatables within the same dashboard and within the same script?)
The input is a Google Spreadsheet with four columns and a header row: Year|Month|Group|Count, all numerics except Group (single letter).
The objective is to create a line chart of total counts by yearmo (201001, 201002...). Ideally I'd like to generate another line chart of total counts by year-group within the same script, but one step at a time...
Since the data isn't rolled up (and I'm new to google apps script and can't find any aggregation outside pivoting the spreadsheet itself), I resort to hashes to sum counts over year-month. I'll do the same for year-groups.
It may not be elegant (I happen to be new to js too), but it makes sense to me. Too bad it doesn't work. Thanks for pointers.
EDIT 2013-04-25
Tried to get too much done in a single script and lost my footing. So I've taken the aggregation outside of the script, created a new spreadsheet, and ran the code found here https://sites.google.com/site/appsscripttutorial/chart-services/line-chart: got my line chart.
function doGet(){
// counts by yearmo
var ss = SpreadsheetApp.openById('0Atd6tVDA1d3UOI-jfkdlRHbk85Y19BcU9BNFdPNXBlVlE');
var data = ss.getDataRange().getValues();
var yr = [];
var mo = [];
for (var i = 1; i < data.length; i++) {
yr[data[i][0]] = 0;
mo[data[i][1]] = 0;
}
Logger.log( yrLvls = Object.keys(yr) );
Logger.log( moLvls = Object.keys(mo) );
// initialize hash
var yearmo = [];
for (var i = 0; i < yrLvls.length; i++){
for (var j = 0; j < moLvls.length; j++){
var key = yrLvls[i] + '-' + moLvls[j]
yearmo[ key ] = 0;
}
}
// aggregation
for (var i = 1; i < data.length; i++) {
yearmo[ data[i][0] + '-' + data[i][1] ] += data[i][3];
}
// check
var values = Object.keys(yearmo).map(function(key){
return yearmo[key];
});
Logger.log( values );
// Year Mo
Logger.log( "--------------" );
var data_yearmo = Charts.newDataTable()
.addColumn(Charts.ColumnType.STRING, 'YearMo')
.addColumn(Charts.ColumnType.NUMBER, 'Count');
Logger.log( Object.keys(yearmo) );
for ( key in Object.keys(yearmo) ) {
Logger.log( key );
data_yearmo.addRow( [ key , yearmo[key] ] );
}
data_yearmo.build()
var lineChart_yearmo = Charts.newLineChart()
.setTitle('Counts')
.setXAxisTitle('YearMo')
.setYAxisTitle('Counts')
.setCurveStyle(Charts.CurveStyle.SMOOTH)
.setPointStyle(Charts.PointStyle.MEDIUM)
.setDataTable(data_yearmo);
var uiApp = UiApp.createApplication().setTitle('Yearmo');
uiApp.add(lineChart_yearmo);
return uiApp;
}
I suspect that you did not update the version of the app that you deployed. Try using the "latest code" development link (you can get it from the dialog when you deploy your app) and if that works, create a new version from the Manage Versions menu and update the deployed app to use that version.
I forgot to add .build() to my Charts.newLineChart() call.
The data_yearmo.build() isn't necessary apparently, but the '.build()' for Charts.newLineChart() definitely is. Credit goes to the link in the Edit above.