I'm trying to optimize this code to make it as short as possible I use it to call an API to get data in a Google Sheet. I've been told that it's the fetch that makes the script so long, and that I could try with a fetchAll but it breaks my code, I feel like putting my url in an array breaks my code (for the fetchAll). I also had suspicions about the if statement that I put in case the data is null (already made my function crash).
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products:(id)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals4(idNew) {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//Logger.log(dataSet)
//let prices = prices;
//create array where the data should be put
let rows = [], data;
if(dataSet.data === null )return
else {
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
let idNew = data.id;
rows.push([data.id, data['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows,null,2) ); // Log transformed data
return rows;
}
}
Try with fetchAll:
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXX"
let url = "https://laptop.pipedrive.com/v1/products:(id)?start="+start+limit+token;
let request = [url];
let response = UrlFetchApp.fetchAll(request); //
let dataAll = response.map(function(e) {return e.getContentText()});
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
function GetPipedriveDeals4(idNew) {
let start = 1;
let limit = "&limit=500";
let token = "&api_token=XXXXXXXXXXXXXXXXXXXX"
let urli = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start="+start+limit+token;
let request1 = [urli]
let response1 = UrlFetchApp.fetchAll(request1); //
var dataAll1 = response1.map(function(e) {return e.getContentText()});
let dataSet1 = dataAll1;
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let urli = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start="+start+limit+token;
let request1 = [urli]
let response1 = UrlFetchApp.fetchAll(request1); //
var dataAll1 = response1.map(function(e) {return e.getContentText()});
let dataSet1 = dataAll1;
//Logger.log(dataSet1)
//let prices = prices;
//create array where the data should be put
let rows1 = [], data1;
if(dataSet1.data1 === null )return
else {
for (let i = 0; i < dataSet1.data1.length; i++) {
data1 = dataSet1.data1[i];
let idNew = data1.id;
rows1.push([data1.id, data1['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows1,null,2) ); // Log transformed data
return rows1;
}
}
So I saw in the documentation that I must put my URL in Tab[] to make the request but know I have the following error :
8 juil. 2020 à 16:06:18 Erreur TypeError: Cannot read property 'length' of undefined
at GetPipedriveDeals2(Copie de importNamesTypes:22:36)
I suppose I'm doing something wrong but can't see it. Thanks
So yes I didn't put the modifications online but in fact I managed to do what I want, so I will put my code and some explanation.
First of all I didn't manage to execute the script above, or any other that I put online for two reasons :
- 1 : The first that I wrote was too long for google sheet, the execution time was above 50s (max 30s)
- 2 : The second script that I made, was too fast for the API pipedrive and when I manage to solve this issue, I've got an error saying "cannot read property of null" it was because pipedrive was returning "null" in string and null as an empty value, so this was breaking the code. When I manage to solve this the script was again too long.
So I rework the script again and put it with a start and a limit in parameters. So now I am calling my function by doing this =getPipeDriveDeals(0, 50) and =getPipeDriveDeals(51, 90) etc....
So this is the code that I wrote :
function getPipedriveDeals(start , limit) {
var allResponsesDeals = [];
options = {muteHttpExceptions: true};
var idsListRequest = "https://xxxx.pipedrive.com/v1/products:(id)?start=";
var token = "&api_token=hiddenforobviousreasons";
var response = UrlFetchApp.fetch(idsListRequest + start +"&limit="+limit + token, options);
let dataAll = JSON.parse(response.getContentText()).data;
var requests = [];
let rows = [], data;
//Logger.log("data="+ JSON.stringify(dataAll, null, 2));
if(dataAll === null) {
//Logger.log(" dataAll issss nnnnuulll" );
}else{
dataAll.forEach(function(product) {
var productDetailUrl = "https://xxxx.pipedrive.com/v1/products/" + product.id + "/deals:(title,7d321c7454a4b44a09f32bdd7702a2b17fd7654e)?start=0";
requests = productDetailUrl + token
var responses = UrlFetchApp.fetch(requests);
var dataArray = JSON.parse(responses.getContentText());
if(dataArray.data === null) {
//Logger.log(" newData issss nnnnuulll" );
}else
{
for (let i = 0; i < dataArray.data.length; i++) {
data = dataArray.data[i];
rows.push([data.title, data['7d321c7454a4b44a09f32bdd7702a2b17fd7654e']])
}
}
});
}
Logger.log("allResponsesDeals ="+ JSON.stringify(rows, null, 2));
return rows;
}
function getAllDeals(){
var allResponses = [];
var deals = getPipedriveDeals();
Logger.log("deals="+ JSON.stringify(deals, null, 2));
deals.forEach((response)=>{allResponses.push(response)});
allResponses.push(deals)
Logger.log("allResponses="+ allResponses);
return allResponses;
}
If you want to add specific data to your sheet just change it in rows.pushand in the Url of you second call.
I advise you to not make modification of the data you want to get with you'r first request (the ID is use to make all the other request).
I hope this will help you and all the people that want to tweak Pipedrive functionality.
Related
Per CallRail's documentation (https://apidocs.callrail.com/) I have been trying to write data to a Google Sheet using Google App Scripts. Currently, it is only pulling in 100 rows of data. I think it may have something to do with the fact that each page has 100 results, but I tried to create a loop of multiple requests and also increase the limit.
I tried the code below, expecting it to return the 5000 most recent records, but it still stops after writing 100 rows.
`function importCallData() {
const apiToken = "TOKEN_HERE";
const sheetId = "SHEET_ID_HERE";
const sheetName = "Data";
const daysToRetrieve = 30;
// Calculate the start and end times for the API request
const now = new Date();
const endTime = Math.floor(now.getTime() / 1000);
const startTime = Math.floor(now.getTime() / 1000) - (daysToRetrieve * 24 * 60 * 60);
// Fetch the data from the CallRail API
let data = [];
let offset = 0;
let url = `https://api.callrail.com/v3/a/ACCOUNT_ID_HERE/calls?fields=id,answered,start_time,duration,utm_source,utm_medium,utm_campaign&limit=100&offset=${offset}`;
let options = {
headers: {
Authorization: `Token token=${apiToken}`
}
};
let response = UrlFetchApp.fetch(url, options);
let newData = JSON.parse(response.getContentText()).calls;
data = data.concat(newData);
offset += newData.length;
while (newData.length === 100 && data.length < 5000) {
url = `https://api.callrail.com/v3/a/ACCOUNT_ID_HERE/calls?fields=id,answered,start_time,duration,utm_source,utm_medium,utm_campaign&created_after=${startTime}&created_before=${endTime}&limit=100&offset=${offset}`;
response = UrlFetchApp.fetch(url, options);
newData = JSON.parse(response.getContentText()).calls;
data = data.concat(newData);
offset += newData.length;
}
// Write the data to the Google Sheet
const sheet = SpreadsheetApp.openById(sheetId).getSheetByName(sheetName);
sheet.clearContents();
sheet.appendRow(["id", "answered", "start_time", "duration", "utm_source", "utm_medium", "utm_campaign"]);
data.forEach(call => {
const row = [
call.id,
call.answered,
call.start_time,
call.duration,
call.utm_source,
call.utm_medium,
call.utm_campaign
];
sheet.appendRow(row);
});
}`
Let me just preface this with I know this script could probably be better but I'm very new to scripting in any language and I'm just playing around with an idea for work. This is essentially just a frankensteined combination of google results for what I want to accomplish but I've hit a deadend.
I'm using an aspect from another script to return contentservice in JSON format for a webapp except it's not working in this case
As far as I can tell it should work perfectly fine and if I replace the return contentservice with the browser.msgbox below I get the values returned that I want but when using contentservice and going to my scripts Web App URL with the action pointing to range I get the error The script completed but did not return anything.
var mysheet = SpreadsheetApp.openByUrl("https://docs.google.com/spreadsheets/d/1sPuqdg0Va9LLQudl2ta23b-CGEF_-FFSTeggRw3J4L4/edit").getSheetByName('Sheet3');
var sheet = SpreadsheetApp.openByUrl("https://docs.google.com/spreadsheets/d/1sPuqdg0Va9LLQudl2ta23b-CGEF_-FFSTeggRw3J4L4/edit").getSheetByName('Sheet1');
function doGet(e){
var action = e.parameter.action;
if(action == 'Range'){
return Range(e);
}
}
function Range(e) {
let term = 'Customer 6';
var sdata = mysheet.getRange("A:A").getValues();
sdata.forEach((val, index) => {
if(val == term){
var msgr = "B" + (index+1)
var msgc = "D" + (index+1)
var rrow = mysheet.getRange(msgr).getValue();
var ccol = mysheet.getRange(msgc).getValue();
var data = sheet.getRange("E" + rrow + ":I"+ccol);
var records={};
var rows = sheet.getRange("E" + rrow + ":I"+ccol).getValues();
data = [];
for (var r = 0, l = rows.length; r < l; r++) {
var row = rows[r],
record = {};
record['Product'] = row[0];
record['Case']=row[2];
record['Order QTY']=row[3];
record['Packed']=row[4];
data.push(record);
}
records.items = data;
var result=JSON.stringify(records);
return ContentService.createTextOutput(result).setMimeType(ContentService.MimeType.JSON);
// Browser.msgBox(result);
}})}
I can't figure out why I'm getting the correct returned values for the msgBox, but no results for the ContentService.
Any help would be greatly appreciated. Thanks in advance.
Edit: I have been publishing new webapp versions every revision
This breaks because you're returning from within a forEach() loop. Check out the MDN web docs:
There is no way to stop or break a forEach() loop other than by throwing an exception. If you need such behavior, the forEach() method is the wrong tool.
The simplest fix is to use a for loop instead.
function Range(e) {
let term = 'Customer 6';
var sdata = mysheet.getRange("A:A").getValues();
for (var index = 0; index < sdata.length; index++) {
var val = sdata[index];
if(val == term){
var msgr = "B" + (index+1)
var msgc = "D" + (index+1)
var rrow = mysheet.getRange(msgr).getValue();
var ccol = mysheet.getRange(msgc).getValue();
var data = sheet.getRange("E" + rrow + ":I"+ccol);
var records={};
var rows = sheet.getRange("E" + rrow + ":I"+ccol).getValues();
data = [];
for (var r = 0, l = rows.length; r < l; r++) {
var row = rows[r],
record = {};
record['Product'] = row[0];
record['Case']=row[2];
record['Order QTY']=row[3];
record['Packed']=row[4];
data.push(record);
}
records.items = data;
var result=JSON.stringify(records);
return ContentService.createTextOutput(result).setMimeType(ContentService.MimeType.JSON);
}
}
}
I am trying to export my Pipedrive data to a Google Sheet, in particular to make the link between two of my queries. So I first wrote this script:
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products:(id)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXX";
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals4(idNew) {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//Logger.log(dataSet)
//let prices = prices;
//create array where the data should be put
let rows = [], data;
if(dataSet.data === null )return
else {
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
let idNew = data.id;
rows.push([data.id, data['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows,null,2) ); // Log transformed data
return rows;
}
}
But it is not optimized at all and takes about 60 seconds to run, and google script executes the custom functions only for 30 seconds... With help, I had this second function:
function getPipedriveDeals(apiRequestLimit){
//Make the initial request to get the ids you need for the details.
var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
var start = 0;
var limit = "&limit="+apiRequestLimit;
var token = "&api_token=XXXXXXXXXXX";
var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
var data = JSON.parse(response.getContentText()).data;
//For every id in the response, construct a url (the detail url) and push to a list of requests
var requests = [];
data.forEach(function(product){
var productDetailUrl = "https://laptop.pipedrive.com/v1/products/"+product.id+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
requests.push(productDetailUrl+start+limit+token)
})
//With the list of detail request urls, make one call to UrlFetchApp.fetchAll(requests)
var allResponses = UrlFetchApp.fetchAll(requests);
// logger.log(allResponses);
return allResponses;
}
But this time it's the opposite. I reach my request limit imposed by Pipedrive: https://pipedrive.readme.io/docs/core-api-concepts-rate-limiting (80 requests in 2 sec).
I confess I have no more idea I thought of putting OAuth2 in my script to increase my query limit, but it seems really long and complicated I'm not at all in my field.
In summary, I would just like to have a script that doesn't execute requests too fast but without exceeding the 30 seconds imposed by Google Apps Script.
---------------------EDIT---TEST---FOREACH80-------------------------------------
function getPipedriveProducts(){
//Make the initial request to get the ids you need for the details.
var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
var start = 0;
var limit = "&limit=500";
var token = "&api_token=XXXXXXXXXXXXXXXXXXX";
var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
var data = JSON.parse(response.getContentText()).data;
//For every id in the response, construct a url (the detail url) and push to a list of requests
const batch = new Set;
let requests = [];
data.forEach(function(product){
var productDetailUrl = "https://laptop.pipedrive.com/v1/products/" + product.id + "/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
requests.push(productDetailUrl+start+limit+token);
if(requests.length === 79) {
batch.add(requests);
requests = [];
}
})
const allResponses = [...batch].flatMap(requests => {
Utilities.sleep(2000);
return UrlFetchApp.fetchAll(requests);
Logger.log(allResponses)
});
}
Create Set of 80 requests each
Execute each set value using fetchAll
const batch = new Set;
let requests = [];
data.forEach(function(product){
var productDetailUrl = "https://example.com";
requests.push(productDetailUrl+start+limit+token);
if(requests.length === 80) {
batch.add(requests);
requests = [];
}
})
const allResponses = [...batch].flatMap(requests => {
Utilities.sleep(2000);
return UrlFetchApp.fetchAll(requests);
});
Chunking
One of the most important concepts in working with APIs is chunking as you need to avoid rate-limiting, accommodate request scheduling, parallelize CPU-heavy calculations, etc. There are countless ways to split an array in chunks (see half a hundred answers in this canonical Q&A just for JavaScript).
Here is a small configurable utility tailored to the situation where one wants to split a flat array into an array of arrays of a certain size/pattern (which is usually the case with request chunking):
/**
* #typedef {object} ChunkifyConfig
* #property {number} [size]
* #property {number[]} [limits]
*
* #summary splits an array into chunks
* #param {any[]} source
* #param {ChunkifyConfig}
* #returns {any[][]}
*/
const chunkify = (source, {
limits = [],
size
} = {}) => {
const output = [];
if (size) {
const {
length
} = source;
const maxNumChunks = Math.ceil((length || 1) / size);
let numChunksLeft = maxNumChunks;
while (numChunksLeft) {
const chunksProcessed = maxNumChunks - numChunksLeft;
const elemsProcessed = chunksProcessed * size;
output.push(source.slice(elemsProcessed, elemsProcessed + size));
numChunksLeft--;
}
return output;
}
const {
length
} = limits;
if (!length) {
return [Object.assign([], source)];
}
let lastSlicedElem = 0;
limits.forEach((limit, i) => {
const limitPosition = lastSlicedElem + limit;
output[i] = source.slice(lastSlicedElem, limitPosition);
lastSlicedElem = limitPosition;
});
const lastChunk = source.slice(lastSlicedElem);
lastChunk.length && output.push(lastChunk);
return output;
};
const sourceLimited = [1, 1, 2, 2, 2, 3];
const outputLimited = chunkify(sourceLimited, { limits: [2, 1] });
console.log({ source : sourceLimited, output : outputLimited });
const sourceSized = ["ES5", "ES6", "ES7", "ES8", "ES9"];
const outputSized = chunkify(sourceSized, { size: 2 });
console.log({ source : sourceSized, output : outputSized });
From there, the only thing you need is to traverse the array while waiting for each chunk to complete to make it applicable to your situation. Please beware that requests can fail for any number of reasons - you should persist last successfully processed chunk.
I am trying to make an API call which has a 50 records per call limit, the JSON response gives me "objects" and "total_objects".
I am running into an issue with the code that i managed to put together with help, the output does increment from the 50th record till the 95th record, post which it loops back to print from the 45th record and loops with the same set.
issue: stuck in a loop where the increment is one record at a time and a repeat of 49 old records
var client_id = 'xxxx';
var client_secret = 'xxx';
var email = 'xxx';
var password = 'abc';
var device = '1';
var app_version = '1';
var Token = 'abcde';
// call the API to get data for your list
function AP_Hyd() {
// URL and params for the API
var root = 'https://abcdef:443//v2/';
var endpoint = 'ghij/3241/objects?search=rtewq&limit=50&offset=';
var offset = '0'
// parameters for url fetch
var params = {
'method': 'GET',
'muteHttpExceptions': true,
'headers': {
'Authorization': 'Bearer ' + Token
}
};
// call the API for first offset
var json = getAssetData(root,endpoint, offset, params);
var totalObjects = json.objects;
//Adding data to ActiveGoogleSheet
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clear()
//Adding Column Headings
var headerRow = ['erwf', 'gtre', 'poi', 'hgf', 'lkj', 'zyx'];
sheet.appendRow(headerRow);
//print first batch
printRows(totalObjects,sheet);
//check for any further data available
if (totalObjects) {
let totalPages = json.totals.objects;
//get total number of pages to be retrieved
var pageCount = parseInt(totalPages / 50)
//check for decimal
//if decimal is found increase the value of pageCount by 1
if(totalPages%50 !=0){
pageCount++;
}
Logger.log("pageCount:::" + pageCount +" for total objects:: " + totalPages);
for (i = 1; i < pageCount; i++) {
//invoke again and lets say the response is added in response
// call the API with incremented offset
var pageResponse = getAssetData(root,endpoint, i, params);
var jsonObjects = pageResponse.objects;
//print the rows
printRows(jsonObjects,sheet);
}
}
}
function getAssetData(root,endpoint,offset, params){
var response = UrlFetchApp.fetch(root + endpoint + offset, params);
var content = response.getContentText()
//return the parsed content
//TODO: add null check
return JSON.parse(content)
}
function printRows(jsonData, sheet) {
if (jsonData) {
for (var i = 0; i < jsonData.length; i++) {
var fieldObj = jsonData[i];
;
Logger.log(fieldObj)
// print out all the Columns
sheet.appendRow(row);
}
}
}
At this moment, I have a script that works correctly to list students of a class in Google Classroom, but it does NOT list ALL of the students, only the first 30. I need it to list ALL of the students, no matter how many there are. What I have now is the following:
function listStudents() {
var s = SpreadsheetApp.getActiveSpreadsheet();
var sh = s.getSheetByName('CLASS');
var r = sh.getDataRange();
var n = r.getNumRows();
var d = r.getValues();
for (x = 0; x < n; x++) {
var i = d[x][0];
if(i == ''){ continue; } else if (i == 'D') {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sh = ss.getSheetByName('LISTSTUDENTS');
var tea = Classroom.Courses.Students.list(d[x][8]);
var t = tea.students;
var arr = [];
try {
for (i = 0; i < t.length; i++) {
var c = t[i];
var ids = c.profile;
var em = ids.emailAddress;
arr.push([em]);
}
}
catch (e) { continue; }
sh.getRange(d[x][14], d[x][15], arr.length, arr[0].length).setValues(arr);
}
}
}
You receive only 30 students in the query because you are only accessing the first page of results. Almost every "advanced service" functions in a similar manner with regards to collections, in that they return a variable number of items in the call (usually up to a size that can be specified in the query, but there are limits). This is to ensure timely service availability for everyone who uses it.
For example, consider Bob (from Accounting). This style of request pagination means he can't request a single response with 20,000 items, during which the service is slower for everyone else. He can, however, request the next 100 items, 200 times. While Bob is consuming those 100 items from his most recent query, others are able to use the service without disruption.
To set this up, you want to use a code loop that is guaranteed to execute at least once, and uses the nextPageToken that is included in the response to the call to .list() to control the loop. In Javascript / Google Apps Script, this can be a do .. while loop:
// Runs once, then again until nextPageToken is missing in the response.
const roster = [],
// The optional arguments pageToken and pageSize can be independently omitted or included.
// In general, 'pageToken' is essentially required for large collections.
options = {pageSize: /* reasonable number */};
do {
// Get the next page of students for this course.
var search = Classroom.Courses.Students.list(courseId, options);
// Add this page's students to the local collection of students.
// (Could do something else with them now, too.)
if (search.students)
Array.prototype.push.apply(roster, search.students);
// Update the page for the request
options.pageToken = search.nextPageToken;
} while (options.pageToken);
Logger.log("There are %s students in class # %s", roster.length, courseId);
for those who struggle with this, here's the code
function listStudent() {
var pageSizeValue = 300; /*** change with numbers that you want*/
var nextPageToken = '';
var courseID = 1234; /*** change with numbers that you want*/
var ownerArray = [];
do {
var optionalArgs = {
pageSize: pageSizeValue,
pageToken: nextPageToken
};
var cls = Classroom.Courses.Students.list(courseID, optionalArgs);
var nextPageToken = cls.nextPageToken;
const ssData = cls.students.map(c => {
return [c.profile.id,c.profile.name.fullName,c.profile.emailAddress]
});
Array.prototype.push.apply(ownerArray, ssData);
} while (nextPageToken);
const ss = SpreadsheetApp.openById("1234"); // <<< UPDATE THIS
const sheet = ss.getSheetByName("Sheet1"); // <<< UPDATE THIS
sheet.getRange(2,1,ownerArray.length,ownerArray[0].length).setValues(ownerArray); // <<< UPDATE THIS
}
I modified the getRoster function in the example provided by Google (https://developers.google.com/apps-script/add-ons/editors/sheets/quickstart/attendance) as follows, and it worked for me.
function getRoster(courseId) {
var studentNames = []
var studentEmails = []
var nextPageToken = ''
do {
var optionalArgs = {
pageSize: 30,
pageToken: nextPageToken
};
var response = Classroom.Courses.Students.list(courseId, optionalArgs)
var students = response.students
nextPageToken = response.nextPageToken
for (var i = 0; i <= students.length; i++) {
try {
studentNames.push(students[i].profile.name.fullName)
studentEmails.push(students[i].profile.emailAddress)
} catch (err) {
}
}
} while (nextPageToken);
return { "studentNames":studentNames, "studentEmails":studentEmails }
}