creating DCM report through google-app-script - google-apps-script

I am fairly new to Google App Scripts. I need to build report inside Spreadsheet using Google-app-script functionality. I know how to download from DCM/DFM reporting excisting report to spreadsheet.
But I've failed to create new inside spreadsheet without downloading existing report from DCM/DFM. Is it possible? May be somebody has a sample (I hadn't found)? Or I've missed the concept and the only way to pull data into Spreadsheet is to create the report in DCM/DCF Report Builder first?
Here is the code i'm using.
function generateReport() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('Reports');
var profileId = 2623334
var httpOptions = {
'headers': {'Authorization': 'Bearer ' + ScriptApp.getOAuthToken()}
}
var resource = {
'kind': 'dfareporting#report',
'accountId': '34405',
'type': 'STANDARD',
'name': 'Simple Report',
'criteria': {
'dateRange': {
'kind': 'dfareporting#dateRange',
'startDate': '2016-09-01',
'endDate': '2017-01-22',
},
'dimensions': [
{
'kind': 'dfareporting#sortedDimension',
'name': 'dfa:date',
}
],
'metricNames': [
'dfa:clicks', 'dfa:impressions'
],
}
}
var url = DoubleClickCampaigns.Reports.insert(resource, profileId);
var report = UrlFetchApp.fetch(url.urls.apiUrl, httpOptions);
for (var i=0; i<report.length; i++) {
var row = report[i];
sheet.getRange('A' + String(i+2)).setValue(row[0]);
sheet.getRange('B' + String(i+2)).setValue(row[1]);
sheet.getRange('C' + String(i+2)).setValue(row[2]);
}
}
The error appears here:
var report = UrlFetchApp.fetch(url.urls.apiUrl, httpOptions);
Error Messsage:
Can not read property "apiUrl" of undefined object.

My final script is:
function generateReport() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('Reports');
var startDate = Browser.inputBox("Enter a start date (format: 'yyyy-mm-dd')");
var endDate = Browser.inputBox("Enter an end date (format: 'yyyy-mm-dd')");
var ReportName = Browser.inputBox("Enter your future report name");
var profileId = 2623334;
var resource = {
'kind': 'dfareporting#report',
'accountId': '34405',
'type': 'STANDARD',
'name': ReportName,
'criteria': {
'dateRange': {
'kind': 'dfareporting#dateRange',
'startDate': startDate,
'endDate': endDate,
},
'dimensions': [
{
'kind': 'fareporting#sortedDimension',
'name': 'dfa:date'
}
],
'metricNames': [
'dfa:clicks', 'dfa:impressions'
],
},
};
var httpOptions = {
'headers': {'Authorization': 'Bearer ' + ScriptApp.getOAuthToken()}
}
var url = DoubleClickCampaigns.Reports.insert(resource, profileId); //create Report inside DCM
var newReportId = Number(url.id); //get id of new Report
var additionalParameters = {
'synchronous': 'true'
};
var newReportRun = DoubleClickCampaigns.Reports.run(profileId, newReportId, additionalParameters); // run new Report in DCM
var newReportFileId = Number(newReportRun.id); //get id of New File
var newReportFile = DoubleClickCampaigns.Files.get(newReportId, newReportFileId);
if(newReportFile.urls) {
var httpOptions = {
'headers': {'Authorization': 'Bearer ' + ScriptApp.getOAuthToken()}
}
var contents = UrlFetchApp.fetch(newReportFile.urls.apiUrl, httpOptions); //Makes a request to fetch (получать) a URL using optional advanced parameters
if(newReportFile.format == 'CSV') {
var rows = Utilities.parseCsv(contents.getContentText());
if(rows && rows.length) {
var fileName = "DCM_test_work5";
var spreadSheet = SpreadsheetApp.create(fileName);
var sheet = spreadSheet.getActiveSheet();
rows.map(function(r) { sheet.appendRow(r); });
}
} else {
// Store the Excel file directly
Logger.log('not CSV!');
DocsList.createFile(contents.getBlob()).rename(newReportFile.fileName)
}
}
}

Related

Time-driven Trigger not running on Apps Script but Manual running of the function script works correctly

I used a script to backup all my google docs, when running the function it works correctly and makes a copy. But when setting up a time-driven trigger it doesn't run but says that it does.
Project Trigger
I checked to make sure the project's time zone was correct and tried to run it at different times, but it still doesn't run.
The script below:
var BACKUP_FOLDER_ID = '16SGkgNCPeNn9DgB9RmhLAv-z5C_kCbCM';
var NATIVE_MIME_TYPES = {};
NATIVE_MIME_TYPES[MimeType.GOOGLE_DOCS] = MimeType.MICROSOFT_WORD;
NATIVE_MIME_TYPES[MimeType.GOOGLE_SHEETS] = MimeType.MICROSOFT_EXCEL;
NATIVE_MIME_TYPES[MimeType.GOOGLE_SLIDES] = MimeType.MICROSOFT_POWERPOINT;
var NATIVE_EXTENSIONS = {};
NATIVE_EXTENSIONS[MimeType.GOOGLE_DOCS] = '.docx';
NATIVE_EXTENSIONS[MimeType.GOOGLE_SHEETS] = '.xlsx';
NATIVE_EXTENSIONS[MimeType.GOOGLE_SLIDES] = '.pptx';
var BACKUP_MIME_TYPES = Object.keys(NATIVE_MIME_TYPES);
function backupAll() {
const backupFolder = DriveApp.getFolderById(BACKUP_FOLDER_ID);
BACKUP_MIME_TYPES.forEach(function(mimeType) {
var files = DriveApp.getFilesByType(mimeType);
while (files.hasNext()) {
var file = files.next();
if (file.getOwner() && file.getOwner().getEmail() == Session.getActiveUser().getEmail()) {
backup(file, backupFolder);
}
}
});
}
function backup(file, folder) {
var targetName = file.getName() + ' ' + file.getId();
var lastUpdated = file.getLastUpdated();
var pdf = getPdfBlob(file);
var native = getNativeBlob(file);
var zip = Utilities.zip([pdf, native], targetName + '.zip');
createOrUpdateFileForBlob(zip, folder, lastUpdated);
}
function createOrUpdateFileForBlob(blob, folder, ifOlderThan) {
var existingFiles = folder.getFilesByName(blob.getName());
if (existingFiles.hasNext()) {
var file = existingFiles.next();
if (file.getLastUpdated() < ifOlderThan) {
updateFile(file, blob);
}
} else {
folder.createFile(blob);
}
}
function updateFile(file, blob) {
const url = 'https://www.googleapis.com/upload/drive/v2/files/' + file.getId() + '?uploadType=media';
const params = {
method: 'put',
headers: { Authorization: 'Bearer ' + ScriptApp.getOAuthToken() },
payload: blob
};
var response = UrlFetchApp.fetch(url, params);
if (response.getResponseCode() < 200 || response.getResponseCode() > 299) {
throw 'Failed to update file named ' + file.getName();
}
}
function getPdfBlob(file) {
var blob = file.getAs('application/pdf');
return blob;
}
function getNativeBlob(file) {
const nativeMimeType = NATIVE_MIME_TYPES[file.getMimeType()];
const extension = NATIVE_EXTENSIONS[file.getMimeType()];
const url = 'https://www.googleapis.com/drive/v2/files/' + file.getId() + '/export?mimeType=' + nativeMimeType;
const params = {
method: 'get',
headers: { Authorization: 'Bearer ' + ScriptApp.getOAuthToken() }
};
const blob = UrlFetchApp.fetch(url, params).getBlob();
blob.setName(file.getName() + extension);
return blob;
}

How to iterate for each cell in Google Sheet range with hyperlinks and send batch photos to Telegram?

For example, I have this range with 10 Drive hyperlinks to images:
This script already sends only one photo (F1) to a group via Telegram Bot, but I need to iterate for each cell in this range to send every uploaded images (via Google Forms, max. 10), but it could be only one or ten pictures, so I need to stop the iteration if is an empty cell like N1 and O1.
photo_url = "DRIVE_URL";
id = "GROUP_ID";
sendPhoto(id,photo_url)
function sendPhoto(id,photo_url) {
var API_TOKEN = "BOT_TOKEN";
var payload = {
'method': 'sendPhoto',
'chat_id': String(id),
'photo': photo_url,
'caption': "Foto 1"
}
var data = {
"method": "post",
"payload": payload,
'muteHttpExceptions':true,
}
//var response =
UrlFetchApp.fetch('https://api.telegram.org/bot' + API_TOKEN + '/', data);
//Logger.log(response);
}
*Plus, is it the same process to change the name or caption of every image? making this code dynamic:
'caption': "Foto 1"
'caption': "Foto 2"
'caption': "Foto 3" //...etc., until max. 10, sometimes is empty.
*Edited, this is the working code only to find data (URL photos in a Drive) inside a range of 10 cells (F2:O2):
function loopImage() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sh = ss.getSheetByName("AWESOME");
var vals =
SpreadsheetApp.getActiveSpreadsheet().getSheetByName("AWESOME")
.getRange(2 +":"+ 2)
.getValues()[0],
lastColNum = vals.length
;
while(!vals.pop())
{ --lastColNum; }
var range = sh.getRange(2, 6, 2, lastColNum-5); //The command will automatically adjust the range based on the last column of the F2:O2 range.
var data = range.getValues();
var photoArr = [];
for(var i=0; i<data[0].length; i++){
photoArr.push({"type": "photo", "media": data[0][i], "caption": "Foto "+(i+1)})
}
if(photoArr.length > 0){
sendPhoto(JSON.stringify(photoArr));
}
}
function sendPhoto(photoArray) {
id = "GROUP_TOKEN";
var API_TOKEN = "BOT_API";
var payload = {
'method': 'sendMediaGroup',
'chat_id': String(id),
'media': photoArray,
}
var data = {
"method": "post",
"payload": payload,
"muteHttpExceptions":true,
}
var response = UrlFetchApp.fetch('https://api.telegram.org/bot' + API_TOKEN + '/', data);
Logger.log(response);
}
The script below will loop through the first row of Sheet starting from column F up to the last column with data, creates an array of object of InputMediaPhoto, and send the array to sendPhotoArray function to send it to a Telegram group chat.
Code:
function loopImage() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sh = ss.getSheetByName("Enter Sheet Name Here");
var range = sh.getRange(1, 6, 1, sh.getLastColumn()-5); //The command will automatically adjust the range based on the last column of the first row.
var data = range.getValues();
var photoArr = [];
for(var i=0; i<data[0].length; i++){
photoArr.push({"type": "photo", "media": data[0][i], "caption": "Foto "+(i+1)})
}
if(photoArr.length > 0){
sendPhoto(JSON.stringify(photoArr));
}
}
function sendPhoto(photoArray) {
id = "Insert Chat ID here";
var API_TOKEN = "Insert API TOKEN HERE";
var payload = {
'method': 'sendMediaGroup',
'chat_id': String(id),
'media': photoArray,
}
var data = {
"method": "post",
"payload": payload,
'muteHttpExceptions':true,
}
var response = UrlFetchApp.fetch('https://api.telegram.org/bot' + API_TOKEN + '/', data);
}
Sample Data:
Output in Telegram:
File Caption:
References:
Telegram sendMediaGroup
Class Range

Can't create KuCoin order with Google App Scripts

I can get account details so my authentication appears correct but in trying to modify that code to create an order it returns a code 401 "msg":"Invalid KC-API-SIGN". The modification involved adding in the method and payload and changing endpoint (/api/vi/accounts) to endpoint2 (/api/v1/orders)
function kucoinTest5()
{
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("xxxxx");
var key = sheet.getRange("xx").getValue()
var secret = sheet.getRange("xx").getValue();
var passphrase = sheet.getRange("xx").getValue();
var host = 'https://openapi-sandbox.kucoin.com';
//var endpoint ='/api/v1/accounts';
var endpoint2 ='/api/v1/orders';
var timestamp = ''+ new Date().getTime();
var strForSign = timestamp + 'GET' + endpoint2;
var signature = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, strForSign, secret);
var encodedPass = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, passphrase, secret);
var url= host + endpoint2
var requestOptions = {
'method': "POST",
'headers': {
'KC-API-KEY': key,
'KC-API-TIMESTAMP': timestamp,
'KC-API-SIGN': Utilities.base64Encode(signature),
'KC-API-KEY-VERSION': '2',
'KC-API-PASSPHRASE': Utilities.base64Encode(encodedPass),
},
'payload': {
'clientOid': 'test1',
'side': 'buy',
'symbol': 'BTC-USDT',
'type': 'market',
'tradeType': 'TRADE',
'funds': 100
},
muteHTTPExceptions: true,
};
var httpRequest= UrlFetchApp.fetch(url, requestOptions);
//var getContext= httpRequest.getContentText();
Logger.log(httpRequest);
}
Solved above problem here is the code to post a buy order on KuCoin:
function kuCoinTest5()
{
var sheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("xxxx");
var key = sheet.getRange("xx").getValue()
var secret = sheet.getRange("xx").getValue();
var passphrase = sheet.getRange("xx").getValue();
var payload = {
'clientOid':"UUID",
'side':"buy",
'symbol':"BTC-USDT",
'type':"market",
'tradeType':"TRADE",
'funds':"100"
};
var data = JSON.stringify(payload);
//Logger.log(data);
var host = 'https://openapi-sandbox.kucoin.com';
var timeStamp = ''+ new Date().getTime();
//var nowStr = "" + nowDate;
var endpoint ='/api/v1/accounts';
var endpoint2 ='/api/v1/orders';
var strForSign = timeStamp + "POST" + endpoint2 + data;
//Logger.log(strForSign);
var signature = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, strForSign, secret);
var encodedPass = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, passphrase, secret);
var url= host + endpoint2;
//Logger.log(url);
var options = {
"method":"POST",
'headers' : {
'KC-API-KEY': key,
'KC-API-TIMESTAMP': timeStamp,
'KC-API-SIGN': Utilities.base64Encode(signature),
'KC-API-KEY-VERSION': '2',
'KC-API-PASSPHRASE': Utilities.base64Encode(encodedPass)
},
"contentType":"application/json",
"payload":data,
//'payload' : {'clientOid':"45234524625",
//'side':"buy",
//'symbol':"BTC-USDT",
//'type':"market",
//'tradeType':"TRADE",
//'funds':"100"},
"muteHttpExceptions":true,
}
var result = UrlFetchApp.getRequest(url, options);
Logger.log(result) // a better way to debug
var result = UrlFetchApp.fetch(url, options); // works perfectly in my case
Logger.log(result)
}
I had the same problem with a GET request, and finally solved thanks to the above code. Here is my code:
function KuCoinRequest(){
var key ='xx'
var secret = 'xx'
var passphrase = 'xx'
var url = "https://api-futures.kucoin.com/"; //endpoint
var timestamp = '' + Number(new Date().getTime()).toFixed(0);
var command = "GET";
var endpoint = "api/v1/fills"
var str_to_sign = timestamp + command +"/" + endpoint;
var signature = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, str_to_sign, secret)
var encodedPass = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_256, passphrase, secret);
var params = {
'method': "GET",
'headers' : {
'KC-API-SIGN': Utilities.base64Encode(signature),
'KC-API-KEY': key,
'KC-API-TIMESTAMP': timestamp,
'KC-API-PASSPHRASE': Utilities.base64Encode(encodedPass),
'KC-API-KEY-VERSION': '2',
'muteHttpExceptions': true
}
};
query = url + endpoint;
var data = UrlFetchApp.fetch(query, params);
Logger.log(data.getContentText());
printJsonKucoin(data, endpoint);
return data;
}

Google Sheets App Script Export to CSV selected Columns

I have this google sheets script which will export my current google sheet to a CSV format in my drive folder.
I tried to select column Index. Ex: columnIndex: 1,2,6,10 But without success, its still export the full data sheet
function onOpen() {
SpreadsheetApp.getUi()
.createMenu('CSV')
.addItem('Export to the file', 'userActionExportToCSV')
.addToUi();
}
// https://drive.google.com/file/d/1111111111/view?usp=sharing
var CSV_FILE_ID = '11111111111__8SC0RDV';
var SHEETID = '0';
function userActionExportToCSV() {
var res = exportToCSV_(
CSV_FILE_ID,
SpreadsheetApp.getActive().getId(),
SpreadsheetApp.getActiveSheet().getSheetId()
);
Logger.log(res);
}
var data = DriveApp.getFileById(csvId)
.getBlob()
.getDataAsString();
// Clear the Sheet
var updateCellsRequest = Sheets.newUpdateCellsRequest();
updateCellsRequest.fields = 'userEnteredValue';
updateCellsRequest.range = { sheetId: sheetId };
batchUpdateSpreadsheet_(
{
updateCells: updateCellsRequest,
},
spreadsheetId
);
var pasteDataRequest = Sheets.newPasteDataRequest();
pasteDataRequest.coordinate = {
sheetId: SpreadsheetApp.getActiveSheet().getSheetId(),
rowIndex: 0,
columnIndex: 0,
};
pasteDataRequest.data = data;
pasteDataRequest.type = SpreadsheetApp.CopyPasteType.PASTE_VALUES;
pasteDataRequest.delimiter = ',';
var batchUpdateSpreadsheetResponse = batchUpdateSpreadsheet_(
{
pasteData: pasteDataRequest,
},
spreadsheetId
);
return batchUpdateSpreadsheetResponse;
}
function exportToCSV_(csvId, spreadsheetId, sheetId) {
var url = Utilities.formatString(
'https://docs.google.com/spreadsheets/export?id=%s&exportFormat=csv&gid=%s',
spreadsheetId,
sheetId
);
var data = UrlFetchApp.fetch(url, {
headers: { Authorization: 'Bearer ' + ScriptApp.getOAuthToken() }, muteHttpExceptions: true
}).getBlob();
DriveApp.getFileById(csvId).setContent(data.getDataAsString());
}
function batchUpdateSpreadsheet_(request, spreadsheetId) {
var resource = {
requests: [],
};
resource.requests.push(request);
var batchUpdateSpreadsheetResponse = Sheets.Spreadsheets.batchUpdate(
resource,
spreadsheetId
);
return batchUpdateSpreadsheetResponse;
}
I would like to only export some selected columns. By example columns 1,2,6,10.
How I can modify this script to only export the selected columns in my CSV file saved on my Google Drive?

Google Script timeouts

Problem:
The import function importXLSXtoGsheet() times out before it can process all 52 XLSX files, I received the error:
Exception: Time-out: https://www.googleapis.com/batch/drive/v3 at [unknown function](Code:63) at Do(Code:8) at importXLSXtoGsheet(Code:71)
If I run the function with 1 file in the importXLS folder, it works correctly.
Script explained:
I've got 52 folders, each containing one spreadsheet file.
Each folder is shared with different colleagues.
During the day, people make changes to the files.
At the end of the day, all files are collected in one folder (gsheetFolder) and converted to XLSX files, using the function collectAndExportXLS.
These files are copied to a local server in the evening (using batch script and drive sync) which updates other information in the file and are copied back to the importXLSXfolder.
In the morning the importXLSXtoGsheet function runs and converts all XLSX files in the importXLSXfolder folder to Gsheet files in the gsheetFolder.
After that sortGsheetFiles runs, sorting and moving every Gsheet file in one of the 52 folders (using an array list from the current spreadsheet).
Other actions include cleaning the folders with the deleteFolder function.
Script:
var gsheetFolder = '###';
var XLSXfolder = '###';
var importXLSXfolder = '###';
// Modified
function deleteFolder(folderId) {
var url = "https://www.googleapis.com/drive/v3/files?q='" + folderId + "'+in+parents+and+trashed%3Dfalse&fields=files%2Fid&access_token=" + ScriptApp.getOAuthToken();
var res = UrlFetchApp.fetch(url);
var obj = JSON.parse(res.getContentText());
var reqs = obj.files.map(function(e) {return {method: "DELETE", endpoint: "https://www.googleapis.com/drive/v3/files/" + e.id}});
var requests = {batchPath: "batch/drive/v3", requests: reqs};
if (requests.requests.length > 0) BatchRequest.Do(requests);
}
// Added
function deleteFiles(files) {
var reqs = files.map(function(e) {return {method: "DELETE", endpoint: "https://www.googleapis.com/drive/v3/files/" + e.id}});
var requests = {batchPath: "batch/drive/v3", requests: reqs};
if (requests.requests.length > 0) BatchRequest.Do(requests);
}
// Added
function getValuesFromSpreadsheet() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheets()[0];
return sheet.getRange("A2:B53").getValues();
}
// Modified
function sortGsheetFiles() {
var url = "https://www.googleapis.com/drive/v3/files?q='" + gsheetFolder + "'+in+parents+and+mimeType%3D'" + MimeType.GOOGLE_SHEETS + "'+and+trashed%3Dfalse&fields=files(id%2Cname)&access_token=" + ScriptApp.getOAuthToken();
var res = UrlFetchApp.fetch(url);
var obj = JSON.parse(res.getContentText());
var values = getValuesFromSpreadsheet();
var reqs = values.reduce(function(ar, e) {
for (var i = 0; i < obj.files.length; i++) {
if (obj.files[i].name == e[0]) {
ar.push({
method: "PATCH",
endpoint: "https://www.googleapis.com/drive/v3/files/" + obj.files[i].id + "?addParents=" + e[1] + "&removeParents=" + gsheetFolder,
});
break;
}
}
return ar;
}, []);
var requests = {batchPath: "batch/drive/v3", requests: reqs};
if (requests.requests.length > 0) BatchRequest.Do(requests);
deleteFolder(importXLSXfolder);
}
// Modified
function importXLSXtoGsheet(){
deleteFolder(XLSXfolder);
var url = "https://www.googleapis.com/drive/v3/files?q='" + importXLSXfolder + "'+in+parents+and+mimeType%3D'" + MimeType.MICROSOFT_EXCEL + "'+and+trashed%3Dfalse&fields=files(id%2Cname)&access_token=" + ScriptApp.getOAuthToken();
var res = UrlFetchApp.fetch(url);
var obj = JSON.parse(res.getContentText());
var reqs = obj.files.map(function(e) {return {
method: "POST",
endpoint: "https://www.googleapis.com/drive/v3/files/" + e.id + "/copy",
requestBody: {mimeType: MimeType.GOOGLE_SHEETS, name: e.name + ".xlsx", parents: [gsheetFolder]},
}
});
var requests = {batchPath: "batch/drive/v3", requests: reqs};
if (requests.requests.length > 0) BatchRequest.Do(requests);
deleteFolder(importXLSXfolder);
}
// Modified
function ConvertBackToXLS(fileList) {
var token = ScriptApp.getOAuthToken();
var reqs1 = fileList.map(function(e) {return {
method: "GET",
url: "https://docs.google.com/spreadsheets/export?id=" + e.id + "&exportFormat=xlsx&access_token=" + token,
}
});
var res = UrlFetchApp.fetchAll(reqs1);
var reqs2 = res.map(function(e, i) {
var metadata = {name: fileList[i].name, parents: [XLSXfolder]};
var form = FetchApp.createFormData(); // Create form data
form.append("metadata", Utilities.newBlob(JSON.stringify(metadata), "application/json"));
form.append("file", e.getBlob());
var url = "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart";
return {url: url, method: "POST", headers: {Authorization: "Bearer " + token}, body: form};
});
FetchApp.fetchAll(reqs2);
}
// Modified
function collectAndExportXLS() {
deleteFolder(gsheetFolder);
var values = getValuesFromSpreadsheet();
var reqs1 = values.reduce(function(ar, e) {
if (e[0] && e[1]) {
ar.push({
method: "GET",
endpoint: "https://www.googleapis.com/drive/v3/files?q='" + e[1] + "'+in+parents+and+trashed%3Dfalse&fields=files(id%2Cname)",
});
}
return ar;
}, []);
var resForReq1 = BatchRequest.Do({batchPath: "batch/drive/v3", requests: reqs1});
var temp = resForReq1.getContentText().split("--batch");
var files = temp.slice(1, temp.length - 1).map(function(e) {return JSON.parse(e.match(/{[\S\s]+}/g)[0])});
var fileList = files.reduce(function(ar, e) {return ar.concat(e.files.map(function(f) {return f}))}, []);
ConvertBackToXLS(fileList);
deleteFiles(fileList);
}
About your question, I could understand like below.
When importXLSXtoGsheet() is run with 52 files, the error occurs.
When importXLSXtoGsheet() is run with less than 13 files, no error occurs.
Other functions except for importXLSXtoGsheet() works fine.
If my understanding is correct, as one workaround, it decides the maximum number for processing the files once. When this is reflect to importXLSXtoGsheet() of your script, the modified script is as follows.
Modified script:
function importXLSXtoGsheet(){
deleteFolder(XLSXfolder);
var url = "https://www.googleapis.com/drive/v3/files?q='" + importXLSXfolder + "'+in+parents+and+mimeType%3D'" + MimeType.MICROSOFT_EXCEL + "'+and+trashed%3Dfalse&fields=files(id%2Cname)&access_token=" + ScriptApp.getOAuthToken();
var res = UrlFetchApp.fetch(url);
var obj = JSON.parse(res.getContentText());
// I modified below script.
var n = 10; // Maximum number.
var files = [];
var len = obj.files.length;
for (var i = 0; i < len; i++) {
files.push(obj.files.splice(0, n));
len -= n - 1;
}
files.forEach(function(f) {
var reqs = f.map(function(e) {return {
method: "POST",
endpoint: "https://www.googleapis.com/drive/v3/files/" + e.id + "/copy",
requestBody: {mimeType: MimeType.GOOGLE_SHEETS, name: e.name + ".xlsx", parents: [gsheetFolder]},
}
});
var requests = {batchPath: "batch/drive/v3", requests: reqs};
if (requests.requests.length > 0) BatchRequest.Do(requests);
});
deleteFolder(importXLSXfolder);
}
Note:
In this sample script, 10 files are processed every batch request. If you want to change this, please modify var n = 10;.