Speed up apps script from MySQL Query Google Sseets - mysql

I am facing an issue like many before with regards to a timeout out Google Apps Script, I am reading the data from a indexed/persisted table in a MySQL Database, the table in question has 71 columns and a total of 28000 rows, the sheet in google sheets I am writing to has no calculations etc on it which might slow things down - those happen on other sheets.
Please can you review the below that I am using and propose any changes to assist in avoiding the time out?
var server = 'xx.xx.xx.xxx';
var port = xxxx;
var dbName = 'test';
var username = 'test';
var password = 'xxx';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readDataPast() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
sheet.clearContents();
var arr = [];
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
}

Issue:
I don't think the script can be made considerably faster, since potential improvements (e.g. using Sheets API as suggested by Ninca Tirtil) don't affect significatively the bulk of the script (iterating through 28000 rows).
Workaround:
Therefore, instead of trying to speed it up, I'd suggest accomplishing this in multiple executions. To that goal, I'd do the following:
Check execution time after each iteration. If this time is close to the time limit, end the loop and write current data to the sheet. You can use the Date object for this.
Create the following time-based trigger at the end of your function: after(durationMilliseconds). Thanks to this, the function will fire automatically after the amount of milliseconds you indicate. After each execution, a trigger will be created to fire the next execution.
Because you want to split the loop, you have to store the row index somewhere (you could use PropertiesService at the end of each execution, for example) and retrieve it at the beginning of the next, so that in each successive execution, the script resumes the loop where it left it. You can get the row index via getRow(), and then move to that row in the next execution via relative(rows).
Code sample:
var maxTimeDiff = 1000 * 60 * 5; // 5 minutes
const PROPERTY_KEY = "Row index";
function setRowIndex(rowIndex) {
const scriptProps = PropertiesService.getScriptProperties();
scriptProps.setProperty(PROPERTY_KEY, rowIndex);
}
function getRowIndex() {
const scriptProps = PropertiesService.getScriptProperties();
const rowIndex = scriptProps.getProperty(PROPERTY_KEY);
return rowIndex;
}
function createTrigger() {
ScriptApp.newTrigger("readDataPast")
.timeBased()
.after(60 * 1000) // Next execution after a minute
.create();
}
function readDataPast() {
var startTime = new Date();
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
var rowIndex = getRowIndex();
var arr = [];
let row = [];
if (!rowIndex || rowIndex == 0) { // Clear sheet and add metadata if first execution
sheet.clearContents();
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
} else {
results.relative(rowIndex); // Move to current row
}
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row);
if (new Date() - startTime > maxTimeDiff) break; // End iteration if long time
}
var currentRow = results.getRow(); // 0 if all rows have been iterated
setRowIndex(currentRow);
var lastRow = sheet.getLastRow();
sheet.getRange(lastRow + 1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
if (currentRow) createTrigger(); // Create trigger if iteration is not finished
}

Related

How can Google Sheets Form Update Records from Results Using Google App script?

I have a program that filters and updates data from an existing sheet.
The program works as follows:
1. Find and filter out the required value
2. Enter data in [Adjustment] column then update to database in Record sheet.
I tried to try but my program doesn't seem to work.
I tried to edit the program code but when run it will affect the other columns and the [adjustment] column value is entered wrong.
This is my link program
function Searchold(){
var ss = SpreadsheetApp.getActiveSpreadsheet ();
var shtRecords = ss. getSheetByName ("RECORD");
var shtForm = ss. getSheetByName ("TEST") ;
var records = shtRecords. getDataRange () . getValues ();
var sField = shtForm. getRange ("A3").getValue ();
var sValue = shtForm.getRange ("A6").getValue();
var sCol = records [0].lastIndexOf(sField);
var results = records.filter(function(e){return sValue == e[sCol] });
if(results.length==0){SpreadsheetApp.getUi().alert("not found values");}
else{
shtForm.getRange(9,1,results.length,results[0].length).setValues(results);
}
}
function Updatenew(){
var ss = SpreadsheetApp.getActiveSpreadsheet();
var shtRecords = ss.getSheetByName("RECORD");
var shtForm = ss.getSheetByName("TEST");
var LastRow = shtForm.getRange("A8").getNextDataCell(SpreadsheetApp.Direction.DOWN).getLastRow();
var newData = shtForm.getRange(9,1,LastRow -1,7).getValues();
for(var i =0; i<newData.length;i++){
var oldData= shtRecords.getDataRange().getValues();
for(var j= 0;j<oldData.length;j++){
if(newData[i][0] ==oldData[j][0]){
var newData2 = [newData[i]];
shtRecords.getRange(j + 1,1,1,newData2[0].length).setValues(newData2);
}
}
}
}
Can you help me with the update program? Sincerely thank you
Modification points:
When I saw your showing script of Updatenew, I think that each row of var oldData = shtRecords.getDataRange().getValues() is used in each loop of for (var i = 0; i < newData.length; i++) {}. By this, each row is overwritten by each row of newData. By this, all searched rows in "RECORD" sheet are the same value. I thought that this might be the reason for your issue.
var oldData = shtRecords.getDataRange().getValues(); can be used one call.
In order to avoid this issue by modifying your script, as one of several methods, how about the following modification?
From:
for (var i = 0; i < newData.length; i++) {
var oldData = shtRecords.getDataRange().getValues();
for (var j = 0; j < oldData.length; j++) {
if (newData[i][0] == oldData[j][0]) {
var newData2 = [newData[i]];
shtRecords.getRange(j + 1, 1, 1, newData2[0].length).setValues(newData2);
}
}
}
To:
var oldData = shtRecords.getDataRange().getValues();
for (var j = 0; j < oldData.length; j++) {
for (var i = 0; i < newData.length; i++) {
if (newData[0][0] == oldData[j][0]) {
var newData2 = newData.splice(0, 1);
shtRecords.getRange(j + 1, 1, 1, newData2[0].length).setValues(newData2);
break;
}
}
}
Note:
At the above modification, setValues is used in a loop. In this case, the process cost becomes high. If you want to reduce the process cost of the script, how about using Sheets API? When Sheets API is used, how about the following modification? Please enable Sheets API at Advanced Google services.
To
var temp = newData.slice();
var data = shtRecords.getDataRange().getValues().reduce((ar, r, i) => {
if (temp[0][0] == r[0]) {
var t = temp.splice(0, 1);
t[0][2] = Utilities.formatDate(t[0][2], Session.getScriptTimeZone(), "dd/MM/yyyy");
t[0][4] = Utilities.formatDate(t[0][4], Session.getScriptTimeZone(), "dd/MM/yyyy");
ar.push({ range: `'RECORD'!A${i + 1}`, values: t });
}
return ar;
}, []);
Sheets.Spreadsheets.Values.batchUpdate({ data, valueInputOption: "USER_ENTERED" }, ss.getId());

Bandwidth quota exceeded. Limit the data transfer speed

I received the following error on a script that we run time-based.
Exception: Bandwidth quota exceeded: https://app.enzyme.finance/api/vault-performance?vault=0x95fca2e84556443c1bc0c6416ee17be0e6844cd0&currency=eur&network=ethereum. Limit the data transfer speed.
I have no clue which quota I'm exceeding. So how fixing this issue is quite a mystery.
In the code below I'm doing the following:
Delete any existing triggers deleteTriggers()
Schedule a trigger via function on time: 00:00 scheduledTrigger(0,0)
Fetch data from URL and parse that data into Google Sheets function_Triggered()
In run this code time-driven day timer at: 11 pm to midnight.
The code below:
function setTrigger() {
deleteTriggers();
scheduledTrigger(0,0);
}
function scheduledTrigger(hours,minutes){
var today_D = new Date();
var year = today_D.getFullYear();
var month = today_D.getMonth();
var day = today_D.getDate();
pars = [year,month,day,hours,minutes];
var scheduled_D = new Date(...pars);
scheduled_D.setDate(scheduled_D.getDate() + 1);
var hours_remain=Math.abs(scheduled_D - today_D) / 36e5;
ScriptApp.newTrigger("function_Triggered")
.timeBased()
.after(hours_remain * 60 * 60 * 1000)
.create()
}
function deleteTriggers() {
var triggers = ScriptApp.getProjectTriggers();
for (var i = 0; i < triggers.length; i++) {
if ( triggers[i].getHandlerFunction() == "function_Triggered") {
ScriptApp.deleteTrigger(triggers[i]);
}
}
}
function function_Triggered() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var raw = ss.getSheetByName('raw');
var current = ss.getSheetByName('nav.current');
var database = ss.getSheetByName('nav.database');
var url = "https://app.enzyme.finance/api/vault-performance?vault=0x95fca2e84556443c1bc0c6416ee17be0e6844cd0&currency=eur&network=ethereum";
var response = UrlFetchApp.fetch(url); // get feed
var dataAll = JSON.parse(response.getContentText());
var rows = [Object.keys(dataAll)]; // Retrieve headers.
var temp = [];
for (var i = 0; i < rows[0].length; i++) {
temp.push(dataAll[rows[0][i]]); // Retrieve values.
}
rows.push(temp);
raw.getRange(1,1,rows.length,rows[0].length).setValues(rows); // Put values to Spreadsheet.
// count rows to snap
var current_rows = current.getLastRow();
var database_rows = database.getLastRow() + 1;
var rows_new = current.getRange("A2:B" + current_rows).getValues();
// snap rows, can run this on a trigger to be timed
database.getRange("A" + database_rows + ":B" + database_rows).setValues(rows_new);
}

Google Apps Script running very slow [duplicate]

This question already has answers here:
Long processing time likely due to getValue and cell inserts
(2 answers)
Closed 1 year ago.
My Google Sheets Apps Script has been timing out - it does a simple read from a MySQL DB and loads the data in, it queries one single persisted table which hosts only 150 rows so there is no reason it should be timing out.
My Script looks like this:
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
/*sheet.autoResizeColumns(1, numCols+1);*/
}
/*ScriptApp.newTrigger('readData')
.timeBased()
.everyHours(2)
.create();*/
Is there anything that could be done to better handle the import to speed it up, as I said the data is persisted into a table on the DB, the table only has approx 150 rows and 10-15 columns so the data is small and queries instantly from a DB GUI
This provide a bit of a speed up all though a greater of performance is achievable if you can write code on both ends of the stream and access them via ssl.
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://' + server + ':' + port + '/' + dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr = [];//this is where you store all of the data
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);//this is where you save all of your data into the spreadsheet thus saving you all of the individual writes to the rows.
results.close();
stmt.close();
}
Class Range setValues() Method

I need help to clean up a script that blocks rows

The code below works but requires too much time to work. In fact, the script cannot finish the code, since the time to process the script has passed several times.
I gladly received help to fine-tune this code
The script was created to block (grouped) rows, if a value is entered in column E. This code is activated every 24 hours as a trigger.
Objective: The worksheet with 1000 rows, is accessible to 250 people, whether or not registered with a google account, and serves to record recreational tennis matches.
var MaxRow = Sheet.getDataRange().getNumRows();
var RowCount = 1;
var Cell = Sheet.getRange("D"+ RowCount);
var CellValue = Cell.getValue();
var BlockStart = 0;
var BlockEnd = 0;
var LockRange = Sheet.getRange(59,6,1,2);
for (RowCount = 4; RowCount <= MaxRow ; RowCount++) {
Cell = Sheet.getRange("D"+ RowCount);
CellValue = Cell.getValue();
if (CellValue != "") {
if (BlockStart == 0) {
BlockStart = RowCount;
BlockEnd = RowCount;
}
else {
BlockEnd = RowCount;
}
}
else {
if (BlockStart > 0) {
LockRange = Sheet.getRange(BlockStart,6,BlockEnd-BlockStart + 1,2);
var Protection = LockRange.protect().setDescription('Rows ' +
BlockStart + ' To ' + BlockEnd + ' Protected');
Protection.removeEditors(Protection.getEditors());
BlockStart = 0;
BlockEnd = 0;
}
}
}
Without a complete function I can't really figure out what you're trying to do. But let's say that you want to read the values in column D from line 4 to the bottom of data, then here's a simple and fast way to do it. This isn't the only way. But it's a lot faster than using getValue() on each row. This will probably run about 10000 times faster.
function getDataInColumnD() {
var ss=SpreadsheetApp.getActive();
var sh=ss.getActiveSheet();
var rg=sh.getRange(4,4,Sheet.getLastRow()-3,1);
var vA=rg.getValues();
for (var i=0;i<vA.length;i++) {
var value=vA[i][0];
var row=i+4;
Logger.log('col: 4, row: %s, value: %s',row,value);
}
}

Google script exceeded maximum execution time work around [duplicate]

This question already exists:
Exceeded maximum execution time in Google Apps Script [duplicate]
Closed 2 years ago.
connecting to a sql server database via jdbc from a google sheets doc. This table has some 30k odd records and hit "exceeded maximum execution time" error. Is there a way to improve performance or a work around to the 6 minute execution time.
function myFunction() {
var conn = Jdbc.getConnection("jdbc:sqlserver://dbURL", "username", "password");
var stmt = conn.createStatement();
stmt.setMaxRows(3000);
var start = new Date();
var rs = stmt.executeQuery('SELECT * FROM dbo.myTable');
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a1');
var row = 0;
var data = [];
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
rs.close();
stmt.close();
conn.close();
var end = new Date();
Logger.log('Time elapsed: ' + (end.getTime() - start.getTime()));
}
Thank you.
Modified the code to use Continuous Batch Library, even though I don't get Exceeded execution time error looks like its getting into an infinite loop and writing not one records to the sheets.
var FUNCTION_NAME = "test";
var EMAIL_RECIPIENT = "";
function test() {
ContinuousBatchLibrary.startOrResumeContinousExecutionInstance(FUNCTION_NAME)
var conn = Jdbc.getConnection("jdbc:sqlserver://HOST:PORT/DBNAME","user","password");
var stmt = conn.createStatement();
stmt.setMaxRows(100);
var rs = stmt.executeQuery('SELECT * FROM myTable');
var SS;
var Sheet;
var SheetRange;
// OPEN EXISTING SPREADSHEET
SS = SpreadsheetApp.openById(SpreadsheetApp.getActiveSpreadsheet().getId());
// SET SPREADSHEET ACTIVE
SpreadsheetApp.setActiveSpreadsheet(SS);
Sheet = SpreadsheetApp.setActiveSheet(SS.getSheetByName(SpreadsheetApp.getActiveSpreadsheet().getSheetName()));
// GET NUMBER OF COLUMNS
var ColCount = rs.getMetaData().getColumnCount();
// SET COLUMN HEADERS
for (var col = 1; col <= ColCount; col++) {
GSheet.getRange(1, col).setValue(rs.getMetaData().getColumnName(col));
}
// SET RANGE TO FIRST ROW AND BOLD
GSheetRange = GSheet.getRange(1,1,1,GSheet.getLastColumn());
GSheetRange.setFontWeight("bold");
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a2');
var row = 0;
var data = [];
var i = ContinuousBatchLibrary.getBatchKey(FUNCTION_NAME) || 0;
for (; i < row.length; i++) {
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
if (ContinuousBatchLibrary.isTimeRunningOut(FUNCTION_NAME)) {
ContinuousBatchLibrary.setBatchKey(FUNCTION_NAME, i)
break;
}
}
rs.close();
stmt.close();
conn.close();
if (i === row.length) {
ContinuousBatchLibrary.endContinuousExecutionInstance(FUNCTION_NAME, EMAIL_RECIPIENT, "task complete")
}
}
This answer is probably still the best work around for the 6 min timer.
The cumulative trigger run time limit is one hour a day so if you are getting <10% of the way thru your data before the timeout you will run into that as well.