This question already has answers here:
Long processing time likely due to getValue and cell inserts
(2 answers)
Closed 1 year ago.
My Google Sheets Apps Script has been timing out - it does a simple read from a MySQL DB and loads the data in, it queries one single persisted table which hosts only 150 rows so there is no reason it should be timing out.
My Script looks like this:
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
/*sheet.autoResizeColumns(1, numCols+1);*/
}
/*ScriptApp.newTrigger('readData')
.timeBased()
.everyHours(2)
.create();*/
Is there anything that could be done to better handle the import to speed it up, as I said the data is persisted into a table on the DB, the table only has approx 150 rows and 10-15 columns so the data is small and queries instantly from a DB GUI
This provide a bit of a speed up all though a greater of performance is achievable if you can write code on both ends of the stream and access them via ssl.
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://' + server + ':' + port + '/' + dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr = [];//this is where you store all of the data
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);//this is where you save all of your data into the spreadsheet thus saving you all of the individual writes to the rows.
results.close();
stmt.close();
}
Class Range setValues() Method
Related
I use apps script to connect to an SQL database and execute a query. It works as intended but writing the result to a docs sheet takes too long. The execution exceeds the timeout long before all rows are written. The query itself is executed relatively fast (<10s) but writing the lines (over 12000 results) happens very slowly. I use the following code:
var server = 'x.x.x.x';
var port = 3306;
var dbName = 'xxx';
var username = 'xxx';
var password = 'xxx';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('insertQueryHere');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('sheet123');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
Any idea on how to improve performance?
You are making repeated calls to the server with appendRow(). Instead collect all the new rows in an array and use setValues(). See Best Practices
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('insertQueryHere');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('sheet123');
sheet.clearContents();
var arr=[];
var rows = [];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
rows.push(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
rows.push(arr);
}
sheet.getRange(sheet.getLastRow()+1,1,rows.length,numCols).setValues(rows);
SpreadsheetApp.flush(); // just to make sure the data is written
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
I am facing an issue like many before with regards to a timeout out Google Apps Script, I am reading the data from a indexed/persisted table in a MySQL Database, the table in question has 71 columns and a total of 28000 rows, the sheet in google sheets I am writing to has no calculations etc on it which might slow things down - those happen on other sheets.
Please can you review the below that I am using and propose any changes to assist in avoiding the time out?
var server = 'xx.xx.xx.xxx';
var port = xxxx;
var dbName = 'test';
var username = 'test';
var password = 'xxx';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readDataPast() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
sheet.clearContents();
var arr = [];
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
}
Issue:
I don't think the script can be made considerably faster, since potential improvements (e.g. using Sheets API as suggested by Ninca Tirtil) don't affect significatively the bulk of the script (iterating through 28000 rows).
Workaround:
Therefore, instead of trying to speed it up, I'd suggest accomplishing this in multiple executions. To that goal, I'd do the following:
Check execution time after each iteration. If this time is close to the time limit, end the loop and write current data to the sheet. You can use the Date object for this.
Create the following time-based trigger at the end of your function: after(durationMilliseconds). Thanks to this, the function will fire automatically after the amount of milliseconds you indicate. After each execution, a trigger will be created to fire the next execution.
Because you want to split the loop, you have to store the row index somewhere (you could use PropertiesService at the end of each execution, for example) and retrieve it at the beginning of the next, so that in each successive execution, the script resumes the loop where it left it. You can get the row index via getRow(), and then move to that row in the next execution via relative(rows).
Code sample:
var maxTimeDiff = 1000 * 60 * 5; // 5 minutes
const PROPERTY_KEY = "Row index";
function setRowIndex(rowIndex) {
const scriptProps = PropertiesService.getScriptProperties();
scriptProps.setProperty(PROPERTY_KEY, rowIndex);
}
function getRowIndex() {
const scriptProps = PropertiesService.getScriptProperties();
const rowIndex = scriptProps.getProperty(PROPERTY_KEY);
return rowIndex;
}
function createTrigger() {
ScriptApp.newTrigger("readDataPast")
.timeBased()
.after(60 * 1000) // Next execution after a minute
.create();
}
function readDataPast() {
var startTime = new Date();
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
var rowIndex = getRowIndex();
var arr = [];
let row = [];
if (!rowIndex || rowIndex == 0) { // Clear sheet and add metadata if first execution
sheet.clearContents();
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
} else {
results.relative(rowIndex); // Move to current row
}
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row);
if (new Date() - startTime > maxTimeDiff) break; // End iteration if long time
}
var currentRow = results.getRow(); // 0 if all rows have been iterated
setRowIndex(currentRow);
var lastRow = sheet.getLastRow();
sheet.getRange(lastRow + 1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
if (currentRow) createTrigger(); // Create trigger if iteration is not finished
}
I have this code to auto insert/update data in Google Sheet from MySql database.
var server = 'server';
var port = 3306;
var dbName = 'db_name';
var username = 'db_username';
var password = 'db_pass';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT .... ');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Sheet1');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
How can I set starting row and column for the data inserted? Now it starts from 'A1' and I'd like to start from 'D5'?
Tried to add var cell = sheet.getRange('A1'); after var sheet = with no result.
The basic idea would be to split the problem into 2:
Get the data from your your MySQL into a 2D array
Add the data to the spreadsheet.
function readData() {
// Get data
const conn = Jdbc.getConnection(url, username, password)
const stmt = conn.createStatement()
const results = stmt.executeQuery('SELECT .... ')
const columnNames = getColumnNames(results)
const data = getStringData(results)
results.close()
stmt.close()
// Set data to spreadsheet
const spreadsheet = SpreadsheetApp.getActiveSpreadsheet()
const sheet = spreadsheet.getSheetByName('Sheet1')
sheet.clearContents()
addToRange(sheet.getRange('B5'), [
columnNames,
...data,
])
}
function getColumnNames(results) {
const result = []
const metaData = results.getMetaData()
const numCols = metaData.getColumnCount()
for (let col = 0; col < numCols; col++) {
result.push(metaData.getColumnName(col + 1))
}
return result;
}
function getStringData(results) {
const result = [];
while (results.next()) {
const row = [];
for (let col = 0; col < numCols; col++) {
row.push(results.getString(col + 1))
}
result.push(row)
}
return result
}
function addToRange(range, values) {
return range
.offset(0, 0, values.length, values[0].length)
.setValues(values)
}
Adding function also make the code more readable and help to split the problem.
Note that I'm adding the column headers to the row 5 (I join the data and the headers). You can set them separately in different ranges if you so desire.
References
Range.offset() (Apps Script reference)
Range.setValue() (Apps Script reference)
Spread syntax (...) (MDN JavaScript reference)
try below code:
var server = 'server';
var port = 3306;
var dbName = 'db_name';
var username = 'db_username';
var password = 'db_pass';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT .... ');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActiveSpreadsheet();
var sheet = spreadsheet.getSheetByName('Sheet1');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
//sheet.appendRow(arr);
var range = sheet.getRange(5,4, arr.length, arr[0].length);
range.setValues(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
Good day everybody, i've encountered a wierd issue with Google Sheets script, where the display and filtering of a MySQL query is not working due to its contents being in Russian language
I will put the whole script code below, but the main part that doesen't work is:
from samplename_order WHERE status = "Оплачен")');
as well as
select (select data_create) as "Дата"
In the second example in the column header renders to "????"
In the first example I get nothing at all except column header name.
The whole code:
function onOpen() {
var spreadsheet = SpreadsheetApp.getActive();
var menuItems = [
{name: 'Sync', functionName: 'readData'}
];
spreadsheet.addMenu('Sync', menuItems);
}
var address = 'xxx:3306'; //ex. '10.1.1.1:1433'
var user = 'xxx';
var userPwd = 'xxxx';
var db = 'sxxxx';
var dbUrl = 'jdbc:mysql://' + address + '/' + db;
function readData() {
var conn = Jdbc.getConnection(dbUrl, user, userPwd);
var stmt = conn.createStatement();
stmt.setMaxRows(1000);
var results = stmt.executeQuery('select (select data_create) as "Дата",(select concat(name," ",sername)) as name,mail,(select concat(" ",phone)) as phone,comments,`status` from sample_order WHERE status = "Оплачен"');
var metaData=results.getMetaData();
var numCols = results.getMetaData().getColumnCount();
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
Add this to the getConnection() call:
?useUnicode=yes&characterEncoding=UTF-8
Question marks and other common character set problems are explained here
Probably you should use proper character encoding option in the connection string.
There is a good answer for using utf-8 in such a case.
This question already exists:
Exceeded maximum execution time in Google Apps Script [duplicate]
Closed 2 years ago.
connecting to a sql server database via jdbc from a google sheets doc. This table has some 30k odd records and hit "exceeded maximum execution time" error. Is there a way to improve performance or a work around to the 6 minute execution time.
function myFunction() {
var conn = Jdbc.getConnection("jdbc:sqlserver://dbURL", "username", "password");
var stmt = conn.createStatement();
stmt.setMaxRows(3000);
var start = new Date();
var rs = stmt.executeQuery('SELECT * FROM dbo.myTable');
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a1');
var row = 0;
var data = [];
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
rs.close();
stmt.close();
conn.close();
var end = new Date();
Logger.log('Time elapsed: ' + (end.getTime() - start.getTime()));
}
Thank you.
Modified the code to use Continuous Batch Library, even though I don't get Exceeded execution time error looks like its getting into an infinite loop and writing not one records to the sheets.
var FUNCTION_NAME = "test";
var EMAIL_RECIPIENT = "";
function test() {
ContinuousBatchLibrary.startOrResumeContinousExecutionInstance(FUNCTION_NAME)
var conn = Jdbc.getConnection("jdbc:sqlserver://HOST:PORT/DBNAME","user","password");
var stmt = conn.createStatement();
stmt.setMaxRows(100);
var rs = stmt.executeQuery('SELECT * FROM myTable');
var SS;
var Sheet;
var SheetRange;
// OPEN EXISTING SPREADSHEET
SS = SpreadsheetApp.openById(SpreadsheetApp.getActiveSpreadsheet().getId());
// SET SPREADSHEET ACTIVE
SpreadsheetApp.setActiveSpreadsheet(SS);
Sheet = SpreadsheetApp.setActiveSheet(SS.getSheetByName(SpreadsheetApp.getActiveSpreadsheet().getSheetName()));
// GET NUMBER OF COLUMNS
var ColCount = rs.getMetaData().getColumnCount();
// SET COLUMN HEADERS
for (var col = 1; col <= ColCount; col++) {
GSheet.getRange(1, col).setValue(rs.getMetaData().getColumnName(col));
}
// SET RANGE TO FIRST ROW AND BOLD
GSheetRange = GSheet.getRange(1,1,1,GSheet.getLastColumn());
GSheetRange.setFontWeight("bold");
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a2');
var row = 0;
var data = [];
var i = ContinuousBatchLibrary.getBatchKey(FUNCTION_NAME) || 0;
for (; i < row.length; i++) {
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
if (ContinuousBatchLibrary.isTimeRunningOut(FUNCTION_NAME)) {
ContinuousBatchLibrary.setBatchKey(FUNCTION_NAME, i)
break;
}
}
rs.close();
stmt.close();
conn.close();
if (i === row.length) {
ContinuousBatchLibrary.endContinuousExecutionInstance(FUNCTION_NAME, EMAIL_RECIPIENT, "task complete")
}
}
This answer is probably still the best work around for the 6 min timer.
The cumulative trigger run time limit is one hour a day so if you are getting <10% of the way thru your data before the timeout you will run into that as well.