Cyrillic database contents in Google Sheets JDBC script - mysql

Good day everybody, i've encountered a wierd issue with Google Sheets script, where the display and filtering of a MySQL query is not working due to its contents being in Russian language
I will put the whole script code below, but the main part that doesen't work is:
from samplename_order WHERE status = "Оплачен")');
as well as
select (select data_create) as "Дата"
In the second example in the column header renders to "????"
In the first example I get nothing at all except column header name.
The whole code:
function onOpen() {
var spreadsheet = SpreadsheetApp.getActive();
var menuItems = [
{name: 'Sync', functionName: 'readData'}
];
spreadsheet.addMenu('Sync', menuItems);
}
var address = 'xxx:3306'; //ex. '10.1.1.1:1433'
var user = 'xxx';
var userPwd = 'xxxx';
var db = 'sxxxx';
var dbUrl = 'jdbc:mysql://' + address + '/' + db;
function readData() {
var conn = Jdbc.getConnection(dbUrl, user, userPwd);
var stmt = conn.createStatement();
stmt.setMaxRows(1000);
var results = stmt.executeQuery('select (select data_create) as "Дата",(select concat(name," ",sername)) as name,mail,(select concat(" ",phone)) as phone,comments,`status` from sample_order WHERE status = "Оплачен"');
var metaData=results.getMetaData();
var numCols = results.getMetaData().getColumnCount();
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}

Add this to the getConnection() call:
?useUnicode=yes&characterEncoding=UTF-8
Question marks and other common character set problems are explained here

Probably you should use proper character encoding option in the connection string.
There is a good answer for using utf-8 in such a case.

Related

Performance issue when writing to google docs sheet via apps script

I use apps script to connect to an SQL database and execute a query. It works as intended but writing the result to a docs sheet takes too long. The execution exceeds the timeout long before all rows are written. The query itself is executed relatively fast (<10s) but writing the lines (over 12000 results) happens very slowly. I use the following code:
var server = 'x.x.x.x';
var port = 3306;
var dbName = 'xxx';
var username = 'xxx';
var password = 'xxx';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('insertQueryHere');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('sheet123');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}
Any idea on how to improve performance?
You are making repeated calls to the server with appendRow(). Instead collect all the new rows in an array and use setValues(). See Best Practices
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('insertQueryHere');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('sheet123');
sheet.clearContents();
var arr=[];
var rows = [];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
rows.push(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
rows.push(arr);
}
sheet.getRange(sheet.getLastRow()+1,1,rows.length,numCols).setValues(rows);
SpreadsheetApp.flush(); // just to make sure the data is written
results.close();
stmt.close();
sheet.autoResizeColumns(1, numCols+1);
}

Speed up apps script from MySQL Query Google Sseets

I am facing an issue like many before with regards to a timeout out Google Apps Script, I am reading the data from a indexed/persisted table in a MySQL Database, the table in question has 71 columns and a total of 28000 rows, the sheet in google sheets I am writing to has no calculations etc on it which might slow things down - those happen on other sheets.
Please can you review the below that I am using and propose any changes to assist in avoiding the time out?
var server = 'xx.xx.xx.xxx';
var port = xxxx;
var dbName = 'test';
var username = 'test';
var password = 'xxx';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readDataPast() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
sheet.clearContents();
var arr = [];
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
}
Issue:
I don't think the script can be made considerably faster, since potential improvements (e.g. using Sheets API as suggested by Ninca Tirtil) don't affect significatively the bulk of the script (iterating through 28000 rows).
Workaround:
Therefore, instead of trying to speed it up, I'd suggest accomplishing this in multiple executions. To that goal, I'd do the following:
Check execution time after each iteration. If this time is close to the time limit, end the loop and write current data to the sheet. You can use the Date object for this.
Create the following time-based trigger at the end of your function: after(durationMilliseconds). Thanks to this, the function will fire automatically after the amount of milliseconds you indicate. After each execution, a trigger will be created to fire the next execution.
Because you want to split the loop, you have to store the row index somewhere (you could use PropertiesService at the end of each execution, for example) and retrieve it at the beginning of the next, so that in each successive execution, the script resumes the loop where it left it. You can get the row index via getRow(), and then move to that row in the next execution via relative(rows).
Code sample:
var maxTimeDiff = 1000 * 60 * 5; // 5 minutes
const PROPERTY_KEY = "Row index";
function setRowIndex(rowIndex) {
const scriptProps = PropertiesService.getScriptProperties();
scriptProps.setProperty(PROPERTY_KEY, rowIndex);
}
function getRowIndex() {
const scriptProps = PropertiesService.getScriptProperties();
const rowIndex = scriptProps.getProperty(PROPERTY_KEY);
return rowIndex;
}
function createTrigger() {
ScriptApp.newTrigger("readDataPast")
.timeBased()
.after(60 * 1000) // Next execution after a minute
.create();
}
function readDataPast() {
var startTime = new Date();
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM test.test_table');
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('Raw_Data');
var rowIndex = getRowIndex();
var arr = [];
let row = [];
if (!rowIndex || rowIndex == 0) { // Clear sheet and add metadata if first execution
sheet.clearContents();
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
} else {
results.relative(rowIndex); // Move to current row
}
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row);
if (new Date() - startTime > maxTimeDiff) break; // End iteration if long time
}
var currentRow = results.getRow(); // 0 if all rows have been iterated
setRowIndex(currentRow);
var lastRow = sheet.getLastRow();
sheet.getRange(lastRow + 1, 1, arr.length, arr[0].length).setValues(arr);
results.close();
stmt.close();
if (currentRow) createTrigger(); // Create trigger if iteration is not finished
}

Google Apps Script running very slow [duplicate]

This question already has answers here:
Long processing time likely due to getValue and cell inserts
(2 answers)
Closed 1 year ago.
My Google Sheets Apps Script has been timing out - it does a simple read from a MySQL DB and loads the data in, it queries one single persisted table which hosts only 150 rows so there is no reason it should be timing out.
My Script looks like this:
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://'+server+':'+port+'/'+dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData=results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(metaData.getColumnName(col + 1));
}
sheet.appendRow(arr);
while (results.next()) {
arr=[];
for (var col = 0; col < numCols; col++) {
arr.push(results.getString(col + 1));
}
sheet.appendRow(arr);
}
results.close();
stmt.close();
/*sheet.autoResizeColumns(1, numCols+1);*/
}
/*ScriptApp.newTrigger('readData')
.timeBased()
.everyHours(2)
.create();*/
Is there anything that could be done to better handle the import to speed it up, as I said the data is persisted into a table on the DB, the table only has approx 150 rows and 10-15 columns so the data is small and queries instantly from a DB GUI
This provide a bit of a speed up all though a greater of performance is achievable if you can write code on both ends of the stream and access them via ssl.
var server = 'IP';
var port = PORT;
var dbName = 'DB';
var username = 'UN';
var password = 'PW';
var url = 'jdbc:mysql://' + server + ':' + port + '/' + dbName;
function readData() {
var conn = Jdbc.getConnection(url, username, password);
var stmt = conn.createStatement();
var results = stmt.executeQuery('SELECT * FROM db.table');
var metaData = results.getMetaData();
var numCols = metaData.getColumnCount();
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getSheetByName('SheetName');
sheet.clearContents();
var arr = [];//this is where you store all of the data
let row = [];
for (var col = 0; col < numCols; col++) {
row.push(metaData.getColumnName(col + 1));
}
arr.push(row);
while (results.next()) {
row = [];
for (var col = 0; col < numCols; col++) {
row.push(results.getString(col + 1));
}
arr.push(row)
}
sheet.getRange(1, 1, arr.length, arr[0].length).setValues(arr);//this is where you save all of your data into the spreadsheet thus saving you all of the individual writes to the rows.
results.close();
stmt.close();
}
Class Range setValues() Method

Wrong MYSQL format number in Google Sheets via JDBC

The numbers that I import from an sql database on google sheet are interpreted as duration and not as numbers, more specifically as currencies.
colA
colB
colC
colD
449
1234
29521.00
0.00
449
1234
29521.00
0.00
Wrong = 29521.00 (format currently imported)
Correct = € 29.521,00
What am I doing wrong in colC and colD?
function runSql(query, options) {
console.log('query from runSql :' + query);
var doc = SpreadsheetApp.getActiveSpreadsheet();
var sheet = doc.getSheets()[2];
var cell = sheet.getRange('a1');
var activeCellRow = cell.getRow();
var activeCellCol = cell.getColumn();
var rs = [];
try {
var fullConnectionString = 'jdbc:' + DB_TYPE + '://' + HOST + ':' + PORT
var conn = Jdbc.getConnection(fullConnectionString, USERNAME, PASSWORD);
console.log('query :', query)
var stmt = conn.createStatement();
stmt.execute('USE ' + DATABASE);
var start = new Date();
var stmt = conn.createStatement();
//stmt.setMaxRows(MAXROWS);
var rs = stmt.executeQuery(query);
} catch (e) {
console.log(e, e.lineNumber);
Browser.msgBox(e);
return false
}
var results = [];
cols = rs.getMetaData();
console.log("cols", cols)
var colNames = [];
var colTypes = {};
for (i = 1; i <= cols.getColumnCount(); i++) {
var colName = cols.getColumnLabel(i)
colTypes[colName] = { type: cols.getColumnTypeName(i), loc: i }
colNames.push(colName);
}
var rowCount = 1;
results.push(colNames);
while (rs.next()) {
curRow = rs.getMetaData();
rowData = [];
for (i = 1; i <= curRow.getColumnCount(); i++) {
rowData.push(rs.getString(i));
}
results.push(rowData);
rowCount++;
}
rs.close();
stmt.close();
conn.close();
console.log('results', results)
var colCount = results[0].length
var rowCount = results.length
var comment = "Updated on: " + (new Date()) + "\n" + "Query:\n" + query
if (options.omitColumnNames) {
results = results.slice(1)
rowCount -= 1
}
if (options.clearColumns && sheet.getLastRow() > 0) {
var startCellRange = sheet.getRange(startCell)
sheet.getRange(startCellRange.getRow(), startCellRange.getColumn(), sheet.getLastRow(), colCount).clearContent();
}
if (options.clearSheet) {
var startCellRange = sheet.getRange(startCell)
sheet.clear({ contentsOnly: true });
}
//sheet.getRange(activeCellRow, activeCellCol, rowCount, colCount).clearContent();
sheet.getRange(activeCellRow, activeCellCol, rowCount, colCount).setValues(results);
var cell = sheet.getRange(activeCellRow, activeCellCol)
cell.clearNote()
cell.setNote(comment);
sheet.setActiveRange(sheet.getRange(activeCellRow + rowCount + 1, activeCellCol))
console.log('query success!, rows = ', rowCount - 1)
}
function runSqlFromSheet() {
//var doc = SpreadsheetApp.getActiveSpreadsheet();
var doc = SpreadsheetApp.getActiveSpreadsheet();
var sheet = doc.getActiveSheet();
var sheet = doc.getSheets()[2];
var sheetName = sheet.getName();
var cell = doc.getActiveSheet().getActiveCell();
//var sql2 = '{call P_Totali_per_Azienda(?)}';
//Logger.log('sql2;', sql2)
var options = {}
console.log(sql2);
runSql(sql2, options)
}
The fact that the values get automatically converted to durations suggests that the data is in a format that makes Google Sheets consider them as duration values. This can happen if you read and write the data as text strings and your spreadsheet is in a locale that uses commas as decimal separators (as in € 1,99) and periods as time separators (as in 12.00). If you are reading the values as text strings, you may need to read them as numbers instead to avoid automatic conversion.
Consider using ResultSet.getFloat() instead of ResultSet.getString().
Once you can write the values correctly to the spreadsheet, you can set the final format programmatically, like this:
SpreadsheetApp.getActive()
.getRange("Sheet1!C2:D")
.setNumberFormat("[$ €]#,##0.00");
If you always write the values to the same sheet, and get the data as numbers from the database, you can most likely manually format columns C:D as Format > Number > Currency just once, and have the format stick through future imports.

Google script exceeded maximum execution time work around [duplicate]

This question already exists:
Exceeded maximum execution time in Google Apps Script [duplicate]
Closed 2 years ago.
connecting to a sql server database via jdbc from a google sheets doc. This table has some 30k odd records and hit "exceeded maximum execution time" error. Is there a way to improve performance or a work around to the 6 minute execution time.
function myFunction() {
var conn = Jdbc.getConnection("jdbc:sqlserver://dbURL", "username", "password");
var stmt = conn.createStatement();
stmt.setMaxRows(3000);
var start = new Date();
var rs = stmt.executeQuery('SELECT * FROM dbo.myTable');
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a1');
var row = 0;
var data = [];
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
rs.close();
stmt.close();
conn.close();
var end = new Date();
Logger.log('Time elapsed: ' + (end.getTime() - start.getTime()));
}
Thank you.
Modified the code to use Continuous Batch Library, even though I don't get Exceeded execution time error looks like its getting into an infinite loop and writing not one records to the sheets.
var FUNCTION_NAME = "test";
var EMAIL_RECIPIENT = "";
function test() {
ContinuousBatchLibrary.startOrResumeContinousExecutionInstance(FUNCTION_NAME)
var conn = Jdbc.getConnection("jdbc:sqlserver://HOST:PORT/DBNAME","user","password");
var stmt = conn.createStatement();
stmt.setMaxRows(100);
var rs = stmt.executeQuery('SELECT * FROM myTable');
var SS;
var Sheet;
var SheetRange;
// OPEN EXISTING SPREADSHEET
SS = SpreadsheetApp.openById(SpreadsheetApp.getActiveSpreadsheet().getId());
// SET SPREADSHEET ACTIVE
SpreadsheetApp.setActiveSpreadsheet(SS);
Sheet = SpreadsheetApp.setActiveSheet(SS.getSheetByName(SpreadsheetApp.getActiveSpreadsheet().getSheetName()));
// GET NUMBER OF COLUMNS
var ColCount = rs.getMetaData().getColumnCount();
// SET COLUMN HEADERS
for (var col = 1; col <= ColCount; col++) {
GSheet.getRange(1, col).setValue(rs.getMetaData().getColumnName(col));
}
// SET RANGE TO FIRST ROW AND BOLD
GSheetRange = GSheet.getRange(1,1,1,GSheet.getLastColumn());
GSheetRange.setFontWeight("bold");
var doc = SpreadsheetApp.getActiveSpreadsheet();
var cell = doc.getRange('a2');
var row = 0;
var data = [];
var i = ContinuousBatchLibrary.getBatchKey(FUNCTION_NAME) || 0;
for (; i < row.length; i++) {
while (rs.next()) {
var rowData = [];
for (var col = 0; col < rs.getMetaData().getColumnCount(); col++) {
rowData[col] = (rs.getString(col + 1));
}
data[row] = rowData;
row++;
}
if (ContinuousBatchLibrary.isTimeRunningOut(FUNCTION_NAME)) {
ContinuousBatchLibrary.setBatchKey(FUNCTION_NAME, i)
break;
}
}
rs.close();
stmt.close();
conn.close();
if (i === row.length) {
ContinuousBatchLibrary.endContinuousExecutionInstance(FUNCTION_NAME, EMAIL_RECIPIENT, "task complete")
}
}
This answer is probably still the best work around for the 6 min timer.
The cumulative trigger run time limit is one hour a day so if you are getting <10% of the way thru your data before the timeout you will run into that as well.