How to escape "/" character in Google Script - google-apps-script

I'm trying to bring Wordpress data to Google Sheets.
The script below worked for almost all columns but I can't bring the emails and if I try to scape the "-" my script won't run.
function getPage(offset,per_page) {
// gets posts in chunks of per_page
var ui = SpreadsheetApp.getUi(); // used for error messages
var ss = SpreadsheetApp.getActiveSpreadsheet();
var options = {
'method': 'get',
"contentType" : "application/json",
'muteHttpExceptions': true
}
var apiHost = 'https://domain.com.br/wp-json'; // set to your own domain
url = apiHost + '/acf/v3/cadastros?per_page='+per_page+'&offset='+offset;
try {
var response = UrlFetchApp.fetch(url, options);
var data = JSON.parse(response)
// loop through the map and output to sheet
for (i = 0; i < data.length; i++) {
row=offset+i+2; // set the row to make sure it is below header and takes into account the paging
ss.getRange('A'+row).setValue(data[i].id);
ss.getRange('B'+row).setValue(data[i].acf.contato);
ss.getRange('C'+row).setValue(data[i].acf.e-mail); //this line is breaking everything I've tried to use "\", put quotes but nothing seems to work
ss.getRange('D'+row).setValue(data[i].acf.telefone);
ss.getRange('E'+row).setValue(data[i].acf.cnpj);
ss.getRange('F'+row).setValue(data[i].acf.endereco);
ss.getRange('G'+row).setValue(data[i].acf.principais_produtos);
ss.getRange('H'+row).setValue(data[i].acf.volume_disponivel);
ss.getRange('I'+row).setValue(data[i].acf.estoque_disponivel);
ss.getRange('J'+row).setValue(data[i].acf.aceite);
}
return data.length;
} catch(error) {
var result = ui.alert( error.toString());
}
return 0;
}
How do I fix this?

Quick and easy will be to replace data[i].acf.e-mail with data[i]['acf']['e-mail']

Related

How to POST / PUT webhook from Google Sheets when new rows are added

Disclaimer : I'm a newbie and doesn't know much. I understand webhooks a little bit, but doesn't know how to code.
Now that is out of the way, what I'm trying to do is, send data from a CRM to google sheets, do some calculations and send it back. I was able to figure out the first step of sending from CRM and doing the calculations.
Here is what I want to do..
I'd like to be able to send the updated data from googlesheet back to the CRM as a webhook when a new row is added to the google sheets.
So I have created a google sheet, I was able to add a trigger to run myFunction on Edit. My current myFunction code is at the end.
**Ideally it should send "id" and "updated_value" fetched from google sheets to the CRM via webhook (PUT method) as it will be updating a contact record.
Can anyone please share the code to add here or guide me how to write one? Thank you in advance**
This is my code so far and I'm currently finding resources to learn to add more lines to it
Update: One problem I'm facing now is that, I'm getting the 1000th row as the last row as "I've used array function in one column. So I'm not actually getting the non-empty last row.
function myFunction() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Sheet1");
var lastRow = sheet.getLastRow();
var lastColumn = sheet.getLastColumn();
var range = sheet.getRange(lastRow, 1, 1, lastColumn);
var values = range.getValues();
var data = {
"id": values[0][1],
"f2130": values[0][3]
};
var options = {
'method': 'PUT',
'url': 'https://api.ontraport.com/1/Contacts',
'headers': {
'Content-Type': 'application/json',
'Api-Appid': 'xxxxxxxxxxxxxxx6',
'Api-Key': 'xxxxxxxxxxxxxxxH'
},
body: JSON.stringify({
"f2130": f2130,
"id": id
})
};
UrlFetchApp.fetch("https://api.ontraport.com/1/Contacts", options);
}
I finally able to figure it out by myself. Here is the code
Also you can read the steps here
https://damartech.com/how-to-send-and-receive-data-from-ontraport-to-google-sheet-and-back-without-zapier/
function opUpdater() {
// Replace `spreadsheetId` and `sheetName` with the ID and name of your Google Sheets document
const spreadsheetId = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
const sheetName = "xxxxxx";
// Get the sheet by ID and name
const sheet = SpreadsheetApp.openById(spreadsheetId).getSheetByName(sheetName);
// Get the number of rows in the sheet
const numRows = sheet.getLastRow();
// Initialize a variable to store the index of the last non-empty row
let lastNonEmptyRowIndex = 0;
// Loop through each row in the sheet
for (let i = 1; i <= numRows; i++) {
// Get the values in the current row
const rowValues = sheet.getRange(i, 1, 1, sheet.getLastColumn()).getValues()[0];
// Check if the row is empty
if (rowValues.every(cell => cell === "")) {
// If the row is not empty, store its index as the last non-empty row
lastNonEmptyRowIndex = i-1;
break;
}
}
// If a non-empty row was found, get its values
if (lastNonEmptyRowIndex > 0) {
const lastNonEmptyRowValues = sheet.getRange(lastNonEmptyRowIndex, 1, 1, sheet.getLastColumn()).getValues()[0];
// Define the variables that will be used in the request
// You can find the values from google sheet column number-1
var contact_id = parseInt(lastNonEmptyRowValues[1]);
var new_date = lastNonEmptyRowValues[3];
}
// Define the URL for the request
let url = "https://api.ontraport.com/1/Contacts";
// Define the options for the request
var options = {
"method": "put",
"headers": {
"Content-Type": "application/json",
"Accept": "application/json",
"Api-Appid": "xxxxxxxxxxxxxxxx",
"Api-Key": "xxxxxxxxxxxxxxxx"
},
"payload": JSON.stringify({
"id": contact_id,
"f2130": new_date
}),
"muteHttpExceptions": true
};
try {
const response = UrlFetchApp.fetch(url, options);
const responseBody = response.getContentText();
const data = JSON.parse(responseBody);
Logger.log(data.data.attrs.id);
Logger.log(data.data.attrs.f2130);
} catch (error) {
Logger.log(error.message);
Logger.log(error.response);
}
}

Google App Script Timeout due to slowness

This would be my first time posting, so I am sorry for the things I may or may not write.
I have a working Google Script which gets a file from an URL, parses the JSON then feed the data to a google Spreadsheet, one row at a time.
The problem that I have is that some of these files are large, 7000 rows large, which the script is unable to feed in the given time of 6 minutes. The speed of which it processes each row is quite slow, it is about 1 row every 1-3 seconds.
I believe that the problem is with the for clause, but I cannot seem to make it work.
I am not that experienced with this kind of scripting, and this was made from "inspirations"
Is there a way to speed up the for process in order to fill out the sheet faster, so it wont timeout?
SpreadsheetApp.getUi()
.createMenu('Update')
.addItem('Refresh Report', 'getData')
.addToUi();
}
function getData() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
//var sheets = ss.getSheets();
var datasheet = ss.getSheetByName('Data');
var hasCreatedHeaders = false;
//Url
var url = 'URL HERE';
//Create header
var headers = {
'Content-Type': 'application/json',
'Authorization': 'xxx'
};
//Options
var options = {
'method': 'get',
'headers': headers
};
var response = UrlFetchApp.fetch(url, options);
var json = response.getContentText();
var data = JSON.parse(json);
var table = data.Table1;
datasheet.clear();
//foreach row
table.forEach(function(row) {
var cols = [];
var headers = [];
for (var prop in row.Columns) {
if (!hasCreatedHeaders)
headers.push(prop);
cols.push(row.Columns[prop]);
}
if (!hasCreatedHeaders) {
datasheet.appendRow(headers);
hasCreatedHeaders = true;
}
datasheet.appendRow(cols);
});
}
Try to change the loop table.forEach() this way:
//foreach row
var table_new = []; // <--- create a new empty array
table.forEach(function (row) {
var cols = [];
var headers = [];
for (var prop in row.Columns) {
if (!hasCreatedHeaders)
headers.push(prop);
cols.push(row.Columns[prop]);
}
if (!hasCreatedHeaders) {
// datasheet.appendRow(headers);
table_new.push(headers); // <--- add row to the array
hasCreatedHeaders = true;
}
// datasheet.appendRow(cols);
table_new.push(cols); // <--- add row to the array
});
// put the array on the sheet all at once
datasheet.getRange(1,1,table_new.length,table_new[0].length).setValues(table_new);
The problem is appendRow() method is quite time consuming. If you create the table as a 2D array you can put it on the sheet in one step with setValues() method. It's much faster.
References
Best practices

Google script to login, retrieve cookie and get csv to sheets

I can get the cookie with curl -d "_u=user_name" -d "_p=password" --cookie-jar ./cookie https://url.tologin.com/admin/login successfully but can't do the same with google script
function myFunction() {
var payload = {"_u" : "user_name","_p" : "password"};
var opt ={"payload":payload,"method":"post"};
var response = UrlFetchApp.fetch("https://url.tologin.com/admin/login",opt);
var headers = response.getAllHeaders();
Logger.log(headers);
var cookie = headers['Set-Cookie'];
Logger.log(cookie);
Logger.log(response);
response.getContentText();
var header = {'Cookie':cookie};
var opt2 = {"headers":header};
var pagedata = UrlFetchApp.fetch("https://url.tologin.com/admin/sales/order/export/csv",opt2);
Logger.log(pagedata);
}
I get answer from webserver with failed login page and I can't figure out what is wrong with google script
As written in the MDN documentation, The request cookie header should be of the format:
Cookie: name=value; name2=value2; name3=value3
A list of name-value pairs in the form of <cookie-name>=<cookie-value>. Pairs in the list are separated by a semicolon and a space (;).
As written in the UrlFetchApp#HttpResponse documentation, getAllHeaders() returns
Returns an attribute/value map of headers for the HTTP response, with headers that have multiple values returned as arrays.
When a array is implicitly converted to a string, it is joint using a comma , ,which is not a valid cookie string. You could get a formatted cookie header from a Set-cookie header using a strip function like this:
const getCookie = setCookie =>
Array.isArray(setCookie)
? setCookie.map(getCookie).join("; ") //get each cookie and join them by ;
: setCookie.split("; ")[0]//get only the first part of cookie; remove irrelevant info like `Max-Age`, `expires` etc.
I got everything working
GAS script to
Login and fetch cookie with POST
build a newCookie and use it in GET requests
Get CSV and parse it to replace separators when necessary
Upload CSV content to an active sheet
function parseCsvResponse(csvString) {
var retArray = [];
var strLines = csvString.split(/\n/g);
var strLineLen = strLines.length;
for (var i = 0; i < strLineLen; i++) {
var line = strLines[i];
if (line != '') {
retArray.push(line.replace(/"/g, "").split(/;/));
//replace ; with separator from your CSV file
}
}
return retArray;
}
function myFunction(sheet) {
var payload = {'user_name' : 'username','password' : 'password'};
//replace with values on your login page "name=user_name" and "name=password"
//if your username contains # send it directly or use %40 instead
var opt ={
'payload': payload,
'method':'post',
"followRedirects": false,
"testcookie": 1
};
var response = UrlFetchApp.fetch("https://url.tologin.com/admin/login",opt);
//inspect the right link via Chrome inspect of your login page
Logger.log(response);
Logger.log(response.getResponseCode());
if ( response.getResponseCode() == 200 ) {
// Incorrect user/pass combo
} else if ( response.getResponseCode() == 302 ) {
// Logged-in
Logger.log("Logged in");
var headers = response.getAllHeaders();
Logger.log(headers);
var cookies = headers['Set-Cookie'];
// Extract the cookies from the login response
var cookieParts = [];
for (var i = 0; i < cookies.length; i++) {
var arr = cookies[i].split('; ');
cookieParts.push(arr[0]);
}
// Create a new cookie to send with subsequent requests
var newCookie = cookieParts.join('; ');
Logger.log(newCookie);
};
opt2 = {
"method" : "get",
"headers": {
"Cookie": newCookie
}
};
var url = "https://url.tologin.com/admin/sales/order/export/csv";
response2 = UrlFetchApp.fetch(url, opt2);
var resp1=response2.getContentText();
var csvContent = parseCsvResponse(response2.getContentText());
Logger.log(resp1);
Logger.log(csvContent);
// clear everything in the sheet
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clearContents().clearFormats();
// set the values in the sheet (as efficiently as we know how)
sheet.getRange(1, 1, csvContent.length /* rows */, csvContent[0].length /* columns */).setValues(csvContent);
}
then
Import myFunction to sheet "Tools > Macros > Import"
First run asks for Google authentication of your App where myFunction is
Run myFunction "Tools > Macros > myFunction"
And you get correctly formatted CSV data to your sheet

How to use Google Photos API Method: mediaItems.search in Google apps script for a spreadsheet

I really tried to figure this out on my own...
I am trying to load photo metadata from google photos into a sheet using the Google Photos API and google apps script.
I was able to make some progress after a lot of help on a previous question
Is it possible to load google photos metadata into google sheets?
I now have two functions.
function photoAPI_ListPhotos() - Uses Method: mediaItems.list and gives me all my photos that are not archived
function photoAPI_ListAlbums() - Uses Method: albums.list and gives me all my albums
What I want to do is retrieve all photos from a specific album. Method: mediaItems.search should do this but it uses the POST protocol and the previous working examples I found only use GET. Looking at the examples available on that page, there is a javascript portion but it does not work in apps script.
The documentation for UrlFetchApp tells me how to format a POST request but not how to add the parameters for authentication.
The external APIs also is not giving me the examples I am looking for.
I feel like I'm missing some essential tiny piece of info and I hope I'm not wasting everyone's time asking it here. Just a solid example of how to use POST with oauth in apps script should get me where I need to go.
Here is my working function for listing all non-archived photos.
function photoAPI_ListPhotos() {
/*
This function retrieves all photos from your personal google photos account and lists each one with the Filename, Caption, Create time (formatted for Sheet), Width, Height, and URL in a new sheet.
it will not include archived photos which can be confusing if you happen to have a large chunk of archived photos some pages may return only a next page token with no media items.
Requires Oauth scopes. Add the below line to appsscript.json
"oauthScopes": ["https://www.googleapis.com/auth/spreadsheets.currentonly", "https://www.googleapis.com/auth/photoslibrary", "https://www.googleapis.com/auth/photoslibrary.readonly", "https://www.googleapis.com/auth/script.external_request"]
Also requires a standard GCP project with the appropriate Photo APIs enabled.
https://developers.google.com/apps-script/guides/cloud-platform-projects
*/
//Get the spreadsheet object
var ss = SpreadsheetApp.getActiveSpreadsheet();
//Check for presence of target sheet, if it does not exist, create one.
var photos_sh = ss.getSheetByName("photos") || ss.insertSheet("photos", ss.getSheets().length);
//Make sure the target sheet is empty
photos_sh.clear();
var narray = [];
//Build the request string. Max page size is 100. set to max for speed.
var api = "https://photoslibrary.googleapis.com/v1/mediaItems?pageSize=100";
var headers = { "Authorization": "Bearer " + ScriptApp.getOAuthToken() };
var options = { "headers": headers, "method" : "GET", "muteHttpExceptions": true };
//This variable is used if you want to resume the scrape at some page other than the start. This is needed if you have more than 40,000 photos.
//Uncomment the line below and add the next page token for where you want to start in the quotes.
//var nexttoken="";
var param= "", nexttoken;
//Start counting how many pages have been processed.
var pagecount=0;
//Make the first row a title row
var data = [
"Filename",
"description",
"Create Time",
"Width",
"Height",
"ID",
"URL",
"NextPage"
];
narray.push(data);
//Loop through JSON results until a nextPageToken is not returned indicating end of data
do {
//If there is a nextpagetoken, add it to the end of the request string
if (nexttoken)
param = "&pageToken=" + nexttoken;
//Get data and load it into a JSON object
var response = UrlFetchApp.fetch(api + param, options);
var json = JSON.parse(response.getContentText());
//Check if there are mediaItems to process.
if (typeof json.mediaItems === 'undefined') {
//If there are no mediaItems, Add a blank line in the sheet with the returned nextpagetoken
//var data = ["","","","","","","",json.nextPageToken];
//narray.push(data);
} else {
//Loop through the JSON object adding desired data to the spreadsheet.
json.mediaItems.forEach(function (MediaItem) {
//Check if the mediaitem has a description (caption) and make that cell blank if it is not present.
if(typeof MediaItem.description === 'undefined') {
var description = "";
} else {
var description = MediaItem.description;
}
//Format the create date as appropriate for spreadsheets.
var d = new Date(MediaItem.mediaMetadata.creationTime);
var data = [
MediaItem.filename,
"'"+description, //The prepended apostrophe makes captions that are dates or numbers save in the sheet as a string.
d,
MediaItem.mediaMetadata.width,
MediaItem.mediaMetadata.height,
MediaItem.id,
MediaItem.productUrl,
json.nextPageToken
];
narray.push(data);
});
}
//Get the nextPageToken
nexttoken = json.nextPageToken;
pagecount++;
//Continue if the nextPageToaken is not null
//Also stop if you reach 400 pages processed, this prevents the script from timing out. You will need to resume manually using the nexttoken variable above.
} while (pagecount<4 && nexttoken);
//Continue if the nextPageToaken is not null (This is commented out as an alternative and can be used if you have a small enough collection it will not time out.)
//} while (nexttoken);
//Save all the data to the spreadsheet.
photos_sh.getRange(1, 1, narray.length, narray[0].length).setValues(narray);
}
You want to retrieve all photos of the specific album using Google Photo API.
You want to know how to use the method of mediaItems.search using Google Apps Script.
You have already been able to retrieve the data using Google Photo API.
If my understanding is correct, how about this sample script? Please think of this as just one of several answers.
Sample script 1:
var albumId = "###"; // Please set the album ID.
var headers = {"Authorization": "Bearer " + ScriptApp.getOAuthToken()};
var url = "https://photoslibrary.googleapis.com/v1/mediaItems:search";
var mediaItems = [];
var pageToken = "";
do {
var params = {
method: "post",
headers: headers,
contentType: "application/json",
payload: JSON.stringify({albumId: albumId, pageSize: 100, pageToken: pageToken}),
}
var res = UrlFetchApp.fetch(url, params);
var obj = JSON.parse(res.getContentText());
Array.prototype.push.apply(mediaItems, obj.mediaItems);
pageToken = obj.nextPageToken || "";
} while (pageToken);
Logger.log(mediaItems)
At the method of mediaItems.search, albumId, pageSize and pageToken are included in the payload, and the values are sent as the content type of application/json.
Sample script 2:
When your script is modified, how about the following modified script?
function photoAPI_ListPhotos() {
var albumId = "###"; // Please set the album ID.
var ss = SpreadsheetApp.getActiveSpreadsheet();
var photos_sh = ss.getSheetByName("photos") || ss.insertSheet("photos", ss.getSheets().length);
photos_sh.clear();
var narray = [];
var api = "https://photoslibrary.googleapis.com/v1/mediaItems:search";
var headers = { "Authorization": "Bearer " + ScriptApp.getOAuthToken() };
var nexttoken = "";
var pagecount = 0;
var data = ["Filename","description","Create Time","Width","Height","ID","URL","NextPage"];
narray.push(data);
do {
var options = {
method: "post",
headers: headers,
contentType: "application/json",
payload: JSON.stringify({albumId: albumId, pageSize: 100, pageToken: nexttoken}),
}
var response = UrlFetchApp.fetch(api, options);
var json = JSON.parse(response.getContentText());
if (typeof json.mediaItems === 'undefined') {
//If there are no mediaItems, Add a blank line in the sheet with the returned nextpagetoken
//var data = ["","","","","","","",json.nextPageToken];
//narray.push(data);
} else {
json.mediaItems.forEach(function (MediaItem) {
if(typeof MediaItem.description === 'undefined') {
var description = "";
} else {
var description = MediaItem.description;
}
var d = new Date(MediaItem.mediaMetadata.creationTime);
var data = [
MediaItem.filename,
"'"+description,
d,
MediaItem.mediaMetadata.width,
MediaItem.mediaMetadata.height,
MediaItem.id,
MediaItem.productUrl,
json.nextPageToken
];
narray.push(data);
});
}
nexttoken = json.nextPageToken || "";
pagecount++;
} while (pagecount<4 && nexttoken);
photos_sh.getRange(1, 1, narray.length, narray[0].length).setValues(narray);
}
Note:
This script supposes as follows.
Google Photo API is enabed.
The scope of https://www.googleapis.com/auth/photoslibrary.readonly or https://www.googleapis.com/auth/photoslibrary are included in the scopes.
Reference:
Method: mediaItems.search
If I misunderstood your question and this was not the result you want, I apologize.

UrlFetchApp query a spreadsheet with gviz

I am trying to query a spreadsheet with gviz (Google Visualization), using UrlFetchApp, but no result so far.
Could you help me to fix this code?
(the query Url works fine in the browser)
function queryTest() {
var onlyforscope = SpreadsheetApp.getActiveSpreadsheet();
var template = "https://docs.google.com/spreadsheets/d/%s/gviz/tq?gid=%s&tq=select C,E,K,M,N,O where C contains '%s'";
var query = Utilities.formatString(template, docId, sheetId, value);
var param = {
method : "get",
headers : {"Authorization": "Bearer " + ScriptApp.getOAuthToken()},
muteHttpExceptions:true,
};
var r = UrlFetchApp.fetch(query, param).getContentText();
// var j = JSON.parse(r);
Logger.log(r);
return;
}
Thanks in advance, Fausto
it was trivial, though hard to find out for me
the required scope is Drive !!!
I just add this line and it worked
var onlyforscope = DriveApp.getRootFolder();