Change Gmail label once attachment is uploaded to drive - google-apps-script

How can you remove the existing label "Global Alcohol" & add a "Global Processed" label to the email messages which have had the attachments uploaded to Google Drive using the following code? I must give credit to Cooper who answered my first question & helped me get the uploads working.
function saveAttachmentInFolder(){
var folder = DriveApp.getFolderById('xxxxxxxxxxxxx');
var userId = "myemail#gmail.com";
var query = "label:Global Alcohol";
var res = Gmail.Users.Messages.list(userId, {q: query});//I assumed that this works
res.messages.forEach(function(m){
var attA=GmailApp.getMessageById(m.id).getAttachments();
attA.forEach(function(a){
var ts=Utilities.formatDate(new Date(),Session.getScriptTimeZone(), "yyMMddHHmmss");
folder.createFile(a.copyBlob()).setName(a.getName()+ts);
});
});
}
I have read the API documentation & can see that you need to use the following code to modify the Labels. However I am stuck with how to integrate it into the function above.
function modifyMessage(userId, messageId, labelsToAdd, labelsToRemove, callback) {
var request = gapi.client.gmail.users.messages.modify({
'userId': userId,
'id': messageId,
'addLabelIds': labelsToAdd,
'removeLabelIds': labelsToRemove
});
request.execute(callback);
}

You were on the right track with the modification call, but the formatting is slightly off. The trick here is that you need to use the label IDs, so I wrote a new function getLabelsByName() that allows you to perform that lookup.
function saveAttachmentInFolder(){
var folder = DriveApp.getFolderById('xxxxxxxxxxxxx');
var userId = "myemail#gmail.com";
var query = "label:Global Alcohol";
var labels = getLabelsByName(userId, ["Global Alcohol", "Global Processed"]);
var res = Gmail.Users.Messages.list(userId, {q: query});//I assumed that this works
res.messages.forEach(function(m){
var attA=GmailApp.getMessageById(m.id).getAttachments();
attA.forEach(function(a){
var ts=Utilities.formatDate(new Date(),Session.getScriptTimeZone(), "yyMMddHHmmss");
folder.createFile(a.copyBlob()).setName(a.getName()+ts);
});
// Remove the old label & add the new one
Gmail.Users.Messages.modify({
addLabelIds: [labels["Global Processed"].id],
removeLabelIds: [labels["Global Alcohol"].id]
}, userId, m.id);
});
}
/**
* Lookup any number of labels by their name using the advanced Gmail service.
* #param {String} userId - The user's email address or "me" to get your own
* #param {String[]} labelNames - An array of labels names to search for
* #returns {Label{}} - Map of labels identified by label name
* https://developers.google.com/gmail/api/v1/reference/users/labels
*/
function getLabelsByName(userId, labelNames) {
var response = Gmail.Users.Labels.list(userId);
var selectedLabels = {};
for (var i = 0; i < response.labels.length; i++) {
var label = response.labels[i];
if (labelNames.indexOf(label.name) != -1) {
selectedLabels[label.name] = label;
}
}
return selectedLabels;
}

Related

Google Apps Script Google People API get contacts from contact group

I have a script (using Google Apps Script) that uses the Contacts API to pull the emails from a contactgroup, then sends an email to the group of contacts. Attempting to convert to the PeopleAPI and cannot seem to replicate the functionality. Here's the relevant script sample with the working ContactsAPI code included but commented out:
function sharereport() {
//var CGroup = ContactsApp.getContactGroup('group_name');
//var Dist = CGroup.getContacts();
var CGroup = People.ContactGroups.get('contactGroups/498ba6e40f63a476')
var Dist = People.People.getBatchGet('CGroup','people.email_addresses');
.
.
.
for (var i = 0; i < Dist.length; i++) {
var nextemail = Dist[i].getEmails();
var usethisemail = nextemail[0].getAddress();
Drive.Permissions.insert(
// #ts-ignore
{
'role': 'writer',
'type': 'user',
'value': usethisemail,
},
MyID,
{
'sendNotificationEmails': 'false',
});
MailString = MailString + ',' + usethisemail;
};
I'm sure I'm missing something really simple here to get the PeopleAPI to return an array of contacts that I can get the email addresses out of so I can populate the drive permissions and email to: field.
Since I did not found any methods getting them directly, I did use People.People.Connections.list and filtered the data till I get the emails. This should what your code will look like.
function sharereport() {
var CGroup = ContactsApp.getContactGroup('label1');
var Emails = CGroup.getContacts().map(function (contact) {
return contact.getEmailAddresses();
});
// Show emails of people belonging to the group label1
Logger.log(Emails);
var PGroup = People.People.Connections.list('people/me', {
personFields: 'emailAddresses,memberships'
});
// resource name of label1
var resourceName = 'contactGroups/7086c0fa8e7b006b';
var Emails2 = [];
PGroup.connections.forEach(function (person) {
person.memberships.forEach(function (membership) {
if (resourceName == membership.contactGroupMembership.contactGroupResourceName) {
var addresses = [];
person.emailAddresses.forEach(function (emailAddress){
// people can have multiple email addresses, add them all
addresses.push(emailAddress.value);
});
Emails2.push(addresses);
}
});
});
Logger.log(Emails2);
}
Behavior:
get all the people connections under you.
get all their memberships
check if their memberships resource name is equal to the one you want. (they can belong to multiple memberships)
loop all emails of that certain person (they can have multiple emails), then push into one array.
push that array into final array containing all emails of all people belonging to the resource name.
Output:
Logged both ContactsApp and People API results, below shows that they were the same.
Resources:
people.connections/list
advanced/people
Here's a more efficient way of doing what you want:
var group = People.ContactGroups.get('contactGroups/50e3b0650db163cc', {
maxMembers: 25000
});
Logger.log("group: " + group);
var group_contacts = People.People.getBatchGet({
resourceNames: group.memberResourceNames,
personFields: "emailAddresses"
});
Logger.log("emails: " + group_contacts.responses.map(x => {
var emailObjects = x.person.emailAddresses;
if (emailObjects != null) {
return emailObjects.map(eo => eo.value);
}
}));
First call gets all the group's members (resourcesNames AKA contactIds)
Second call gets all the members' email-addresses.
Then we just get the actual value of the email from the response (via map)

Script only works when uploading a single file

When an employee submits the google form, this script renames the file they have uploaded based off of information said employee fills out in the form. (At the moment it is pulling their Name, Job ID, and Address of the current job location, the file is always pictures of what they completed on the jobsite.)
However the script only pulls this information if a single file/picture is uploaded the form instead of all files.
What modification would allow this to rename all uploaded files in the form.
while (files.hasNext()) {
for (var i = 0; i < formResponses.length; i++) {
var formResponse = formResponses[i];
var itemResponses = formResponse.getItemResponses();
var itemResponseFname = itemResponses[0];
var itemResponseLname = itemResponses[10];
var itemResponseID = itemResponses[11];
var itemResponsePhoto = itemResponses[13];
var photoID = itemResponsePhoto.getResponse();
var newName = itemResponseFname.getResponse() + " " + itemResponseLname.getResponse() + " - " + itemResponseID.getResponse();
var url = baseString + photoID + endString;
var urlCheck = file.getUrl();
if ( url == urlCheck) {
var modName = newName + ".jpg";
file.setName(modName);
The easiest way of doing such thing is the following:
Install a submit trigger
Get the list of IDs of submitted files
Change its name to whatever you like
In practice this will look something like this:
function submit(e) {
const itemResponses = e.response.getItemResponses()
// Read values
const fname = itemResponses[0].getResponse()
const lname = itemResponses[10].getResponse()
const uid = itemResponses[11].getResponse()
const imageIds = itemResponses[13].getResponse()
// Iterate images
for (let imgId of imageIds){
const image = DriveApp.getFileById(imgId)
// Create new filename (preserves extension)
const filename = `${fname} ${lname} - ${uid}${getExtension(image)}`
// Set file name
image.setName(filename)
}
}
/**
* Returns the extension of a file
*
* For example: A file with name 'example.json' will return '.json'.
*
* #param file {DriveApp.File} File to extract the extension from
* #returns {string} The extension with the dot.
*/
function getExtension(file) {
// Gets the last dot and the characters that follow
const r = /(\.\w+)$/.exec(file.getName())
// If it has no extension return an empty string, otherwise return the captured group
return r === null ? '' : r[1]
}
Obviously you need to add all your other code that you seem to have and install the trigger if you haven't, already.

Quicken google apps script so I can return success response within Shopify 5 second limit

I have this google apps script I wrote that I'm using as a web app as an endpoint for a Shopify webhook.
The issue I'm having is that Shopify has a 5 second limit to receive a success response otherwise the webhook will fire again to ensure you don't miss it.
The problem is my script takes too long to finish triggering a duplicate webhook which runs my code multiple times which I don't want.
Is there a way to respond quicker or clean up my script to finish quicker?
PLEASE NOTE: I need my script to be easily modified since exact values might change or be different in final version as I'm still developing this app. (additionally I need a way that if a value is missing it will leave that column blank, hence not mixing up value with column headers)
function doPost(e){
var data = JSON.parse(e.postData.contents);
var ss = SpreadsheetApp.getActiveSpreadsheet().getSheetByName('Sheet1');
var l = data.line_items.length;
for (var i=0;i<l;i++){
var prop = data.line_items[i].properties;
if (prop.length>0){
var pdf = prop.find(function(x) {if(x.name == "_pdf") return x});
if (!pdf){pdf = "Prop not found";}else{pdf = pdf.value};
var shape = prop.find(function(x) {if(x.name.toLowerCase() == "shape") return x});
if (!shape){shape = "Prop not found";}else{shape = shape.value};
var test = prop.find(function(x) {if(x.name == "test") return x});
if (!test){test = "Prop not found";}else{test = test.value};
}else{
var pdf = "N/A"
var shape = "N/A"
var test = "N/A"
};
var count = "Item "+ (i+1) + " of " + l;
var qty = data.line_items[i].quantity;
var title = data.line_items[i].title;
var id = data.id.toString();
var email = data.email;
var totalPrice = data.total_price;
var discounts = data.total_discounts;
var acceptAds = data.buyer_accepts_marketing;
var orderStatus = data.order_status_url;
var addr = data.shipping_address.address1;
var city = data.shipping_address.city;
var state = data.shipping_address.province;
var zip = data.shipping_address.zip;
var phone = data.shipping_address.phone;
var firstName = data.shipping_address.first_name;
var lastName = data.shipping_address.last_name;
var orderNum = data.name;
var d = new Date(data.created_at).toLocaleString();
ss.appendRow([d,orderNum,email,count,title,shape,test,qty,totalPrice,discounts,pdf,firstName,lastName,addr,city,state,zip,phone,orderStatus]);
if (pdf != "N/A"){
if (pdf != "Prop not found"){
var res = UrlFetchApp.fetch(pdf);
var blob = res.getBlob();
var createFile = DriveApp.getFolderById('xxxxxxxxxxxxx').createFile(blob.getAs('application/pdf'));
var fileName = orderNum + " " + qty;
createFile.setName(fileName);
}}
};
}
It's slower than using the PropertiesService, but I like using Sheets as a queue. (I use this with services that require responses within 3 seconds.) Not only is it easier to work with, but I've actually had issues with using Properties that are addressed with the appendRow() method:
Appends a row to the spreadsheet. This operation is atomic; it prevents issues where a user asks for the last row, and then writes to that row, and an intervening mutation occurs between getting the last row and writing to it.
When you receive the POST data, simply add it to the queue and terminate. Apps Script will send a 200 success response, so Shopify shouldn't send duplicate requests.
Then have a time-driven trigger that runs a processQueue() function at the interval of your choice.
function doPost(e) {
const queue = new Queue(SpreadsheetApp.getActive().getId(), "Unprocessed", "Processed");
queue.append(e.postData.contents, skipRefresh = true);
}
function processQueue() {
const queue = new Queue(SpreadsheetApp.getActive().getId(), "Unprocessed", "Processed");
while (queue.hasNext()) {
try {
const data = JSON.parse(queue.next());
doSomethingWithShopifyData(data); // Process your data
queue.moveToProcessed();
} catch (error) {
console.error(error);
queue.skip();
}
}
}
function doSomethingWithShopifyData(data) { /* your existing code, but with appropriate modifications */ }
Here's the class I use to abstract the spreadsheet into a queue. I have it setup to preserve all of the data moving it from an unprocessed to a processed sheet. You may prefer to simply delete the data once processed.
/**
* A spreadsheet is used as a makeshift queue for processing requests asynchronously.
* #param {string} spreadsheetId - The ID of the spreadsheet to be used for the queue.
* #param {string} unprocessedSheetName - The name of the sheet to be used for storing unprocessed items.
* #param {string} processedSheetName - The name of the sheet to be used for storing processed items.
*/
class Queue {
constructor(spreadsheetId, unprocessedSheetName, processedSheetName) {
this.index = 0;
this.row = 1;
this.spreadsheet = SpreadsheetApp.openById(spreadsheetId);
this.unprocessedSheet = this.spreadsheet.getSheetByName(unprocessedSheetName);
this.processedSheet = this.spreadsheet.getSheetByName(processedSheetName);
}
/**
* Determines whether calling next() will return an item.
* #returns {boolean}
*/
hasNext() {
if (this.unprocessedValues == null) { this.refreshUnprocessedValues(); }
return this.index < this.unprocessedValues.length;
}
/**
* Get and save the unprocessed element values to the queue.
* #returns {object[]}
*/
refreshUnprocessedValues() {
try {
const range =this.unprocessedSheet.getRange(1, 1, this.unprocessedSheet.getLastRow());
this.unprocessedValues = range.getValues();
} catch (error) {
this.unprocessedValues = [];
}
return this.unprocessedValues;
}
/**
* Get the next element from the queue.
* #returns {string}
*/
next() {
return this.unprocessedValues[this.index++][0];
}
/**
* Skip the current queue element. Update row property to maintain synchronization
* with the spreadsheet range.
*/
skip() {
this.row++;
}
/**
* Add new data to the queue for processing.
* #param {string} data - The data to add to the queue.
* #param {boolean} [skipRefresh] - Default: false. If true, will skip refreshing the queue values.
*/
append(data, skipRefresh) {
this.unprocessedSheet.appendRow([data]);
if (!skipRefresh) { this.refreshUnprocessedValues(); }
}
/**
* Move a payload out of the unprocessed sheet and into the processed sheet. Uses the payload
* at the top of the unprocessed range.
*/
moveToProcessed() {
const cell = this.unprocessedSheet.getRange(this.row, 1);
// Move into processed sheet
this.processedSheet.appendRow([cell.getValue()]);
// Move out of unprocessed sheet
cell.deleteCells(SpreadsheetApp.Dimension.ROWS);
}
}

How to pull data from multiple Mailchimp endpoints?

The code below pulls data from the Mailchimp API Reports endpoint and adding it to Sheets.
I would like to add some more data from other endpoints (like fields from the "List/Audience" endpoint: member_count, total_contacts i.e.) but don't have a slick solution to this.
What's the best practice/solution here? Can this task be kept in the same function or is a separate function preferable?
I'm new in this area so bear with me :)
function chimpCampaigns() {
var API_KEY = 'X'; // MailChimp API Key
var REPORT_START_DATE = '2018-01-01 15:54:00'; // Report Start Date (ex. when you sent your first MailChimp Newsletter)
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("CampaignData");
var dc = API_KEY.split('-')[1];
var api = 'https://'+ dc +'.api.mailchimp.com/3.0';
var count = 100; // Max rows to return
var campaignList = '/campaigns?&count='+count+'&since_send_time='+REPORT_START_DATE
var options = {"headers": {"authorization": 'apikey '+API_KEY}};
var apiCall = function(endpoint){
apiResponseCampaigns = UrlFetchApp.fetch(api+endpoint,options);
json = JSON.parse(apiResponseCampaigns);
return json
}
var campaigns = apiCall(campaignList);
var total = campaigns.total_items;
var campaignData = campaigns.campaigns;
if (campaignData) {
sheet.clear(); // Clear MailChimp data in Spreadsheet
// Append Column Headers
sheet.appendRow(["Sent Time", "Campaign ID", "Audience", "Campaign Title", "Subject Line", "Emails Sent", "Abuse Reports", "Unsubscribed", "Unsubscribe Rate", "Hard Bounces", "Soft Bounces", "Bounces Total", "Syntax Errors", "Forwards Count", "Forwards Opens", "Opens Total", "Unique Opens", "Open Rate", "Last Open", "Clicks Total", "Unique Clicks","Unique Subscriber Clicks", "Click Rate", "Last Click"]);
}
for (i=0; i< campaignData.length; i++){
var c = campaignData[i];
var cid = c.id;
var title = c.title;
var subject = c.subject;
var send_time = c.send_time;
if (send_time){
apiResponseReports = UrlFetchApp.fetch('https://'+ dc +'.api.mailchimp.com/3.0/reports/'+cid,options);
reports = JSON.parse(apiResponseReports);
reportsSendTime = reports.send_time;
if(reportsSendTime){
var campaign_title = c.settings.title;
var subject_line = c.settings.subject_line;
var emails_sent = reports.emails_sent;
var list_name = reports.list_name;
var fields = reports.fields;
var abuse_reports = reports.abuse_reports;
var unsubscribed = reports.unsubscribed;
var unsubscribe_rate = unsubscribed/emails_sent;
var hard_bounces = reports.bounces.hard_bounces;
var soft_bounces = reports.bounces.soft_bounces;
var bounces = hard_bounces+soft_bounces;
var syntax_errors = reports.bounces.syntax_errors;
var forwards_count = reports.forwards.forwards_count;
var forwards_opens = reports.forwards.forwards_opens;
var opens_total = reports.opens.opens_total;
var unique_opens = reports.opens.unique_opens;
var open_rate = reports.opens.open_rate;
var last_open = reports.opens.last_open;
var clicks_total = reports.clicks.clicks_total;
var unique_clicks = reports.clicks.unique_clicks;
var unique_subscriber_clicks = reports.clicks.unique_subscriber_clicks;
var click_rate = reports.clicks.click_rate;
var last_click = reports.clicks.last_click;
// the report array is how each row will appear on the spreadsheet
var report = [send_time, fields, cid, list_name, campaign_title, emails_sent, subject_line, abuse_reports, unsubscribed, unsubscribe_rate, hard_bounces, soft_bounces, bounces, syntax_errors, forwards_count, forwards_opens, opens_total, unique_opens, open_rate, last_open, clicks_total, unique_clicks, unique_subscriber_clicks, click_rate, last_click];
sheet.appendRow(report);
}
}
}
}
You can call each endpoint in succession using the error-first pattern. More on this here.
If your previous call returns data and doesn't error out, you pass the next function as a callback, etc.
In the example below, I've omitted the logic that builds URL endpoint, query-string, and the 'options' object as these can simply be borrowed from your code.
Basically, you define a function with a callback parameter for each API endpoint.
Whenever you need to call multiple endpoints, you create a 3rd function that calls them in succession, passing each new function call as a parameter to the previous one.
The inner functions will still have access to the outer scope so you can combine data from multiple endpoints after the last call is executed (provided you assign unique names to the returned data - 'campaigns', 'reports', etc)
//function for the 'campaings' endpoint
function getCampaings(options, callback) {
//API call
var response = UrlFetchApp.fetch(campaignsEndpoint, options);
if (res.getStatusCode() == 200) {
var campaigns = JSON.parse(res.getContentText());
callback(false, campaigns);
} else {
callback("Error: Server responded with the status code of " + res.getStatusCode());
}
}
After creating the function for calling the 'reports' endpoint using the same approach, combine calls in the 3rd function.
function getCampaignsAndReports(){
var combinedData = {};
getCampaigns(options, function(err, campaigns){
if (!err && campaigns) {
//Call is successful - proceed to the next call
getReports(options, function(err, reports){
//Call successful
if (!err && reports) {
//Proceed to the next call or combine data from
//multiple endpoints
combinedData.campaigns = campaigns.campaigns;
combinedData.reports = reports.reports;
//write to sheet
//...
} else {
//Error calling reports endpoint
throw err;
}
});
} else {
//Error calling 'campaigns' endpoint. Throw error or write
//another function to show it to the user
throw err;
}
});
}
This may vary depending on how the MailChimp API data is structured so please change the code accordingly. Also, if you need to call the 'reports' endpoint multiple times for each entry in the 'campaings' endpoint, you can change your function to handle multiple request (options) object using UrlFetchApp.fetchAll(request[]). More on this here. Calling this method will return multiple response objects that you can iterate over.

google sheet script inserting data into sheet

So i have a script the gets some data from a server.
I want my script to publish the data to a sheet name "market items".
I got this working if I'm running the script directly from the sheet by using =getMarketItemsTrigger(1).
It posts all 11,669 items to my sheet.
The problem with this is that it refreshes every time the sheet is reloaded; I need it to only run once a month.
I've been trying to create a script which needs no reference in the given sheet but posts directly to a pre-named sheet but I can't figure out how I can get the data into the sheet
this is the script file i'm using
var version = '9a'
function getMarketItemsTrigger(refresh)
{
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Market Items");
Logger.log(sheet.getMaxColumns());
Logger.log(sheet.getMaxRows());
sheet.clear();
if(sheet.getMaxColumns()>2){
Logger.log('deleting colums');
sheet.deleteColumns(2, sheet.getMaxColumns()-2);
}
if(sheet.getMaxRows()>2){
Logger.log('deleting rows');
sheet.deleteRows(2,sheet.getMaxRows()-1);
}
var marketItemsEndpoint = 'https://crest-tq.eveonline.com/market/types/';
var marketItemsResponse = JSON.parse(fetchUrl(marketItemsEndpoint));
var totalPages = marketItemsResponse['pageCount'];
var itemList = [];
var headers = ['Item Name', 'ID'];
itemList.push(headers);
for (var currentPage = 1; currentPage <= totalPages; currentPage++)
{
Logger.log('Processing page ' + currentPage);
var marketItems = marketItemsResponse['items'];
for (var itemReference in marketItems)
{
var item = marketItems[itemReference];
itemList.push([item['type']['name'], item['id']]);
}
if (currentPage < totalPages)
{
var nextEndpoint = marketItemsResponse['next']['href'];
marketItemsResponse = JSON.parse(fetchUrl(nextEndpoint));
}
}
//sheet.insertRows(1,itemList.length+1);
// var range = sheet.getRange(1, 1,itemList.length+1,3);
// for(var i = 1;i<itemList.length;i++){
// range.getCell(i, 1).setValue([itemList]);
// range.getCell(1, i).setValue(itemList.);
// }
// Logger.log("don");
//sheet.getRange(1, 1, 1, itemList.length).setValues(itemList);
// sheet.getRange(itemList.length+1, 2).setValues(itemList);
// sheet.getDataRange().setValues([itemList]);
// sheet.appendRow(itemList);
// sheet.getRange(12+totalPages, 1, itemList.length, 1).setValues(itemList);
return itemList;
}
/**
* Private helper method that wraps the UrlFetchApp in a semaphore
* to prevent service overload.
*
* #param {url} url The URL to contact
* #param {options} options The fetch options to utilize in the request
*/
function fetchUrl(url)
{
if (gcsGetLock())
{
// Make the service call
headers = {"User-Agent": "Google Crest Script version " + version + " (/u/nuadi #Reddit.com)"}
params = {"headers": headers}
httpResponse = UrlFetchApp.fetch(url, params);
}
return httpResponse;
}
/**
* Custom implementation of a semaphore after LockService failed to support GCS properly.
* Hopefully this works a bit longer...
*
* This function searches through N semaphores, until it finds one that is not defined.
* Once it finds one, that n-th semaphore is set to TRUE and the function returns.
* If no semaphore is open, the function sleeps 0.1 seconds before trying again.
*/
function gcsGetLock()
{
var NLocks = 150;
var lock = false;
while (!lock)
{
for (var nLock = 0; nLock < NLocks; nLock++)
{
if (CacheService.getDocumentCache().get('GCSLock' + nLock) == null)
{
CacheService.getDocumentCache().put('GCSLock' + nLock, true, 1)
lock = true;
break;
}
}
}
return lock;
}
/**
* Private helper function that will check for a new version of GCS.
*/
function versionCheck()
{
var versionEndpoint = 'https://raw.githubusercontent.com/nuadi/googlecrestscript/master/version';
var newVersion = fetchUrl(versionEndpoint);
if (newVersion != null)
{
newVersion = newVersion.getContentText().trim();
Logger.log('Current version from Github: ' + newVersion);
var message = 'You are using the latest version of GCS. Fly safe. o7';
var title = 'No updates found';
if (newVersion > version)
{
message = 'A new version of GCS is available on GitHub.';
title = 'GCS version ' + newVersion + ' available!';
}
SpreadsheetApp.getActiveSpreadsheet().toast(message, title, 120);
}
}
All the code in the function getMarketItemsTrigger that's commented out is what I have tyred without luck .
The short version of this question is how can i post the values in itemList to column a and b in sheet market items
You can write the array itemList to the sheet by adding:
//your code
ss.getSheetByName('name_of_sheet_here')
.getRange(1, 1, itemList.length, itemList[0].length)
.setValues(itemList)
//more code (if needed)
} //end of code
-->> change sheet name and range to suit.
There are two ways to do this. If you did want it to run as a custom function those have access to the script property service. You could save a time stamp in the script properties and check it every time the custom function runs.
https://developers.google.com/apps-script/reference/properties/
https://developers.google.com/apps-script/guides/sheets/functions#using_apps_script_services
The second is to create a time trigger to run the code as a cron job every month.
https://developers.google.com/apps-script/guides/triggers/installable#time-driven_triggers
https://developers.google.com/apps-script/guides/triggers/installable#managing_triggers_manually