I am trying to make an API call which has a 50 records per call limit, the JSON response gives me "objects" and "total_objects".
I am running into an issue with the code that i managed to put together with help, the output does increment from the 50th record till the 95th record, post which it loops back to print from the 45th record and loops with the same set.
issue: stuck in a loop where the increment is one record at a time and a repeat of 49 old records
var client_id = 'xxxx';
var client_secret = 'xxx';
var email = 'xxx';
var password = 'abc';
var device = '1';
var app_version = '1';
var Token = 'abcde';
// call the API to get data for your list
function AP_Hyd() {
// URL and params for the API
var root = 'https://abcdef:443//v2/';
var endpoint = 'ghij/3241/objects?search=rtewq&limit=50&offset=';
var offset = '0'
// parameters for url fetch
var params = {
'method': 'GET',
'muteHttpExceptions': true,
'headers': {
'Authorization': 'Bearer ' + Token
}
};
// call the API for first offset
var json = getAssetData(root,endpoint, offset, params);
var totalObjects = json.objects;
//Adding data to ActiveGoogleSheet
var sheet = SpreadsheetApp.getActiveSheet();
sheet.clear()
//Adding Column Headings
var headerRow = ['erwf', 'gtre', 'poi', 'hgf', 'lkj', 'zyx'];
sheet.appendRow(headerRow);
//print first batch
printRows(totalObjects,sheet);
//check for any further data available
if (totalObjects) {
let totalPages = json.totals.objects;
//get total number of pages to be retrieved
var pageCount = parseInt(totalPages / 50)
//check for decimal
//if decimal is found increase the value of pageCount by 1
if(totalPages%50 !=0){
pageCount++;
}
Logger.log("pageCount:::" + pageCount +" for total objects:: " + totalPages);
for (i = 1; i < pageCount; i++) {
//invoke again and lets say the response is added in response
// call the API with incremented offset
var pageResponse = getAssetData(root,endpoint, i, params);
var jsonObjects = pageResponse.objects;
//print the rows
printRows(jsonObjects,sheet);
}
}
}
function getAssetData(root,endpoint,offset, params){
var response = UrlFetchApp.fetch(root + endpoint + offset, params);
var content = response.getContentText()
//return the parsed content
//TODO: add null check
return JSON.parse(content)
}
function printRows(jsonData, sheet) {
if (jsonData) {
for (var i = 0; i < jsonData.length; i++) {
var fieldObj = jsonData[i];
;
Logger.log(fieldObj)
// print out all the Columns
sheet.appendRow(row);
}
}
}
Related
I am trying to pull an attachment from an email which is a zip file and send that file to Google Analytics to upload the data. I am struggling with the getAttachment part in the script as it is showing as undefined. I have no idea what I am doing wrong. Any help would be greatly appreciated. I have now fixed where it is pulling the right attachment by using: var attachments = messages[0].getAttachments(); However I am now getting a new error where it seems to be looking for a title of the attachment but it is still showing as undefined.
function refundImport() {
/// use custom report to schedule the email - will need to adjust the processCsv() function based on your schema
/// if you are using a non-bing data source - you will probably need to adjust the findCsvAttachments() function as it grabs a zip file now
var CONFIG = {
'emailSubject': 'Refunded or Partially Refunded Orders - TKS',
'customDataSourceId': 'xxxxxxxxxxxxxxxxxxxxx',
'now': new Date(),
'zipFileName': 'refunded_or_partially_refunded_orders.zip',
//'csvFileName': 'refunded_or_partially_refunded_orders.csv',
'analyticsAccountId': '12345678',
'analyticsPropertyId': 'UA-12345678-1'
}
//adds one whole day to a date object - can take negative days if you want yesterday etc
function addDaysToDate(DATE, DAYS) {
var newDate = DATE.getTime() + DAYS * 3600000 * 24;
var newDate1 = new Date(newDate);
return newDate1;
}
//takes a date object and formats it as a string
function formatDateAsString(DATE) {
var dateString = Utilities.formatDate(DATE, 'GMT+12:00', 'yyyy/MM/dd');
return dateString;
}
//after and before must be date strings - use the above function
function grabEmailAttachments(SUBJECT, AFTER, BEFORE) {
var query = 'subject:' + SUBJECT + ' ' + 'has:attachment after:' + AFTER + ' ' + 'before:' + BEFORE;
//assumes only 1 will match - if more than 1 - will match the first one
var thread = GmailApp.search('in:inbox from:"noreply#highviewapps.com"');
var messages = thread[0].getMessages();
var content = messages[0].getPlainBody();
//var attachments = thread.getAttachments()[0];
var attachments = messages[0].getAttachments();
//thread.moveToTrash();
return attachments;
}
// finds csv attachment and creates 2d array of row,column e.g. csv[0][1] = value in row 0 column 1 of csv
function findCsvAttachment(attachments, zipFileToSearch, fileNameToSearch) {
var counter = 0;
for (i = 0; i < attachments.length; i++) {
if (attachments[i].getName().search(zipFileToSearch) != -1) {
var unzip = Utilities.unzip(attachments[i]);
var csvData = Utilities.parseCsv(unzip[0].getDataAsString(), ",");
counter = counter + 1;
}
}
if (counter == 0) {
Logger.log('No file with ' + fileNameToSearch + ' in its name was found.');
}
if (counter == 1) {
return csvData;
}
if (counter > 1) {
Logger.log('More than 1 file with ' + fileNameToSearch + ' in its name was found - the last one was used.');
}
}
function processCsv(csvData, date) {
var headers = 'ga:transactionId,ga:productSku,ga:productPrice,ga:quantityRefunded,ga:transactionRevenue';
var dataForUpload = headers;
return dataForUpload;
}
//assumes media dataType for upload https://developers.google.com/analytics/devguides/config/mgmt/v3/mgmtReference/management/uploads/uploadData
function uploadDataToAnalytics(data, accountId, webPropertyId, customDataSourceId) {
var dataBlob = Utilities.newBlob(data, "application/octet-stream");
var upload = Analytics.Management.Uploads.uploadData(accountId, webPropertyId, customDataSourceId, dataBlob);
return upload;
}
///////// ACTUAL IMPLEMENTATION OF SCRIPT /////////////
var tomorrowString = formatDateAsString(addDaysToDate(CONFIG.now, 1));
var yesterdayString = formatDateAsString(addDaysToDate(CONFIG.now, -1));
var todayString = formatDateAsString(CONFIG.now);
var emailAttachments = grabEmailAttachments(CONFIG.emailSubject, yesterdayString, tomorrowString);
var csv = findCsvAttachment(emailAttachments, CONFIG.zipFileName, CONFIG.csvFileName);
var csvForUpload = processCsv(csv, yesterdayString);
var analyticsUpload = uploadDataToAnalytics(csvForUpload, CONFIG.analyticsAccountId, CONFIG.analyticsPropertyId, CONFIG.customDataSourceId);
}
Try changing these lines of code
From:
var attachments = messages.getAttachments();
and
var unzip = Utilities.unzip(attachments[i]);
To:
var attachments = messages[0].getAttachments();
and
var unzip = Utilities.unzip(attachments[i].copyBlob());
I'm helping an educational project.
The following is the request:
the words-sentences in 1 row from a google spreadsheets list should be sent automatically to the discord text channal every day. The next day, a line below. When the whole list is finished, it should go back to the beginning and send it again. and write it on a new line after each column.
2nd request: same but this time 2 rows should be sent every day.
Number of columns Generally the same 2 or 3.
this is the code i found works, but that's not what i wanted. this code is for:"a range of cells".
How do I get it to send the next line every day? I will set the code to run once a day with Trigger from the menu. But how will it know which line it sent yesterday, etc.?
Unfortunately, I couldn't do exactly what I wanted. I will be glad if you help
(I'm an IT person, but I don't have any coding knowledge. I understand the code when I see it, but I can't write it.)
enter image description here
function postMessageToDiscord(message) {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Loot");
var range = sheet.getRange("A1:C3");
var numRows = sheet.getLastRow()-1; // Number of rows to process
var data = range.getValues();
var result = '';
for (var i = 0; i < data.length; i++) {
var d = data[i];
for (var j = 0; j < d.length; j++) {
result = result.concat(d[j]);
}
}
message = message || result ;
var discordUrl = 'webhook xxx';
var payload = JSON.stringify({content: message});
var params = {
method: "POST",
payload: payload,
muteHttpExceptions: true,
contentType: "application/json"
};
var response = UrlFetchApp.fetch(discordUrl, params);
Logger.log(response.getContentText());
}
Edit:
function postMessageToDiscord(message) {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("b1");
var propertyServ = PropertiesService.getScriptProperties();
var properties = propertyServ.getProperties(); //get script properties
var row = 1; // set initial row
var incRow = 4; // how much row
if(Object.getOwnPropertyNames(properties).length != 0){ //this will check if the properties object is not empty
row = parseInt(properties["row"]) + incRow; //increase row
}
var range = sheet.getRange(row, 1, incRow, 6);
var values = range.getValues();
var result = '';
for (var i = 0; i < values.length; i++) {
var d = values[i];
for (var j = 0; j < d.length; j++) {
result = result.concat(d[j]);
}
}
message = message || result ;
var discordUrl = 'https://discord.com/api/webhooks xxx';
var payload = JSON.stringify({content: message});
var params = {
method: "POST",
payload: payload,
muteHttpExceptions: true,
contentType: "application/json"
};
var response = UrlFetchApp.fetch(discordUrl, params);
Logger.log(response.getContentText());
propertyServ.setProperty("row", row); //save the current row of processed line
}
As mentioned by Cooper, you can use the Properties Service of Google Apps Script to save the processed range in your code.
Here I have an example which you can incorporate to your code.
EDIT
Revised code to process 3 rows and print each row per line.
Test Data:
Code:
function propertyServExample() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Loot");
var propertyServ = PropertiesService.getScriptProperties();
var properties = propertyServ.getProperties(); //get script properties
var row = 1; // set initial row
if(Object.getOwnPropertyNames(properties).length != 0){ //this will check if the properties object is not empty
row = parseInt(properties["row"]) + 3;
}
var range = sheet.getRange(row, 1, 3, 2);
var values = range.getValues();
var str = '';
values.forEach(val => {
str = str + val.join('') + '\n';
})
Logger.log(str);
propertyServ.setProperty("row", row); //save the current row of processed line
}
Each run will get the next row.
Run 1:
Run 2:
Run 3:
Reference:
Properties Service
I am trying to export my Pipedrive data to a Google Sheet, in particular to make the link between two of my queries. So I first wrote this script:
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products:(id)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXX";
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals4(idNew) {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//Logger.log(dataSet)
//let prices = prices;
//create array where the data should be put
let rows = [], data;
if(dataSet.data === null )return
else {
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
let idNew = data.id;
rows.push([data.id, data['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows,null,2) ); // Log transformed data
return rows;
}
}
But it is not optimized at all and takes about 60 seconds to run, and google script executes the custom functions only for 30 seconds... With help, I had this second function:
function getPipedriveDeals(apiRequestLimit){
//Make the initial request to get the ids you need for the details.
var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
var start = 0;
var limit = "&limit="+apiRequestLimit;
var token = "&api_token=XXXXXXXXXXX";
var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
var data = JSON.parse(response.getContentText()).data;
//For every id in the response, construct a url (the detail url) and push to a list of requests
var requests = [];
data.forEach(function(product){
var productDetailUrl = "https://laptop.pipedrive.com/v1/products/"+product.id+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
requests.push(productDetailUrl+start+limit+token)
})
//With the list of detail request urls, make one call to UrlFetchApp.fetchAll(requests)
var allResponses = UrlFetchApp.fetchAll(requests);
// logger.log(allResponses);
return allResponses;
}
But this time it's the opposite. I reach my request limit imposed by Pipedrive: https://pipedrive.readme.io/docs/core-api-concepts-rate-limiting (80 requests in 2 sec).
I confess I have no more idea I thought of putting OAuth2 in my script to increase my query limit, but it seems really long and complicated I'm not at all in my field.
In summary, I would just like to have a script that doesn't execute requests too fast but without exceeding the 30 seconds imposed by Google Apps Script.
---------------------EDIT---TEST---FOREACH80-------------------------------------
function getPipedriveProducts(){
//Make the initial request to get the ids you need for the details.
var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
var start = 0;
var limit = "&limit=500";
var token = "&api_token=XXXXXXXXXXXXXXXXXXX";
var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
var data = JSON.parse(response.getContentText()).data;
//For every id in the response, construct a url (the detail url) and push to a list of requests
const batch = new Set;
let requests = [];
data.forEach(function(product){
var productDetailUrl = "https://laptop.pipedrive.com/v1/products/" + product.id + "/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
requests.push(productDetailUrl+start+limit+token);
if(requests.length === 79) {
batch.add(requests);
requests = [];
}
})
const allResponses = [...batch].flatMap(requests => {
Utilities.sleep(2000);
return UrlFetchApp.fetchAll(requests);
Logger.log(allResponses)
});
}
Create Set of 80 requests each
Execute each set value using fetchAll
const batch = new Set;
let requests = [];
data.forEach(function(product){
var productDetailUrl = "https://example.com";
requests.push(productDetailUrl+start+limit+token);
if(requests.length === 80) {
batch.add(requests);
requests = [];
}
})
const allResponses = [...batch].flatMap(requests => {
Utilities.sleep(2000);
return UrlFetchApp.fetchAll(requests);
});
Chunking
One of the most important concepts in working with APIs is chunking as you need to avoid rate-limiting, accommodate request scheduling, parallelize CPU-heavy calculations, etc. There are countless ways to split an array in chunks (see half a hundred answers in this canonical Q&A just for JavaScript).
Here is a small configurable utility tailored to the situation where one wants to split a flat array into an array of arrays of a certain size/pattern (which is usually the case with request chunking):
/**
* #typedef {object} ChunkifyConfig
* #property {number} [size]
* #property {number[]} [limits]
*
* #summary splits an array into chunks
* #param {any[]} source
* #param {ChunkifyConfig}
* #returns {any[][]}
*/
const chunkify = (source, {
limits = [],
size
} = {}) => {
const output = [];
if (size) {
const {
length
} = source;
const maxNumChunks = Math.ceil((length || 1) / size);
let numChunksLeft = maxNumChunks;
while (numChunksLeft) {
const chunksProcessed = maxNumChunks - numChunksLeft;
const elemsProcessed = chunksProcessed * size;
output.push(source.slice(elemsProcessed, elemsProcessed + size));
numChunksLeft--;
}
return output;
}
const {
length
} = limits;
if (!length) {
return [Object.assign([], source)];
}
let lastSlicedElem = 0;
limits.forEach((limit, i) => {
const limitPosition = lastSlicedElem + limit;
output[i] = source.slice(lastSlicedElem, limitPosition);
lastSlicedElem = limitPosition;
});
const lastChunk = source.slice(lastSlicedElem);
lastChunk.length && output.push(lastChunk);
return output;
};
const sourceLimited = [1, 1, 2, 2, 2, 3];
const outputLimited = chunkify(sourceLimited, { limits: [2, 1] });
console.log({ source : sourceLimited, output : outputLimited });
const sourceSized = ["ES5", "ES6", "ES7", "ES8", "ES9"];
const outputSized = chunkify(sourceSized, { size: 2 });
console.log({ source : sourceSized, output : outputSized });
From there, the only thing you need is to traverse the array while waiting for each chunk to complete to make it applicable to your situation. Please beware that requests can fail for any number of reasons - you should persist last successfully processed chunk.
I'm trying to optimize this code to make it as short as possible I use it to call an API to get data in a Google Sheet. I've been told that it's the fetch that makes the script so long, and that I could try with a fetchAll but it breaks my code, I feel like putting my url in an array breaks my code (for the fetchAll). I also had suspicions about the if statement that I put in case the data is null (already made my function crash).
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products:(id)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals4(idNew) {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let url = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXX"
let response = UrlFetchApp.fetch(url+start+limit+token); //
let dataAll = JSON.parse(response.getContentText());
let dataSet = dataAll;
//Logger.log(dataSet)
//let prices = prices;
//create array where the data should be put
let rows = [], data;
if(dataSet.data === null )return
else {
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
let idNew = data.id;
rows.push([data.id, data['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows,null,2) ); // Log transformed data
return rows;
}
}
Try with fetchAll:
// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals2() {
let ss = SpreadsheetApp.getActiveSpreadsheet();
let sheets = ss.getSheets();
let sheet = ss.getActiveSheet();
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let limit = "&limit=500";
//let filter = "&filter_id=64";
let pipeline = 1; // put a pipeline id specific to your PipeDrive setup
let start = 1;
//let end = start+50;
let token = "&api_token=XXXXXXXXXXXXXXXXXXXXXX"
let url = "https://laptop.pipedrive.com/v1/products:(id)?start="+start+limit+token;
let request = [url];
let response = UrlFetchApp.fetchAll(request); //
let dataAll = response.map(function(e) {return e.getContentText()});
let dataSet = dataAll;
//let prices = prices;
//create array where the data should be put
let rows = [], data;
for (let i = 0; i < dataSet.data.length; i++) {
data = dataSet.data[i];
rows.push([data.id,
GetPipedriveDeals4(data.id)
]);
}
Logger.log( 'function2' ,JSON.stringify(rows,null,8) ); // Log transformed data
return rows;
}
function GetPipedriveDeals4(idNew) {
let start = 1;
let limit = "&limit=500";
let token = "&api_token=XXXXXXXXXXXXXXXXXXXX"
let urli = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start="+start+limit+token;
let request1 = [urli]
let response1 = UrlFetchApp.fetchAll(request1); //
var dataAll1 = response1.map(function(e) {return e.getContentText()});
let dataSet1 = dataAll1;
//the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
let urli = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start="+start+limit+token;
let request1 = [urli]
let response1 = UrlFetchApp.fetchAll(request1); //
var dataAll1 = response1.map(function(e) {return e.getContentText()});
let dataSet1 = dataAll1;
//Logger.log(dataSet1)
//let prices = prices;
//create array where the data should be put
let rows1 = [], data1;
if(dataSet1.data1 === null )return
else {
for (let i = 0; i < dataSet1.data1.length; i++) {
data1 = dataSet1.data1[i];
let idNew = data1.id;
rows1.push([data1.id, data1['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
}
Logger.log( 'function4', JSON.stringify(rows1,null,2) ); // Log transformed data
return rows1;
}
}
So I saw in the documentation that I must put my URL in Tab[] to make the request but know I have the following error :
8 juil. 2020 à 16:06:18 Erreur TypeError: Cannot read property 'length' of undefined
at GetPipedriveDeals2(Copie de importNamesTypes:22:36)
I suppose I'm doing something wrong but can't see it. Thanks
So yes I didn't put the modifications online but in fact I managed to do what I want, so I will put my code and some explanation.
First of all I didn't manage to execute the script above, or any other that I put online for two reasons :
- 1 : The first that I wrote was too long for google sheet, the execution time was above 50s (max 30s)
- 2 : The second script that I made, was too fast for the API pipedrive and when I manage to solve this issue, I've got an error saying "cannot read property of null" it was because pipedrive was returning "null" in string and null as an empty value, so this was breaking the code. When I manage to solve this the script was again too long.
So I rework the script again and put it with a start and a limit in parameters. So now I am calling my function by doing this =getPipeDriveDeals(0, 50) and =getPipeDriveDeals(51, 90) etc....
So this is the code that I wrote :
function getPipedriveDeals(start , limit) {
var allResponsesDeals = [];
options = {muteHttpExceptions: true};
var idsListRequest = "https://xxxx.pipedrive.com/v1/products:(id)?start=";
var token = "&api_token=hiddenforobviousreasons";
var response = UrlFetchApp.fetch(idsListRequest + start +"&limit="+limit + token, options);
let dataAll = JSON.parse(response.getContentText()).data;
var requests = [];
let rows = [], data;
//Logger.log("data="+ JSON.stringify(dataAll, null, 2));
if(dataAll === null) {
//Logger.log(" dataAll issss nnnnuulll" );
}else{
dataAll.forEach(function(product) {
var productDetailUrl = "https://xxxx.pipedrive.com/v1/products/" + product.id + "/deals:(title,7d321c7454a4b44a09f32bdd7702a2b17fd7654e)?start=0";
requests = productDetailUrl + token
var responses = UrlFetchApp.fetch(requests);
var dataArray = JSON.parse(responses.getContentText());
if(dataArray.data === null) {
//Logger.log(" newData issss nnnnuulll" );
}else
{
for (let i = 0; i < dataArray.data.length; i++) {
data = dataArray.data[i];
rows.push([data.title, data['7d321c7454a4b44a09f32bdd7702a2b17fd7654e']])
}
}
});
}
Logger.log("allResponsesDeals ="+ JSON.stringify(rows, null, 2));
return rows;
}
function getAllDeals(){
var allResponses = [];
var deals = getPipedriveDeals();
Logger.log("deals="+ JSON.stringify(deals, null, 2));
deals.forEach((response)=>{allResponses.push(response)});
allResponses.push(deals)
Logger.log("allResponses="+ allResponses);
return allResponses;
}
If you want to add specific data to your sheet just change it in rows.pushand in the Url of you second call.
I advise you to not make modification of the data you want to get with you'r first request (the ID is use to make all the other request).
I hope this will help you and all the people that want to tweak Pipedrive functionality.
I'm trying to fetch the API data with Google Apps Script about my balance on Bittrex but it returns me nothing, no errors and only blank cells. This is the code I've written based on the documentation here: https://bittrex.com/Home/Api
function getBalance() {
var ss = SpreadsheetApp.openById("***");
var data = ss.getSheetByName("Data");
var key = ss.getSheetByName("Api").getRange('A2').getValue();
var secret = ss.getSheetByName("Api").getRange('B2').getValue();
var baseUrl = 'https://bittrex.com/api/v1.1/';
var nonce = Math.floor(new Date().getTime()/1000);
var command = "/account/getbalances";
var uri = baseUrl.concat(command + "?apikey=" + key + "&nonce=" + nonce);
var signature = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_512,
uri,
secret);
signature = signature.map(function(byte) {
return ('0' + (byte & 0xFF).toString(16)).slice(-2);
}).join('')
var headers = {
"apisign": signature
}
var params = {
"method": "get",
"headers": headers,
}
var response = UrlFetchApp.fetch(uri, params);
var json = JSON.parse(response.getContentText());
var blnc = [];
blnc.push(['Balance']);
for(var key in json.result)
{
blnc[0].push(json.result[key]);
}
askRange = data.getRange(2, 2, blnc.length, 1);
askRange.setValues(blnc);
}
The askrange last number is "1" because the script only pass the "Balance" value to the spreadsheet, but the values should be something around 210. Any help? Thank you
I solved it, the problem was in the "command" variable, there was a slash not needed that generated an url with a double slash