I have a long list of websites where I want to search for a specific string. In this case the string is "cast area". I have come up with this script that looks through the list (I have confined it to the row that I know has a valid output). In the log, it says Logging output too large. Truncating output. I have read online that this shouldn't matter - it's just too big for the log, not that it's given up on looking through the rest.
function getData() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('URLs');
var firstURL = 24673
var lastURL = 24678
var i
for (i=firstURL; i<lastURL; i++) {
var data = sheet.getRange(i,1).getValue();
var response = UrlFetchApp.fetch(data).getContentText();
Logger.log(data)
//Logger.log(response)
if (response.toLowerCase().indexOf("cast area")>-1) {
Logger.log(1)
}
}
}
To test it out, here is the code with the url included that has the words "CAST AREA" in the Notes section of the page. I'm hoping the log should return the number 1 to show that it works.
function getData() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('URLs');
var data = "https://elanthipedia.play.net/Faenella%27s_Grace";
var response = UrlFetchApp.fetch(data).getContentText();
Logger.log(data)
//Logger.log(response)
if (response.indexOf("CAST AREA")>-1) {
Logger.log(1)
}
}
If you want to visualize a contentText that is too large for the Logging output - write this data somewhere
The simplest solution would be to write it into a spreadsheet.
Sample:
function getData() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('URLs');
var data = "https://elanthipedia.play.net/Faenella%27s_Grace";
var response = UrlFetchApp.fetch(data).getContentText();
sheet.getRange(sheet.getLastRow()+1,1).setValue(response);
}
Obviously, to visualize the data in a nicer way it makes sense to parse the responce and write the different objects into different columns of the spreadsheet.
I am trying to scrape a table of price data from this website using the following code;
function scrapeData() {
// Retrieve table as a string using Parser.
var url = "https://stooq.com/q/d/?s=barc.uk&i=d";
var fromText = '<td align="center" id="t03">';
var toText = '</td>';
var content = UrlFetchApp.fetch(url).getContentText();
var scraped = Parser.data(content).from(fromText).to(toText).build();
//Parse table using XmlService.
var root = XmlService.parse(scraped).getRootElement();
}
I have taken this method from an approach I used in a similar question here however its failing on this particular url and giving me the error;
Error on line 1: Content is not allowed in prolog. (line 12, file "Stooq")
In related questions here and here they talk of textual content that is not accepted being submitted to the parser however, I am unable to apply the solutions in these questions to my own problem. Any help would be much appreciated.
How about this modification?
Modification points:
In this case, it is required to modify the retrieved HTML values. For example, when var content = UrlFetchApp.fetch(url).getContentText() is run, each attribute value is not enclosed. These are required to be modified.
There is a merged column in the header.
When above points are reflected to the script, it becomes as follows.
Modified script:
function scrapeData() {
// Retrieve table as a string using Parser.
var url = "https://stooq.com/q/d/?s=barc.uk&i=d";
var fromText = '#d9d9d9}</style>';
var toText = '<table';
var content = UrlFetchApp.fetch(url).getContentText();
var scraped = Parser.data(content).from(fromText).to(toText).build();
// Modify values
scraped = scraped.replace(/=([a-zA-Z0-9\%-:]+)/g, "=\"$1\"").replace(/nowrap/g, "");
// Parse table using XmlService.
var root = XmlService.parse(scraped).getRootElement();
// Retrieve header and modify it.
var headerTr = root.getChild("thead").getChildren();
var res = headerTr.map(function(e) {return e.getChildren().map(function(f) {return f.getValue()})});
res[0].splice(7, 0, "Change");
// Retrieve values.
var valuesTr = root.getChild("tbody").getChildren();
var values = valuesTr.map(function(e) {return e.getChildren().map(function(f) {return f.getValue()})});
Array.prototype.push.apply(res, values);
// Put the result to the active spreadsheet.
var ss = SpreadsheetApp.getActiveSheet();
ss.getRange(1, 1, res.length, res[0].length).setValues(res);
}
Note:
Before you run this modified script, please install the GAS library of Parser.
This modified script is not corresponding to various URL. This can be used for the URL in your question. If you want to retrieve values from other URL, please modify the script.
Reference:
Parser
XmlService
If this was not what you want, I'm sorry.
I am looking to bring in the "bid" values from each "ticker" from this API call https://api.etherdelta.com/returnTicker into Google Sheet cells.
An example cell value will have something like: =crypt("PPT).
Here is the code I have so far, but I am having a hard time figuring out how I can get the data for each ticker (I know I haven't declared "ticker" anywhere in the code).
function crypt(ticker) {
var url = "https://api.etherdelta.com/returnTicker";
var response = UrlFetchApp.fetch(url);
var text = response.getContentText();
var json = JSON.parse(text);
var price = json[bid];
return parseFloat(bid);
}
How about the following modifications?
Modification points :
Each ticker name has a header of ETH_.
ETH_ + ticker is a key of the object.
When =crypt("PPT") is used, the key is ETH_PPT and "bid" you want is in the value of ETH_PPT.
The modified script which was reflected above is as follows.
Modified script :
function crypt(ticker) {
var url = "https://api.etherdelta.com/returnTicker";
var response = UrlFetchApp.fetch(url);
var text = response.getContentText();
var json = JSON.parse(text);
var price = json["ETH_" + ticker].bid; // Modified
return parseFloat(price); // Modified
}
This modified script retrieves the value of bid for each ticker by putting =crypt("PPT") to a cell in the spreadsheet.
Note :
It seems that an error response is sometimes returned from the URL.
If I misunderstand your question, I'm sorry.
I am trying to get the following to work: A JSON is sent to my Google Script - Now I want to get specific values from that JSON Message and store them into specific rows of one specific Google Spreadsheet. So far so good, this is what I have :
function doPost(response) {
var sheets = SpreadsheetApp.openById('MY SHEET ID');
var dataAll = JSON.parse(response.getContentText());
var nR = getNextRow(sheets) + 1;
// RECORD DATA IN SPREADSHEET
sheets.getRangeByName('timestamp').getCell(nR,1).setValue(new Date());
sheets.getRangeByName('ticket_id').getCell(nR,1).setValue(dataAll);
}
function getNextRow(sheets) {
var timestamps = sheets.getRangeByName("timestamp").getValues();
for (i in timestamps) {
if(timestamps[i][0] == "") {
return Number(i);
break;
}}}
It should store the response and put it into a blank cell of the range "timestamp". But nothing happens at this point.
This is the JSON ( Body ) from JIRA:
{"timestamp":1483576902984,"webhookEvent":"jira:issue_created","issue_event_type_name":"issue_created","user":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"issue":{"id":"10057","self":"https://xxx.atlassian.net/rest/api/2/issue/10057","key":"TA-58","fields":{"issuetype":{"self":"https://xxx.atlassian.net/rest/api/2/issuetype/10104","id":"10104","description":"A problem which impairs or prevents the functions of the product.","iconUrl":"https://xxx.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10303&avatarType=issuetype","name":"Bug","subtask":false,"avatarId":10303},"timespent":null,"project":{"self":"https://xxx.atlassian.net/rest/api/2/project/10000","id":"10000","key":"TA","name":"Test Area","avatarUrls":{"48x48":"https://xxx.atlassian.net/secure/projectavatar?avatarId=10324","24x24":"https://xxx.atlassian.net/secure/projectavatar?size=small&avatarId=10324","16x16":"https://xxx.atlassian.net/secure/projectavatar?size=xsmall&avatarId=10324","32x32":"https://xxx.atlassian.net/secure/projectavatar?size=medium&avatarId=10324"}},"customfield_10110":null,"fixVersions":[],"customfield_10111":null,"aggregatetimespent":null,"customfield_10112":"Not started","resolution":null,"customfield_10113":null,"customfield_10114":null,"customfield_10104":null,"customfield_10105":null,"customfield_10106":null,"customfield_10107":null,"customfield_10108":null,"customfield_10109":null,"resolutiondate":null,"workratio":-1,"lastViewed":null,"watches":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/watchers","watchCount":0,"isWatching":false},"created":"2017-01-05T01:41:42.903+0100","priority":{"self":"https://xxx.atlassian.net/rest/api/2/priority/3","iconUrl":"https://xxx.atlassian.net/images/icons/priorities/medium.svg","name":"Medium","id":"3"},"customfield_10100":null,"customfield_10101":null,"customfield_10102":null,"customfield_10103":null,"labels":[],"timeestimate":null,"aggregatetimeoriginalestimate":null,"versions":[],"issuelinks":[],"assignee":null,"updated":"2017-01-05T01:41:42.903+0100","status":{"self":"https://xxx.atlassian.net/rest/api/2/status/10000","description":"","iconUrl":"https://xxx.atlassian.net/","name":"To Do","id":"10000","statusCategory":{"self":"https://xxx.atlassian.net/rest/api/2/statuscategory/2","id":2,"key":"new","colorName":"blue-gray","name":"To Do"}},"components":[],"timeoriginalestimate":null,"description":"super alles neu","timetracking":{},"customfield_10005":null,"attachment":[],"aggregatetimeestimate":null,"summary":"super alles neu","creator":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"subtasks":[],"reporter":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"customfield_10000":"{}","aggregateprogress":{"progress":0,"total":0},"customfield_10001":null,"customfield_10115":null,"customfield_10116":"0|i0005r:","environment":null,"duedate":null,"progress":{"progress":0,"total":0},"comment":{"comments":[],"maxResults":0,"total":0,"startAt":0},"votes":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/votes","votes":0,"hasVoted":false},"worklog":{"startAt":0,"maxResults":20,"total":0,"worklogs":[]}}}}
However, I don't want to have the whole JSON in my cell, I only want to have specific obejcts/id from within the JSON. How do I call them ?
After tons of research, this is a solution that works for me (in my case):
function doPost(response) {
var sheets = SpreadsheetApp.openById('SHEET_ID');
// retrieve data from JIRA Payload and store them into "data"
var json = response.postData.contents;
var data = JSON.parse(json);
// index values from "data" and store them into seperate variables
// for example:
var ticket_id = data.issue.key;
var priority_name = data.issue.fields.priority.name;
var summary = data.issue.fields.summary;
This two lines:
var json = response.postData.contents;
var data = JSON.parse(json);
Made it possible to read the body and index all the specific parameters I want.
Here is an example:
/*
* webhookHandler: JIRA webhook callback function
*/
function webhookHandler(response) {
var data = response.getAs("application/json");
//logs out data in dev console
console.log(data);
var spreadsheet = SpreadsheetApp.openById("<spreadsheet id>");
var cellRange = spreadsheet.getRangeByName("<some range name>");
var cell = cellRange.getCell(0 /*row index*/, 0/*column index*/);
cell.setValue(data.ticket_id/*index the JSON object returned by response*/);
}
UrlFetchApp Documentation
SpreadsheetApp Documentation
I´m using a Google Apps Script, trying to import Bitcoin-exchange rate information to a Google Spreadsheet. I use this code:
var url = "https://btc-e.com/api/2/btc_usd/ticker";
var response = UrlFetchApp.fetch(url);
var json = response.getContentText();
var lcharts_data = JSON.parse(json)
function bitcoin(){
var b = lcharts_data["ticker"]["avg"];
return b;
}
The JSON-file looks like this:
{"ticker":
{"high":947.99902,
"low":817.64001,
"avg":882.819515,
"vol":24625847.06001,
"vol_cur":28189.09956,
"last":930,
"buy":930,
"sell":929.998,
"updated":1385575341,
"server_time":1385575342}
}
}
Yet b returns as 22.49. What am i doing wrong?
I use the similar method for receiving the data but I parse it slightly different. For example:
var url = "https://btc-e.com/api/2/btc_usd/ticker";
var response = UrlFetchApp.fetch(url);
var json = response.toString();
var lcharts_data = JSON.parse(json);
Notice the toString() method and the native Utilities.jsonParse() method. I tried JSON.parse() but that didn't work for me.
Also are using an API key to access the data? Have tried above but it's timing out.
As at when I tested your url, the JSON response returned was:
{
"ticker":{
"high":421.70001,
"low":418,
"avg":419.850005,
"vol":2361935.91952,
"vol_cur":5620.41595,
"last":420.168,
"buy":420.168,
"sell":419.853,
"updated":1460626271,
"server_time":1460626273
}
}
Here's the code that returns the value for 'avg':
var url = "https://btc-e.com/api/2/btc_usd/ticker";
var response = UrlFetchApp.fetch(url);
var json = response.getContentText();
var lcharts_data = JSON.parse(json);
function bitcoin(){
var b = lcharts_data.ticker.avg;
return b;
}