Google sheet to BigQuery by GAS - google-apps-script

I am following the instruction below link with following code ( to make a app script to send data to big query from google sheet ):
function myFunction() {
var projectId = 'tcndata';
var datasetId = 'dec06';
var tableId = 'dec0601';
var fileId = '1Mb7tN3xshHt0gpsxHkt5Ifje4xAeu7N9Vn_YEQAdcoc';
var ss = SpreadsheetApp.openById(fileId);
var source = ss.getSheetByName("send");
var dataToCopy = source.getRange('A2:D5');
var values = dataToCopy.getValues();
var rowsCSV = values.join("\n");
var data = Utilities.newBlob(rowsCSV, 'application/octet-stream');
function convertValuesToRows(data) {
var rows = [];
var headers = ["Contract","Product","Dest","QTY"] ;
for (var i = 1, numColumns = data.length; i < numColumns; i++) {
var row = BigQuery.newTableDataInsertAllRequestRows();
row.json = data[i].reduce(function(obj, value, index) {
obj[headers[index]] = value;
return obj
}, {});
rows.push(row);
};
return rows;
}
function bigqueryInsertData(data, tableId) {
var insertAllRequest = BigQuery.newTableDataInsertAllRequest();
insertAllRequest.rows = convertValuesToRows(data);
var response = BigQuery.Tabledata.insertAll(insertAllRequest, projectId, datasetId, tableId);
if (response.insertErrors) {
Logger.log(response.insertErrors);
}
}
bigqueryInsertData(Utilities.parseCsv(data.getDataAsString()), tableId);
}
'''
The script also no error but the big query no record data.
Any one can help, to figure out what is reason ?
Thank you

You could see these options:
You should validate the headers’s name, as it needs to match the
column names of the table in BigQuery
You should validate converting the rowsCSV to a blob to use
getDataAsString
You should validate the name of the sheet is correct
ss.getSheetByName("send"); you can use ss.getSheets()[0];
You should validate the range has data source.getRange('A2:D5'). You
can use this format getRange(row, column, numRows, numColumns).Yo can see more documentation about getRange.
Another option is for you to load CSV data from Cloud Storage into a new BigQuery table by. You can see this example:
from google.cloud import bigquery
# Construct a BigQuery client object.
client = bigquery.Client()
# TODO(developer): Set table_id to the ID of the table to create.
# table_id = "your-project.your_dataset.your_table_name"
job_config = bigquery.LoadJobConfig(
schema=[
bigquery.SchemaField("name", "STRING"),
bigquery.SchemaField("post_abbr", "STRING"),
],
skip_leading_rows=1,
# The source format defaults to CSV, so the line below is optional.
source_format=bigquery.SourceFormat.CSV,
)
uri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv"
load_job = client.load_table_from_uri(
uri, table_id, job_config=job_config
) # Make an API request.
load_job.result() # Waits for the job to complete.
destination_table = client.get_table(table_id) # Make an API request.
print("Loaded {} rows.".format(destination_table.num_rows))
You can see this documentation.

Related

Need a way to extract specific data from Firebase RealtimeDB to Google Sheets

I'm trying to transfer Firebase RealtimeDB data into Google Sheets using AppScript.
I need a way to extract ID, Department, and Surname strings from the DB while it is received as such from the Logs below.
I use .childByAutoID() which has the following effect on the DB.
My database:
Desired result in spreadsheet:
function writeSheets() {
var firebaseUrl = "<my-database>.firebaseio.com/Attendees";
var base = FirebaseApp.getDatabaseByUrl(firebaseUrl);
var data = base.getData();
console.log(JSON.stringify(data));
var ss = SpreadsheetApp.openById("<my-spreadsheet>");
var sheet = ss.getSheetByName("Sheet1");
var num = 2;
range = ss.getRange("A"+num+":C"+num+"");
for(var i in data) {
var values = [[data[i][0], data[i][1], data[i][2]]];
range.setValues(values);
num += 1;
range = sheet.getRange("A"+num+":C"+num+"");
}
}
base.getData() reads it as:
{
"-M4PTaIESKhQZhreHSE6":
{"Department":"HR",
"ID":"1009",
"Surname":"Blanc"},
"-M4PTgaNIE8BDqAcMr5y":
{"Department":"Accounting",
"ID":"1002",
"Surname":"Sandler"},
"-M4PTmOxxNge0Xfe_ez0":
{"Department":"Creative",
"ID":"1009",
"Surname":"Tolkien"}
}
I
Logs:
[20-04-09 00:11:31:653 HKT] {"-M4PTaIESKhQZhreHSE6":{"Department":"HR","ID":"1009","Surname":"Blanc"},"-M4PTgaNIE8BDqAcMr5y":{"Department":"Accounting","ID":"1002","Surname":"Sandler"},"-M4PTmOxxNge0Xfe_ez0":{"Department":"Creative","ID":"1010","Surname":"Tolkien"}}
Use Object.values and Array.map :
const data = {
"-M4PTaIESKhQZhreHSE6":
{"Department":"HR",
"ID":"1009",
"Surname":"Blanc"},
"-M4PTgaNIE8BDqAcMr5y":
{"Department":"Accounting",
"ID":"1002",
"Surname":"Sandler"},
"-M4PTmOxxNge0Xfe_ez0":
{"Department":"Creative",
"ID":"1009",
"Surname":"Tolkien"}
};
const out = Object.values(data).map(Object.values);
//or to change order: Object.values(data).map(({Department:dp,ID,Surname:sn})=>[ID,dp,sn]);
console.info(out);

How to pull data from multiple Mailchimp endpoints?

The code below pulls data from the Mailchimp API Reports endpoint and adding it to Sheets.
I would like to add some more data from other endpoints (like fields from the "List/Audience" endpoint: member_count, total_contacts i.e.) but don't have a slick solution to this.
What's the best practice/solution here? Can this task be kept in the same function or is a separate function preferable?
I'm new in this area so bear with me :)
function chimpCampaigns() {
var API_KEY = 'X'; // MailChimp API Key
var REPORT_START_DATE = '2018-01-01 15:54:00'; // Report Start Date (ex. when you sent your first MailChimp Newsletter)
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("CampaignData");
var dc = API_KEY.split('-')[1];
var api = 'https://'+ dc +'.api.mailchimp.com/3.0';
var count = 100; // Max rows to return
var campaignList = '/campaigns?&count='+count+'&since_send_time='+REPORT_START_DATE
var options = {"headers": {"authorization": 'apikey '+API_KEY}};
var apiCall = function(endpoint){
apiResponseCampaigns = UrlFetchApp.fetch(api+endpoint,options);
json = JSON.parse(apiResponseCampaigns);
return json
}
var campaigns = apiCall(campaignList);
var total = campaigns.total_items;
var campaignData = campaigns.campaigns;
if (campaignData) {
sheet.clear(); // Clear MailChimp data in Spreadsheet
// Append Column Headers
sheet.appendRow(["Sent Time", "Campaign ID", "Audience", "Campaign Title", "Subject Line", "Emails Sent", "Abuse Reports", "Unsubscribed", "Unsubscribe Rate", "Hard Bounces", "Soft Bounces", "Bounces Total", "Syntax Errors", "Forwards Count", "Forwards Opens", "Opens Total", "Unique Opens", "Open Rate", "Last Open", "Clicks Total", "Unique Clicks","Unique Subscriber Clicks", "Click Rate", "Last Click"]);
}
for (i=0; i< campaignData.length; i++){
var c = campaignData[i];
var cid = c.id;
var title = c.title;
var subject = c.subject;
var send_time = c.send_time;
if (send_time){
apiResponseReports = UrlFetchApp.fetch('https://'+ dc +'.api.mailchimp.com/3.0/reports/'+cid,options);
reports = JSON.parse(apiResponseReports);
reportsSendTime = reports.send_time;
if(reportsSendTime){
var campaign_title = c.settings.title;
var subject_line = c.settings.subject_line;
var emails_sent = reports.emails_sent;
var list_name = reports.list_name;
var fields = reports.fields;
var abuse_reports = reports.abuse_reports;
var unsubscribed = reports.unsubscribed;
var unsubscribe_rate = unsubscribed/emails_sent;
var hard_bounces = reports.bounces.hard_bounces;
var soft_bounces = reports.bounces.soft_bounces;
var bounces = hard_bounces+soft_bounces;
var syntax_errors = reports.bounces.syntax_errors;
var forwards_count = reports.forwards.forwards_count;
var forwards_opens = reports.forwards.forwards_opens;
var opens_total = reports.opens.opens_total;
var unique_opens = reports.opens.unique_opens;
var open_rate = reports.opens.open_rate;
var last_open = reports.opens.last_open;
var clicks_total = reports.clicks.clicks_total;
var unique_clicks = reports.clicks.unique_clicks;
var unique_subscriber_clicks = reports.clicks.unique_subscriber_clicks;
var click_rate = reports.clicks.click_rate;
var last_click = reports.clicks.last_click;
// the report array is how each row will appear on the spreadsheet
var report = [send_time, fields, cid, list_name, campaign_title, emails_sent, subject_line, abuse_reports, unsubscribed, unsubscribe_rate, hard_bounces, soft_bounces, bounces, syntax_errors, forwards_count, forwards_opens, opens_total, unique_opens, open_rate, last_open, clicks_total, unique_clicks, unique_subscriber_clicks, click_rate, last_click];
sheet.appendRow(report);
}
}
}
}
You can call each endpoint in succession using the error-first pattern. More on this here.
If your previous call returns data and doesn't error out, you pass the next function as a callback, etc.
In the example below, I've omitted the logic that builds URL endpoint, query-string, and the 'options' object as these can simply be borrowed from your code.
Basically, you define a function with a callback parameter for each API endpoint.
Whenever you need to call multiple endpoints, you create a 3rd function that calls them in succession, passing each new function call as a parameter to the previous one.
The inner functions will still have access to the outer scope so you can combine data from multiple endpoints after the last call is executed (provided you assign unique names to the returned data - 'campaigns', 'reports', etc)
//function for the 'campaings' endpoint
function getCampaings(options, callback) {
//API call
var response = UrlFetchApp.fetch(campaignsEndpoint, options);
if (res.getStatusCode() == 200) {
var campaigns = JSON.parse(res.getContentText());
callback(false, campaigns);
} else {
callback("Error: Server responded with the status code of " + res.getStatusCode());
}
}
After creating the function for calling the 'reports' endpoint using the same approach, combine calls in the 3rd function.
function getCampaignsAndReports(){
var combinedData = {};
getCampaigns(options, function(err, campaigns){
if (!err && campaigns) {
//Call is successful - proceed to the next call
getReports(options, function(err, reports){
//Call successful
if (!err && reports) {
//Proceed to the next call or combine data from
//multiple endpoints
combinedData.campaigns = campaigns.campaigns;
combinedData.reports = reports.reports;
//write to sheet
//...
} else {
//Error calling reports endpoint
throw err;
}
});
} else {
//Error calling 'campaigns' endpoint. Throw error or write
//another function to show it to the user
throw err;
}
});
}
This may vary depending on how the MailChimp API data is structured so please change the code accordingly. Also, if you need to call the 'reports' endpoint multiple times for each entry in the 'campaings' endpoint, you can change your function to handle multiple request (options) object using UrlFetchApp.fetchAll(request[]). More on this here. Calling this method will return multiple response objects that you can iterate over.

How can I write in Google Sheets my Firebase data?

I have a database in Firebase, and I want to get the data from there and put them in a Google SpreadSheet.
function getData() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName("Database");
var data = getFirebaseData('contacts');
var [rows, columns] = [sheet.getLastRow(), sheet.getLastColumn()];
var range = sheet.getRange(1,1,1,1);
Logger.log(data)
range.setValue(data)
}
function getFirebaseData(data){
var firebaseUrl = "https://XXXXX.firebaseio.com/";
var secret = 'XXXXXXXX';
var base = FirebaseApp.getDatabaseByUrl(firebaseUrl, secret);
var result = base.getData('contacts');
for(var i in data) {
Logger.log(data[i].eMail + ' ' + data[i].title);
return result;
}
}
and here the image:
No data is shown, and I cannot understand why
Your problem should be solved by completing several steps:
In your getFirebaseData() function, move the return statement outside of the loop;
Instead of looping over data, loop over result (currently, you iterate over each property of the "contacts" String);
Optionally, add checks for getData() returning null or invalid firebaseUrl (in the last case, getData() will cause an error, use try...catch to account for that);
Change base.getData('contacts') to base.getData(data) (isn't it
the reason you pass data to the function?);

Google scipts - cannot convert array to object[][]

I am trying to pull some information off poloniex.com and paste into a range in a google sheet and am running into issues when trying to set the values. I am pretty new at this and cannot tell if the issue is with my understanding of how setValues works or if it has to do with the way I'm pushing data into my prices array.
function processPoloAPI() {
var sheet = SpreadsheetApp.openById('<insert sheet id here>')
var APIPullSheet = sheet.getSheetByName("APIPull");
APIPullSheet.getRange('A2:D19999').clearContent();
var url = "https://poloniex.com/public?command=returnChartData&currencyPair=USDT_BTC&start=1405699200&end=9999999999&period=86400"
var responseAPI = UrlFetchApp.fetch(url)
var parcedData = JSON.parse(responseAPI.getContentText());
var prices = new Array ();
prices.push(['Date', 'High', 'Low', 'Open','Close', 'Volume', 'QuoteVolume', 'WeightedAverage'])
for(var key in parcedData)
{
prices.push(parcedData[key]);
}
var length = prices.length
askRange = APIPullSheet.getRange(1, 1, length, 8);
askRange.setValues(prices);
}
How about the following modification?
Modification points :
Data for setValues() is 2 dimensional array.
When it retrieves values using keys from JSON, the upper/lower cases for keys should be matched to JSON data.
In your script, prices is [[Date, High, Low, Open, Close, Volume, QuoteVolume, WeightedAverage], {date=1424304000, volume=46.27631267, high=244, low=225, weightedAverage=239.62777823, quoteVolume=0.19311748, close=244, open=225},,,. If you want to create data that each row only numbers for the keys at the top row, each number has to be retrieved from JSON data.
The modified script which was reflected in these modification points is as follows.
Modified script :
function processPoloAPI() {
var sheet = SpreadsheetApp.openById('<insert sheet id here>')
var APIPullSheet = sheet.getSheetByName("APIPull");
APIPullSheet.getRange('A2:D19999').clearContent();
var url = "https://poloniex.com/public?command=returnChartData&currencyPair=USDT_BTC&start=1405699200&end=9999999999&period=86400"
var responseAPI = UrlFetchApp.fetch(url)
var parcedData = JSON.parse(responseAPI.getContentText());
var prices = new Array ();
prices.push(['Date', 'High', 'Low', 'Open','Close', 'Volume', 'QuoteVolume', 'WeightedAverage'])
var keys = ['date', 'high', 'low', 'open', 'close', 'volume', 'quoteVolume', 'weightedAverage'];
for (var i in parcedData) {
var temp = [];
for (var j in keys) {
temp.push(parcedData[i][keys[j]]);
}
prices.push(temp);
}
var length = prices.length
askRange = APIPullSheet.getRange(1, 1, length, 8);
askRange.setValues(prices);
}
Result :
If I misunderstand your question, I'm sorry.

Use JIRA webhook data in Google Script Project

I am trying to get the following to work: A JSON is sent to my Google Script - Now I want to get specific values from that JSON Message and store them into specific rows of one specific Google Spreadsheet. So far so good, this is what I have :
function doPost(response) {
var sheets = SpreadsheetApp.openById('MY SHEET ID');
var dataAll = JSON.parse(response.getContentText());
var nR = getNextRow(sheets) + 1;
// RECORD DATA IN SPREADSHEET
sheets.getRangeByName('timestamp').getCell(nR,1).setValue(new Date());
sheets.getRangeByName('ticket_id').getCell(nR,1).setValue(dataAll);
}
function getNextRow(sheets) {
var timestamps = sheets.getRangeByName("timestamp").getValues();
for (i in timestamps) {
if(timestamps[i][0] == "") {
return Number(i);
break;
}}}
It should store the response and put it into a blank cell of the range "timestamp". But nothing happens at this point.
This is the JSON ( Body ) from JIRA:
{"timestamp":1483576902984,"webhookEvent":"jira:issue_created","issue_event_type_name":"issue_created","user":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"issue":{"id":"10057","self":"https://xxx.atlassian.net/rest/api/2/issue/10057","key":"TA-58","fields":{"issuetype":{"self":"https://xxx.atlassian.net/rest/api/2/issuetype/10104","id":"10104","description":"A problem which impairs or prevents the functions of the product.","iconUrl":"https://xxx.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10303&avatarType=issuetype","name":"Bug","subtask":false,"avatarId":10303},"timespent":null,"project":{"self":"https://xxx.atlassian.net/rest/api/2/project/10000","id":"10000","key":"TA","name":"Test Area","avatarUrls":{"48x48":"https://xxx.atlassian.net/secure/projectavatar?avatarId=10324","24x24":"https://xxx.atlassian.net/secure/projectavatar?size=small&avatarId=10324","16x16":"https://xxx.atlassian.net/secure/projectavatar?size=xsmall&avatarId=10324","32x32":"https://xxx.atlassian.net/secure/projectavatar?size=medium&avatarId=10324"}},"customfield_10110":null,"fixVersions":[],"customfield_10111":null,"aggregatetimespent":null,"customfield_10112":"Not started","resolution":null,"customfield_10113":null,"customfield_10114":null,"customfield_10104":null,"customfield_10105":null,"customfield_10106":null,"customfield_10107":null,"customfield_10108":null,"customfield_10109":null,"resolutiondate":null,"workratio":-1,"lastViewed":null,"watches":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/watchers","watchCount":0,"isWatching":false},"created":"2017-01-05T01:41:42.903+0100","priority":{"self":"https://xxx.atlassian.net/rest/api/2/priority/3","iconUrl":"https://xxx.atlassian.net/images/icons/priorities/medium.svg","name":"Medium","id":"3"},"customfield_10100":null,"customfield_10101":null,"customfield_10102":null,"customfield_10103":null,"labels":[],"timeestimate":null,"aggregatetimeoriginalestimate":null,"versions":[],"issuelinks":[],"assignee":null,"updated":"2017-01-05T01:41:42.903+0100","status":{"self":"https://xxx.atlassian.net/rest/api/2/status/10000","description":"","iconUrl":"https://xxx.atlassian.net/","name":"To Do","id":"10000","statusCategory":{"self":"https://xxx.atlassian.net/rest/api/2/statuscategory/2","id":2,"key":"new","colorName":"blue-gray","name":"To Do"}},"components":[],"timeoriginalestimate":null,"description":"super alles neu","timetracking":{},"customfield_10005":null,"attachment":[],"aggregatetimeestimate":null,"summary":"super alles neu","creator":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"subtasks":[],"reporter":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"customfield_10000":"{}","aggregateprogress":{"progress":0,"total":0},"customfield_10001":null,"customfield_10115":null,"customfield_10116":"0|i0005r:","environment":null,"duedate":null,"progress":{"progress":0,"total":0},"comment":{"comments":[],"maxResults":0,"total":0,"startAt":0},"votes":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/votes","votes":0,"hasVoted":false},"worklog":{"startAt":0,"maxResults":20,"total":0,"worklogs":[]}}}}
However, I don't want to have the whole JSON in my cell, I only want to have specific obejcts/id from within the JSON. How do I call them ?
After tons of research, this is a solution that works for me (in my case):
function doPost(response) {
var sheets = SpreadsheetApp.openById('SHEET_ID');
// retrieve data from JIRA Payload and store them into "data"
var json = response.postData.contents;
var data = JSON.parse(json);
// index values from "data" and store them into seperate variables
// for example:
var ticket_id = data.issue.key;
var priority_name = data.issue.fields.priority.name;
var summary = data.issue.fields.summary;
This two lines:
var json = response.postData.contents;
var data = JSON.parse(json);
Made it possible to read the body and index all the specific parameters I want.
Here is an example:
/*
* webhookHandler: JIRA webhook callback function
*/
function webhookHandler(response) {
var data = response.getAs("application/json");
//logs out data in dev console
console.log(data);
var spreadsheet = SpreadsheetApp.openById("<spreadsheet id>");
var cellRange = spreadsheet.getRangeByName("<some range name>");
var cell = cellRange.getCell(0 /*row index*/, 0/*column index*/);
cell.setValue(data.ticket_id/*index the JSON object returned by response*/);
}
UrlFetchApp Documentation
SpreadsheetApp Documentation