Upload multiple attachments via Jira API using UrlFetchApp.fetch - google-apps-script

I am using google app script server side coding to upload multiple files using Jira Rest API. I have used urlFetchApp.fetch() method for it. The payload i am passing is file blob data with 'Content-Type: false' as i am unable to pass it as formData through server side coding.
With this approach i am able to pass single file at a time to Jira attachments API, I have to call Jira API for each file.
Below is sample code,
function updateJiraTicket(fileUploadArr, ticketId){
const jira_Api = getJiraInstanceKeys();
const JIRA_URL= jira_Api["URL"] + ticketId + "/attachments";
const JIRA_LOGIN = jira_Api["LOGINNAME"];
const JIRA_PASSWORD = jira_Api["PASSWORD"];
const PROJECT = jira_Api["PROJECT"];
//fileUploadArr format getting passed from file upload UI side control
//fileUploadArr = [{“fileData”:”{base64 file data}”, “fileType”:”image/jpg”,”fileName”:”testimg1”},{“fileData”:”{base64 file data}”, “fileType”:”image/jpg”,”fileName”:”testimg2”}]
fileUploadArr.forEach(function(arr) {
let file = getFile(arr.fileData, arr.fileType, arr.fileName);
let formdata = {'file' : file };
let params = {
method: 'POST',
headers: {
'Authorization': 'Basic ' + Utilities.base64Encode(JIRA_LOGIN+':'+JIRA_PASSWORD),
'Content-Type': false,
'X-Atlassian-Token': 'no-check'
},
"payload": formdata,
muteHttpExceptions: true
}
let result = UrlFetchApp.fetch(JIRA_URL, params);
let response = result.getContentText();
})
}
function getFile(data, type, name) {
var file = Utilities.newBlob(Utilities.base64Decode(data), type, name);
return file;
}
Can anyone help with this?

Related

Google Apps Script - URL Fetch App returning random numbers

I am new to Apps Script and was trying to build an API and call that API through a different script. I created the web app and published it.
This is the URL:
https://script.google.com/macros/s/AKfycbxKVmGy3fxDfoHxyDtQh7psqj7IdKF7qHbgxLAwNRoiKTA-bpKN4QKtArzwsYdFb-Hb/exec
When I open this link, I can see the data correctly but when I try to fetch this data from a different script using urlfetchapp, it returns random numbers. I need help on what I am doing incorrectly.
Script which I am using to call this data:
function GetCopies()
{
var options = {
'contentType': "application/json",
'method' : 'get',
};
var Data = UrlFetchApp.fetch('https://script.google.com/macros/s/AKfycbxKVmGy3fxDfoHxyDtQh7psqj7IdKF7qHbgxLAwNRoiKTA-bpKN4QKtArzwsYdFb-Hb/exec',options)
Logger.log(Data.getContent())
}
This is the log I get:
I tried parsing it, but it throws an error:
How can I get data from URL correctly?
A working sample:
Create two Google Apps Script projects. In my case API and fetcher
API
const doGet = () => {
const myObj = {
"name": "Mr.GAS",
"email": "mrgas#blabla.com"
}
return ContentService
.createTextOutput(JSON.stringify(myObj))
.setMimeType(
ContentService.MimeType.JSON
)
}
fetcher
const LINK = "API_LINK"
const fetchTheAPI = async () => {
const options = {
'contentType': "application/json",
'method': 'get',
}
const res = UrlFetchApp.fetch(LINK, options)
const text = res.getContentText()
console.log(JSON.parse(text))
}
Deploy the API: Select type > Web app and Who has access > Anyone, copy the URL (it is important to copy that URL not the one redirected in the browser)
Replace the "API_LINK" by the URL.
Run the function.
You only need to adapt this example to suit your needs.
Documentation:
Content Service
Web Apps

Upload json object to Google Cloud Storage with Google Apps Script

I'm calling an API in a Google Apps Script. The response is a json object. I'd like to send it to Google Cloud Storage.
Here is the script I wrote so far:
function uploadToGoogleCloudStorage() {
var api_response = '{ "var":"value"}'
var url = 'https://storage.googleapis.com/storage/v1/b/my_bucket_name/o/test?fields=OBJECT'.replace("OBJECT", JSON.stringify(api_response))
var response = UrlFetchApp.fetch(url, {
method: "POST"
});
}
I have the following error Exception: Invalid argument: https://storage.googleapis.com/storage/v1/b/gtm_container_export/o/test?fields="{%20\"var\":\"value\"}"
I didn't find any documentation on how to interact with Google Cloud Storage from Google Apps Script. I'm wondering if UrlFetchApp is the right way to do it or if I should activate a specific service.
Here is a basic example for you - your approach just needs to be modified a bit:
function uploadToGoogleCloudStorage() {
let url = "https://storage.googleapis.com/upload/storage/v1/b/[replace this with your bucket ID]/o?uploadType=media&name=my_test_json.json";
let token = "Bearer [replace this with your oauth token!]";
// this is the content of the document we will create in the bucket
let data = {
'name': 'Bob Smith',
'age': 35,
'pets': ['fido', 'fluffy']
};
let options = {
'method' : 'post',
'headers' : {
'Authorization' : token
},
'contentType': 'application/json',
// Convert the JavaScript object to a JSON string.
'payload' : JSON.stringify(data)
};
var response = UrlFetchApp.fetch(url, options);
console.log( response.getResponseCode() ); // 200 OK
console.log( response.getContentText() );
}
In this case, the file name in the bucket will be "my_test_json.json", as defined in name=my_test_json.json.
Replace [replace this with your bucket ID] with your bucket ID.
Replace [replace this with your oauth token!] with your oauth token.
If you are not familiar with tokens, that is a fairly large topic, and outside the scope of this question. But in case it helps, here are some very basic notes: To generate a test token, I used the OAuth 2.0 Playground to create a test token. I chose "Cloud Storage API" read/write scope. After step 2, I copy/pasted the access_token string out of the "response" box on the right had side of the page. This string is over 300 characters in length. This is not a production-strength solution - it's just for testing.

How to add media upload for BigQuery Rest API using UrlFetchApp?

I need to stream data into BigQuery from my Google Apps Script addon.
But I need to use my service account only (I need to insert data into my BigQuery table, not user's BigQuery table)
I followed this example: https://developers.google.com/apps-script/advanced/bigquery#load_csv_data
Because Apps Script Advanced Service doesn't support service account natively, so I need to change this example a bit:
Instead of using Advanced Service BigQuery, I need to get the OAuth token from my service account, then using BigQuery Rest API to handle the same job:
This is what I did:
function getBigQueryService() {
return (
OAuth2.createService('BigQuery')
// Set the endpoint URL.
.setTokenUrl('https://accounts.google.com/o/oauth2/token')
// Set the private key and issuer.
.setPrivateKey(PRIVATE_KEY)
.setIssuer(CLIENT_EMAIL)
// Set the property store where authorized tokens should be persisted.
.setPropertyStore(PropertiesService.getScriptProperties())
// Caching
.setCache(CacheService.getUserCache())
// Locking
.setLock(LockService.getUserLock())
// Set the scopes.
.setScope('https://www.googleapis.com/auth/bigquery')
)
}
export const insertLog = (userId, type) => {
const bigQueryService = getBigQueryService()
if (!bigQueryService.hasAccess()) {
console.error(bigQueryService.getLastError())
return
}
const projectId = bigqueryCredentials.project_id
const datasetId = 'usage'
const tableId = 'logs'
const row = {
timestamp: new Date().toISOString(),
userId,
type,
}
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
// Create the data upload job.
const job = {
configuration: {
load: {
destinationTable: {
projectId,
datasetId,
tableId,
},
sourceFormat: 'NEWLINE_DELIMITED_JSON',
},
},
}
const url = `https://bigquery.googleapis.com/upload/bigquery/v2/projects/${projectId}/jobs`
const headers = {
Authorization: `Bearer ${bigQueryService.getAccessToken()}`,
'Content-Type': 'application/json',
}
const options = {
method: 'post',
headers,
payload: JSON.stringify(job),
}
try {
const response = UrlFetchApp.fetch(url, options)
const result = JSON.parse(response.getContentText())
console.log(JSON.stringify(result, null, 2))
} catch (err) {
console.error(err)
}
}
As you can see in my code, I get the Blob data (which is the actual json data that I need to put in BigQuery table) using this line:
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
But I don't know where to use this data with the BigQuery Rest API
The documentation doesn't mention it: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert
How this can be done? Thank you.
I can solve this problem using Tanaike's FetchApp library:
https://github.com/tanaikech/FetchApp#fetch
Anyone has this issue in the future: please check my comment in code to understand what was done.
Turn out, the job variable is treated as metadata, and the data variable is treated as file in the form data object
// First you need to convert the JSON to Newline Delimited JSON,
// then turn the whole thing to Blob using Utilities.newBlob
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
// Create the data upload job.
const job = {
configuration: {
load: {
destinationTable: {
projectId,
datasetId,
tableId,
},
sourceFormat: 'NEWLINE_DELIMITED_JSON',
},
},
}
const url = `https://bigquery.googleapis.com/upload/bigquery/v2/projects/${projectId}/jobs?uploadType=multipart`
const headers = {
Authorization: `Bearer ${bigQueryService.getAccessToken()}`,
}
const form = FetchApp.createFormData() // Create form data
form.append('metadata', Utilities.newBlob(JSON.stringify(job), 'application/json'))
form.append('file', data)
const options = {
method: 'post',
headers,
muteHttpExceptions: true,
body: form,
}
try {
FetchApp.fetch(url, options)
} catch (err) {
console.error(err)
}
Note: When you create the service account, choose role BigQuery Admin, or any role that has permission bigquery.jobs.create
https://cloud.google.com/bigquery/docs/access-control#bigquery-roles
Because if you don't, you will have the error
User does not have bigquery.jobs.create permission...

Google Sheets - Mailchimp API - Create Template

WHAT I AM TRYING TO DO:
I am trying to create a template (custom HTML) using Mailchimp API according to this documentation.
WHAT I HAVE TRIED SO FAR:
Took raw HTML of the template I created using 'drag-and-drop'. Tested it using 'code-your-own'. Saved in a variable in apps script. Used to the following code, with data set as that variable. I got the following error
{instance=2fb8b5eb-f11c-4260-a958-f16e5bc7c98b, detail=The resource submitted could not be validated. For field-specific details, see the 'errors' array., type=http://developer.mailchimp.com/documentation/mailchimp/guides/error-glossary/, title=Invalid Resource, errors=[{field=, message=Schema describes object, NULL found instead}], status=400}
I then set the data to simpler HTML as shown below. I got the same error.
I tried using an already created template (accessed through TEMPLATE_ID) and tried to edit that (just the name of the template). I got the same error though I am able to access the template. The changes I made were:
var TEMPLATE_ID = 'MY_TEMPLATE_ID';
var endpoint = 'templates/' + TEMPLATE_ID;
var data = {
'name': 'new test name'
}
In params
'method': 'PATCH'
I also tried to GET method to see the templates, campaigns, lists. I am successfully able to do that.
I looked up the various answers on SO, one of them suggested using mergefields, I tried it too with the same error.
var data = {
'name': 'Test Template',
'html': '<html><head></head><body><p>TEST</p><body></html>',
'mergefields': {}
};
MWE:
function mailchimpCampaign(){
// URL and params for the Mailchimp API
var root = 'https://us19.api.mailchimp.com/3.0/';
var endpoint = 'templates';
var data = {
'name': 'Test Template',
'html': '<html><head></head><body><p>TEST</p><body></html>'
};
var payload = JSON.stringify(data);
// parameters for url fetch
var params = {
'method': 'POST',
'muteHttpExceptions': true,
'headers': {
'Authorization': 'apikey ' + API_KEY,
'content-type': 'application/json'
},
'data': payload
};
try {
// call the Mailchimp API
var response = UrlFetchApp.fetch(root + endpoint, params);
var data = response.getContentText();
var json = JSON.parse(data);
Logger.log(json);
}
catch (error) {
// deal with any errors
Logger.log(error);
};
}
Any help will be appreciated. Thanks.
This is for future readers.
So while I was hitting my head on this error. I tried using UrlFetchApp.getRequest() and it showed me that payload was empty.
The problem was I had to payload instead of data that I was using.
Updated working code:
var params = {
'method': 'POST',
'muteHttpExceptions': true,
'headers': {
'Authorization': 'apikey ' + API_KEY,
'content-type': 'application/json'
},
'payload': payload
};

CouchDb 2.1.1 Admin API Compaction PUT Request

I am working in NodeJS with CouchDB 2.1.1.
I'm using the http.request() method to set various config settings using the CouchDB API.
Here's their API reference, yes, I've read it:
Configuration API
Here's an example of a working request to set the logging level:
const http = require('http');
var configOptions = {
host: 'localhost',
path: '/_node/couchdb#localhost/_config/',
port:5984,
header: {
'Content-Type': 'application/json'
}
};
function setLogLevel(){
configOptions.path = configOptions.path+'log/level';
configOptions.method = 'PUT';
var responseString = '';
var req = http.request(configOptions, function(res){
res.on("data", function (data) {
responseString += data;
});
res.on("end", function () {
console.log("oldLogLevel: " + responseString);
});
});
var data = '\"critical\"';
req.write(data);
req.end();
}
setLogLevel();
I had to escape all the quotes and such, which was expected.
Now I'm trying to get CouchDb to accept a setting for compaction.
The problem is that I'm attempting to replicate this same request to a different setting but that setting doesn't have a simple structure, though it appears to be "just a String" as well.
The CouchDB API is yelling at me about invalid JSON formats and I've tried a boatload of escape sequences and attempts to parse the JSON in various ways to get it to behave the way I think it should.
I can use Chrome's Advanced Rest Client to send this payload, and it is successful:
Request Method: PUT
Request URL: http://localhost:5984/_node/couchdb#localhost/_config/compactions/_default
Request Body: "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"60%\"}, {from, \"23:00\"}, {to, \"04:00\"}]"
This returns a "200 OK"
When I execute the following function in my node app, I get a response of:
{"error":"bad_request","reason":"invalid UTF-8 JSON"}
function setCompaction(){
configOptions.path = configOptions.path+'compactions/_default';
configOptions.method = 'PUT';
var responseString = '';
var req = http.request(configOptions, function(res){
res.on("data", function (data) {
responseString += data;
});
res.on("end", function () {
console.log("oldCompaction: " + responseString);
});
});
var data = "\"[{db_fragmentation, \"70%\"}, {view_fragmentation, \"60%\"}, {from, \"23:00\"}, {to, \"04:00\"}]\"";
req.write(data);
req.end();
}
Can someone point at what I'm missing here?
Thanks in advance.
You need to use node's JSON module to prepare the data for transport:
var data = '[{db_fragmentation, "70%"}, {view_fragmentation, "60%"}, {from, "23:00"}, {to, "04:00"}]';
// Show the formatted data for the requests' payload.
JSON.stringify(data);
> '"[{db_fragmentation, \\"70%\\"}, {view_fragmentation, \\"60%\\"}, {from, \\"23:
00\\"}, {to, \\"04:00\\"}]"'
// Format data for the payload.
req.write(JSON.stringify(data));