I have a very simple program to list files in a Google Drive. I have written 2 versions, using v2 and v3 of the API. However when I run them I get inconsistent results. v2 works fine, v3 doesn't. They are both written in groovy which is a superset of java. To install groovy use Sdkman
v2.groovy
#GrabResolver(name='com.google.apis', root='http://google-api-client-libraries.appspot.com/mavenrepo')
#Grapes([
#Grab(group='com.google.api-client', module='google-api-client version='1.12.0-beta'),
#Grab(group='com.google.apis', module='google-api-services-drive', version='v2-rev30-1.12.0-beta'),
#Grab(group='com.google.http-client', module='google-http-client-jackson', version='1.12.0-beta')
])
import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse
import com.google.api.client.http.HttpTransport
import com.google.api.client.json.JsonFactory
import com.google.api.client.http.javanet.NetHttpTransport
import com.google.api.client.json.jackson.JacksonFactory
import com.google.api.services.drive.Drive
import com.google.api.services.drive.DriveScopes
import com.google.api.services.drive.model.*
import groovy.json.*
import com.google.api.client.http.FileContent
class GD {
def drive
def credential
def httpTransport
def jsonFactory
def scopes
def GD (config) {
httpTransport = new NetHttpTransport ()
jsonFactory = new JacksonFactory ()
scopes = DriveScopes.DRIVE
credential = new GoogleCredential.Builder().setTransport(httpTransport)
.setJsonFactory(jsonFactory)
.setServiceAccountId(config.accountId)
.setServiceAccountScopes(scopes)
.setServiceAccountPrivateKeyFromP12File(new java.io.File(config.p12File))
.setServiceAccountUser (config.accountUser)
.build();
credential.refreshToken();
drive = new Drive.Builder(httpTransport, jsonFactory, credential).setApplicationName(config.applicationName).build()
}
def getAllFiles() throws IOException {
def list = []
def request = drive.files().list()
while (true) {
def files = request.execute()
list += files.getItems()
request.pageToken = files.nextPageToken
if (!request.pageToken)
break
if (list.size() % 10 == 0)
print "${list.size()} ...\r"
}
println "${list.size()}"
return list
}
}
def config = [
accountId: '13242342543534653', // replace by your user id
accountUser: 'xxxx#appspot.gserviceaccount.com', // replace by your account email
p12File: '/home/pascal/wep-ffah-f01d3bfec317.p12', // replace by path to your p12File
applicationName: 'myapp', // replace by your app
]
def drive = new GD (config)
println "retrieving files"
drive.getAllFiles().each { println "${it.id} ${it.title}: ${it.mimeType}" }
v3.groovy
#GrabResolver(name='com.google.apis', root='http://google-api-client-libraries.appspot.com/mavenrepo')
#Grapes([
#Grab(group='com.google.api-client', module='google-api-client', version='1.21.0'),
#Grab(group='com.google.http-client', module='google-http-client-jackson2', version='1.21.0'),
#Grab(group='com.google.apis', module='google-api-services-drive', version='v3-rev16-1.21.0')
// #Grab(group='com.google.apis', module='google-api-services-prediction',version='v1.6-rev59-1.21.0'),
])
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport
import com.google.api.client.json.jackson2.JacksonFactory
// import com.google.api.services.prediction.PredictionScopes
import com.google.api.services.drive.Drive
import com.google.api.services.drive.DriveScopes
import com.google.api.services.drive.model.*
import groovy.json.*
import java.util.Collections
class GD {
def drive
def credential
def httpTransport
def jsonFactory
def scopes
def GD (config) {
httpTransport = GoogleNetHttpTransport.newTrustedTransport()
jsonFactory = JacksonFactory.getDefaultInstance()
scopes = Collections.singleton (DriveScopes.DRIVE)
credential = GoogleCredential.fromStream (new FileInputStream (config.jsonFile), httpTransport, jsonFactory)
credential.serviceAccountScopes = scopes
drive = new Drive.Builder(httpTransport, jsonFactory, credential).setApplicationName(config.applicationName).build()
}
def getAllFiles() throws IOException {
def list = []
def request = drive.files().list()
while (true) {
def files = request.execute()
list += files.getFiles()
request.pageToken = files.getNextPageToken()
if (!request.pageToken && request.pageToken.length > 0)
break
if (list.size() % 10 == 0)
print "${list.size()} ...\r"
}
println "${list.size()}"
return list
}
}
/************************************** Start here ******************************************/
def config = [
jsonFile: '/home/myhome/yourapp-b954bxxxxx.json', // repalce by your json credential file
applicationName: 'yourapp', // set your app
]
def drive = new GD (config)
println "retrieving files"
drive.getAllFiles ().each { println "${it.id} ${it.name}: ${it.mimeType}" }
Related
I have two python classes:- One class(CloudLink) is responsible for sending JSON events to the app and another(ReadData) is responsible for building the JSON data.
The ReadData class will be using the CloudLink methods to send the JSON data to the App. But I'm getting error _buildJSONdata() missing 1 required positional argument: 'Data'.
ReadData class
from pyspark.sql import SparkSession
import functools
from pyspark.sql import DataFrame
from pyspark.sql.functions import explode
from cosmosconnect import azurecosmos
class ReadData:
#exception(logger)
def __init__(self):
self.spark_session = (
SparkSession.builder
.appName("readData")
.getOrCreate()
)
mssparkutils.fs.unmount('/mnt/test')
logger.info("Drive unmounted")
mssparkutils.fs.mount(
'abfss://abc#transl.dfs.core.windows.net/',
'/mnt/test',
{'linkedService': "linkCosmos"}
)
logger.info("Mounted Successfully")
self.input_directory = (f"synfs:/{mssparkutils.env.getJobId()}/mnt/test/input_path"
)
self.output_directory = (f"synfs:/{mssparkutils.env.getJobId()}/mnt/test/output_path"
)
'''
Reading the schema from csv file
'''
#exception(logger)
def readConfig(self):
try:
logger.info(f"Reading the Config present in {self.input_directory} ")
dfConfig = self.spark_session.read.option("multiline","true") \
.json(self.input_directory)
#for f in dfConfig.select("Entity","Query","Business_Rule").collect():
dfConfig=dfConfig.select(explode('Input').alias('Input_Data'))\
.select('Input_Data.Validation_Type','Input_Data.Entity','Input_Data.Query','Input_Data.Business_Rule')
for f in dfConfig.rdd.toLocalIterator():
#for index, f in dfConfig.toPandas().iterrows():
self.Validation_Type=f[0]
self.container=f[1]
self.query=f[2]
self.rule=f[3]
self.readCosmos(self)
except:
raise ValueError("")
#exception(logger)
def readCosmos(self,*params):
#from cosmosconnect import azurecosmos
#a=[]
linkedService='fg'
df=azurecosmos.cosmosConnect(linkedService,self.query,self.container)
df.cache()
if len(df.head(1)) >0:
outputpath=self.output_directory+'/'+self.container
df.coalesce(1).write.mode('overwrite').parquet(outputpath)
Status="Validation Failure"
Data= {"Validation_Type":[],"Status":[],"Container":[],"Business_Rule":[]}
Data["Validation_Type"].append(self.Validation_Type)
Data["Status"].append(Status)
Data["Container"].append(self.container)
Data["Business_Rule"].append(self.rule)
CloudLink._buildJSONdata(Data)
if __name__ == "__main__":
p = ReadData()
p.readConfig()
CloudLink class
import json
import datetime
import hashlib
import json
import sys
import traceback
import adal
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import logging
from functools import wraps
import sys
def create_logger():
#create a logger object
#logger = logging.getLogger()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logfile = logging.FileHandler('exc_logger.log')
#logfile = logging.StreamHandler(sys.stdout)
fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(fmt)
logfile.setFormatter(formatter)
logger.addHandler(logfile)
return logger
logger = create_logger()
def exception(logger):
def decorator(func):
#wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
issue = "exception in "+func.__name__+"\n"
issue = issue+"-------------------------\
------------------------------------------------\n"
logger.exception(issue)
raise
return wrapper
return decorator
class CloudLink(object):
_token = None
_instance = None
http = None
cloudclient = TokenLibrary.getSecret("xxxx", "rtrt")
clientid = TokenLibrary.getSecret("xxxx", "tyty")
clientcredentials = TokenLibrary.getSecret("xxxx", "abcabc")
authority_url = TokenLibrary.getSecret("xxxx", "abab")
cloudtest = TokenLibrary.getSecret("xxxx", "yyyy")
#staticmethod
def getInstance():
if not CloudLink._instance:
CloudLink._instance = CloudLink()
return CloudLink._instance
def __init__(self):
retry_strategy = Retry(
total=3,
backoff_factor=0,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=["HEAD", "GET", "OPTIONS"],
)
adapter = HTTPAdapter(max_retries=retry_strategy)
self.http = requests.Session()
self.http.mount("https://", adapter)
self.http.mount("http://", adapter)
print("Inside init")
def parseJSON(self, t):
try:
eventData = json.loads(t)
logger.info(f"Sending {eventData} to cloud")
self.sendToCloud(eventData)
except ValueError as e:
print("Error: %s Please validate JSON in https://www.jsonschemavalidator.net/"% e)
return None # or: raise
def sendToCloud(self, eventData):
cloudData = {"eventData": eventData, "metadata": self._buildMetadata()}
logger.info(f"Raising alert with data=({cloudData}")
response = self.http.post(
self.cloudtest, headers=self._buildHeaders(), json=cloudData
)
logger.info(f"cloud alert response={response}")
if response.status_code == 202 or response.status_code == 200:
logger.info("Mail sent to Cloud")
else:
raise Exception(f"Cloud reporting failed with Error {response}")
def _buildJSONdata(self,Data):
if len(Data) == 0:
raise Exception("JSON is empty")
else:
t = json.dumps(self.Data)
self.parseJSON(t)
def _buildMetadata(self):
return {
"messageType": "Send Email",
"messageVersion": "0.0.1",
"sender": "Send Email",
}
def _buildHeaders(self):
self._refreshADToken()
headers = {
"Authorization": "Bearer {}".format(self._token["accessToken"]),
"Content-type": "application/json",
"Accept": "text/plain",
}
return headers
def _refreshADToken(self):
def shouldRenew(token):
"""Returns True if the token should be renewed"""
expiresOn = datetime.datetime.strptime(
token["expiresOn"], "%Y-%m-%d %H:%M:%S.%f"
)
now = datetime.datetime.now()
return (expiresOn - now) < datetime.timedelta(minutes=5)
if not self._token or shouldRenew(self._token):
logger.info("Renewing credentials for Alerting")
result = None
try:
context = adal.AuthenticationContext(CloudLink.authority_url)
result = context.acquire_token_with_client_credentials(CloudLink.cloudclient, CloudLink.clientid,CloudLink.clientcredentials)
except Exception as e:
error = "Failed to renew client credentials."
logger.info(error)
raise
if result and "accessToken" in result:
self._token = result
else:
logger.error(
"Failed to acquire bearer token. accessToken not found in result object on renewing credentials."
)
raise Exception("Could not acquire a bearer token")
I managed to successfully implement a webhook with Dialogflow CX in a repl environment and flask, but not sure how to move the webhook onto Google Cloud Functions. I've scoured the web for a tutorials, but most are in Dialogflow ES. Any help is appreciated!
This is my current code on repl:
from flask import Flask, request
app = Flask(__name__)
def create_json_response(response_text, updated_params):
jsonResponse = {
"fulfillment_response":
{
"messages": [
{
"text": {
"text": [
str(response_text)
]
}
}
]
},
"sessionInfo": {
"parameters": updated_params
}
}
return jsonResponse
#app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
fulfillmentInfo = req.get('fulfillmentInfo')
if fulfillmentInfo['tag'] == 'add':
updated_params = {}
total = 0
num1 = int(req.get('sessionInfo').get('parameters').get('number'))
num2 = int(req.get('sessionInfo').get('parameters').get('number1'))
total = str(num1 + num2)
updated_params['previous1'] = num1
updated_params['previous2'] = num2
updated_params['number'] = None
updated_params['number1'] = None
jsonResponse = create_json_response(total, updated_params)
return jsonResponse
elif fulfillmentInfo['tag'] == 'multiply':
updated_params = {}
total = 0
num1 = int(req.get('sessionInfo').get('parameters').get('number'))
num2 = int(req.get('sessionInfo').get('parameters').get('number1'))
total = str(num1 * num2)
updated_params['previous1'] = num1
updated_params['previous2'] = num2
updated_params['number'] = None
updated_params['number1'] = None
jsonResponse = create_json_response(total, updated_params)
return jsonResponse
#app.route('/') # this is the home page route
def hello_world(): # this is the home page function that generates the page code
return "Hello Beautiful World!"
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080) # This line is required to run Flask on repl.it
I'm answering my own question! I just needed to pass request as a parameter in my webhook function. Derp!
The following Groovy code can create a new CSV file (in this example testfile.csv) and write JSON data in the in CSV. I do not want to create a new CSV file but just want to add (Append) few more lines to the existing testfile.csv file without overwriting the file. May someone please help what to change in the following code to force it to append the file instead of writing a new one? I heard about StandardOpenOption.APPEND but no idea where to put that. Thanks
import groovy.json.JsonSlurper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import com.oracle.e1.common.OrchestrationAttributes;
import java.text.SimpleDateFormat;
HashMap < String, Object > main(OrchestrationAttributes orchAttr, HashMap inputMap) {
HashMap < String, Object > returnMap = new HashMap < String, Object > ();
returnMap.put("CSVComplete", "false");
// Write the view number after jsonIn.fs_DATABROWSE_
def jsonIn = new JsonSlurper().parseText(inputMap.get("Vendor Data"));
def jsonData = jsonIn.fs_DATABROWSE_GettingJsonDataFromSomewhere.data.gridData.rowset;
if (jsonData.size() == 0) {
returnMap.put("CSVComplete", "empty");
return returnMap;
}
def fileName = orchAttr.getTempFileName("testfile.csv");
returnMap.put("CSVOutFileName", fileName);
//class writer to write file def
def sw = new StringWriter();
//build the CSV writer with a header
//def csv = new CSVPrinter(sw, CSVFormat.DEFAULT.withHeader("Business Unit", "Document Number", "LT", "SUB","Amount","HardcodedTHREAD","ApprovedBudget","fromview003"));
def csv = new CSVPrinter(sw, CSVFormat.DEFAULT); //No header
// create output file
fileCsvOut = new File(fileName);
def count=0;
// build the CSV
def an8Map = new ArrayList();
for (int i = 0; i < jsonData.size(); i++) {
def businessunit = jsonData[i].table1_column1;
if (an8Map.contains(businessunit)) {
continue;
}
an8Map.add(businessunit);
count++;
csv.printRecord(businessunit, jsonData[i].table_column,
jsonData[i].table1_column1, jsonData[i]. table1_column2, jsonData[i]. table1_column3, "Fixed text1 "Fixed text2", "Fixedtext3");
}
csv.close();
//writing csv to file
fileCsvOut.withWriter('UTF-8') {
writer ->
writer.write(sw.toString())
}
orchAttr.writeDebug(sw.toString());
returnMap.put("csv", sw.toString());
returnMap.put("CSVComplete", "true");
returnMap.put("CSVcount", Integer.toString(count));
return returnMap;
}
use withWriterAppend instead of withWriter
https://docs.groovy-lang.org/latest/html/groovy-jdk/java/io/File.html#withWriterAppend(java.lang.String,%20groovy.lang.Closure)
// import the required libraries
import groovy.json.JsonOutput
import groovyx.net.http.HttpResponseException
import groovy.json.JsonSlurper
//set the commandline arguements in to global variables
def reqUrl = "<Bitbucket URL>"
def branchUAT = "<branchname>"
def branchRelease = "<branchname>"
def projects = mergeRequest.pullRequest.toRef.repository.project.key
String repoString = mergeRequest.pullRequest.toRef.repository
repos = repoString.tokenize("/")[1].tokenize("[")[0]
def branchSource = mergeRequest.pullRequest.fromRef.displayId
def branchDestination = mergeRequest.pullRequest.toRef.displayId
//define the required variables
def UAT_Commits_List = [] as String[]
def JSON_slurper = new groovy.json.JsonSlurper()
if (branchDestination == branchRelease) {
def UAT_Commits_uri_curl = [ 'bash', '-c', "curl https://${reqUrl}/rest/api/1.0/projects/${projects}/repos/${repos}/compare/commits?from=${branchUAT}" ].execute().text
def UAT_Commits_uri_result = JSON_slurper.parseText(UAT_Commits_uri_curl)
try {
def PRApproveArray = UAT_Commits_uri_result.values
PRApproveArray.each {
UAT_Commits_List = UAT_Commits_List + it['id']
}
}
catch(HttpResponseException e) {
catchMethod(e)
}
}
Error: You tried to call a method which is not allowed: groovy.json.JsonSlurper#parseText(java.lang.String)
# line 28, column 32.
def UAT_Commits_uri_result = JSON_slurper.parseText(UAT_Commits_uri_curl)
class Account {
Date dob=new Date();
}
import net.sf.json.JSON;
import groovyx.net.http.RESTClient;
def rest=new RESTClient("http://localhost:9090/Rest/rest")
def resp=rest.post(
contentType: "application/json",
body: account
)
the JSON content is formed as
{"dob":{"date":19,"day":1,"hours":17,"minutes":34,"month":4,"seconds":44,"time":1400501084326,"timezoneOffset":-330,"year":114}}
How to override JSON serializer for Date <-> Long(getTime())
It is little bit late and I think that is not the cleanest, but I hope it helps.
It is necesary to replace the "EncoderRegistry" with your own JSON parser.
RESTClient restClient = new RESTClient( "http://myrest.com")
EncoderRegistry encoderRegistry = restClient.getEncoder();
encoderRegistry.putAt(groovyx.net.http.ContentType.JSON, {it ->
def builder = new groovy.json.JsonBuilder();
builder.content = it
ByteArrayInputStream dataStreamed = new ByteArrayInputStream(builder.toString().getBytes(StandardCharsets.UTF_8))
InputStreamEntity res = new InputStreamEntity(dataStreamed);
res.setContentType(groovyx.net.http.ContentType.JSON.toString())
res;
})
Using JsonBuilder instead of the default of the rest client it use a standar format to send the date: yyyy-MM-ddTHH:mm:ss+.sTZD
Then you can use the restClient as usual:
def response = restClient.post(
path: path,
headers: ["User-Agent": "UserAgent"],
query: query,
body: body,
requestContentType: groovyx.net.http.ContentType.JSON
)
Btw, if you want to replace the parser you need to replace the "ParserRegistry"
ParserRegistry parserRegistry = restClient.getParser()
parserRegistry.putAt(groovyx.net.http.ContentType.JSON,{HttpResponseDecorator resp ->
def obj = null
if (resp.status ==200){
if(clazz != null){
String jsonString = IOUtils.toString(resp.getEntity().getContent(), "UTF-8");
obj = new ObjectMapper().readValue(jsonString,new TypeReference<MyObject>(){});
}
return obj
}else{
throw new Exception("No found")
}
})
Some variations on IƱaki's answer.
Example of replacing the default JSON encoder with JsonGenerator.
def generator = new groovy.json.JsonGenerator.Options()
.excludeNulls()
.addConverter(Instant) { Instant i ->
i.toEpochMilli()
}
.build()
restClient.getEncoder().putAt(groovyx.net.http.ContentType.JSON, {body ->
StringEntity entity = new StringEntity(generator.toJson(body))
entity.setContentType( groovyx.net.http.ContentType.JSON.toString() )
return entity
})
Example of replacing the both the JSON encoder and parser with ObjectMapper:
import com.fasterxml.jackson.databind.DeserializationFeature
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.SerializationFeature
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import org.apache.http.entity.StringEntity
ObjectMapper objectMapper = new ObjectMapper()
objectMapper.registerModule(new JavaTimeModule())
objectMapper.enable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
objectMapper.disable(SerializationFeature.WRITE_DATE_TIMESTAMPS_AS_NANOSECONDS)
objectMapper.disable(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS)
restClient.getEncoder().putAt(groovyx.net.http.ContentType.JSON, {body ->
StringEntity entity = new StringEntity(objectMapper.writeValueAsString(body))
entity.setContentType( groovyx.net.http.ContentType.JSON.toString() )
return entity
})
restClient.getParser().putAt(JSON, { resp ->
return objectMapper.readValue(resp.entity.content, YourPojo.class)
})
Probably not the brightest solution, but you can get the properties map from the account object and change the date key:
import net.sf.json.*
class Account {
Date date = Date.parse("yyyy-MM-dd", '2014-05-01')
String name
}
a = new Account(name: 'john doe')
aMap = a.properties
aMap.date = aMap.date.time
json = JSONObject.fromObject(aMap)
assert json.toString() == '{"date":1398913200000,"name":"john doe"}'
You can modify the marshaller in the Bootstrap file for example:
class BootStrap {
def init = {
servletContext ->
JSON.registerObjectMarshaller(Date) {
return it.time
}
}
}