Using a webhook from Google Cloud Functions with Dialogflow CX - google-cloud-functions

I managed to successfully implement a webhook with Dialogflow CX in a repl environment and flask, but not sure how to move the webhook onto Google Cloud Functions. I've scoured the web for a tutorials, but most are in Dialogflow ES. Any help is appreciated!
This is my current code on repl:
from flask import Flask, request
app = Flask(__name__)
def create_json_response(response_text, updated_params):
jsonResponse = {
"fulfillment_response":
{
"messages": [
{
"text": {
"text": [
str(response_text)
]
}
}
]
},
"sessionInfo": {
"parameters": updated_params
}
}
return jsonResponse
#app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
fulfillmentInfo = req.get('fulfillmentInfo')
if fulfillmentInfo['tag'] == 'add':
updated_params = {}
total = 0
num1 = int(req.get('sessionInfo').get('parameters').get('number'))
num2 = int(req.get('sessionInfo').get('parameters').get('number1'))
total = str(num1 + num2)
updated_params['previous1'] = num1
updated_params['previous2'] = num2
updated_params['number'] = None
updated_params['number1'] = None
jsonResponse = create_json_response(total, updated_params)
return jsonResponse
elif fulfillmentInfo['tag'] == 'multiply':
updated_params = {}
total = 0
num1 = int(req.get('sessionInfo').get('parameters').get('number'))
num2 = int(req.get('sessionInfo').get('parameters').get('number1'))
total = str(num1 * num2)
updated_params['previous1'] = num1
updated_params['previous2'] = num2
updated_params['number'] = None
updated_params['number1'] = None
jsonResponse = create_json_response(total, updated_params)
return jsonResponse
#app.route('/') # this is the home page route
def hello_world(): # this is the home page function that generates the page code
return "Hello Beautiful World!"
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080) # This line is required to run Flask on repl.it

I'm answering my own question! I just needed to pass request as a parameter in my webhook function. Derp!

Related

Unable to use method of a class in different class-missing 2 required positional arguments

I have two python classes:- One class(CloudLink) is responsible for sending JSON events to the app and another(ReadData) is responsible for building the JSON data.
The ReadData class will be using the CloudLink methods to send the JSON data to the App. But I'm getting error _buildJSONdata() missing 1 required positional argument: 'Data'.
ReadData class
from pyspark.sql import SparkSession
import functools
from pyspark.sql import DataFrame
from pyspark.sql.functions import explode
from cosmosconnect import azurecosmos
class ReadData:
#exception(logger)
def __init__(self):
self.spark_session = (
SparkSession.builder
.appName("readData")
.getOrCreate()
)
mssparkutils.fs.unmount('/mnt/test')
logger.info("Drive unmounted")
mssparkutils.fs.mount(
'abfss://abc#transl.dfs.core.windows.net/',
'/mnt/test',
{'linkedService': "linkCosmos"}
)
logger.info("Mounted Successfully")
self.input_directory = (f"synfs:/{mssparkutils.env.getJobId()}/mnt/test/input_path"
)
self.output_directory = (f"synfs:/{mssparkutils.env.getJobId()}/mnt/test/output_path"
)
'''
Reading the schema from csv file
'''
#exception(logger)
def readConfig(self):
try:
logger.info(f"Reading the Config present in {self.input_directory} ")
dfConfig = self.spark_session.read.option("multiline","true") \
.json(self.input_directory)
#for f in dfConfig.select("Entity","Query","Business_Rule").collect():
dfConfig=dfConfig.select(explode('Input').alias('Input_Data'))\
.select('Input_Data.Validation_Type','Input_Data.Entity','Input_Data.Query','Input_Data.Business_Rule')
for f in dfConfig.rdd.toLocalIterator():
#for index, f in dfConfig.toPandas().iterrows():
self.Validation_Type=f[0]
self.container=f[1]
self.query=f[2]
self.rule=f[3]
self.readCosmos(self)
except:
raise ValueError("")
#exception(logger)
def readCosmos(self,*params):
#from cosmosconnect import azurecosmos
#a=[]
linkedService='fg'
df=azurecosmos.cosmosConnect(linkedService,self.query,self.container)
df.cache()
if len(df.head(1)) >0:
outputpath=self.output_directory+'/'+self.container
df.coalesce(1).write.mode('overwrite').parquet(outputpath)
Status="Validation Failure"
Data= {"Validation_Type":[],"Status":[],"Container":[],"Business_Rule":[]}
Data["Validation_Type"].append(self.Validation_Type)
Data["Status"].append(Status)
Data["Container"].append(self.container)
Data["Business_Rule"].append(self.rule)
CloudLink._buildJSONdata(Data)
if __name__ == "__main__":
p = ReadData()
p.readConfig()
CloudLink class
import json
import datetime
import hashlib
import json
import sys
import traceback
import adal
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import logging
from functools import wraps
import sys
def create_logger():
#create a logger object
#logger = logging.getLogger()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logfile = logging.FileHandler('exc_logger.log')
#logfile = logging.StreamHandler(sys.stdout)
fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(fmt)
logfile.setFormatter(formatter)
logger.addHandler(logfile)
return logger
logger = create_logger()
def exception(logger):
def decorator(func):
#wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
issue = "exception in "+func.__name__+"\n"
issue = issue+"-------------------------\
------------------------------------------------\n"
logger.exception(issue)
raise
return wrapper
return decorator
class CloudLink(object):
_token = None
_instance = None
http = None
cloudclient = TokenLibrary.getSecret("xxxx", "rtrt")
clientid = TokenLibrary.getSecret("xxxx", "tyty")
clientcredentials = TokenLibrary.getSecret("xxxx", "abcabc")
authority_url = TokenLibrary.getSecret("xxxx", "abab")
cloudtest = TokenLibrary.getSecret("xxxx", "yyyy")
#staticmethod
def getInstance():
if not CloudLink._instance:
CloudLink._instance = CloudLink()
return CloudLink._instance
def __init__(self):
retry_strategy = Retry(
total=3,
backoff_factor=0,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=["HEAD", "GET", "OPTIONS"],
)
adapter = HTTPAdapter(max_retries=retry_strategy)
self.http = requests.Session()
self.http.mount("https://", adapter)
self.http.mount("http://", adapter)
print("Inside init")
def parseJSON(self, t):
try:
eventData = json.loads(t)
logger.info(f"Sending {eventData} to cloud")
self.sendToCloud(eventData)
except ValueError as e:
print("Error: %s Please validate JSON in https://www.jsonschemavalidator.net/"% e)
return None # or: raise
def sendToCloud(self, eventData):
cloudData = {"eventData": eventData, "metadata": self._buildMetadata()}
logger.info(f"Raising alert with data=({cloudData}")
response = self.http.post(
self.cloudtest, headers=self._buildHeaders(), json=cloudData
)
logger.info(f"cloud alert response={response}")
if response.status_code == 202 or response.status_code == 200:
logger.info("Mail sent to Cloud")
else:
raise Exception(f"Cloud reporting failed with Error {response}")
def _buildJSONdata(self,Data):
if len(Data) == 0:
raise Exception("JSON is empty")
else:
t = json.dumps(self.Data)
self.parseJSON(t)
def _buildMetadata(self):
return {
"messageType": "Send Email",
"messageVersion": "0.0.1",
"sender": "Send Email",
}
def _buildHeaders(self):
self._refreshADToken()
headers = {
"Authorization": "Bearer {}".format(self._token["accessToken"]),
"Content-type": "application/json",
"Accept": "text/plain",
}
return headers
def _refreshADToken(self):
def shouldRenew(token):
"""Returns True if the token should be renewed"""
expiresOn = datetime.datetime.strptime(
token["expiresOn"], "%Y-%m-%d %H:%M:%S.%f"
)
now = datetime.datetime.now()
return (expiresOn - now) < datetime.timedelta(minutes=5)
if not self._token or shouldRenew(self._token):
logger.info("Renewing credentials for Alerting")
result = None
try:
context = adal.AuthenticationContext(CloudLink.authority_url)
result = context.acquire_token_with_client_credentials(CloudLink.cloudclient, CloudLink.clientid,CloudLink.clientcredentials)
except Exception as e:
error = "Failed to renew client credentials."
logger.info(error)
raise
if result and "accessToken" in result:
self._token = result
else:
logger.error(
"Failed to acquire bearer token. accessToken not found in result object on renewing credentials."
)
raise Exception("Could not acquire a bearer token")

Json Post from Django to Camunda

Further to my earlier post yesterday: Post request to external Rest Service using Django - use returned json to update model
I have managed to post data to camunda using Django - request.post. Using the following script:
payload = "{\n \"businessKey\": \"SomeValue\",\n \"variables\": {\n \"Organisation_ID\": {\n \"value\": \"SOmeUUID\",\n \"type\": \"String\"\n },\n \"UserID\": {\n \"value\":\"Some User ID\",\n \"type\": \"String\"\n }\n }\n}"
However when I start to use variables from the form and format my payload using
class StartProcessView(View):
template_name = 'startdeliveryphase.html'
def get(self,request, *args, **kwargs):
form = IntStartDeliveryPhase
return render(request, self.template_name,{'form':form})
def post(self,request, *args, **kwargs):
form = IntStartDeliveryPhase(request.POST or None)
if form.is_valid():
data = form.cleaned_data
OrganisationID = data['Form_Field_OrganisationID']
UserID = data['Form_Field_User_ID']
BusinessKey = data['Form_Field_Business_Key']
url = "http://localhost:8080/engine-rest/process-definition/key/Process_B_PerProject/start"
payload = {"businessKey":BusinessKey,"variables":[{"Organisation":[{"value":OrganisationID, "type":"String"}]},[{"Startedby":[{"value":UserID,"type":"String"}]}]]}
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data = payload)
#print(repsonse.errors)
print(response.text.encode('utf8'))
return render(request)
else:
return render(request,self.template_name,{'form':form})
I get an error from the camunda engine:-
b'{"type":"JsonParseException","message":"Unrecognized token \'businessKey\': was expecting (\'true\', \'false\' or \'null\')\\n at [Source: (org.camunda.bpm.engine.rest.filter.EmptyBodyFilter$1$1); line: 1, column: 13]"}'
the local vars shows the following:
▼ Local vars
Variable Value
BusinessKey
'1qaz'
OrganisationID
<Organisation: Some Local Authoristy>
UserID
<Actor_User: me#me.com>
args
()
data
{'Form_Field_Business_Key': '1qaz',
'Form_Field_CamundaInstanceID': 'sss',
'Form_Field_Camunda_HRef': 'ss',
'Form_Field_Camunda_TenantID': '22',
'Form_Field_DateCreated': datetime.datetime(2020, 4, 23, 19, 22, 30, tzinfo=<StaticTzInfo 'GMT'>),
'Form_Field_OrganisationID': <Organisation: Some Local Authoristy>,
'Form_Field_User_ID': <Actor_User: me#me.com>}
form
<IntStartDeliveryPhase bound=True, valid=True, fields=(Form_Field_OrganisationID;Form_Field_DateCreated;Form_Field_CamundaInstanceID;Form_Field_Camunda_HRef;Form_Field_Camunda_TenantID;Form_Field_User_ID;Form_Field_Business_Key)>
headers
{'Content-Type': 'application/json'}
kwargs
{}
payload
{'businessKey': '1qaz',
'variables': [{'Organisation': [{'type': 'String',
'value': <Organisation: Some Local Authoristy>}]},
[{'Startedby': [{'type': 'String',
'value': <Actor_User: me#me.com>}]}]]}
request
<WSGIRequest: POST '/bimProcess/'>
response
<Response [400]>
self
<bimProcess.views.StartProcessView object at 0x055B7898>
url
'http://localhost:8080/engine-rest/process-definition/key/Process_B_PerProject/start'
How do I get the correct format as required by camunda into which I can insert my variables with the required double quotes
EDIT Yay - I have finally got the correct sequence!!!!
def post(self,request, *args, **kwargs):
form = IntStartDeliveryPhase(request.POST or None)
if form.is_valid():
data = form.cleaned_data
OrganisationID = str(data['Form_Field_OrganisationID'])
UserID = str(data['Form_Field_User_ID'])
BusinessKey = data['Form_Field_Business_Key']
url = "http://localhost:8080/engine-rest/process-definition/key/Process_B_PerProject/start"
payload = {"businessKey":BusinessKey,"variables":{"Organisation":{"value":OrganisationID, "type":"String"},"Startedby":{"value":UserID,"type":"String"}}}
headers = {
'Content-Type': 'application/json'
}
payload2 = json.dumps(payload)
print (payload2)
response = requests.request("POST", url, headers=headers, data=payload2)
#print(repsonse.errors)
print(response.text.encode('utf8'))
return render(request)
else:
return render(request,self.template_name,{'form':form})
Now for the next questions:
1) I get a response from Camunda as a 200: the payload back from the post request needs to go back into the form data where it can then be saved without user interruption.
2)I am doing a post self on this form - what is the best way of achieving the sequence flow? should I do a redirect and pass the data through or is there a more efficient way? Also is there a better way to achieve the view.py than I have posted which is more efficient?
def post(self,request, *args, **kwargs):
form = IntStartDeliveryPhase(request.POST or None)
if form.is_valid():
data = form.cleaned_data
OrganisationID = str(data['Form_Field_OrganisationID'])
UserID = str(data['Form_Field_User_ID'])
BusinessKey = data['Form_Field_Business_Key']
url = "http://localhost:8080/engine-rest/process-definition/key/Process_B_PerProject/start"
payload = {"businessKey":BusinessKey,"variables":{"Organisation":{"value":OrganisationID, "type":"String"},"Startedby":{"value":UserID,"type":"String"}}}
headers = {
'Content-Type': 'application/json'
}
payload2 = json.dumps(payload)
print (payload2)
response = requests.request("POST", url, headers=headers, data=payload2)
#print(repsonse.errors)
print(response.text.encode('utf8'))
return render(request)
else:
return render(request,self.template_name,{'form':form})

Bitbucket Merge script working in Gobal Admin but not in Admin to repository

// import the required libraries
import groovy.json.JsonOutput
import groovyx.net.http.HttpResponseException
import groovy.json.JsonSlurper
//set the commandline arguements in to global variables
def reqUrl = "<Bitbucket URL>"
def branchUAT = "<branchname>"
def branchRelease = "<branchname>"
def projects = mergeRequest.pullRequest.toRef.repository.project.key
String repoString = mergeRequest.pullRequest.toRef.repository
repos = repoString.tokenize("/")[1].tokenize("[")[0]
def branchSource = mergeRequest.pullRequest.fromRef.displayId
def branchDestination = mergeRequest.pullRequest.toRef.displayId
//define the required variables
def UAT_Commits_List = [] as String[]
def JSON_slurper = new groovy.json.JsonSlurper()
if (branchDestination == branchRelease) {
def UAT_Commits_uri_curl = [ 'bash', '-c', "curl https://${reqUrl}/rest/api/1.0/projects/${projects}/repos/${repos}/compare/commits?from=${branchUAT}" ].execute().text
def UAT_Commits_uri_result = JSON_slurper.parseText(UAT_Commits_uri_curl)
try {
def PRApproveArray = UAT_Commits_uri_result.values
PRApproveArray.each {
UAT_Commits_List = UAT_Commits_List + it['id']
}
}
catch(HttpResponseException e) {
catchMethod(e)
}
}
Error: You tried to call a method which is not allowed: groovy.json.JsonSlurper#parseText(java.lang.String)
# line 28, column 32.
def UAT_Commits_uri_result = JSON_slurper.parseText(UAT_Commits_uri_curl)

Migration from Google Drive API from v2 to v3 giving inconsistent results

I have a very simple program to list files in a Google Drive. I have written 2 versions, using v2 and v3 of the API. However when I run them I get inconsistent results. v2 works fine, v3 doesn't. They are both written in groovy which is a superset of java. To install groovy use Sdkman
v2.groovy
#GrabResolver(name='com.google.apis', root='http://google-api-client-libraries.appspot.com/mavenrepo')
#Grapes([
#Grab(group='com.google.api-client', module='google-api-client version='1.12.0-beta'),
#Grab(group='com.google.apis', module='google-api-services-drive', version='v2-rev30-1.12.0-beta'),
#Grab(group='com.google.http-client', module='google-http-client-jackson', version='1.12.0-beta')
])
import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
import com.google.api.client.googleapis.auth.oauth2.GoogleTokenResponse
import com.google.api.client.http.HttpTransport
import com.google.api.client.json.JsonFactory
import com.google.api.client.http.javanet.NetHttpTransport
import com.google.api.client.json.jackson.JacksonFactory
import com.google.api.services.drive.Drive
import com.google.api.services.drive.DriveScopes
import com.google.api.services.drive.model.*
import groovy.json.*
import com.google.api.client.http.FileContent
class GD {
def drive
def credential
def httpTransport
def jsonFactory
def scopes
def GD (config) {
httpTransport = new NetHttpTransport ()
jsonFactory = new JacksonFactory ()
scopes = DriveScopes.DRIVE
credential = new GoogleCredential.Builder().setTransport(httpTransport)
.setJsonFactory(jsonFactory)
.setServiceAccountId(config.accountId)
.setServiceAccountScopes(scopes)
.setServiceAccountPrivateKeyFromP12File(new java.io.File(config.p12File))
.setServiceAccountUser (config.accountUser)
.build();
credential.refreshToken();
drive = new Drive.Builder(httpTransport, jsonFactory, credential).setApplicationName(config.applicationName).build()
}
def getAllFiles() throws IOException {
def list = []
def request = drive.files().list()
while (true) {
def files = request.execute()
list += files.getItems()
request.pageToken = files.nextPageToken
if (!request.pageToken)
break
if (list.size() % 10 == 0)
print "${list.size()} ...\r"
}
println "${list.size()}"
return list
}
}
def config = [
accountId: '13242342543534653', // replace by your user id
accountUser: 'xxxx#appspot.gserviceaccount.com', // replace by your account email
p12File: '/home/pascal/wep-ffah-f01d3bfec317.p12', // replace by path to your p12File
applicationName: 'myapp', // replace by your app
]
def drive = new GD (config)
println "retrieving files"
drive.getAllFiles().each { println "${it.id} ${it.title}: ${it.mimeType}" }
v3.groovy
#GrabResolver(name='com.google.apis', root='http://google-api-client-libraries.appspot.com/mavenrepo')
#Grapes([
#Grab(group='com.google.api-client', module='google-api-client', version='1.21.0'),
#Grab(group='com.google.http-client', module='google-http-client-jackson2', version='1.21.0'),
#Grab(group='com.google.apis', module='google-api-services-drive', version='v3-rev16-1.21.0')
// #Grab(group='com.google.apis', module='google-api-services-prediction',version='v1.6-rev59-1.21.0'),
])
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport
import com.google.api.client.json.jackson2.JacksonFactory
// import com.google.api.services.prediction.PredictionScopes
import com.google.api.services.drive.Drive
import com.google.api.services.drive.DriveScopes
import com.google.api.services.drive.model.*
import groovy.json.*
import java.util.Collections
class GD {
def drive
def credential
def httpTransport
def jsonFactory
def scopes
def GD (config) {
httpTransport = GoogleNetHttpTransport.newTrustedTransport()
jsonFactory = JacksonFactory.getDefaultInstance()
scopes = Collections.singleton (DriveScopes.DRIVE)
credential = GoogleCredential.fromStream (new FileInputStream (config.jsonFile), httpTransport, jsonFactory)
credential.serviceAccountScopes = scopes
drive = new Drive.Builder(httpTransport, jsonFactory, credential).setApplicationName(config.applicationName).build()
}
def getAllFiles() throws IOException {
def list = []
def request = drive.files().list()
while (true) {
def files = request.execute()
list += files.getFiles()
request.pageToken = files.getNextPageToken()
if (!request.pageToken && request.pageToken.length > 0)
break
if (list.size() % 10 == 0)
print "${list.size()} ...\r"
}
println "${list.size()}"
return list
}
}
/************************************** Start here ******************************************/
def config = [
jsonFile: '/home/myhome/yourapp-b954bxxxxx.json', // repalce by your json credential file
applicationName: 'yourapp', // set your app
]
def drive = new GD (config)
println "retrieving files"
drive.getAllFiles ().each { println "${it.id} ${it.name}: ${it.mimeType}" }

Status code error while fetching JSON data

I am trying to Extract JSON data from URL with parameters to it and code is as follows:
import json
from flask import Flask, render_template, request, jsonify
import requests
app = Flask(__name__)
import urllib2
#app.route("/",methods=['GET','POST'])
def home():
if request.method == 'POST':
#user inputs
valueone= request.form.get('first')
valueTwo = request.form.get('second')
valueThree = request.form.get('third')
#api call
data = {"perfid" : {0}, "section" : {"hostname" : {1}, "iteration" : {2}, "sectionname" : "sysstat_M"}}
req = urllib2.Request('http://api-latx-dev.corp.netapp.com/ws/spm/spm-general', json.dumps(data))
response = urllib2.urlopen(req)
the_page = response.read()
url=response.getcode()
returnData = {}
if url.status_code == 200:
returnData["status"] = "SUCCESS"
returnData["result"] = the_page
return jsonify(returnData)
else:
returnData["status"] = "ERROR"
return jsonify(returnData)
#jsonify(response.json())
return render_template('index.html')
I need to fetch data But getcode() gives int which is not working with "if url.status_code == 200".Can anyone please suggest how to deal with it.