How to configure CloudFunctionInvokeFunctionOperator to call cloud function on composer(airflow) - google-cloud-functions

I ran into the error to invoke cloud function by using CloudFunctionInvokeFunctionOperator like this.
line 915, in execute raise HttpError(resp, content, uri=self.uri) googleapiclient.errors.HttpError: <HttpError 404 when requesting https://cloudfunctions.googleapis.com/v1/projects/pongthorn/locations/asia-southeast1/functions/crypto-trading-to-bq:call?alt=json returned "Function crypto-trading-to-bq in region asia-southeast1 in project pongthorn does not exist". Details: "Function crypto-trading-to-bq in region asia-southeast1 in project pongthorn does not exist">
I assump that I my made mistake at function id , What is functoin id , the figure below is my cloud functoin and function name is crypto-trading-to-bq
function id and function name , they are the same??????
I set 3 variable names on JSON file and upload it to airflow as the following values
{
"project_id": "pongthorn",
"region_name": "asia-southeast1",
"function_name": "crypto-trading-to-bq"
}
This is my code
import datetime
import airflow
from airflow.providers.google.cloud.operators.functions import (
CloudFunctionDeleteFunctionOperator,
CloudFunctionDeployFunctionOperator,
CloudFunctionInvokeFunctionOperator,
)
YESTERDAY = datetime.datetime.now() - datetime.timedelta(days=1)
XProjdctID=airflow.models.Variable.get('project_id')
XRegion=airflow.models.Variable.get('region_name')
XFunction=airflow.models.Variable.get('function_name')
default_args = {
'owner': 'Binance Trading Transaction',
'depends_on_past': False,
'email': [''],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': datetime.timedelta(minutes=5),
'start_date': YESTERDAY,
}
with airflow.DAG(
'bn_trading_flow',
catchup=False,
default_args=default_args,
schedule_interval=datetime.timedelta(days=1)) as dag:
call_crypto_trading_to_bq = CloudFunctionInvokeFunctionOperator(
task_id = "load_crypto_trading_to_bq",
location = XRegion,
function_id = XFunction,
input_data = {},
)

Related

pass data with SimpleHttpOperator to trigger cloud function 2nd gen

I have the following task:
this_is_a_task = SimpleHttpOperator(
task_id= 'task_id',
method='POST',
http_conn_id='conn_id',
endpoint='/?test=foo',
# data={"test": "foo"},
headers={"Content-Type": "application/json"}
on the cloud functions side, I'm trying to catch the parameters with the two following ways:
# catching data
# test_data = request.get_json().get('test')
# print('test: {}'.format(test))
# catching end point
test_endpoint = request.args.get('test')
print('test: {}'.format(test))
the second option is working (request.args.get('test')) however when trying the first option request.get_json().get('test') I'm getting a 400 request error.
so if I'm not using the endpoint variable from my SimpleHttpOperator how can I catch the json object pass into the data variable?
I've tried to replicate your issue and based on this documentation you need to add json.dumps when you are calling a POST with json data. Then provide authentication credentials as a Google-generated ID token stored in an Authorization header.
See below sample code:
import datetime
import json
from airflow import models
from airflow.operators import bash
from airflow.providers.http.operators.http import SimpleHttpOperator
YESTERDAY = datetime.datetime.now() - datetime.timedelta(days=1)
default_args = {
'owner': 'Composer Example',
'depends_on_past': False,
'email': [''],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': datetime.timedelta(minutes=5),
'start_date': YESTERDAY,
}
with models.DAG(
'composer_quickstart',
catchup=False,
default_args=default_args,
schedule_interval=datetime.timedelta(days=1)) as dag:
# Print the dag_run id from the Airflow logs
gen_auth = bash.BashOperator(
task_id='gen_auth', bash_command='gcloud auth print-identity-token '
)
auth_token = "{{ task_instance.xcom_pull(task_ids='gen_auth') }}"
this_is_a_task = SimpleHttpOperator(
task_id='task_id',
method='POST',
http_conn_id='cf_conn1',
data=json.dumps({"test": "foo"}),
headers={"Content-Type": "application/json","Authorization": "Bearer " + auth_token}
)
gen_auth >> this_is_a_task
On the cloud functions side tried to use below sample code:
test_data = request.get_json().get('test')
print(test_data)
return test_data
You can also test your function using this curl command:
curl -i -X POST -H "Content-Type:application/json" -d '{"test": "foo"}' "Authorization: bearer $(gcloud auth print-identity-token)" https://function-5-k6ssrsqwma-uc.a.run.app

Unable to Delete Airflow Dags using Airflow Session

I want to write down airflow dags cleanup dag. I' https://github.com/teamclairvoyant/airflow-maintenance-dags for reference. I have written a cleanup DAG which takes all the dags and check their schedule, if it's once, it deletes the DAG. When I'm trying to run the cleanup dag, it is throwing sqlchemy error(Screenshot Attached). Please help me out with this error.
from airflow.operators.python_operator import PythonOperator
from airflow import settings
from datetime import timedelta
import os
import os.path
import socket
import logging
import airflow
DAG_ID = "AIRFLOW_CLEANUP_DAG"
START_DATE = airflow.utils.dates.days_ago(1)
SCHEDULE_INTERVAL = "#once"
DAG_OWNER_NAME = "PDM"
ALERT_EMAIL_ADDRESSES = []
ENABLE_DELETE = True
default_args = {
"owner": DAG_OWNER_NAME,
"depends_on_past": False,
"email": ALERT_EMAIL_ADDRESSES,
"email_on_failure": True,
"email_on_retry": False,
"start_date": START_DATE,
"retries": 1,
"retry_delay": timedelta(minutes=1),
}
dag = DAG(
DAG_ID,
default_args=default_args,
schedule_interval=SCHEDULE_INTERVAL,
start_date=START_DATE,
catchup=False,
description="Performing airflow cleanup",
)
if hasattr(dag, "doc_md"):
dag.doc_md = __doc__
if hasattr(dag, "catchup"):
dag.catchup = False
def cleanup_once_dags_fn(**context):
logging.info("Starting to run cleanup Process")
try:
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
logging.info("Running on Machine with Host Name: " + host_name)
logging.info("Running on Machine with IP: " + host_ip)
except Exception as e:
print("Unable to get Host Name and IP: " + str(e))
session = settings.Session()
logging.info("Configurations:")
logging.info("enable_delete: " + str(ENABLE_DELETE))
logging.info("session: " + str(session))
logging.info("")
dags = session.query(DagModel).all()
entries_to_delete = []
logging.info(f"Total dags :: {len(dags)}")
for dag in dags:
dag_schedule = dag.schedule_interval
if dag_schedule == "#once" and dag.dag_id=='ba54206d-078c-42e8-a6b5-ad579e833364':
entries_to_delete.append(dag)
logging.info(f"Dags with once schedule {len(entries_to_delete)}")
logging.info("Process will be Deleting the DAG(s) from the DB:")
logging.info("Process will be Deleting " + str(len(entries_to_delete)) + " DAG(s)")
if ENABLE_DELETE:
logging.info("Performing Delete...")
for entry in entries_to_delete:
session.delete(entry)
session.commit()
logging.info("Finished Performing Delete")
else:
logging.warn("You're opted to skip deleting the DAG entries!!!")
logging.info("Finished Running Clear Process")
cleanup_once_dags = PythonOperator(
task_id="cleanup_once_dags", python_callable=cleanup_once_dags_fn, provide_context=True, dag=dag
)

Groovy - Parse XML Response, Select Fields, Create New File

I think I've read every Groovy parsing question on here but I can't seem to find my exact scenario so I'm reaching out for help - please be kind, I'm new to Groovy and I've really bitten off more than I can chew in this latest endeavor.
So I have this XML Response:
<?xml version="1.0" encoding="UTF-8"?>
<worklogs date_from="2020-04-19 00:00:00" date_to="2020-04-25 23:59:59" number_of_worklogs="60" format="xml" diffOnly="false" errorsOnly="false" validOnly="false" addDeletedWorklogs="true" addBillingInfo="false" addIssueSummary="true" addIssueDescription="false" duration_ms="145" headerOnly="false" userName="smm288" addIssueDetails="false" addParentIssue="false" addUserDetails="true" addWorklogDetails="false" billingKey="" issueKey="" projectKey="" addApprovalStatus="true" >
<worklog>
<worklog_id></worklog_id>
<jira_worklog_id></jira_worklog_id>
<issue_id></issue_id>
<issue_key></issue_key>
<hours></hours>
<work_date></work_date>
<username></username>
<staff_id />
<billing_key></billing_key>
<billing_attributes></billing_attributes>
<activity_id></activity_id>
<activity_name></activity_name>
<work_description></work_description>
<parent_key></parent_key>
<reporter></reporter>
<external_id />
<external_tstamp />
<external_hours></external_hours>
<external_result />
<customField_11218></customField_11218>
<customField_12703></customField_12703>
<customField_12707></customField_12707>
<hash_value></hash_value>
<issue_summary></issue_summary>
<user_details>
<full_name></full_name>
<email></email>
<user-prop key="auto_approve_timesheet"></user-prop>
<user-prop key="cris_id"></user-prop>
<user-prop key="iqn_gl_string"></user-prop>
<user-prop key="is_contractor"></user-prop>
<user-prop key="is_employee"></user-prop>
<user-prop key="it_leadership"></user-prop>
<user-prop key="primary_role"></user-prop>
<user-prop key="resource_manager"></user-prop>
<user-prop key="team"></user-prop>
</user_details>
<approval_status></approval_status>
<timesheet_approval>
<status></status>
<status_date></status_date>
<reviewer></reviewer>
<actor></actor>
<comment></comment>
</timesheet_approval>
</worklog>
....
....
</worklogs>
And I'm retrieving this XML Response from an API call so the response is held within an object. NOTE: The sample XML above is from Postman.
What I'm trying to do is the following:
1. Only retrieve certain values from this response from all the nodes.
2. Write the values collected to a .json file.
I've created a map but now I'm kind of stuck on how to parse through it and create a .json file out of the fields I want.
This is what I have thus far
#Grab('org.codehaus.groovy.modules.http-builder:http-builder:0.7.1')
#Grab('oauth.signpost:signpost-core:1.2.1.2')
#Grab('oauth.signpost:signpost-commonshttp4:1.2.1.2')
import groovyx.net.http.RESTClient
import groovyx.net.http.Method
import static groovyx.net.http.ContentType.*
import groovyx.net.http.HttpResponseException
import groovy.json.JsonBuilder
import groovy.json.JsonOutput
import groovy.json.*
// User Credentials
def jiraAuth = ""
// JIRA Endpoints
def jiraUrl = "" //Dev
def jiraUrl = "" //Production
// Tempo API Tokens
//def tempoApiToken = "" //Dev
//def tempoApiToken = "" //Production
// Define Weekly Date Range
def today = new Date()
def lastPeriodStart = today - 8
def lastPeriodEnd = today - 2
def dateFrom = lastPeriodStart.format("yyyy-MM-dd")
def dateTo = lastPeriodEnd.format("yyyy-MM-dd")
def jiraClient = new RESTClient(jiraUrl)
jiraClient.ignoreSSLIssues()
def headers = [
"Authorization" : "Basic " + jiraAuth,
"X-Atlassian-token": "no-check",
"Content-Type" : "application/json"
]
def response = jiraClient.get(
path: "",
query: [
tempoApiToken: "${tempoApiToken}",
format: "xml",
dateFrom: "${dateFrom}",
dateTo: "${dateTo}",
addUserDetails: "true",
addApprovalStatus: "true",
addIssueSummary: "true"
],
headers: headers
) { response, worklogs ->
println "Processing..."
// Start building the Output - Creates a Worklog Map
worklogs.worklog.each { worklognodes ->
def workLog = convertToMap(worklognodes)
// Print out the Map
println (workLog)
}
}
// Helper Method
def convertToMap(nodes) {
nodes.children().collectEntries {
if (it.name() == 'user-prop') {
[it['#key'], it.childNodes() ? convertToMap(it) : it.text()]
} else {
[it.name(), it.childNodes() ? convertToMap(it) : it.text()]
}
}
}
I'm only interested in parsing out the following fields from each node:
<worklogs>
<worklog>
<hours>
<work_date>
<billing_key>
<customField_11218>
<issue_summary>
<user_details>
<full_name>
<user-prop key="auto_approve_timesheet">
<user-prop key="it_leadership">
<user-prop key="resource_manager">
<user-prop key="team">
<user-prop key="cris_id">
<user-prop key="iqn_id">
<approval_status>
</worklog>
...
</worklogs>
I've tried the following:
1. Converting the workLog to a json string (JsonOutput.toJson) and then converting the json string to prettyPrint (JsonOutput.prettyPrint) - but this just returns a collection of .json responses which I can't do anything with (thought process was, this is as good as I can get and I'll just use a .json to .csv converter and get rid of what I don't want) - which is not the solution I ultimately want.
2. Printing the map workLog just returns little collections which I can't do anything with either
3. Create a new file using File and creating a .json file of workLog but again, it doesn't translate well.
The results of the println for workLog is here (just so everyone can see that the response is being held and the map matches the XML response).
[worklog_id: , jira_worklog_id: , issue_id: , issue_key: , hours: , work_date: , username: , staff_id: , billing_key: , billing_attributes: , activity_id: , activity_name: , work_description: , parent_key: , reporter: , external_id:, external_tstamp:, external_hours: , external_result:, customField_11218: , hash_value: , issue_summary: , user_details:[full_name: , email: , auto_approve_timesheet: , cris_id: , iqn_gl_approver: , iqn_gl_string: , iqn_id: , is_contractor: , is_employee: , it_leadership: , primary_role: , resource_manager: , team: ], approval_status: , timesheet_approval:[status: ]]
I would so appreciate it if anyone could offer some insights on how to move forward or even documentation that has good examples of what I'm trying to achieve (Apache's documentation is sorely lacking in examples, in my opinion).
It's not all of the way there. But, I was able to get a JSON file created with the XML and Map. From there I can just use the .json file to create a .csv and then get rid of the columns I don't want.
// Define Weekly Date Range
def today = new Date()
def lastPeriodStart = today - 8
def lastPeriodEnd = today - 2
def dateFrom = lastPeriodStart.format("yyyy-MM-dd")
def dateTo = lastPeriodEnd.format("yyyy-MM-dd")
def jiraClient = new RESTClient(jiraUrl)
jiraClient.ignoreSSLIssues()
// Creates and Begins the File
File file = new File("${dateFrom}_RPT05.json")
file.write("")
file.append("[\n")
// Defines the File
def arrplace = 0
def arrsize = 0
def headers = [
"Authorization" : "Basic " + jiraAuth,
"X-Atlassian-token": "no-check",
"Content-Type" : "application/json"
]
def response = jiraClient.get(
path: "/plugins/servlet/tempo-getWorklog/",
query: [
tempoApiToken: "${tempoApiToken}",
format: "xml",
dateFrom: "${dateFrom}",
dateTo: "${dateTo}",
addUserDetails: "true",
addApprovalStatus: "true",
addIssueSummary: "true"
],
headers: headers
) { response, worklogs ->
println "Processing..."
// Gets Size of Array
worklogs.worklog.each { worklognodes ->
arrsize = arrsize+1 }
// Start Building the Output - Creates a Worklog Map
worklogs.worklog.each { worklognodes ->
worklognodes = convertToMap(worklognodes)
// Convert Map to a JSON String
def json_str = JsonOutput.toJson(worklognodes)
// Adds Row to File
file.append(json_str)
arrplace = arrplace+1
if(arrplace<arrsize)
{file.append(",")}
file.append("\n")
print "."
}
}
file.append("]")
// Helper Method
def convertToMap(nodes) {
nodes.children().collectEntries {
if (it.name() == 'user-prop') {
[it['#key'], it.childNodes() ? convertToMap(it) : it.text()]
} else {
[it.name(), it.childNodes() ? convertToMap(it) : it.text()]
}
}
}
The output is a collection/array of worklogs.

json to lua with multiple stings backslash and dot

Hello i'm trying to use Json from my washer with lua. It's for visualizing the samsung in Domoitcz.
A part of the Json what i get from https://api.smartthings.com/v1/devices/abcd-1234-abcd is:
"main": {
"washerJobState": {
"value": "wash"
},
"mnhw": {
"value": "1.0"
},
"data": {
"value": "{
\"payload\":{
\"x.com.samsung.da.state\":\"Run\",\"x.com.samsung.da.delayEndTime\":\"00:00:00\",\"x.com.samsung.da.remainingTime\":\"01:34:00\",\"if\":[\"oic.if.baseline\",\"oic.if.a\"],\"x.com.samsung.da.progressPercentage\":\"2\",\"x.com.samsung.da.supportedProgress\":[\"None\",\"Wash\",\"Rinse\",\"Spin\",\"Finish\"],\"x.com.samsung.da.progress\":\"Wash\",\"rt\":[\"x.com.samsung.da.operation\"]}}"
},
"washerRinseCycles": {
"value": "3"
},
"switch": {
"value": "on"
},
if i use in my script
local switch = item.json.main.switch.value
I got the valua on or off and i can use it for showing the status of the washer.
i'm trying to find out how to get the "data"value in my script, there are more items with dots en backslhases:
local remainingTime = rt.data.value.payload['x.com.samsung.da.remainingTime']
or
local remainingTime = rt.data.value['\payload']['\x.com.samsung.da.remainingTime']
i tried some more opions with 'or // , "" but always got a nill value.
Can someone explain me how to get:
\"x.com.samsung.da.remainingTime\":\"01:34:00\"
\"x.com.samsung.da.progressPercentage\":\"2\",
All the " , \, x., ar confusing me
Below is my script to test where i only left the Json log (Dzvents Lua Based) i get an error:
dzVents/generated_scripts/Samsung_v3.lua:53: attempt to index a nil value (global 'json') i don't heave any idea how te use/adjust my code for decode the string.
local json = require"json" -- the JSON library
local outer = json.decode(your_JSON_string)
local rt = outer.main
local inner = json.decode(rt.data.value)
local remainingTime = inner.payload['x.com.samsung.da.remainingTime']
local API = 'API'
local Device = 'Device'
local LOGGING = true
--Define dz Switches
local WM_STATUS = 'WM Status' --Domoitcz virtual switch ON/Off state Washer
return
{
on =
{
timer =
{
'every 1 minutes', -- just an example to trigger the request
},
httpResponses =
{
'trigger', -- must match with the callback passed to the openURL command
},
},
logging =
{
level = domoticz.LOG_DEBUG ,
},
execute = function(dz, item)
local wm_status = dz.devices(WM_STATUS)
if item.isTimer then
dz.openURL({
url = 'https://api.smartthings.com/v1/devices/'.. Device .. '/states',
headers = { ['Authorization'] = 'Bearer '.. API },
method = 'GET',
callback = 'trigger', -- see httpResponses above.
})
end
if (item.isHTTPResponse) then
if item.ok then
if (item.isJSON) then
rt = item.json.main
-- outer = json.decode'{"payload":{"x.com.samsung.da.state":"Run","x.com.samsung.da.delayEndTime":"00:00:00","x.com.samsung.da.remainingTime":"00:40:00","if":["oic.if.baseline","oic.if.a"],"x.com.samsung.da.progressPercentage":"81","x.com.samsung.da.supportedProgress":["None","Weightsensing","Wash","Rinse","Spin","Finish"],"x.com.samsung.da.progress":"Rinse","rt":["x.com.samsung.da.operation"]}}
inner = json.decode(rt.data.value)
-- local remainingTime = inner.payload['x.com.samsung.da.remainingTime']
dz.utils.dumpTable(rt) -- this will show how the table is structured
-- dz.utils.dumpTable(inner)
local washerSpinLevel = rt.washerSpinLevel.value
-- local remainingTime = inner.payload['x.com.samsung.da.remainingTime']
dz.log('Debuggg washerSpinLevel:' .. washerSpinLevel, dz.LOG_DEBUG)
dz.log('Debuggg remainingTime:' .. remainingTime, dz.LOG_DEBUG)
-- dz.log('Resterende tijd:' .. remainingTime, dz.LOG_INFO)
-- dz.log(dz.utils.fromJSON(item.data))
-- end
elseif LOGGING == true then
dz.log('There was a problem handling the request', dz.LOG_ERROR)
dz.log(item, dz.LOG_ERROR)
end
end
end
end
}
This is a weird construction: a serialized JSON inside a normal JSON.
This means you have to invoke deserialization twice:
local json = require"json" -- the JSON library
local outer = json.decode(your_JSON_string)
local rt = outer.main
local inner = json.decode(rt.data.value)
local remainingTime = inner.payload['x.com.samsung.da.remainingTime']

Pass variable into Step Function start_execution() input parameter

I am using boto3 with Python 3.6 to start a Step Function execution. The Step Function is designed to share my base AMI across all my accounts. I have 4 variables I need to pass to the input parameter to kick off the execution. These are the AMI ID, the account list of accounts I own, the source account, and the KMS key. The AMI ID and the account list are constructed in my code and are the variables that need to get passed dynamically. According to the documentation, input is a string that contains the JSON input data for the execution and gives the following example: "input": "{\"ami_id\" : \"ami_id\"}". My question is how do I pass the variables in question to this parameter as the value? My code is below with the traceback:
CODE:
import boto3
import json
# Get an STS token to assume roles into AWS accounts
def get_sts_token(**kwargs):
role_arn = kwargs['RoleArn']
region_name = kwargs['RegionName']
sts = boto3.client(
'sts',
region_name=region_name,
)
token = sts.assume_role(
RoleArn=role_arn,
RoleSessionName='GetInstances',
DurationSeconds=900,
)
return token["Credentials"]
def get_accounts():
role_arn = "arn:aws:iam::xxxxxxxxxx:role/list-accounts-role"
region_name = "us-east-1"
token = get_sts_token(RoleArn=role_arn, RegionName=region_name)
access_key = token['AccessKeyId']
secret_key = token['SecretAccessKey']
session_token = token['SessionToken']
client = boto3.client('organizations',
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=session_token)
moreAccounts=True
nexttoken=''
global accountList
accountList =[]
while moreAccounts:
if (len(nexttoken)>0):
accounts=client.list_accounts(NextToken=nexttoken)
else:
accounts=client.list_accounts()
if 'NextToken' in accounts:
nexttoken=accounts['NextToken']
else:
moreAccounts=False
for account in accounts['Accounts']:
if account['Status'] != 'SUSPENDED' and account['Status'] != 'CLOSED' :
account_id = account['Id']
accountList.append(account_id)
print(accountList)
def trigger_sfn():
ssm = boto3.client('ssm')
role_arn = "arn:aws:iam::xxxxxxxx:role/execute-sfn"
region_name = "us-east-1"
ami_id = ssm.get_parameter(Name='/BaseAMI/newest')['Parameter']['Value']
print(ami_id)
token = get_sts_token(RoleArn=role_arn, RegionName=region_name)
print(token)
access_key = token['AccessKeyId']
secret_key = token['SecretAccessKey']
session_token = token['SessionToken']
sfn = boto3.client('stepfunctions',
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=session_token)
response = sfn.start_execution(
stateMachineArn='arn:aws:states:us-east-1:xxxxxxxx:stateMachine:ami-share',
input="{\"ami_id\": ami_id, \"source_account_id\": \"112233445566\", \"accountList\": accountList, \"kms_key_arn\": \"alias/aws/ebs\"}"
)
print(response)
TRACE:
An error occurred (InvalidExecutionInput) when calling the
StartExecution operation: Invalid State Machine Execution Input:
'com.fasterxml.jackson.core.JsonParseException: Unrecognized token
'ami_id': was expecting ('true', 'false' or 'null')
at [Source: (String)"{"ami_id": ami_id, "source_account_id":
"112233445566", "accountList": accountList, "kms_key_arn":
"alias/aws/ebs"}"; line: 1, column: 18]': InvalidExecutionInput
Traceback (most recent call last):
File "/var/task/lambda_function.py", line 6, in lambda_handler
trigger_sfn()
File "/var/task/lambda_function.py", line 96, in trigger_sfn
input="{\"ami_id\": ami_id, \"source_account_id\": \"112233445566\",
\"accountList\": accountList, \"kms_key_arn\": \"alias/aws/ebs\"}"
File "/var/runtime/botocore/client.py", line 314, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/var/runtime/botocore/client.py", line 612, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.errorfactory.InvalidExecutionInput: An error occurred
(InvalidExecutionInput) when calling the StartExecution operation:
Invalid State Machine Execution Input:
'com.fasterxml.jackson.core.JsonParseException: Unrecognized token
'ami_id': was expecting ('true', 'false' or 'null')
at [Source: (String)"{"ami_id": ami_id, "source_account_id":
"112233445566", "accountList": accountList, "kms_key_arn":
"alias/aws/ebs"}"; line: 1, column: 18]'
You can add the value of the variable to the string like this:
input="{\"ami_id\": \"" + ami_id + "\", \"source_account_id\": \"112233445566\", \"accountList\": accountList, \"kms_key_arn\": \"alias/aws/ebs\"}"
Your code currently sends the literal string "ami_id", instead of the variable value of ami_id