Reading through Consul keys data in terraform - json

I have a problem where I am trying to read the values from terraform data Consul keys:
These are the two data path I need to read to get a specific value:
Path 1: private/plt-network/infrastructure/eu-west-1/vpc-layout where if env=="development" && name contains *mesh then dev_vpc_id == id
[
{
"cidr_block": "0.0.0.0/17",
"environment": "development",
"id": "vpc-xxx",
"name": "development"
},
{
"cidr_block": "10.113.0.0/19",
"environment": "development",
"id": "vpc-yyy",
"name": "development-mesh",
}
]
Path 2: private/plt-network/infrastructure/ap-south-1/global/vpc-layout where if env=="development" && mesh == false then dev_vpc_id == id
[
{
"environment": "acceptance",
"id": "vpc-xyz",
"mesh": false
},
{
"environment": "development",
"id": "vpc-abc",
"mesh": true
},
]
The data export looks like below:
data "consul_keys" "vpcs-id" {
# for_each = toset(local.aws_regions)
key {
name = "vpcs-all"
path = "private/plt-network/infrastructure/eu-west-1/vpc-layout"
# path = each.value == "eu-west-1" ? "${local.consul_base_path}/plt-network/infrastructure/${each.value}/vpc-layout" : "private/plt-network/infrastructure/${each.value}/vpc-layout/global"
}
}
locals.tf
locals {
aws_regions = toset(["eu-west-1", "eu-central-1", "us-east-2", "us-west-2", "ap-south-1", "ap-southeast-1"])
consul_base_path = "private/plt-network/infrastructure"
eu-west-1-json = jsondecode(data.consul_keys.vpcs-id.var.vpcs_all)
ap-south-1-json = jsondecode(data.consul_keys.vpcs-id-ap-south.var.vpcs_all)
eu-west-1-vpc-ids = local.eu-west-1-json.id
ap-south-1-vpc-ids = [for user in local.ap-south-1-json : user.id]
outputs.tf
output "eu-west-1-id" {
value = local.eu-west-1-vpc-ids
}
output "ap-south-1-id" {
value = local.ap-south-1-vpc-ids
}
Error:
Error: Missing map element
on locals.tf line 8, in locals:
8: eu-west-1-json = jsondecode(data.consul_keys.vpcs-id.var.vpcs_all)
|----------------
| data.consul_keys.vpcs-id.var is map of string with 1 element
This map does not have an element with the key "vpcs_all".
Error: Missing map element
on locals.tf line 9, in locals:
9: ap-south-1-json = jsondecode(data.consul_keys.vpcs-id-ap-south.var.vpcs_all)
|----------------
| data.consul_keys.vpcs-id-ap-south.var is map of string with 1 element
This map does not have an element with the key "vpcs_all".
Can someone help please?

Related

How to get the All index values in Groovy JSON xpath

Please find the attached Groovy code which I am using to get the particular filed from the response body.
Query 1 :
It is retrieving the results when the I am using the correct Index value like if the data.RenewalDetails[o], will give output as Value 1 and if the data.RenewalDetails[1], output as Value 2.
But in my real case, I will never know about number of blocks in the response, so I want to get all the values that are satisficing the condition, I tried data.RenewalDetails[*] but it is not working. Can you please help ?
Query 2:
Apart from the above condition, I want to add one more filter, where "FamilyCode": "PREMIUM" in the Itemdetails, Can you help on the same ?
def BoundId = new groovy.json.JsonSlurper().parseText('{"data":{"RenewalDetails":[{"ExpiryDetails":{"duration":"xxxxx","destination":"LHR","from":"AUH","value":2,"segments":[{"valudeid":"xxx-xx6262-xxxyyy-1111-11-11-1111"}]},"Itemdetails":[{"BoundId":"Value1","isexpired":true,"FamilyCode":"PREMIUM","availabilityDetails":[{"travelID":"AAA-AB1234-AAABBB-2022-11-10-1111","quota":"X","scale":"XXX","class":"X"}]}]},{"ExpiryDetails":{"duration":"xxxxx","destination":"LHR","from":"AUH","value":2,"segments":[{"valudeid":"xxx-xx6262-xxxyyy-1111-11-11-1111"}]},"Itemdetails":[{"BoundId":"Value2","isexpired":true,"FamilyCode":"PREMIUM","availabilityDetails":[{"travelID":"AAA-AB1234-AAABBB-2022-11-10-1111","quota":"X","scale":"XXX","class":"X"}]}]}]},"warnings":[{"code":"xxxx","detail":"xxxxxxxx","title":"xxxxxxxx"}]}')
.data.RenewalDetails[0].Itemdetails.find { itemDetail ->
itemDetail.availabilityDetails[0].travelID.length() == 33
}?.BoundId
println "Hello " + BoundId
Something like this:
def txt = '''\
{
"data": {
"RenewalDetails": [
{
"ExpiryDetails": {
"duration": "xxxxx",
"destination": "LHR",
"from": "AUH",
"value": 2,
"segments": [
{
"valudeid": "xxx-xx6262-xxxyyy-1111-11-11-1111"
}
]
},
"Itemdetails": [
{
"BoundId": "Value1",
"isexpired": true,
"FamilyCode": "PREMIUM",
"availabilityDetails": [
{
"travelID": "AAA-AB1234-AAABBB-2022-11-10-1111",
"quota": "X",
"scale": "XXX",
"class": "X"
}
]
}
]
},
{
"ExpiryDetails": {
"duration": "xxxxx",
"destination": "LHR",
"from": "AUH",
"value": 2,
"segments": [
{
"valudeid": "xxx-xx6262-xxxyyy-1111-11-11-1111"
}
]
},
"Itemdetails": [
{
"BoundId": "Value2",
"isexpired": true,
"FamilyCode": "PREMIUM",
"availabilityDetails": [
{
"travelID": "AAA-AB1234-AAABBB-2022-11-10-1111",
"quota": "X",
"scale": "XXX",
"class": "X"
}
]
}
]
}
]
},
"warnings": [
{
"code": "xxxx",
"detail": "xxxxxxxx",
"title": "xxxxxxxx"
}
]
}'''
def json = new groovy.json.JsonSlurper().parseText txt
List<String> BoundIds = json.data.RenewalDetails.Itemdetails*.find { itemDetail ->
itemDetail.availabilityDetails[0].travelID.size() == 33 && itemDetail.FamilyCode == 'PREMIUM'
}?.BoundId
assert BoundIds.toString() == '[Value1, Value2]'
Note, that you will get the BoundIds as a List
If you amend your code like this:
def json = new groovy.json.JsonSlurper().parse(prev.getResponseData()
you would be able to access the number of returned items as:
def size = json.data.RenewalDetails.size()
as RenewalDetails represents a List
Just add as many queries you want using Groovy's && operator:
find { itemDetail ->
itemDetail.availabilityDetails[0].travelID.length() == 33 &&
itemDetail.FamilyCode.equals('PREMIUM')
}
More information:
Apache Groovy - Parsing and producing JSON
Apache Groovy: What Is Groovy Used For?

Terraform aws_dynamodb_table_item - insert multiline JSON into attribute

I have the following terraform config:
resource "aws_dynamodb_table_item" "my_table" {
table_name = aws_dynamodb_table.my_table.name
hash_key = aws_dynamodb_table.my_table.hash_key
item = <<ITEM
{
"id": {"S": "nameAndCodes"},
"data": {"S": "[
{
"code": "03",
"displayName": "name1"
},
{
"code": "04",
"displayName": "name2"
}
]"}
}
ITEM
}
When the plan stage executes I receive the error:
Error: Invalid format of "item": Decoding failed: invalid character '\r' in string literal
The only way i can get this to work is to make the whole json a single line as follows:
"data": {"S": "[{\"code\": \"03\", \"displayName\": \"name1\"},{\"code\": \"04\", \"displayName\": \"name2\"}]"
This looks very ugly and difficult to manage.
Does anyone know how I can enter a multiline JSON inside a <<ITEM block?
To resolve that issue, You can use the jsonencode function to set the item value and put entire JSON object in there. Here is an example in Terraform from my project which creates a DynamoDB table and put an initial item.
resource "aws_dynamodb_table" "customer_table" {
name = "customer"
billing_mode = "PAY_PER_REQUEST"
hash_key = "customerId"
stream_enabled = false
attribute {
name = "customerId"
type = "S"
}
}
resource "aws_dynamodb_table_item" "customer_table_item" {
table_name = aws_dynamodb_table.customer_table.name
hash_key = aws_dynamodb_table.customer_table.hash_key
depends_on = [aws_dynamodb_table.customer_table]
item = jsonencode({
"customerId" : {
"S" : "1"
},
"firstName" : {
"S" : "John"
},
"lastName" : {
"S" : "Doe"
},
})
}
commands:
terrform init
terraform fmt
terraform plan
terraform apply

Terraform: JSON Path Query doesn't work in terraform

I have data like below in json file site24x7IPs.json, and new filter it in terraform:
{
"LocationDetails": [
{
"IPv6_Address_External": "2803:eb80:4000:d::0/64",
"City": "Buenos Aires",
"Place": "Argentina",
"external_ip": "170.78.75.88"
},
{
"IPv6_Address_External": "",
"City": "Buenos Aires",
"Place": "Argentina",
"external_ip": "170.78.75.87"
},
{
"IPv6_Address_External": "",
"City": "Melbourne",
"Place": "Australia",
"external_ip": "103.91.166.0/24"
}
]
}
And terraform code:
locals {
site24x7IPs = jsondecode(file("${path.module}/site24x7IPs.json"))
}
output "site24x7IPs" {
#value = local.site24x7IPs.LocationDetails[*].external_ip # This works
# I'd like to filter the IP from Australia,
value = local.site24x7IPs.LocationDetails[?(#.Place == "Australia")].external_ip
}
Expecting Result:
"103.91.166.0/24"
Output:
value = local.site24x7IPs.LocationDetails[?(#.Place == "Australia")].external_ip
This character is not used within the language.
.LocationDetails[?(#.Place == "Australia")].external_ip is the JSON query syntax, but it doesn't work in Terraform.
Is there a similar way to achieve the filtering goal in Terraform?
Thanks,
This should give you the result:
output "australia_ip_with_quotes" {
value = format("%q",element([for i in local.site24x7IPs.LocationDetails: i.external_ip if i.Place == "Australia"],0))
}
output "australia_ip_without_quotes" {
value = element([for i in local.site24x7IPs.LocationDetails: i.external_ip if i.Place == "Australia"],0)
}
output "list" {
value = [for i in local.site24x7IPs.LocationDetails: i.external_ip if i.Place == "Australia"]
}
Outputs:
australia_ip_with_quotes = "103.91.166.0/24"
australia_ip_without_quotes = 103.91.166.0/24
list = [
"103.91.166.0/24",
]

how to parse CSV to JSON from 2 CSV Files in Groovy

Please help with parse CSV to JSON from 2 CSV Files in groovy
For example :
CSV1:
testKey,status
Name001,PASS
Name002,PASS
Name003,FAIL
CSV2:
Kt,Pd
PT-01,Name001
PT-02,Name002
PT-03,Name003
PT-04,Name004
I want to input in "testlist" data from CSV2.val[1..-1],CSV1.val[1..-1]
Result should be like :
{
"testExecutionKey": "DEMO-303",
"info": {
"user": "admin"
},
"tests": [
{
"TestKey": "PT-01",
"status": "PASS"
},
{
"TestKey": "PT-02",
"status": "PASS"
},
{
"TestKey": "PT-03",
"status": "FAIL"
}
]
code without this modification (from only 1 csv):
import groovy.json.*
def kindaFile = '''
TestKey;Finished;user;status
Name001;PASS;
Name002;PASS;
'''.trim()
def keys
def testList = []
//parse CSV
kindaFile.splitEachLine( /;/ ){ parts ->
if( !keys )
keys = parts
else{
def test = [:]
parts.eachWithIndex{ val, ix -> test[ keys[ ix ] ] = val }
testList << test
}
}
def builder = new JsonBuilder()
def root = builder {
testExecutionKey 'DEMO-303'
info user: 'admin'
tests testList
}
println JsonOutput.prettyPrint(JsonOutput.toJson(root))
Your sample JSON doesn't match the CSV definition. It looks lile you're using fields [1..-1] from CSV 1, as you stated, but fields [0..-2] from CSV 2. As you only have 2 fields in each CSV that's the equivalent of csv1[1] and csv2[0]. The example below uses [0..-2]. Note that if you always have exactly two fields in your input files then the following code could be simplified a little. I've given a more generic solution that can cope with more fields.
Load both CSV files into lists
File csv1 = new File( 'one.csv')
File csv2 = new File( 'two.csv')
def lines1 = csv1.readLines()
def lines2 = csv2.readLines()
assert lines1.size() <= lines2.size()
Note the assert. That's there as I noticed you have 4 tests in CSV2 but only 3 in CSV1. To allow the code to work with your sample data, it iterates through through CSV1 and adds the matching data from CSV2.
Get the field names
fieldSep = /,[ ]*/
def fieldNames1 = lines1[0].split( fieldSep )
def fieldNames2 = lines1[0].split( fieldSep )
Build the testList collection
def testList = []
lines1[1..-1].eachWithIndex { csv1Line, lineNo ->
def mappedLine = [:]
def fieldsCsv1 = csv1Line.split( fieldSep )
fieldsCsv1[1..-1].eachWithIndex { value, fldNo ->
String name = fieldNames1[ fldNo + 1 ]
mappedLine[ name ] = value
}
def fieldsCsv2 = lines2[lineNo + 1].split( fieldSep )
fieldsCsv2[0..-2].eachWithIndex { value, fldNo ->
String name = fieldNames2[ fldNo ]
mappedLine[ name ] = value
}
testList << mappedLine
}
Parsing
You can now parse the list of maps with your existing code. I've made a change to the way the JSON string is displayed though.
def builder = new JsonBuilder()
def root = builder {
testExecutionKey 'DEMO-303'
info user: 'admin'
tests testList
}
println builder.toPrettyString()
JSON Output
Running the above code, using your CSV1 and CSV 2 data, gives the JSON that you desire.
for CSV1:
testKey,status
Name001,PASS
Name002,PASS
Name003,FAIL
and CSV2:
Kt,Pd
PT-01,Name007
PT-02,Name001
PT-03,Name003
PT-05,Name002
PT-06,Name004
PT-07,Name006
result is:
{
"testExecutionKey": "DEMO-303",
"info": {
"user": "admin"
},
"tests": [
{
"status": "PASS",
"testKey": "PT-01"
},
{
"status": "PASS",
"testKey": "PT-02"
},
{
"status": "FAIL",
"testKey": "PT-03"
}
]
}
but I need exactly the same values for testKey (testKey from CSV1=Kt from CSV2)
{
"testExecutionKey": "DEMO-303",
"info": {
"user": "admin"
},
"tests": [
{
"testKey": "PT-02",
"status": "PASS"
},
{
"testKey": "PT-05",
"status": "PASS"
},
{
"testKey": "PT-03",
"status": "FAIL"
}
]
}

merge lists of dictionaries in terraform v0.12

I would like to do the following using terraform:
I have 2 JSONs:
1.json:
[
{
"description": "description1",
"url": "url1",
"data": "data1"
},
{
"description": "description2",
"url": "url2",
"data": "data2",
"action": "action2"
},
{
"description": "description3",
"url": "url3",
"data": "data3"
}
]
2.json:
[
{
"description": "description1",
"url": "url1",
"data": "data1"
},
{
"description": "description2_new",
"url": "url2",
"data": "data2_new"
},
{
"description": "description4",
"url": "url4",
"data": "data4"
}
]
and I want to merge them into one. Dictionaries from the second JSON should override dictionaries from the first one if url key is the same. I.e. combined JSON should look like:
[
{
"description": "description1",
"url": "url1",
"data": "data1"
},
{
"description": "description2_new",
"url": "url2",
"data": "data2_new"
},
{
"description": "description3",
"url": "url3",
"data": "data3"
},
{
"description": "description4",
"url": "url4",
"data": "data4"
}
]
Using python I can easily do it:
import json
with open('1.json') as f:
json1 = json.load(f)
with open('2.json') as f:
json2 = json.load(f)
def list_to_dict(json_list):
res_dict = {}
for d in json_list:
res_dict[d['url']] = d
return res_dict
def merge_json(json1, json2):
j1 = list_to_dict(json1)
j2 = list_to_dict(json2)
j1.update(j2)
res_list = []
for key in j1.keys():
res_list.append(j1[key])
return res_list
print(json.dumps(merge_json(json1, json2), indent=4))
How can I do that using terraform?
Using terraform 0.12.x
$ cat main.tf
locals {
# read from files and turn into json
list1 = jsondecode(file("1.json"))
list2 = jsondecode(file("2.json"))
# iterate over lists and turn url into a unique key
dict1 = { for item in local.list1 : item.url => item }
dict2 = { for item in local.list2 : item.url => item }
# combine both dictionaries so values converge
# only take its values
merged = values(merge(local.dict1, local.dict2))
}
output "this" {
value = local.merged
}
$ terraform apply
Apply complete! Resources: 0 added, 0 changed, 0 destroyed.
Outputs:
this = [
{
"data" = "data1"
"description" = "description1"
"url" = "url1"
},
{
"data" = "data2_new"
"description" = "description2_new"
"url" = "url2"
},
{
"data" = "data3"
"description" = "description3"
"url" = "url3"
},
{
"data" = "data4"
"description" = "description4"
"url" = "url4"
},
]
Terraform supports expanding a list into function parameters using the ... operator. This will allow an arbitrary number of documents to be read.
(I'm not sure, but I believe this feature was added in v0.15)
For this example, I added a new file 3.json with the contents:
[
{
"description": "description4_new",
"url": "url4",
"data": "data4_new"
}
]
For main.tf, I'm using the same logic as #someguyonacomputer's answer:
$ cat main.tf
locals {
jsondocs = [
for filename in fileset(path.module, "*.json") : jsondecode(file(filename))
]
as_dicts = [
for arr in local.jsondocs : {
for obj in arr : obj.url => obj
}
]
# This is where the '...' operator is used
merged = merge(local.as_dicts...)
}
output "as_list" {
value = values(local.merged)
}
Result:
Changes to Outputs:
+ as_list = [
+ {
+ data = "data1"
+ description = "description1"
+ url = "url1"
},
+ {
+ data = "data2_new"
+ description = "description2_new"
+ url = "url2"
},
+ {
+ data = "data3"
+ description = "description3"
+ url = "url3"
},
+ {
+ data = "data4_new"
+ description = "description4_new"
+ url = "url4"
},
]
References:
Terraform Docs -- Function Calls # Expanding Function Arguments