Am new to Groovy and am having trouble converting an array to JSON. The JSON computed should have all the values from my array list, but it is storing only the last one. Here is the code:
def arraylist = [["0",2],["1",8],["2",6],["3",8],["4",3]]
def arraysize = arraylist.size()
def builder = new groovy.json.JsonBuilder()
builder ({
cols([
{
"id" "hours"
"label" "Hours"
"type" "string"
},
{
"id" "visitor"
"label" "Visitors"
"type" "number"
}
])
rows([
{
for( i in 0..< arraysize )
{
c([
{
"v" arraylist[i][0]
},
{
"v" arraylist[i][1]
}
])
}//for
}
])
})
println builder.toPrettyString()
Can try running the code here:
http://groovyconsole.appspot.com/
Expected output is here:
{
"cols": [
{
"id": "hours",
"label": "Hours",
"type": "string"
},
{
"id": "visitor",
"label": "Visitors",
"type": "number"
}
],
"rows": [
{
"c": [
{
"v": "0"
},
{
"v": 2
}
]
},
{
"c": [
{
"v": "1"
},
{
"v": 8
}
]
},
{
"c": [
{
"v": "2"
},
{
"v": 6
}
]
},
{
"c": [
{
"v": "3"
},
{
"v": 8
}
]
},
{
"c": [
{
"v": "4"
},
{
"v": 3
}
]
}
]
}
Something like this seems to give the result you wanted:
def arraylist = [["0",2],["1",8],["2",6],["3",8],["4",3]]
def builder = new groovy.json.JsonBuilder()
builder {
cols( [
[ id: "hours", label: "Hours", type: "string" ],
[ id: "visitor", label: "Visitors", type: "number" ] ] )
rows( arraylist.collect { pair -> [ c: pair.collect { item -> [ v: item ] } ] } )
}
println builder.toPrettyString()
Related
I need to merge file1.json file2.json (could be more) into onefile.json.
version is always the same value in all files. however vulnerabilities array and dependency_files array values different but there might be duplicate/which I want to remove if any after the merge
file1.json:
{
"version": "x.x.x",
"vulnerabilities": [
{
"id": "0000"
},
{
"id": "11111"
},
{
"id": "2222"
}
],
"dependency_files": [
{
"name": "name0000"
},
{
"name": "name1111"
},
{
"name": "name2222"
}
]
}
file2.json:
{
"version": "x.x.x",
"vulnerabilities": [
{
"id": "2222"
},
{
"id": "3333"
}
],
"dependency_files": [
{
"name": "name2222"
},
{
"name": "name3333"
}
]
}
onefile.json:
{
"version": "x.x.x",
"vulnerabilities": [
{
"id": "0000"
},
{
"id": "11111"
},
{
"id": "2222"
},
{
"id": "3333"
}
],
"dependency_files": [
{
"name": "name0000"
},
{
"name": "name1111"
},
{
"name": "name2222"
},
{
"name": "name3333"
}
]
}
I tried a lot with no luck
You could have a reduce on all files, initialized with the first, hence no need for the -n option:
jq '
reduce inputs as {$vulnerabilities, $dependency_files} (.;
.vulnerabilities = (.vulnerabilities + $vulnerabilities | unique_by(.id))
| .dependency_files = (.dependency_files + $dependency_files | unique_by(.name))
)
' file*.json
{
"version": "x.x.x",
"vulnerabilities": [
{
"id": "0000"
},
{
"id": "11111"
},
{
"id": "2222"
},
{
"id": "3333"
}
],
"dependency_files": [
{
"name": "name0000"
},
{
"name": "name1111"
},
{
"name": "name2222"
},
{
"name": "name3333"
}
]
}
Demo
Using this python code
import json
def merge_dicts(*dicts):
r = {}
skip = 'version'
for item in dicts:
for key, value in item.items():
if (key == skip):
r[skip] = value
else:
r.setdefault(key, []).extend(value)
unique = []
for obj in r[key]:
if obj not in unique:
unique.append(obj)
r[key] = unique
return r
with open("file1.json") as file_1:
data_1 = json.load(file_1)
with open("file2.json") as file_2:
data_2 = json.load(file_2)
with open('data.json', 'w') as merge_file:
json.dump(merge_dicts(data_1, data_2), merge_file, indent = 4)
Result
{
"version": "x.x.x",
"vulnerabilities": [
{
"id": "0000"
},
{
"id": "11111"
},
{
"id": "2222"
},
{
"id": "3333"
}
],
"dependency_files": [
{
"name": "name0000"
},
{
"name": "name1111"
},
{
"name": "name2222"
},
{
"name": "name3333"
}
]
}
This code is multiple json files support
import os, json
def merge_dicts(*dicts):
r = {}
skip = 'version'
for item in dicts:
for key, value in item.items():
if (key == skip):
r[skip] = value
else:
r.setdefault(key, []).extend(value)
unique = []
for obj in r[key]:
if obj not in unique:
unique.append(obj)
r[key] = unique
return r
json_files = [pos_json for pos_json in os.listdir('./') if pos_json.endswith('.json')]
a = []
print(type(a))
for json_file in json_files:
with open(json_file) as file_item:
read_data = json.load(file_item)
a.append(read_data)
file_item.close()
with open('data.json', 'w') as merge_file:
json.dump(merge_dicts(*tuple(a)), merge_file, indent = 4)
Given the following message:
[{
"Name1": "Value1",
"Name2": [{
"Name2_1": [],
"Name2_2": [{
"Name2_2_1": "Value2_2_1"}]
}, {
"Name2_3": [{
"Name2_3_1": 12300}
],
"Name2_4": [{
"Name2_4_1": "Value2_4_1"}]
}],
"Name3": [{
"Name3_1": "Value3_1"
}]
}, {
"Name1": "Value1",
"Name2": [{
"Name2_1": 1234,
"Name2_2": [{
"Name2_2_1": "Value2_2_1"
}],
"Name2_3": []
}],
"Name3": []
}]
I want to remove all the empty and/or null values using the Groovy programming language.
The following code:
import groovy.json.*
def data = '''\
[
{
"Name1": "Value1",
"Name2": [
{
"Name2_1": [],
"Name2_2": [
{
"Name2_2_1": "Value2_2_1"
}
]
},
{
"Name2_3": [
{
"Name2_3_1": 12300
}
],
"Name2_4": [
{
"Name2_4_1": "Value2_4_1"
}
]
}
],
"Name3": [
{
"Name3_1": "Value3_1"
}
]
},
{
"Name1": "Value1",
"Name2": [
{
"Name2_1": 1234,
"Name2_2": [
{
"Name2_2_1": "Value2_2_1"
}
],
"Name2_3": []
}
],
"Name3": []
}
]'''
def json = new JsonSlurper().parseText(data)
json = recursivelyRemoveEmpties(json)
println(JsonOutput.prettyPrint(JsonOutput.toJson(json)))
def recursivelyRemoveEmpties(item) {
switch(item) {
case Map:
return item.collectEntries { k, v ->
[k, recursivelyRemoveEmpties(v)]
}.findAll { k, v -> v }
case List:
return item.collect {
recursivelyRemoveEmpties(it)
}.findAll { v -> v }
default:
return item
}
}
defines a recursive method recursivelyRemoveEmpties which removes empty and null values from arbitrary depth in the data structure. When executed this prints:
─➤ groovy solution.groovy
[
{
"Name1": "Value1",
"Name2": [
{
"Name2_2": [
{
"Name2_2_1": "Value2_2_1"
}
]
},
{
"Name2_3": [
{
"Name2_3_1": 12300
}
],
"Name2_4": [
{
"Name2_4_1": "Value2_4_1"
}
]
}
],
"Name3": [
{
"Name3_1": "Value3_1"
}
]
},
{
"Name1": "Value1",
"Name2": [
{
"Name2_1": 1234,
"Name2_2": [
{
"Name2_2_1": "Value2_2_1"
}
]
}
]
}
]
Tested on:
─➤ groovy -v
Groovy Version: 3.0.6 JVM: 15.0.2 Vendor: Amazon.com Inc. OS: Linux
How can I sort the given JSON object with property count. I want to sort the entire sub-object. The higher the count value should come on the top an so on.
{
"Resource": [
{
"details": [
{
"value": "3.70"
},
{
"value": "3.09"
}
],
"work": {
"count": 1
}
},
{
"details": [
{
"value": "4"
},
{
"value": "5"
}
],
"work": {
"count": 2
},
{
"details": [
{
"value": "5"
},
{
"value": "5"
}
],
"work": "null"
}
]
}
You can try this example to sort your data:
data = {
"data": {
"Resource": [
{
"details": [{"value": "3.70"}, {"value": "3.09"}],
"work": {"count": 1},
},
{"details": [{"value": "4"}, {"value": "5"}], "work": {"count": 2}},
]
}
}
# sort by 'work'/'count'
data["data"]["Resource"] = sorted(
data["data"]["Resource"], key=lambda r: r["work"]["count"]
)
# sort by 'details'/'value'
for r in data["data"]["Resource"]:
r["details"] = sorted(r["details"], key=lambda k: float(k["value"]))
# pretty print:
import json
print(json.dumps(data, indent=4))
Prints:
{
"data": {
"Resource": [
{
"details": [
{
"value": "3.09"
},
{
"value": "3.70"
}
],
"work": {
"count": 1
}
},
{
"details": [
{
"value": "4"
},
{
"value": "5"
}
],
"work": {
"count": 2
}
}
]
}
}
Using Groovy, requirement is to collect a map's nested element values, along with its top-level element value.
Unsure if a recursive method is needed.
Sample JSON
{
"items": [
{
"attribute": "Type",
"options":
[
{
"label": "Type1",
"value": "1"
},
{
"label": "Type2",
"value": "2"
}
]
},
{
"attribute": "Size",
"options": [
{
"label": "SizeA",
"value": "A"
},
{
"label": "SizeB",
"value": "B"
}
]
}
]
}
Expected output after collect
[
{attribute=Type,label=Type1,value=1},
{attribute=Type,label=Type2,value=2},
{attribute=Size,label=SizeA,value=A},
{attribute=Size,label=SizeB,value=B}
]
You can solve this combining multiple lists of options obtained through the collect method using a collectMany.
See the following snippet of code:
def input = """
{
"items": [
{
"attribute": "Type",
"options": [
{
"label": "Type1",
"value": "1"
},
{
"label": "Type2",
"value": "2"
}
]
},
{
"attribute": "Size",
"options": [
{
"label": "SizeA",
"value": "A"
},
{
"label": "SizeB",
"value": "B"
}
]
} ]
}"""
def json = new groovy.json.JsonSlurper().parseText(input)
/* collectMany directly flatten the two sublists
* [ [ [ type1 ], [ type2 ] ], [ [ sizeA ], [ sizeB ] ] ]
* into
* [ [type1], [type2], [sizeA], [sizeB] ]
*/
def result = json.items.collectMany { item ->
// collect returns a list of N (in this example, N=2) elements
// [ [ attribute1: ..., label1: ..., value1: ... ],
// [ attribute2: ..., label2: ..., value2: ... ] ]
item.options.collect { option ->
// return in [ attribute: ..., label: ..., value: ... ]
[ attribute: item.attribute ] + option
}
}
assert result == [
[ attribute: "Type", label: "Type1", value: "1" ],
[ attribute: "Type", label: "Type2", value: "2" ],
[ attribute: "Size", label: "SizeA", value: "A" ],
[ attribute: "Size", label: "SizeB", value: "B" ],
]
Thanks to Giuseppe!
I had solved this using a less groovy method:
def result = [];//Create new list with top-level element and nested elements
json.items.each {item ->
result.addAll(item.options.collect{ option ->
[attribute: /"${item?.attribute?:''}"/, label: /"${option?.label?:''}"/, value: /"${option?.value?:''}"/ ]
})
};
Here is my code in mongodb : .
db.mydb.aggregate([
{ "$group": {
"_id": {
"A": "$A",
"B": "$B",
"C": "$C"
},
}},
{ "$group": {
"cpt": { '$sum': 1 } ,
"_id": "$_id.A",
"allowDrag": {'$literal':false},
"expanded": {'$literal':false},
"children": {
"$push": {
"text": "$_id.B",
"details": "$_id.C",
"leaf": {'$literal': true},
}
},
}}
])
I would like to add in my json output some hardcoded properties and values, it works with
"leaf": {'$literal': true}
but I don't know why I can't make it with
"allowDrag": {'$literal':false},
"expanded": {'$literal':false}
is it possible with $group?
Example of the output json I've got :
"result" : [
{
"_id" : "A",
"cpt" : 1,
"children" : [
{
"text" : "B",
"details" : "C",
"leaf" : true
}
]
}]
Example of the output json I wish I had :
"result" : [
{
"_id" : "A",
"cpt" : 1,
"allowDrag" : false,
"expanded" : false,
"children" : [
{
"text" : "B",
"details" : "C",
"leaf" : true
}
]
}]
Use the $literal operator in the $project pipeline to return the new fields set to boolean values of false:
db.mydb.aggregate([
{
"$group": {
"_id": {
"A": "$A",
"B": "$B",
"C": "$C"
}
}
},
{
"$group": {
"cpt": { '$sum': 1 } ,
"_id": "$_id.A",
"children": {
"$push": {
"text": "$_id.B",
"details": "$_id.C",
"leaf": {'$literal': true}
}
}
}
},
{
"$project": {
"allowDrag": {'$literal':false},
"expanded": {'$literal':false},
"cpt": 1,
"children": 1
}
}
])
Tested with the following collection sample:
db.mydb.insert([
{
"A": "test1",
"B": "test2",
"C": "test3"
},
{
"A": "test1",
"B": "test2",
"C": "test2"
},
{
"A": "test2",
"B": "test2",
"C": "test3"
},
{
"A": "test2",
"B": "test2",
"C": "test3"
}
])
The above aggregation gives the following results:
/* 0 */
{
"result" : [
{
"_id" : "test1",
"cpt" : 2,
"children" : [
{
"text" : "test2",
"details" : "test2",
"leaf" : true
},
{
"text" : "test2",
"details" : "test3",
"leaf" : true
}
],
"allowDrag" : false,
"expanded" : false
},
{
"_id" : "test2",
"cpt" : 1,
"children" : [
{
"text" : "test2",
"details" : "test3",
"leaf" : true
}
],
"allowDrag" : false,
"expanded" : false
}
],
"ok" : 1
}