How to perform group-by over JSON data using JSONATA - json

I have a JSON structure given below.
I want to perform a couple of transformations on this data. I came to know about JSONATA and it looks fantastic to me, but since I am new, I am struggling a little bit around transformation.
I want to calculate the total of count (sum) for each module date for the given date.
The expected result is given below.
Input:
{
"id": "6332acbfe13e6063dcb740ef",
"record": [
{
"date": "2022-09-22",
"entries": [
{
"repo": "SRE-MAC-PDM-TEAM",
"prod": [
{
"name": "modules/dynatrace",
"count": 16
}
]
},
{
"repo": "SRE-MAC-IBO-BONUS-PAYMENTS-REPO",
"prod": [
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 3
}
]
},
{
"repo": "SRE-MAC-ANAPostPurchase",
"prod": [
{
"name": "modules/dynatrace",
"count": 17
}
]
},
{
"repo": "SRE-MAC-ANA-SubscriptionsRewards",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 8
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 8
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 4
}
]
},
{
"repo": "SRE-MAC-CSP-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 9
}
]
},
{
"repo": "SRE-MAC-IBOIP-REPO",
"prod": [
{
"name": "modules/dynatrace",
"count": 6
}
]
},
{
"repo": "SRE-MAC-IBOLIFE-REPO",
"preprod": [
{
"name": "modules/dynatrace",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 2
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 3
}
],
"prod": [
{
"name": "modules/dynatrace",
"count": 5
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
]
},
{
"repo": "SRE-MAC-PL-APPLICATION",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 4
}
],
"prod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 2
}
]
},
{
"repo": "SRE-MAC-TPS-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/dynatrace/calculated_service_metric",
"count": 1
},
{
"name": "modules/dynatrace/anomalies/custom/metric_id",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 11
}
]
}
]
},
{
"date": "2022-09-19",
"entries": [
{
"repo": "SRE-MAC-PDM-TEAM",
"prod": [
{
"name": "modules/dynatrace",
"count": 14
}
]
},
{
"repo": "SRE-MAC-IBO-BONUS-PAYMENTS-REPO",
"prod": [
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 3
}
]
},
{
"repo": "SRE-MAC-ANAPostPurchase",
"prod": [
{
"name": "modules/dynatrace",
"count": 15
}
]
},
{
"repo": "SRE-MAC-ANA-SubscriptionsRewards",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 5
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 5
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 3
}
]
},
{
"repo": "SRE-MAC-CSP-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 6
}
]
},
{
"repo": "SRE-MAC-IBOIP-REPO",
"prod": [
{
"name": "modules/dynatrace",
"count": 6
}
]
},
{
"repo": "SRE-MAC-IBOLIFE-REPO",
"preprod": [
{
"name": "modules/dynatrace",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 2
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 3
}
],
"prod": [
{
"name": "modules/dynatrace",
"count": 4
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
]
},
{
"repo": "SRE-MAC-PL-APPLICATION",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 3
}
],
"prod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 2
}
]
},
{
"repo": "SRE-MAC-TPS-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/dynatrace/calculated_service_metric",
"count": 1
},
{
"name": "modules/dynatrace/anomalies/custom/metric_id",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 9
}
]
}
]
},
{
"date": "2022-09-12",
"entries": [
{
"repo": "SRE-MAC-PDM-TEAM",
"prod": [
{
"name": "modules/dynatrace",
"count": 4
}
]
},
{
"repo": "SRE-MAC-IBO-BONUS-PAYMENTS-REPO",
"prod": [
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 2
}
]
},
{
"repo": "SRE-MAC-ANAPostPurchase",
"prod": [
{
"name": "modules/dynatrace",
"count": 3
}
]
},
{
"repo": "SRE-MAC-ANA-SubscriptionsRewards",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 3
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 4
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 2
}
]
},
{
"repo": "SRE-MAC-CSP-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 4
}
]
},
{
"repo": "SRE-MAC-IBOIP-REPO",
"prod": [
{
"name": "modules/dynatrace",
"count": 3
}
]
},
{
"repo": "SRE-MAC-IBOLIFE-REPO",
"preprod": [
{
"name": "modules/dynatrace",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 2
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 2
}
],
"prod": [
{
"name": "modules/dynatrace",
"count": 3
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
]
},
{
"repo": "SRE-MAC-PL-APPLICATION",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 2
}
],
"prod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 2
}
]
},
{
"repo": "SRE-MAC-TPS-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/dynatrace/calculated_service_metric",
"count": 1
},
{
"name": "modules/dynatrace/anomalies/custom/metric_id",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 7
}
]
}
]
},
{
"date": "2022-09-05",
"entries": [
{
"repo": "SRE-MAC-PDM-TEAM",
"prod": [
{
"name": "modules/dynatrace",
"count": 12
}
]
},
{
"repo": "SRE-MAC-IBO-BONUS-PAYMENTS-REPO",
"prod": [
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 1
}
]
},
{
"repo": "SRE-MAC-ANAPostPurchase",
"prod": [
{
"name": "modules/dynatrace",
"count": 11
}
]
},
{
"repo": "SRE-MAC-ANA-SubscriptionsRewards",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 2
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 5
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/monitors/http-monitors/basic",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 1
}
]
},
{
"repo": "SRE-MAC-CSP-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 5
}
]
},
{
"repo": "SRE-MAC-IBOIP-REPO",
"prod": [
{
"name": "modules/dynatrace",
"count": 3
}
]
},
{
"repo": "SRE-MAC-IBOLIFE-REPO",
"preprod": [
{
"name": "modules/dynatrace",
"count": 1
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/dynatrace",
"count": 2
},
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
]
},
{
"repo": "SRE-MAC-PL-APPLICATION",
"preprod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/splunk/hec-token",
"count": 1
},
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
]
},
{
"repo": "SRE-MAC-TPS-APPLICATION",
"preprod": [
{
"name": "modules/aws/lambda/logs_streaming_splunk",
"count": 1
}
],
"prod": [
{
"name": "modules/dynatrace/alerting_profiles",
"count": 1
},
{
"name": "modules/dynatrace/notifications/splunk-on-call",
"count": 1
},
{
"name": "modules/dynatrace/calculated_service_metric",
"count": 1
},
{
"name": "modules/dynatrace/anomalies/custom/metric_id",
"count": 1
},
{
"name": "modules/dynatrace",
"count": 2
}
]
}
]
}
],
"metadata": {
"name": "",
"readCountRemaining": 98,
"timeToExpire": 86389,
"createdAt": "2022-09-27T07:56:47.003Z"
}
}
Expected Output:
[
"2022-09-22" : [
{
"name" : "modules/dynatrace",
"count": 11
},
{
"name" : "modules/dynatrace",
"count": 4
},
{
"name" : "modules/splunk/hec-token",
"count": 14
}
....
],
"2022-09-19" : [
{
"name" : "modules/dynatrace",
"count": 52
},
.....
]
.
.
.
]

This should do the grouping for you:
(
record{
date: $reduce(entries.prod.name, function($acc, $item) {
$merge([$acc, {
$item: $count(entries.prod.name[$=$item])
}])
}, {}) ~> $each(function($value, $key) {
{ "name": $key, "count": $value }
})
}
)
You can check it out live here: https://stedi.link/LiOttdB
UPDATE - even shorter solution:
(
record{
date: entries.*{ $.name: $sum($.count) }
~> $each(function($c, $n) {{ "name": $n, "count": $c }})
}
)
https://stedi.link/pQiCnJo

Thanks #mralex to answer my question. But unfortunately it is not working in older version which is getting used in the latest JSON-API Grafana module. So I myself create another solution which is given below:
(
record.(
$t1 := entries.preprod{ $.name: $sum($.count) };
$t2 := entries.prod{ $.name: $sum($.count) };
$t3 := $distinct($append($t1.$keys(),$t2.$keys()));
$t3.(
$t4 := ($exists($lookup($t1, $)) ? $lookup($t1, $) : 0) + ($exists($lookup($t2, $)) ? $lookup($t2, $): 0);
{
'name': $,
'count' : $t4
}
)
)
)

Related

Using jq search value using regex

I have this json object and I am trying to find a way to use regex in jq to search value in issuer_dn that is more than 8 characters. A regex like [A-Z]{8} should work but I am unable to get results. I am still learning jq and if there is any other tool that can be used then please share.
{
"ip": "127.0.0.1",
"data": {
"tls": {
"status": "success",
"protocol": "tls",
"result": {
"handshake_log": {
"server_hello": {
"version": {
"name": "TLSv1.2",
"value": 771
},
"random": "hhdshfhhdhfhshdh",
"session_id": "hjdsfyyueujhfjaskdfjjl",
"cipher_suite": {
"hex": "0xC014",
"name": "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
"value": 49172
},
"compression_method": 0,
"ocsp_stapling": false,
"ticket": false,
"secure_renegotiation": true,
"heartbeat": false,
"extended_master_secret": false
},
"server_certificates": {
"certificate": {
"raw": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC==",
"parsed": {
"version": 3,
"serial_number": "1234567890",
"signature_algorithm": {
"name": "SHA256-RSA",
"oid": "1.2.840.113549.1.1.11"
},
"issuer": {
"common_name": [
"ABC"
],
"country": [
"ABC"
],
"locality": [
"ABC"
],
"province": [
"ABC"
],
"organization": [
"ABC"
],
"organizational_unit": [
"ABC"
]
},
"issuer_dn": "C=ABCD, ST=ABCD, L=ABCD, O=ABCD, OU=ABCD, CN=ABCD",
"validity": {
"start": "2020-02-01T01:09:22Z",
"end": "2021-02-01T03:09:22Z",
"length": 7883663
},
"subject": {
"common_name": [
"ABC"
],
"country": [
"ABC"
],
"locality": [
"ABC"
],
"province": [
"ABC"
],
"organization": [
"ABC"
],
"organizational_unit": [
"ABC"
]
},
"subject_dn": "C=ABCD, ST=ABCD, L=ABCD, O=ABCD, OU=ABCD, CN=ABCD",
"subject_key_info": {
"key_algorithm": {
"name": "RSA"
},
"rsa_public_key": {
"exponent": 65537,
"modulus": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC",
"length": 2048
},
"fingerprint_sha256": "73hh3g39920jfjj38723bb3993hh3774994002"
},
"extensions": {
"basic_constraints": {
"is_ca": true
},
"authority_key_id": "73hh3g39920jfjj38723bb3993hh3774994002",
"subject_key_id": "73hh3g39920jfjj38723bb3993hh3774994002"
},
"signature": {
"signature_algorithm": {
"name": "SHA256-RSA",
"oid": "1.2.840.113549.1.1.11"
},
"value": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC",
"valid": true,
"self_signed": true
},
"fingerprint_md5": "73hh3g39920jfjj38723bb3993hh3774994002",
"fingerprint_sha1": "73hh3g39920jfjj38723bb3993hh3774994002",
"fingerprint_sha256": "73hh3g39920jfjj38723bb3993hh3774994002",
"tbs_noct_fingerprint": "73hh3g39920jfjj38723bb3993hh3774994002",
"spki_subject_fingerprint": "73hh3g39920jfjj38723bb3993hh3774994002",
"tbs_fingerprint": "73hh3g39920jfjj38723bb3993hh3774994002",
"validation_level": "73hh3g39920jfjj38723bb3993hh3774994002",
"redacted": false
}
},
"validation": {
"browser_trusted": false,
"browser_error": "x509: failed to load system roots and no roots provided"
}
},
"server_key_exchange": {
"ecdh_params": {
"curve_id": {
"name": "secp256r1",
"id": 23
},
"server_public": {
"x": {
"value": "73hh3g39920jfjj38723bb3993hh3774994002",
"length": 256
},
"y": {
"value": "73hh3g39920jfjj38723bb3993hh3774994002",
"length": 256
}
}
},
"digest": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC",
"signature": {
"raw": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC",
"type": "rsa",
"valid": true,
"signature_and_hash_type": {
"signature_algorithm": "rsa",
"hash_algorithm": "sha256"
},
"tls_version": {
"name": "TLSv1.2",
"value": 771
}
}
},
"client_key_exchange": {
"ecdh_params": {
"curve_id": {
"name": "secp256r1",
"id": 23
},
"client_public": {
"x": {
"value": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC=",
"length": 256
},
"y": {
"value": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC=",
"length": 256
}
},
"client_private": {
"value": "dGVzdCBkYXRhIGFuZCBnYXJiYWdldGVzdC=",
"length": 32
}
}
},
"client_finished": {
"verify_data": "dGVzdCBkY"
},
"server_finished": {
"verify_data": "dGVzdCBkY"
},
"key_material": {
"master_secret": {
"value": "dGVzdCBkY",
"length": 48
},
"pre_master_secret": {
"value": "dGVzdCBkY",
"length": 32
}
}
}
},
"timestamp": "2020-02-9T07:14:47Z"
}
}
}
I would use the following :
.data.tls.result.handshake_log.server_certificates.certificate.parsed.issuer_dn \
| select(length > 8)
You can try it here.
You could use:
test("[A-Z]{8}")

How to match on multiple fields per array item in elastic search

I am trying to create an elastic search query to match multiple fields inside of an object inside of an array.
For example, the Elastic Search structure I am querying against is similar to the following:
"hits": [
{
"_index": "titles",
"_type": "title",
...
"_source": {
...
"genres": [
{
"code": "adventure",
"priority": 1
},
{
"code": "action",
"priority": 2
},
{
"code": "horror",
"priority": 3
}
],
...
},
...
]
And what I am trying to do is match on titles with specific genre/priority pairings. For example, I am trying to match all titles with code=action and priority=1, but my query is returning too many results. The above title is hit during this example due to the fact that the genre list contains both a genre with code=action AND another genre that matches priority=1. My query is similar to the following:
"query": {
"bool": {
"filter": [
{
"bool": {
"must":[
{"term": {
"genres.code": {
"value": "action",
"boost": 1.0
}
}},
{"term": {
"genres.priority": {
"value": 1,
"boost": 1.0
}
}}
]
}
},
...
}
Is there any way to form the query in order to match a title with a single genre containing both priority=1 AND code=action?
I have recreated your problem. I added the following mapping
PUT titles
{
"mappings": {
"title": {
"properties": {
"author": {
"type": "text"
},
"genres": {
"type": "nested"
}
}
}
}
}
Then I added values to the index. This was what was inserted
"hits": {
"total": 3,
"max_score": 1,
"hits": [
{
"_index": "titles",
"_type": "title",
"_id": "2",
"_score": 1,
"_source": {
"author": "Author 1",
"genres": [
{
"code": "adventure",
"priority": 2
},
{
"code": "action",
"priority": 3
},
{
"code": "horror",
"priority": 1
}
]
}
},
{
"_index": "titles",
"_type": "title",
"_id": "1",
"_score": 1,
"_source": {
"author": "Author 2",
"genres": [
{
"code": "adventure",
"priority": 3
},
{
"code": "action",
"priority": 1
},
{
"code": "horror",
"priority": 2
}
]
}
},
{
"_index": "titles",
"_type": "title",
"_id": "3",
"_score": 1,
"_source": {
"author": "Author 3",
"genres": [
{
"code": "adventure",
"priority": 3
},
{
"code": "action",
"priority": 1
},
{
"code": "horror",
"priority": 2
}
]
}
}
]
}
My query is:
GET titles/title/_search
{
"query": {
"nested": {
"path": "genres",
"query": {
"bool": {
"must": [
{
"term": {
"genres.code": {
"value": "horror"
}
}
},
{
"term": {
"genres.priority": {
"value": 1
}
}
}
]
}
}
}
}
}
The query returns
"_source": {
"author": "Author 1",
"genres": [
{
"code": "adventure",
"priority": 2
},
{
"code": "action",
"priority": 3
},
{
"code": "horror",
"priority": 1
}
]
}
This title is the only one that has code = 'horror' and priority = 1.

Convert list of json's present in a single json file into one single valid json

I have a big file named as new_file.json which have several json in it, like:
{ "ResourceRecordSets": [ { "Name": "XYZ.", "Type": "mms", "TTL": 172800, "ResourceRecords": [ { "Value": "mms-1219.buydmms-24.org." }, { "Value": "mms-1606.buydmms-08.co.uk." }, { "Value": "mms-516.buydmms-00.net." }, { "Value": "mms-458.buydmms-57.com." } ] }, { "Name": "XYZ.", "Type": "SOA", "TTL": 900, "ResourceRecords": [ { "Value": "ABC.COM. 1 7200 900 1209600 86400" } ] }, { "Name": "bb.XYZ.", "Type": "CNAME", "SetIdentifier": "fix", "GeoLocation": { "ContinentCode": "EU" }, "TTL": 300, "ResourceRecords": [ { "Value": "abx.xyz.com" } ] }, { "Name": "bb.XYZ.", "Type": "CNAME", "SetIdentifier": "route to xms staging svc", "GeoLocation": { "CountryCode": "*" }, "TTL": 60, "ResourceRecords": [ { "Value": "xms-staging-xmssvc-1241009625.eu-west-1.elb.amazonbuy.com" } ] } ] }
{ "ResourceRecordSets": [ { "Name": "xyz.com.", "Type": "mms", "TTL": 172800, "ResourceRecords": [ { "Value": "mms-877.buydmms-45.net." }, { "Value": "mms-1168.buydmms-18.org." }, { "Value": "mms-375.buydmms-46.com." }, { "Value": "mms-1835.buydmms-37.co.uk." } ] }, { "Name": "xyz.com.", "Type": "SOA", "TTL": 900, "ResourceRecords": [ { "Value": "mms-877.buydmms-45.net. buydmms-taste.hurdle.com. 1 7200 900 1209600 86400" } ] }, { "Name": "prod.xyz.com.", "Type": "CNAME", "SetIdentifier": "pointing to finclub", "Weight": 1, "TTL": 300, "ResourceRecords": [ { "Value": "indiv-finclub.elb.amazonbuy.com" } ] }, { "Name": "prod.xyz.com.", "Type": "CNAME", "SetIdentifier": "pointing to symentic", "Weight": 99, "TTL": 300, "ResourceRecords": [ { "Value": "some.com" } ] } ] }
{ "ResourceRecordSets": [ { "Name": "fun.org.", "Type": "mms", "TTL": 172800, "ResourceRecords": [ { "Value": "mms-352.buydmms-44.com." }, { "Value": "mms-1131.buydmms-13.org." }, { "Value": "mms-591.buydmms-09.net." }, { "Value": "mms-1997.buydmms-57.co.uk." } ] }, { "Name": "fun.org.", "Type": "SOA", "TTL": 900, "ResourceRecords": [ { "Value": "mms-352.buydmms-44.com. buydmms-taste.hurdle.com. 1 7200 900 1209600 86400" } ] }, { "Name": "portal-junior.fun.org.", "Type": "CNAME", "TTL": 300, "ResourceRecords": [ { "Value": "portal.expressplay.com" } ] } ] }
{ "ResourceRecordSets": [ { "Name": "junior.fun.org.", "Type": "mms", "TTL": 172800, "ResourceRecords": [ { "Value": "mms-518.buydmms-00.net." }, { "Value": "mms-1447.buydmms-52.org." }, { "Value": "mms-499.buydmms-62.com." }, { "Value": "mms-1879.buydmms-42.co.uk." } ] }, { "Name": "junior.fun.org.", "Type": "SOA", "TTL": 900, "ResourceRecords": [ { "Value": "mms-518.buydmms-00.net. buydmms-taste.hurdle.com. 1 7200 900 1209600 86400" } ] }, { "Name": "db.junior.fun.org.", "Type": "CNAME", "TTL": 300, "ResourceRecords": [ { "Value": "xms16-ap.crds.hurdle.com" } ] }, { "Name": "junior.junior.fun.org.", "Type": "CNAME", "ResourceRecords": [ { "Value": "This resource record set includes an attribute that is ummsupported on this Route 53 endpoint. Please commsider using a newer endpoint or a tool that does so." } ], "TrafficPolicyImmstanceId": "17b76444-85c2-4ec5-a16d-8611fa05ca82" } ] }
{ "ResourceRecordSets": [ { "Name": "junior.myjuniordmms.org.", "Type": "mms", "TTL": 172800, "ResourceRecords": [ { "Value": "mms-455.buydmms-56.com." }, { "Value": "mms-1381.buydmms-44.org." }, { "Value": "mms-741.buydmms-28.net." }, { "Value": "mms-1992.buydmms-57.co.uk." } ] }, { "Name": "junior.myjuniordmms.org.", "Type": "SOA", "TTL": 900, "ResourceRecords": [ { "Value": "mms-455.buydmms-56.com. buydmms-taste.hurdle.com. 1 7200 900 1209600 86400" } ] } ] }
I want to make the same file as one single valid json, can it be done by using jq or some other method in shell/bash
Yes , you can.
command:
cat new_file.json | jq -s '.[0] * .[1]'
output:
{
"ResourceRecordSets": [
{
"Name": "xyz.com.",
"Type": "mms",
"TTL": 172800,
"ResourceRecords": [
{
"Value": "mms-877.buydmms-45.net."
},
{
"Value": "mms-1168.buydmms-18.org."
},
{
"Value": "mms-375.buydmms-46.com."
},
{
"Value": "mms-1835.buydmms-37.co.uk."
}
]
},
{
"Name": "xyz.com.",
"Type": "SOA",
"TTL": 900,
"ResourceRecords": [
{
"Value": "mms-877.buydmms-45.net. buydmms-taste.hurdle.com. 1 7200 900 1209600 86400"
}
]
},
{
"Name": "prod.xyz.com.",
"Type": "CNAME",
"SetIdentifier": "pointing to finclub",
"Weight": 1,
"TTL": 300,
"ResourceRecords": [
{
"Value": "indiv-finclub.elb.amazonbuy.com"
}
]
},
{
"Name": "prod.xyz.com.",
"Type": "CNAME",
"SetIdentifier": "pointing to symentic",
"Weight": 99,
"TTL": 300,
"ResourceRecords": [
{
"Value": "some.com"
}
]
}
]
}
it's possible to achieve the same using unix/linux utilities only: sed and paste:
bash $ cat new_file.json | sed '/^ *$/d' | paste -s -d, - | sed -E 's/(.*)/[\1]/'
the first sed removes all the blank lines
paste concatenates all the input lines over comma ,
the last sed puts square brackets around the input
the resulting output will be a valid JSON

jsonpath query for reading json's child elements

I'm trying to read element's values.
For test purposes I was trying to read account_name element value.
I've tried this
jsonloop query = "data/ads/data/insights/data[*]"
account_name = "$.data.ads.data.insights.data.[*].account_name"
But not getting result. Apart from this I've googled alot and my logic seems to be okay..but I dont know where am doing wrong.
{
"data": [{
"id": "act_78425484545145418"
}, {
"id": "act_87814545415645416"
}, {
"ads": {
"data": [{
"insights": {
"data": [{
"account_name": "Emirates WW",
"campaign_name": "FR_Ozone_aug",
"adset_name": "Android",
"ad_name": "C_mail_9sept",
"spend": 12.2,
"impressions": "1146",
"clicks": "16",
"_store_clicks": "0",
"inline_click_pointss": "16",
"actions": [{
"action_type": "custom_event._activate_",
"value": 2
}, {
"action_type": "custom_event.other",
"value": 1
}, {
"action_type": "click_points",
"value": 16
}, {
"action_type": "__install",
"value": 1
}, {
"action_type": "offsite_points.view_content_points",
"value": 25
}, {
"action_type": "post_points",
"value": 16
}, {
"action_type": "post_points",
"value": 16
}, {
"action_type": "offsite_points",
"value": 25
}, {
"action_type": "custom_event",
"value": 3
}],
"date_start": "2016-09-09",
"date_stop": "2016-09-19"
}],
"paging": {
"cursors": {
"before": "ADZZZZ",
"after": "ADZZZZ"
}
}
},
"id": "6054027758549"
}, {
"insights": {
"data": [{
"account_name": "Emirates WW",
"campaign_name": "FR_Ozone_aug",
"adset_name": "Reta_Phy_iOS",
"ad_name": "Council_email_9sept",
"spend": 10.13,
"impressions": "1004",
"clicks": "10",
"_store_clicks": "8",
"inline_click_pointss": "10",
"actions": [{
"action_type": "click_points",
"value": 10
}, {
"action_type": "offsite_points.view_content_points",
"value": 38
}, {
"action_type": "post_points",
"value": 10
}, {
"action_type": "post_points",
"value": 10
}, {
"action_type": "offsite_points",
"value": 38
}],
"date_start": "2016-09-09",
"date_stop": "2016-09-19"
}],
"paging": {
"cursors": {
"before": "ADZZZZ",
"after": "ADZZZZ"
}
}
},
"id": "6054030075149"
}],
"paging": {
"cursors": {
"before": "Juysubgysbyy",
"after": "Nhwunhseuubeyegb"
}
}
}
}]
}
Can anyone help ?
Thanks in Advance !!
First and second data properties have array values, so you should add .[*] after them:
account_name = "$.data.[*].ads.data.[*].insights.data.[*].account_name"
The result:
[
"Emirates WW",
"Emirates WW"
]

How to find count of children of root nodes json data in angularjs

I have the following array of JSON data. There are two root nodes and then there are subnodes of the root nodes and sub-nodes of sub-nodes. I need to find how many subnodes does the root node has. I have no idea how to do that.
[
{
"Id": "0",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
"Events": [
{
"Id": "0",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
},
{
"Id": "1",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
}
],
"Actions": null
},
{
"Id": "1",
"Name": "Suscriber 1"
"Events": [
{
"Id": "0",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
},
{
"Id": "1",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
}
],
"Actions": null
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
},
{
"Id": "1",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
"Events": [
{
"Id": "0",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
},
{
"Id": "1",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
}
],
"Actions": null
},
{
"Id": "1",
"Name": "Suscriber 1"
,
"Events": [
{
"Id": "0",
"Name": "Subscriber"
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
},
{
"Id": "1",
"Name": "Subscriber
"Suscribers": [
{
"Id": "0",
"Name": "Suscriber 1"
},
{
"Id": "1",
"Name": "Suscriber 1"
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
}
],
"Actions": null
}
],
"Actions": [
{
"Name": "Start",
"Text": "Abc"
},
{
"Name": "Start",
"Text": "Abc"
}
]
}
]