Improperly formatted JSON, no idea what is incorrect - json

I have a JSON string here. In Python, whenever I json.loads(string), I get json.decoder.JSONDecodeError: Expecting ',' delimiter: line 1 column 21 (char 20). However, I am completely lost on why it is an improperly formatted JSON. I have tried adding r infront of the string but with no avail.
JSON String:
string = '{"alarm_flood": "[{"Date_Time": "01-12-2009 12:18:42", "flood_status": "1", "FloodIndex": "0", "Plant_Area": "AREA 1"}, {"Date_Time": "01-12-2009 12:18:42", "flood_status": "0", "FloodIndex": "0", "Plant_Area": "AREA 1"}]", "flood_status": "[{"Date_Time": "01-12-2009 12:18:42", "flood_status": "1", "FloodIndex": "0", "Plant_Area": "AREA 1"}, {"Date_Time": "01-12-2009 12:18:42", "flood_status": "0", "FloodIndex": "0", "Plant_Area": "AREA 1"}]"}'
Can someone point me to the right direction?

You had some extra quotation marks. Here is the corrected version:
string = '{"alarm_flood": [{"Date_Time": "01-12-2009 12:18:42", "flood_status": "1", "FloodIndex": "0", "Plant_Area": "AREA 1"}, {"Date_Time": "01-12-2009 12:18:42", "flood_status": "0", "FloodIndex": "0", "Plant_Area": "AREA 1"}], "flood_status": [{"Date_Time": "01-12-2009 12:18:42", "flood_status": "1", "FloodIndex": "0", "Plant_Area": "AREA 1"}, {"Date_Time": "01-12-2009 12:18:42", "flood_status": "0", "FloodIndex": "0", "Plant_Area": "AREA 1"}]}'

you have some extra double quotes, you can remove them using replace
string = string.replace('"[', '[').replace(']"', ']');

Related

How can I convert a JSON traffic packet into JSON format for bulk import into Elasticsearch?

I am trying to convert some JSON files about TCP and DNP3 traffic into bulk import into Elasticsearch. I've already know that tshark has a command that can generate JSON for bulk import from a pcap:
tshark -T ek -r dnp3_trace.pcap > dnp3_trace.json
However, I haven't got the pcaps for some JSON files and I don't know if there is something that could transform the JSON into bulk index.
For example, I provide an example of my JSON that I would like to convert into bulk index:
{
"_index": "packets-2020-10-17",
"_type": "doc",
"_score": null,
"_source": {
"layers": {
"frame": {
"frame.interface_id": "0",
"frame.interface_id_tree": {
"frame.interface_name": "ens224"
},
"frame.encap_type": "1",
"frame.time": "Oct 17, 2020 10:51:44.072688465 Central Daylight Time",
"frame.offset_shift": "0.000000000",
"frame.time_epoch": "1602949904.072688465",
"frame.time_delta": "0.000000000",
"frame.time_delta_displayed": "0.000000000",
"frame.time_relative": "0.000000000",
"frame.number": "1",
"frame.len": "72",
"frame.cap_len": "72",
"frame.marked": "0",
"frame.ignored": "0",
"frame.protocols": "eth:ethertype:ip:tcp:dnp3",
"frame.coloring_rule.name": "TCP",
"frame.coloring_rule.string": "tcp"
},
"eth": {
"eth.dst": "00:00:00:aa:00:25",
"eth.dst_tree": {
"eth.dst_resolved": "00:00:00_aa:00:25",
"eth.dst.oui": "0",
"eth.dst.oui_resolved": "Officially Xerox, but 0:0:0:0:0:0 is more common",
"eth.addr": "00:00:00:aa:00:25",
"eth.addr_resolved": "00:00:00_aa:00:25",
"eth.addr.oui": "0",
"eth.addr.oui_resolved": "Officially Xerox, but 0:0:0:0:0:0 is more common",
"eth.dst.lg": "0",
"eth.lg": "0",
"eth.dst.ig": "0",
"eth.ig": "0"
},
"eth.src": "00:50:56:9c:5f:cc",
"eth.src_tree": {
"eth.src_resolved": "VMware_9c:5f:cc",
"eth.src.oui": "20566",
"eth.src.oui_resolved": "VMware, Inc.",
"eth.addr": "00:50:56:9c:5f:cc",
"eth.addr_resolved": "VMware_9c:5f:cc",
"eth.addr.oui": "20566",
"eth.addr.oui_resolved": "VMware, Inc.",
"eth.src.lg": "0",
"eth.lg": "0",
"eth.src.ig": "0",
"eth.ig": "0"
},
"eth.type": "0x00000800"
},
"ip": {
"ip.version": "4",
"ip.hdr_len": "20",
"ip.dsfield": "0x00000000",
"ip.dsfield_tree": {
"ip.dsfield.dscp": "0",
"ip.dsfield.ecn": "0"
},
"ip.len": "58",
"ip.id": "0x000009f9",
"ip.flags": "0x00004000",
"ip.flags_tree": {
"ip.flags.rb": "0",
"ip.flags.df": "1",
"ip.flags.mf": "0"
},
"ip.frag_offset": "0",
"ip.ttl": "64",
"ip.proto": "6",
"ip.checksum": "0x0000c405",
"ip.checksum.status": "2",
"ip.src": "172.16.0.2",
"ip.addr": "172.16.0.2",
"ip.src_host": "172.16.0.2",
"ip.host": "172.16.0.2",
"ip.dst": "192.168.0.5",
"ip.addr": "192.168.0.5",
"ip.dst_host": "192.168.0.5",
"ip.host": "192.168.0.5"
},
"tcp": {
"tcp.srcport": "41391",
"tcp.dstport": "20000",
"tcp.port": "41391",
"tcp.port": "20000",
"tcp.stream": "0",
"tcp.len": "18",
"tcp.seq": "1",
"tcp.seq_raw": "3359839259",
"tcp.nxtseq": "19",
"tcp.ack": "1",
"tcp.ack_raw": "1388983197",
"tcp.hdr_len": "20",
"tcp.flags": "0x00000018",
"tcp.flags_tree": {
"tcp.flags.res": "0",
"tcp.flags.ns": "0",
"tcp.flags.cwr": "0",
"tcp.flags.ecn": "0",
"tcp.flags.urg": "0",
"tcp.flags.ack": "1",
"tcp.flags.push": "1",
"tcp.flags.reset": "0",
"tcp.flags.syn": "0",
"tcp.flags.fin": "0",
"tcp.flags.str": "·······AP···"
},
"tcp.window_size_value": "501",
"tcp.window_size": "501",
"tcp.window_size_scalefactor": "-1",
"tcp.checksum": "0x00006cec",
"tcp.checksum.status": "2",
"tcp.urgent_pointer": "0",
"tcp.analysis": {
"tcp.analysis.bytes_in_flight": "18",
"tcp.analysis.push_bytes_sent": "18"
},
"Timestamps": {
"tcp.time_relative": "0.000000000",
"tcp.time_delta": "0.000000000"
},
"tcp.payload": "05:64:0b:c4:59:02:01:00:d4:49:ca:ca:01:3c:01:06:d1:ff",
"tcp.pdu.size": "18"
},
"dnp3": {
"Data Link Layer, Len: 11, From: 1, To: 601, DIR, PRM, Unconfirmed User Data": {
"dnp3.start": "0x00000564",
"dnp3.len": "11",
"dnp3.ctl": "0x000000c4",
"dnp3.ctl_tree": {
"dnp3.ctl.dir": "1",
"dnp3.ctl.prm": "1",
"dnp3.ctl.fcb": "0",
"dnp3.ctl.fcv": "0",
"dnp3.ctl.prifunc": "4"
},
"dnp3.dst": "601",
"dnp3.addr": "601",
"dnp3.src": "1",
"dnp3.addr": "1",
"dnp3.hdr.CRC": "0x000049d4",
"dnp.hdr.CRC.status": "1"
},
"dnp3.tr.ctl": "0x000000ca",
"dnp3.tr.ctl_tree": {
"dnp3.tr.fin": "1",
"dnp3.tr.fir": "1",
"dnp3.tr.seq": "10"
},
"Data Chunks": {
"Data Chunk: 0": {
"dnp.data_chunk": "ca:ca:01:3c:01:06",
"dnp.data_chunk_len": "6",
"dnp.data_chunk.CRC": "0x0000ffd1",
"dnp.data_chunk.CRC.status": "1"
}
},
"dnp3.al.fragments": {
"dnp3.al.fragment": "1",
"dnp3.al.fragment.count": "1",
"dnp3.al.fragment.reassembled.length": "5"
},
"Application Layer: (FIR, FIN, Sequence 10, Read)": {
"dnp3.al.ctl": "0x000000ca",
"dnp3.al.ctl_tree": {
"dnp3.al.fir": "1",
"dnp3.al.fin": "1",
"dnp3.al.con": "0",
"dnp3.al.uns": "0",
"dnp3.al.seq": "10"
},
"dnp3.al.func": "1",
"READ Request Data Objects": {
"dnp3.al.obj": "15361",
"dnp3.al.obj_tree": {
"Qualifier Field, Prefix: None, Range: No Range Field": {
"dnp3.al.objq.prefix": "0",
"dnp3.al.objq.range": "6"
},
"Number of Items: 0": ""
}
}
}
}
}
}
}
My goal would be to convert this JSON in this format:
{"index":{"_index":"packets-2019-10-25","_type":"doc"}}
{"timestamp":"1571994793106","layers":{"frame":{"frame_frame_encap_type":"1","frame_frame_time":"2019-10-25T09:13:13.106208000Z","frame_frame_offset_shift":"0.000000000","frame_frame_time_epoch":"1571994793.106208000","frame_frame_time_delta":"0.000000000","frame_frame_time_delta_displayed":"0.000000000","frame_frame_time_relative":"0.000000000","frame_frame_number":"1","frame_frame_len":"78","frame_frame_cap_len":"78","frame_frame_marked":false,"frame_frame_ignored":false,"frame_frame_protocols":"eth:ethertype:ip:tcp:dnp3"},"eth":{"eth_eth_dst":"50:7b:9d:76:77:d5","eth_eth_dst_resolved":"LCFCHeFe_76:77:d5","eth_eth_dst_oui":"5274525","eth_eth_dst_oui_resolved":"LCFC(HeFei) Electronics Technology co., ltd","eth_eth_addr":"50:7b:9d:76:77:d5","eth_eth_addr_resolved":"LCFCHeFe_76:77:d5","eth_eth_addr_oui":"5274525","eth_eth_addr_oui_resolved":"LCFC(HeFei) Electronics Technology co., ltd","eth_eth_dst_lg":false,"eth_eth_lg":false,"eth_eth_dst_ig":false,"eth_eth_ig":false,"eth_eth_src":"d8:50:e6:05:a3:1e","eth_eth_src_resolved":"ASUSTekC_05:a3:1e","eth_eth_src_oui":"14176486","eth_eth_src_oui_resolved":"ASUSTek COMPUTER INC.","eth_eth_addr":"d8:50:e6:05:a3:1e","eth_eth_addr_resolved":"ASUSTekC_05:a3:1e","eth_eth_addr_oui":"14176486","eth_eth_addr_oui_resolved":"ASUSTek COMPUTER INC.","eth_eth_src_lg":false,"eth_eth_lg":false,"eth_eth_src_ig":false,"eth_eth_ig":false,"eth_eth_type":"0x00000800"},"ip":{"ip_ip_version":"4","ip_ip_hdr_len":"20","ip_ip_dsfield":"0x00000000","ip_ip_dsfield_dscp":"0","ip_ip_dsfield_ecn":"0","ip_ip_len":"64","ip_ip_id":"0x0000259f","ip_ip_flags":"0x00004000","ip_ip_flags_rb":false,"ip_ip_flags_df":true,"ip_ip_flags_mf":false,"ip_ip_frag_offset":"0","ip_ip_ttl":"128","ip_ip_proto":"6","ip_ip_checksum":"0x00000000","ip_ip_checksum_status":"2","ip_ip_src":"192.168.1.150","ip_ip_addr":["192.168.1.150","192.168.1.200"],"ip_ip_src_host":"192.168.1.150","ip_ip_host":["192.168.1.150","192.168.1.200"],"ip_ip_dst":"192.168.1.200","ip_ip_dst_host":"192.168.1.200"},"tcp":{"tcp_tcp_srcport":"53543","tcp_tcp_dstport":"20000","tcp_tcp_port":["53543","20000"],"tcp_tcp_stream":"0","tcp_tcp_len":"24","tcp_tcp_seq":"1","tcp_tcp_seq_raw":"3354368014","tcp_tcp_nxtseq":"25","tcp_tcp_ack":"1","tcp_tcp_ack_raw":"3256068755","tcp_tcp_hdr_len":"20","tcp_tcp_flags":"0x00000018","tcp_tcp_flags_res":false,"tcp_tcp_flags_ns":false,"tcp_tcp_flags_cwr":false,"tcp_tcp_flags_ecn":false,"tcp_tcp_flags_urg":false,"tcp_tcp_flags_ack":true,"tcp_tcp_flags_push":true,"tcp_tcp_flags_reset":false,"tcp_tcp_flags_syn":false,"tcp_tcp_flags_fin":false,"tcp_tcp_flags_str":"·······AP···","tcp_tcp_window_size_value":"2052","tcp_tcp_window_size":"2052","tcp_tcp_window_size_scalefactor":"-1","tcp_tcp_checksum":"0x000084e1","tcp_tcp_checksum_status":"2","tcp_tcp_urgent_pointer":"0","tcp_tcp_analysis":null,"tcp_tcp_analysis_bytes_in_flight":"24","tcp_tcp_analysis_push_bytes_sent":"24","text":"Timestamps","tcp_tcp_time_relative":"0.000000000","tcp_tcp_time_delta":"0.000000000","tcp_tcp_payload":"05:64:11:c4:01:00:02:00:c3:5a:c8:c8:01:3c:02:06:3c:03:06:3c:04:06:c0:4c","tcp_tcp_pdu_size":"24"},"dnp3":{"text":["Data Link Layer, Len: 17, From: 2, To: 1, DIR, PRM, Unconfirmed User Data","Data Chunks","Application Layer: (FIR, FIN, Sequence 8, Read)"],"dnp3_dnp3_start":"0x00000564","dnp3_dnp3_len":"17","dnp3_dnp3_ctl":"0x000000c4","dnp3_dnp3_ctl_dir":true,"dnp3_dnp3_ctl_prm":true,"dnp3_dnp3_ctl_fcb":false,"dnp3_dnp3_ctl_fcv":false,"dnp3_dnp3_ctl_prifunc":"4","dnp3_dnp3_dst":"1","dnp3_dnp3_addr":["1","2"],"dnp3_dnp3_src":"2","dnp3_dnp3_hdr_CRC":"0x00005ac3","dnp3_dnp_hdr_CRC_status":"1","dnp3_dnp3_tr_ctl":"0x000000c8","dnp3_dnp3_tr_fin":true,"dnp3_dnp3_tr_fir":true,"dnp3_dnp3_tr_seq":"8","text":["Data Chunk: 0","READ Request Data Objects"],"dnp3_dnp_data_chunk":"c8:c8:01:3c:02:06:3c:03:06:3c:04:06","dnp3_dnp_data_chunk_len":"12","dnp3_dnp_data_chunk_CRC":"0x00004cc0","dnp3_dnp_data_chunk_CRC_status":"1","dnp3_dnp3_al_fragments":null,"dnp3_dnp3_al_fragment":"1","dnp3_dnp3_al_fragment_count":"1","dnp3_dnp3_al_fragment_reassembled_length":"11","dnp3_dnp3_al_ctl":"0x000000c8","dnp3_dnp3_al_fir":true,"dnp3_dnp3_al_fin":true,"dnp3_dnp3_al_con":false,"dnp3_dnp3_al_uns":false,"dnp3_dnp3_al_seq":"8","dnp3_dnp3_al_func":"1","dnp3_dnp3_al_obj":["15362","15363","15364"],"text":["Qualifier Field, Prefix: None, Range: No Range Field","Number of Items: 0","Qualifier Field, Prefix: None, Range: No Range Field","Number of Items: 0","Qualifier Field, Prefix: None, Range: No Range Field","Number of Items: 0"],"dnp3_dnp3_al_objq_prefix":["0","0","0"],"dnp3_dnp3_al_objq_range":["6","6","6"]}}}
If anyone has any solution or suggestion, I would appreciate it :)
Thanks in advance.

Sort keys based on a particular element in their values

I'm trying to create a JQ filter that prints the hash of the oldest entry. I'm a beginner and I can't seem to figure it out. A sorted list would suffice because I could pipe to head -1 to get the oldest hash.
My problem is that I can't figure out how to sort based on a value in the array, and then print the key of the array. I can print all the hashes with
jq -r '.t | keys[]'
but it would not be sorted.
The date is in .[26].
This is what I tried:
jq -r '.t[] |= sort_by(.[26]) | keys_unsorted[]'
And this is my input:
{
"t": {
"2C3D7D91DFD0A57CFCA8B7092347B311088D3B6E": [
"1",
"0",
"1",
"1",
"Young.Sheldon.S04.1080p.x265-ZMNT",
"4764367221",
"1136",
"1136",
"4764367221",
"5201425130",
"1091",
"0",
"0",
"4194304",
"",
"0",
"0",
"0",
"0",
"0",
"2",
"1622459402",
"0",
"0",
"1136",
"/home/xxxxxxxx/files/Young.Sheldon.S04.1080p.x265-ZMNT",
"1622421479",
"2",
"1",
"",
"",
"4436465131520",
"1",
"1"
],
"FAC73275BC376C4C26DFDA41D991D021838DB778": [
"1",
"0",
"1",
"1",
"Joshy.2016.NORDIC.1080p.BluRay.REMUX.AVC.DTS-HD.MA.5.1-Danishbits",
"21160180253",
"5045",
"5045",
"21160180253",
"4470554624",
"211",
"114688",
"0",
"4194304",
"",
"1",
"0",
"1",
"0",
"0",
"2",
"1622459402",
"0",
"0",
"5045",
"/home/xxxxxxxx/files/Joshy.2016.NORDIC.1080p.BluRay.REMUX.AVC.DTS-HD.MA.5.1-Danishbits",
"1622413504",
"2",
"1",
"",
"",
"4436465131520",
"1",
"1"
],
"671CA27A76DC35E8E9F46723F1F6596A8BC75DA0": [
"1",
"0",
"1",
"1",
"Working.Girl.1988.1080p.Bluray.REMUX.AVC.DTS-HD.MA.5.1-4K4U",
"29680778067",
"14153",
"14153",
"29680778067",
"12426936320",
"418",
"0",
"0",
"2097152",
"",
"1",
"0",
"1",
"0",
"0",
"2",
"1622459402",
"0",
"0",
"14153",
"/home/xxxxxxxx/files/Working.Girl.1988.1080p.Bluray.REMUX.AVC.DTS-HD.MA.5.1-4K4U",
"1622440882",
"2",
"1",
"",
"",
"4436465131520",
"1",
"1"
]
},
"cid": 1423760010
}
Here is my desired output:
FAC73275BC376C4C26DFDA41D991D021838DB778
2C3D7D91DFD0A57CFCA8B7092347B311088D3B6E
671CA27A76DC35E8E9F46723F1F6596A8BC75DA0
Or even better just the single
671CA27A76DC35E8E9F46723F1F6596A8BC75DA0
I would really appreciate some advice.
You could form a k/v pair with the name of the root key and the value of .[26] and sort on that. Return the keys from the sorted order
[
.t |
(keys_unsorted[]) as $k |
{ key: $k, value: .[$k][26] }
] | sort_by(.value)[].key
Or to get the last element alone replace sort in above with sort_by(.value) | last.key
Online demo
It'll be easier if you convert t to an array of key-value pairs first.
.t | to_entries | sort_by(.value[26])[].key
.t | to_entries | max_by(.value[26]) .key
Online demo - All keys
Online demo - Only the last one

How to flatten nested json data in Azure stream analytics

I have a problem writing a query to extract a table out of the arrays from a JSON file.
I want to flatten the three arrays i.e. case_Time, details & others, and make them all in a normal SQL table.
Sample JSON data:
{
"case_Time": [
{
"v1": "1",
"v2": "0",
"v3": "0",
"date": "30 January ",
"dateymd": "2020-01-30",
"v4": "1",
"v5": "0",
"v6": "0"
},
{
"v1": "1",
"v2": "0",
"v3": "0",
"date": "31 January ",
"dateymd": "2020-01-31",
"v4": "1",
"v5": "0",
"v6": "0"
}],
"details": [
{
"d1": "281844",
"d2": "10124024",
"d3": "146791",
"d4": "0",
"d5": "0",
"d6": "0",
"lastupdatedtime": "24/12/2020 09:12:24",
"d7": "2746",
"d8": "9692643",
"d9": "Total",
"notes": "some text"
},
{
"d1": "281944",
"d2": "1012",
"d3": "1791",
"d4": "0",
"d5": "0",
"d6": "0",
"lastupdatedtime": "25/12/2020 09:12:24",
"d7": "2746",
"d8": "96643",
"d9": "Total",
"notes": "some text"
}],
"others": [
{
"p1": "",
"p2": "75.64",
"p3": "",
"p4": "",
"p5": "",
"p6": "",
"date": "13/03/2020",
"p7": "",
"p8": "1.20%",
"p9": "",
"p10": "83.33",
"p11": "5",
"p12": "5900",
"p13": "78"
},
{
"p1": "",
"p2": "75.64",
"p3": "",
"p4": "",
"p5": "",
"p6": "",
"date": "14/03/2020",
"p7": "",
"p8": "1.20%",
"p9": "",
"p10": "81.33",
"p11": "5",
"p12": "500",
"p13": "78"
}
]
}
I tried the below query but getting first array data only, how to flatten remaining array :
WITH Cases AS
(
SELECT
arrayElement.ArrayIndex,
arrayElement.ArrayValue as av
FROM input as event
CROSS APPLY GetArrayElements(event.case_Time) AS arrayElement
)
SELECT av.v1, av.v2, av.v3,av.date,av.dateymd, av.v4,av.v5,av.v6
INTO powerbi
FROM Cases
Appreciate any help :)
You can Cross APPLY all your array, try something like this:
WITH Cases AS
(
SELECT
arrayElement.ArrayIndex as ai,
arrayElement.ArrayValue as av,
y.ArrayIndex as yi,
y.ArrayValue as dt,
z.ArrayIndex as zi,
z.ArrayValue as ot
FROM input as event
CROSS APPLY GetArrayElements(event.case_Time) AS arrayElement
CROSS APPLY GetArrayElements(event.details) AS y
CROSS APPLY GetArrayElements(event.others) AS z
)
SELECT av.v1, av.v2, av.v3,av.date,av.dateymd,av.v4,av.v5,av.v6,dt.d1,dt.d2,dt.d3,dt.d4,dt.d5,dt.d6,dt.lastupdatedtime,dt.d7,dt.d8,dt.d9,dt.notes,ot.p1,ot.p2,ot.p3,ot.p4,ot.p5,ot.p6,ot.p7,ot.p8,ot.p9,ot.p10,ot.p11,ot.p12,ot.p13,ot.date as tdate
FROM Cases
INTO powerbi
This query will result in a complete cross product, so you will get 8 rows. If you only want to get 2 rows(correspond index), you can add Where ai = yi and yi = zi

Is this json formatted correctly?

I'm am not sure that this json is formatted correctly can someone tell me what is wrong with the formatting?
{"response": 1,
"data": { "events": [
{
"placeTitle":"Griffwood Dr",
"placeAddress": "Canonsburg, , ",
"downCount": "0",
"time": "2015-01-01 06:47:28 UTC",
"nameOfHost": "Tyler Rice",
"event_id": "21",
"userresponse": "0",
"people_down": []
},
]
}}
You need to remove the comma if you are not going to have another object:
{"response": 1,
"data": { "events": [
{
"placeTitle":"Griffwood Dr",
"placeAddress": "Canonsburg, , ",
"downCount": "0",
"time": "2015-01-01 06:47:28 UTC",
"nameOfHost": "Tyler Rice",
"event_id": "21",
"userresponse": "0",
"people_down": []
}
]
}}
For future, you can use a JSON parser such as this one.
Remove the extra comma after the object within the array. Since there is only one object in the array there is no need to use the comma delimiter, which would normally separate objects.
{
"placeTitle":"Griffwood Dr",
"placeAddress": "Canonsburg, , ",
"downCount": "0",
"time": "2015-01-01 06:47:28 UTC",
"nameOfHost": "Tyler Rice",
"event_id": "21",
"userresponse": "0",
"people_down": []
}, //remove this comma

Parse Json output from Nagios plugin

I've Searched this site and googled as much as i can but cannot seem to find a solution that works.
I have Nagios core running at home for a monitoring project. the status.dat is converted to a Json file on the web server using a plugin i installed.
the output from this plugin looks like this:
{
"programStatus": {
"modified_host_attributes": "0",
"modified_service_attributes": "0",
"nagios_pid": "983",
"daemon_mode": "1",
"program_start": "1414556165",
"last_log_rotation": "0",
"enable_notifications": "1",
"active_service_checks_enabled": "1",
"passive_service_checks_enabled": "1",
"active_host_checks_enabled": "1",
"passive_host_checks_enabled": "1",
"enable_event_handlers": "1",
"obsess_over_services": "0",
"obsess_over_hosts": "0",
"check_service_freshness": "1",
"check_host_freshness": "0",
"enable_flap_detection": "1",
"process_performance_data": "0",
"global_host_event_handler": "",
"global_service_event_handler": "",
"next_comment_id": "1",
"next_downtime_id": "1",
"next_event_id": "77",
"next_problem_id": "23",
"next_notification_id": "304",
"active_scheduled_host_check_stats": "1,5,5",
"active_ondemand_host_check_stats": "0,0,0",
"passive_host_check_stats": "0,0,0",
"active_scheduled_service_check_stats": "3,11,11",
"active_ondemand_service_check_stats": "0,0,0",
"passive_service_check_stats": "0,0,0",
"cached_host_check_stats": "0,0,0",
"cached_service_check_stats": "0,0,0",
"external_command_stats": "0,0,0",
"parallel_host_check_stats": "1,5,5",
"serial_host_check_stats": "0,0,0"
},
"hosts": {
"localhost": {
"host_name": "localhost",
"modified_attributes": "0",
"check_command": "check-host-alive",
"check_period": "24x7",
"notification_period": "workhours",
"check_interval": "5.000000",
"retry_interval": "1.000000",
"event_handler": "",
"has_been_checked": "1",
"should_be_scheduled": "1",
"check_execution_time": "4.007",
"check_latency": "1.279",
"check_type": "0",
"current_state": "0",
"last_hard_state": "0",
"last_event_id": "0",
"current_event_id": "0",
"current_problem_id": "0",
"last_problem_id": "0",
"plugin_output": "PING OK - Packet loss = 0%, RTA = 0.08 ms",
"long_plugin_output": "",
"performance_data": "rta=0.076000ms;3000.000000;5000.000000;0.000000 pl=0%;80;100;0",
"last_check": "1414556166",
"next_check": "1414556470",
"check_options": "0",
"current_attempt": "1",
"max_attempts": "10",
"state_type": "1",
"last_state_change": "1411951605",
"last_hard_state_change": "1411951605",
"last_time_up": "1414556170",
"last_time_down": "0",
"last_time_unreachable": "0",
"last_notification": "0",
"next_notification": "0",
"no_more_notifications": "0",
"current_notification_number": "0",
"current_notification_id": "0",
"notifications_enabled": "1",
"problem_has_been_acknowledged": "0",
"acknowledgement_type": "0",
"active_checks_enabled": "1",
"passive_checks_enabled": "1",
"event_handler_enabled": "1",
"flap_detection_enabled": "1",
"process_performance_data": "1",
"obsess": "1",
"last_update": "1414556456",
"is_flapping": "0",
"percent_state_change": "0.00",
"scheduled_downtime_depth": "0"
},
"test-vm": {
"host_name": "test-vm",
"modified_attributes": "0",
"check_command": "check-host-alive",
"check_period": "24x7",
"notification_period": "workhours",
"check_interval": "5.000000",
"retry_interval": "1.000000",
"event_handler": "",
"has_been_checked": "1",
"should_be_scheduled": "1",
"check_execution_time": "3.001",
"check_latency": "0.000",
"check_type": "0",
"current_state": "1",
"last_hard_state": "1",
"last_event_id": "70",
"current_event_id": "72",
"current_problem_id": "19",
"last_problem_id": "10",
"plugin_output": "CRITICAL - Host Unreachable (192.168.56.4)",
"long_plugin_output": "",
"performance_data": "",
"last_check": "1414556437",
"next_check": "1414556740",
"check_options": "0",
"current_attempt": "1",
"max_attempts": "10",
"state_type": "1",
"last_state_change": "1413873683",
"last_hard_state_change": "1413873683",
"last_time_up": "1413873142",
"last_time_down": "1414556440",
"last_time_unreachable": "0",
"last_notification": "1414556268",
"next_notification": "1414563468",
"no_more_notifications": "0",
"current_notification_number": "2",
"current_notification_id": "301",
"notifications_enabled": "1",
"problem_has_been_acknowledged": "0",
"acknowledgement_type": "0",
"active_checks_enabled": "1",
"passive_checks_enabled": "1",
"event_handler_enabled": "1",
"flap_detection_enabled": "1",
"process_performance_data": "1",
"obsess": "1",
"last_update": "1414556456",
"is_flapping": "0",
"percent_state_change": "0.00",
"scheduled_downtime_depth": "0"
},
"winserver": {
"host_name": "winserver",
"modified_attributes": "0",
"check_command": "check-host-alive",
"check_period": "24x7",
"notification_period": "24x7",
"check_interval": "5.000000",
"retry_interval": "1.000000",
"event_handler": "",
"has_been_checked": "1",
"should_be_scheduled": "1",
"check_execution_time": "4.004",
"check_latency": "0.000",
"check_type": "0",
"current_state": "0",
"last_hard_state": "0",
"last_event_id": "75",
"current_event_id": "76",
"current_problem_id": "0",
"last_problem_id": "20",
"plugin_output": "PING OK - Packet loss = 0%, RTA = 0.44 ms",
"long_plugin_output": "",
"performance_data": "rta=0.438000ms;3000.000000;5000.000000;0.000000 pl=0%;80;100;0",
"last_check": "1414556380",
"next_check": "1414556684",
"check_options": "0",
"current_attempt": "1",
"max_attempts": "10",
"state_type": "1",
"last_state_change": "1414556303",
"last_hard_state_change": "1414556303",
"last_time_up": "1414556384",
"last_time_down": "1414556303",
"last_time_unreachable": "0",
"last_notification": "1414556303",
"next_notification": "1414558103",
"no_more_notifications": "0",
"current_notification_number": "0",
"current_notification_id": "302",
"notifications_enabled": "1",
"problem_has_been_acknowledged": "0",
"acknowledgement_type": "0",
"active_checks_enabled": "1",
"passive_checks_enabled": "1",
"event_handler_enabled": "1",
"flap_detection_enabled": "1",
"process_performance_data": "1",
"obsess": "1",
"last_update": "1414556456",
"is_flapping": "0",
"percent_state_change": "5.99",
"scheduled_downtime_depth": "0"
}
},
and goes on forever with host infomation.
now ive managed to parse this using rainmeter web parser and a regex helper called rainregex quite easily.
what i would like to do is use this information on a web page.
i've tried doing jquery:
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js"> </script>
<script>
$.getJSON("http://<serverIP>/nagios/statusJson.php", function( data){
var items = [];
$.each( data, function( key, val ) {
items.push( "<li id='" + key + "'>" + val + "</li>" );
});
$( "<ul/>", {
"class": "my-new-list",
html: items.join( "" )
}).appendTo( "body" );
});
</script>
</script>
but i couldn't get it to pull the information from the php file.
im not sure what im doing wrong here.
In case you were still curious about a solution, this might help you:
If you were pulling from the php file, you have to make sure that you echo the json encoded array you created:
echo json_encode($postData);
You can also skip parsing using php, and get the info straight from the Nagios RESTful API(I assume you have the api plugin installed as well). From there you can parse and output the json using your jquery script.
Hi I Managed to Resolve this issue myself.
I'm not fluent enough in JQuery so i decided to use a Python Script to parse the Json Data and update it to a SQL Table, from here i was able to easily use PHP to query the SQL DB for the data.
Thank you all anyway. ill edit this comment soon when i'm finished polishing the script.