Using sub queries to create JSON file - json

I'm trying to create a JSON file for a new project that I'm currently looking into I've got most of it working as expected but I'm now at a point where I'm trying to use sub queries in order to format the JSON correctly.
I've tried to use the following sub query but SQL doesn't like the formatting.
` SELECT
'Admin User TEST ONLY PLEASE IGNORE' AS AdditionalNotes
(
SELECT v.atFault
FROM dbo.ic_DP_AX ax
CROSS APPLY (VALUES (ax.Acc_fault1), (ax.Acc_fault2)) v (atFault)
FOR JSON AUTO
) AS InsuredPartyClaims,
(
SELECT Acc_fault3 AS atFault
FROM dbo.ic_DP_AX
FOR JSON AUTO
) AS InsuredPartyConvictions
FOR JSON PATH) ROOT('InsuredParties')
FROM
dbo.icp_Daprospect AS p INNER JOIN
dbo.icp_Dapolicy AS d ON p.Branch# = d.Branch# AND p.ClientRef# =
d.ClientRef# LEFT OUTER JOIN
dbo.ic_DP_AX AS ax ON P.Branch# = ax.B# AND ax.PolRef# = d.PolicyRef#
LEFT OUTER JOIN
WHERE
d.PolicyRef# = '' AND
d.Branch# = 0`
FOR JSON PATH
The output I'm trying to achieve is:
"InsuredParties": [
{
"InsuredPartyClaims": [
{
"atFault": false
},
{
"atFault": true
}
],
"InsuredPartyConvictions": [
{
"atFault": false
},
Can anyone see what I'm doing wrong? I'm trying to keep this as simple as possible.

It's always difficult without sample data, but the foolowing example is a possible solution:
Table:
CREATE TABLE dbo.ic_DP_AX (Acc_fault1 bit, Acc_fault2 bit, Acc_fault3 bit)
INSERT INTO dbo.ic_DP_AX (Acc_fault1, Acc_fault2, Acc_fault3)
VALUES (0, 1, 0)
Statment:
SELECT
(
SELECT v.atFault
FROM dbo.ic_DP_AX ax
CROSS APPLY (VALUES (ax.Acc_fault1), (ax.Acc_fault2)) v (atFault)
FOR JSON AUTO
) AS InsuredPartyClaims,
(
SELECT Acc_fault3 AS atFault
FROM dbo.ic_DP_AX
FOR JSON AUTO
) AS InsuredPartyConvictions
FOR JSON PATH, ROOT('InsuredParties')
Result:
{
"InsuredParties":[
{
"InsuredPartyClaims":[
{
"atFault":false
},
{
"atFault":true
}
],
"InsuredPartyConvictions":[
{
"atFault":false
}
]
}
]
}

The subqueries need to return JSON as well.
Try
(
(SELECT ax.Acc_fault1 AS [atFault] FROM dbo.ic_DP_AX AS ax FOR JSON PATH) AS [PartyClaims]
(SELECT ax.Acc_fault2 AS [atFault] FROM dbo.ic_DP_AX AS ax FOR JSON PATH) AS [PartyClaims]
(SELECT ax.Acc_fault3 AS [atFault] FROM dbo.ic_DP_AX AS ax FOR JSON PATH) AS [PartyConvictions]
) FOR JSON PATH AS [InsuredParties]

Related

TSQL FOR JSON nested value

I am trying to output JSON via tsql hierarchy table.
Code:
select Field1, Field2 from #header inner join #line on #header.Id = #Line.Id FOR JSON AUTO
I am getting:
"Field1": "BOB",
"Field2": "BOB2",
but I am looking for it to display
"Field1": {
"value": "BOB"
},
"Field2": {
"value": "BOB2"
},
What am I doing wrong? I can using text manipulation, but was wondering if there is a "blessed" way, i.e. built-in that is readable and best-practice.
Select STRING_AGG(concat('"',[Key],'":{"value":"',string_escape(Value,'json'),'"}'),',')
From OpenJson( (Select * From #YourTable For JSON Path,Without_Array_Wrapper ) )
For 2016 -- STUFF XML
Select stuff((Select concat('"',[Key],'":{"value":"',string_escape(Value,'json'),'"}')
From OpenJson( (Select * From #YourTable For JSON Path,Without_Array_Wrapper ) )
For XML Path ('')),1,0,'')
Results
"Field1":{
"value":"Bob"
},
"Field2":{
"value":"Bob2"
}
You can just use a path for each column name, combined with FOR JSON PATH.
SELECT
Field1 AS [Field1.value],
Field2 AS [Field2.value]
FROM #header h
JOIN #line l ON h.Id = l.Id
FOR JSON PATH;
If you need the two fields as entirely separate rows each with its own object, you can unpivot it
SELECT
v.[Field1.value],
v.[Field2.value]
FROM #header h
JOIN #line l ON h.Id = l.Id
CROSS APPLY (VALUES
(Field1, NULL),
(NULL, Field2)
) v([Field1.value], [Field2.value])
FOR JSON PATH;

Amazon Athena parsing JSON

Struggling to parse some json.
This is the format, where there are no fixed names/keys - everything is dynamic.
{ "{condition-operator}" : { "{condition-key}" : "{condition-value}" }}
An an example of values:
{
"bool":{"aws:viaawsservice":"true"},
"stringequals":{
"ec2:createaction":[
"CreateSecurityGroup",
"CreateVolume",
"CreateSnapshot",
"RunInstances"
]
}
}
I've managed to extract the 'operator' and 'key' values. (See below)
However, my result for 'values' is problematic.
One value is 'true',
the other ["CreateSecurityGroup","CreateVolume","CreateSnapshot","RunInstances"]
Neither of which I seem able to use or cast as an UNNESTable array.
To be honest, getting woefully lost in what's going on !!
I need to be able to unnest these, to get 1 row per value (so 5 values/rows in total)
Any guidance appreciated !
with cte as (
select '{"bool":{"aws:viaawsservice":"true"},"stringequals":{"ec2:createaction":["CreateSecurityGroup","CreateVolume","CreateSnapshot","RunInstances"]}}'
as sample
)
select
,ct.ct as condition_operator
,map_keys(cast(ct.cb as map<varchar,json>))[1] as condition_key
, map_values(cast(ct.cb as map<varchar,json>))[1] as condition_values
from
cte
CROSS JOIN UNNEST(map_keys(cast(json_parse(cte.sample)as map<varchar,json>)),map_values(cast(json_parse(cte.sample)as map<varchar,json>))) ct(ct,cb)
-- CROSS JOIN UNNEST( ## something here ##) as values(v)
condition_ope.. condition_key condition_values
(string(255)) (string(255)) (json)
bool aws:viaawsservice "true"
stringequals ec2:createaction ["CreateSecurityGroup","CreateVolume","CreateSnapshot","RunInstances"]
You can use try, which results in null in case of failure, and attempt to cast data to array of varchar and fallback to either cast to varchar (which will fail in case of json object in value) or just using json_format:
select ct.ct as condition_operator,
ct_key,
ct_value
from cte
CROSS JOIN UNNEST(
map_keys(cast(json_parse(cte.sample) as map < varchar, json >)),
map_values(cast(json_parse(cte.sample) as map < varchar, json >))
) ct(ct, cb)
CROSS JOIN UNNEST(
map_keys(cast(ct.cb as map < varchar, json >)),
map_values(cast(ct.cb as map < varchar, json >))
) ct1(ct_key, ct_value_json)
CROSS JOIN UNNEST(
coalesce(try(cast(ct_value_json as array < varchar >)),array [ json_format(ct_value_json) ]
)
) ct2(ct_value)
Output:
condition_operator
ct_key
ct_value
bool
aws:viaawsservice
true
stringequals
ec2:createaction
CreateSecurityGroup
stringequals
ec2:createaction
CreateVolume
stringequals
ec2:createaction
CreateSnapshot
stringequals
ec2:createaction
RunInstances

MariaDB: sum values from JSON

I'd like to sum up certain values from a JSON snippet following this example data:
set #json='
{
"items": [
{
"a": {
"a_amount": "0.0020095"
},
"b": {
"b_amount": "0.0004"
}
},
{
"a": {
"a_amount": "0.02763081"
},
"b": {
"b_amount": "0.0055"
}
}
]
}';
I need to sum all a.a_amount and all b.b_amount independantly, so I'd like to do something like SUM(a.a_amount) and SUM(b.b_amount).
But I haven't gotten any further than extracting the respective values like this:
SELECT JSON_EXTRACT(#json, '$.items[*].a.a_amount') AS sum_a,
JSON_EXTRACT(#json, '$.items[*].b.b_amount') AS sum_b;
sum_a
sum_b
["0.0020095", "0.02763081"]
["0.0004", "0.0055"]
I've fiddled around with JSON_EXTRACT(), JSON_VALUE() and even the ha_connect plugin but still couldn't come up with SQL code that would give me the sums I need.
Who can help me here?
One option is using a DOUBLE conversion along with Recursive CTE through use of JSON_EXTRACT() function such as
WITH RECURSIVE cte AS
(
SELECT 0 i
UNION ALL
SELECT i + 1 i
FROM cte
WHERE i + 1 <= ( SELECT JSON_LENGTH(json) FROM j )
)
SELECT SUM(CAST(JSON_EXTRACT(json, CONCAT('$.items[',i,'].a.a_amount')) AS DOUBLE)) AS sum_a,
SUM(CAST(JSON_EXTRACT(json, CONCAT('$.items[',i,'].b.b_amount')) AS DOUBLE)) AS sum_b
FROM cte,
j
sum_a
sum_b
0.02964031
0.0059
Demo
The JSON Table function could help you. Here a small example with your data. Maybe you must play a little bit with the data types.
SELECT
SUM(a_amount),
SUM(b_amount)
FROM
(
SELECT * FROM
JSON_TABLE(#json, '$.items[*]' COLUMNS(
a_amount FLOAT PATH '$.a.a_amount',
b_amount FLOAT PATH '$.b.b_amount'
)
) as items
) as temp;
SUM(a_amount)
SUM(b_amount)
0.029640309745445848
0.005899999960092828
View on DB Fiddle

Add a new key/value pair into a nested array inside a PostgreSQL JSON column

Using PostgreSQL 13.4 I have a table with a JSON column in a structure like the following sample:
{
"username": "jsmith",
"location": "United States",
"posts": [
{
"id":"1",
"title":"Welcome",
"newKey":true <----------- insert new key/value pair here
},
{
"id":"4",
"title":"What started it all",
"newKey":true <----------- insert new key/value pair here
}
]
}
For changing keys on the first level, I used a simple query like this
UPDATE
sample_table_json
SET
json = json::jsonb || '{"active": true}';
But this doesn't work for nested objects and objects in an array like in the sample.
How would I insert a key/value pair into a JSON column with nested objects in an array?
You have to use the jsonb_set function while specifying the right path see the manual.
For a single json update :
UPDATE sample_table_json
SET json = jsonb_set( json::jsonb
, '{post,0,active}'
, 'true'
, true
)
For a (very) limited set of json updates :
UPDATE sample_table_json
SET json = jsonb_set(jsonb_set( json::jsonb
, '{post,0,active}'
, 'true'
, true
)
, '{post,1,active}'
, 'true'
, true
)
For a larger set of json updates of the same json data, you can create the "aggregate version" of the jsonb_set function :
CREATE OR REPLACE FUNCTION jsonb_set(x jsonb, y jsonb, p text[], e jsonb, b boolean)
RETURNS jsonb LANGUAGE sql AS $$
SELECT jsonb_set(COALESCE(x,y), p, e, b) ; $$ ;
CREATE OR REPLACE AGGREGATE jsonb_set_agg(x jsonb, p text[], e jsonb, b boolean)
( STYPE = jsonb, SFUNC = jsonb_set) ;
and then use the new aggregate function jsonb_set_agg while iterating on a query result where the path and val fields could be calculated :
SELECT jsonb_set_agg('{"username": "jsmith","location": "United States","posts": [{"id":"1","title":"Welcome"},{"id":"4","title":"What started it all"}]}' :: jsonb
, l.path :: text[]
, to_jsonb(l.val)
, true)
FROM (VALUES ('{posts,0,active}', 'true'), ('{posts,1,active}', 'true')) AS l(path, val) -- this list could be the result of a subquery
This query could finally be used in order to update some data :
WITH list AS
(
SELECT id
, jsonb_set_agg(json :: jsonb
, l.path :: text[]
, to_jsonb(l.val)
, true) AS res
FROM sample_table_json
CROSS JOIN (VALUES ('{posts,0,active}', 'true'), ('{posts,1,active}', 'true')) AS l(path, val)
GROUP BY id
)
UPDATE sample_table_json AS t
SET json = l.res
FROM list AS l
WHERE t.id = l.id
see the test result in dbfiddle
It became a bit complicated. Loop through the array, add the new key/value pair to each array element and re-aggregate the array, then rebuild the whole object.
with t(j) as
(
values ('{
"username": "jsmith",
"location": "United States",
"posts": [
{
"id":"1", "title":"Welcome", "newKey":true
},
{
"id":"4", "title":"What started it all", "newKey":true
}]
}'::jsonb)
)
select j ||
jsonb_build_object
(
'posts',
(select jsonb_agg(je||'{"active":true}') from jsonb_array_elements(j->'posts') je)
)
from t;

Correctly return column as JSON Array in MySQL after using CONCAT, GROUP_CONCAT and JSON_OBJECT

I'm using MySQL in a Node.JS API, so I need to get data from the database as JSON objects/arrays.
I'm trying to get a JSON Array nested within the result JSON as one of the values, so this is my current query:
SELECT
l.id AS id, l.description AS description, l.parent AS parent,
(
SELECT CONCAT(
'[',
GROUP_CONCAT(
JSON_OBJECT(
'id', a.id, 'description', a.description,
'ip', a.ip, 'lastmovementdetected', a.lastmovementdetected
)
),
']'
)
FROM airconditioners AS a WHERE location = l.id
) AS airconditioners
FROM locations as l`
However, this is the query result (actual output is an array of these JSON objects):
{
"id": 1,
"description": "Meu quarto",
"parent": 0,
"airconditioners": "[{\"id\": 1, \"ip\": \"192.168.137.96\", \"description\": \"Ar-condicionado\", \"lastmovementdetected\": null},{\"id\": 2, \"ip\": \"192.168.0.1\", \"description\": \"Ar-condicionado\", \"lastmovementdetected\": null},{\"id\": 3, \"ip\": \"192.168.0.1\", \"description\": \"Ar-condicionado\", \"lastmovementdetected\": null}]"
}
SQL is returning the JSON Array as a String and it's also escaping the double quotes from within the JSON.
This is the expected return:
"id": 1,
"description": "Meu quarto",
"parent": 0,
"airconditioners": [
{
"id":1,
"ip":"192.168.137.96",
"description":"Ar-condicionado",
"lastmovementdetected":null
},
{
"id":2,
"ip":"192.168.0.1",
"description":"Ar-condicionado",
"lastmovementdetected":null
},
{
"id":3,
"ip":"192.168.0.1",
"description":"Ar-condicionado",
"lastmovementdetected":null
}
]
Can this be done using a SQL query only? Or I'll have to treat the result before sending the response on the API?
I've tried surrounding the column with a CAST((SELECT...) AS JSON) AS airconditioners and also putting JSON_UNQUOTE() in many places, with no success whatsoever.
EDIT
I couldn't get to a conclusion whether MySQL is compatible with what I want or not. But, for instance, I'm using the following Javascript code to work around it:
Object.keys(result).forEach(key => {
let airconditioners = result[key].airconditioners;
if(airconditioners == null) {
// If the airconditioner field is null, then we replace it with an empty array
result[key].airconditioners = [];
} else {
result[key].airconditioners = JSON.parse(airconditioners);
}
});
use JSON_EXTRACT then get result as you expect
SELECT
l.id AS id, l.description AS description, l.parent AS parent,
(
SELECT JSON_EXTRACT( IFNULL(
CONCAT(
'[',
GROUP_CONCAT(
JSON_OBJECT(
'id', a.id, 'description', a.description,
'ip', a.ip, 'lastmovementdetected', a.lastmovementdetected
)
),
']'
)
,'[]'),'$')
FROM airconditioners AS a WHERE location = l.id
) AS airconditioners
FROM locations as l`