stuck with appending name to the sub elements in json - json

I have 3 tables with the following schema.
a.c1 int (pk),
a.c2 varchar(50),
a.c3 varchar(50),
b.c1 int(pk),
b.c2 int(fk) -> a.c1,
b.c3 varchar(50),
b.c4 varchar(50),
c.c1 int(pk),
c.c2 int(fk) -> b.c1,
c.c3 int(fk) -> a.c1,
c.c4 varchar(50),
c.c5 varchar(50)
I'm expecting the result to be
{
"json_doc": {
"a.c1": "val",
"a.c2": "val",
"a.c3": "val",
"b": [{
"b_c1_value": {
"b.c1": "val",
"b.c2": "val",
"b.c3": "val",
"b.c4": "val",
"c": [{
"c_c1_value": {
"c.c1": "val",
"c.c2": "val",
"c.c3": "val",
"c.c4": "val",
"c.c5": "val"
}
}]
}
}]
}
}
Can someone please help me with the right sql. I'm very very new to Postgres.
I have gotten this far:
select row_to_json(t)
from (
select
*,
(
select array_to_json(array_agg(row_to_json(d)))
from (
select
*,
(
select array_to_json(array_agg(row_to_json(dc)))
from (
select *
from c
where c.c2 = b.c1
) dc
) as c
from b
where c2 = a.c1
) d
) as b
from a
WHERE a.deployid = 19
) t;
I need key names for the arrays to be populated. I'm stuck with this. Any help is deeply appreciated!

Related

Read Nested Json through sql query

Currently I have this piece of code
DECLARE #json NVARCHAR(MAX)
SET #json =
N'[
{
"objOrg": {
"EmpIds": [
{
"Id": 101
},
{
"Id": 102
},
{
"Id": 103
}
]
}
}
]'
How can I return EmpId values pivoted such as
Id1
Id2
Id3
101
102
103
OPENJSON without a schema will return the array index. Then pass the inner object to OPENJSON again to parse it out, and pivot the final result using PIVOT or MAX(CASE
DECLARE #json nvarchar(max) =
N'[
{
"objOrg": {
"EmpIds": [
{
"Id": 101
},
{
"Id": 102
},
{
"Id": 103
}
]
}
}
]';
SELECT MAX(CASE WHEN arr.[key] = 0 THEN ID END) AS Id1,
MAX(CASE WHEN arr.[key] = 1 THEN ID END) AS Id2,
MAX(CASE WHEN arr.[key] = 2 THEN ID END) AS Id3
FROM OPENJSON(#json, '$[0].objOrg.EmpIds') arr
CROSS APPLY OPENJSON (arr.value)
WITH (
Id int
) AS j;
-- alternatively
SELECT p.*
FROM (
SELECT arr.[key] + 1 AS [key], j.Id
FROM OPENJSON(#json, '$[0].objOrg.EmpIds') arr
CROSS APPLY OPENJSON (arr.value)
WITH (
Id int
) AS j
) j
PIVOT (
MAX(j.Id) FOR j.[key] IN
([1], [2], [3])
) p;
db<>fiddle
You can use OPENJSON() along with ROW_NUMBER() window function such as
DECLARE
#json AS NVARCHAR(MAX),
#query AS NVARCHAR(MAX);
SET #json =
N'[
{
"objOrg": {
"EmpIds": [
{
"Id": 101
},
{
"Id": 102
},
{
"Id": 103
}
]
}
}
]';
SELECT j.*, ROW_NUMBER() OVER (ORDER BY j.Id) AS rn
INTO t_json
FROM OPENJSON(#json)
WITH (
JS NVARCHAR(MAX) '$.objOrg.EmpIds' AS JSON
) AS j0
CROSS APPLY OPENJSON (j0.JS)
WITH (
Id INT '$.Id'
) AS j;
SET #query = CONCAT('SELECT',
STUFF(
(SELECT CONCAT(', MAX(CASE WHEN rn=' , CAST(rn AS VARCHAR) , ' THEN Id END) AS Id', CAST(rn AS VARCHAR))
FROM t_json
ORDER BY rn
FOR XML PATH(''), type).value('.', 'NVARCHAR(MAX)'),
1,1,''
),' FROM t_json');
EXECUTE(#query)
Demo

Json in Postgresql

I'm learning Postgresql and Json.
I have for example database like that:
CREATE TABLE employees (
employee_id serial primary key,
department_id integer references departments(department_id),
name text,
start_date date,
fingers integer,
geom geometry(point, 4326)
);
CREATE TABLE departments (
department_id bigint primary key,
name text
);
INSERT INTO departments
(department_id, name)
VALUES
(1, 'spatial'),
(2, 'cloud');
INSERT INTO employees
(department_id, name, start_date, fingers, geom)
VALUES
(1, 'Paul', '2018/09/02', 10, 'POINT(-123.32977 48.40732)'),
(1, 'Martin', '2019/09/02', 9, 'POINT(-123.32977 48.40732)'),
(2, 'Craig', '2019/11/01', 10, 'POINT(-122.33207 47.60621)'),
(2, 'Dan', '2020/10/01', 8, 'POINT(-122.33207 47.60621)');
How could i do so i could get the data like this:
[
{
"department_name": "cloud",
"employees": [
{
"name": "Craig",
"start_date": "2019-11-01"
},
{
"name": "Dan",
"start_date": "2020-10-01"
}
]
},
{
"department_name": "spatial",
"employees": [
{
"name": "Paul",
"start_date": "2018-09-02"
},
{
"name": "Martin",
"start_date": "2019-09-02"
}
]
}
]
follow this link: https://dba.stackexchange.com/questions/69655/select-columns-inside-json-agg/200240#200240
CREATE TEMP TABLE x (
name text,
start_date date
);
WITH cte AS (
SELECT
d.name AS department_name,
json_agg((e.name, e.start_date)::x) AS employees
FROM
departments d
JOIN employees e ON d.department_id = e.department_id
GROUP BY
1
)
SELECT
json_agg((row_to_json(cte.*)))
FROM
cte;

How to remove multiple values of different dictionaries stored in arrays with JSON_REMOVE?

I have the following json doc in MYSQL JSON field called test:
[
{"a": "1", "b": "2"},
{"a": "1", "b": "-2"},
{"a": "2", "b": "3"},
{"a": "2", "b": "-3"},
{"a": "3", "b": "4"}
]
CREATE TABLE `test` (`test` JSON);
INSERT INTO `test` VALUES
(JSON_ARRAY(JSON_OBJECT('a', '1', 'b', '2'),
JSON_OBJECT('a', '1', 'b', '-2'),
JSON_OBJECT('a', '2', 'b', '3'),
JSON_OBJECT('a', '2', 'b', '-3'),
JSON_OBJECT('a', '3', 'b', '4'))),
(JSON_ARRAY()),
(JSON_ARRAY());
SELECT JSON_UNQUOTE(JSON_SEARCH(`test`, 'all', 1, null, '$[*].a')) `data`
FROM `test`;
+----------------------+
| data |
+----------------------+
| ["$[0].a", "$[1].a"] |
| NULL |
| NULL |
+----------------------+
And I want to remove all dictionaries that have key/value "a": "1".
So I tried this:
UPDATE `test`
SET `test` = JSON_REMOVE(`test`, JSON_UNQUOTE(JSON_SEARCH(`test`,
'all',
1,
null,
'$[*].a')));
The expected result that I wanted is, but of course it doesn't work:
// This is an expected result after update
SELECT JSON_UNQUOTE(JSON_SEARCH(`test`, 'all', 1, null, '$[*].a')) `data`, `test` FROM `test`;
+----------------------+------------------------------------------------------------------------------------------------------------------+
| data | test |
+----------------------+------------------------------------------------------------------------------------------------------------------+
| NULL | [{"a": "2", "b": "3"}, {"a": "2", "b": "-3"}, {"a": "3", "b": "4"}] |
| NULL | [] |
| NULL | [] |
+----------------------+------------------------------------------------------------------------------------------------------------------+
Note that the mysql version is 8.0
Thank you
You can use Recursive CTE along with JSON_EXTRACT() function in order to dynamically generate the indexes, of which don't match a = 1, of objects within the array such as
WITH RECURSIVE cte AS
(
SELECT 0 i
UNION ALL
SELECT i + 1 i
FROM cte
WHERE i + 1 < ( SELECT JSON_LENGTH(jsdata) FROM `test` )
)
SELECT JSON_ARRAYAGG(JSON_EXTRACT(jsdata, CONCAT('$[',i,']'))) AS Result
FROM `test`,
cte
WHERE JSON_EXTRACT(jsdata, CONCAT('$[',i,'].a')) != "1"
provided that the version of the DB is 8+
Demo
WITH
-- enumerate rows - unique value per row is needed for reconstruction
cte1 AS (
SELECT *, ROW_NUMBER() OVER () rn
FROM test
),
-- decompose JSON array to separate objects properties,
-- select only those which does not contain unneeded value
cte2 AS (
SELECT cte1.*, jsontable.a, jsontable.b
FROM cte1
CROSS JOIN JSON_TABLE(cte1.test,
'$[*]' COLUMNS ( a VARCHAR(255) PATH '$.a',
b VARCHAR(255) PATH '$.b')) jsontable
WHERE jsontable.a <> 1
),
-- reconstruct the array w/o unneeded objects back
cte3 AS (
SELECT test, rn, JSON_ARRAYAGG(JSON_OBJECT('a', a, 'b', b)) new_test
FROM cte2
GROUP BY test, rn
)
-- update sourdce table
UPDATE test
JOIN cte3 ON test.test = cte3.test
SET test.test = cte3.new_test;
https://dbfiddle.uk/?rdbms=mysql_8.0&fiddle=05e4db09be79152c37b9b482b8bff170
If the table in practice contains primary key or at least unique index then you can simplify the query (cte1 not needed, final join can be performed by this PK/unique).

How to I prevent repetitive columns in FOR JSON PATH SQL queries?

So, I'm trying to get data from MSSQL to update some fields in an HTML form, which includes 1 checkbox and a set of options for a select input.
I thought I was being smart by writing my query as shown below. It gets BOTH the two fields at once, instead of two independent queries... I mean, it's OKAY, but I have a lot of repeated items.
Is there a way to flatten this out?
// how do I flatten this
{
"Calculated": [
{
"Calculated": false
}
],
"Schedule": [
{
"Schedule": "THX-1138"
},
{
"Schedule": "LUH-3417"
},
{
"Schedule": "SEN-5241"
}
]
}
// into something more like this?
{
"Calculated": false,
"Schedule": [
"THX-1138",
"LUH-3417",
"SEN-5241"
]
}
here is the query:
declare
#EffectDate smalldatetime = '07-01-2012'
,#Grade varchar(3) = '001'
,#Schedule varchar(9) = 'THX-1138'
,#Step smallint = '15'
,#jsonResponse nvarchar(max)
;
select #jsonResponse = (
select
[Calculated] =
(
select
b.Calculated
from
tblScalesHourly a
inner join
tblSchedules b
on a.EffectDate = b.EffectDate
and a.Schedule = b.Schedule
where
a.EffectDate = #EffectDate
and a.Schedule = #Schedule
and a.Grade = #Grade
and a.Step = #Step
for json path
)
,[Schedule] =
(
select
Schedule
from
tblSchedules
where
EffectDate = #EffectDate
and Calculated = 0
order by
Schedule asc
for json path
)
for json path, without_array_wrapper
)
It's probably a late answer, but I'm able to reproduce this issue with the following test data:
declare #jsonResponse nvarchar(max)
select #jsonResponse = (
select
[Calculated] =
(
select CONVERT(bit, 0) AS Calculated
for json path
)
,
[Schedule] =
(
select Schedule
from (values ('THX-1138'), ('LUH-3417'), ('SEN-5241')) tblSchedules (Schedule)
order by Schedule asc
for json path
)
for json path, without_array_wrapper
)
You can get the expected results with the following approach. Note, that you can't generate a JSON array of scalar values using FOR JSON, so you need to use a string aggregation (FOR XML PATH('') for SQL Server 2016 or STRING_AGG() for SQL Server 2017+):
select #jsonResponse = (
select
[Calculated] = (
select CONVERT(bit, 0) AS Calculated
)
,
[Schedule] = JSON_QUERY(CONCAT(
'["',
STUFF(
(
select CONCAT('","', Schedule)
from (values ('THX-1138'), ('LUH-3417'), ('SEN-5241')) tblSchedules (Schedule)
order by Schedule asc
for xml path('')
), 1, 3, ''
),
'"]'
))
for json path, without_array_wrapper
)
Output:
{"Calculated":false,"Schedule":["LUH-3417","SEN-5241","THX-1138"]}
And finally, using the statement from the question (not tested):
declare
#EffectDate smalldatetime = '07-01-2012'
,#Grade varchar(3) = '001'
,#Schedule varchar(9) = 'THX-1138'
,#Step smallint = '15'
,#jsonResponse nvarchar(max)
;
select #jsonResponse = (
select
[Calculated] = (
select
b.Calculated
from
tblScalesHourly a
inner join
tblSchedules b
on a.EffectDate = b.EffectDate
and a.Schedule = b.Schedule
where
a.EffectDate = #EffectDate
and a.Schedule = #Schedule
and a.Grade = #Grade
and a.Step = #Step
),
[Schedule] = JSON_QUERY(CONCAT(
'["',
STUFF(
(
select CONCAT('","', Schedule)
from
tblSchedules
where
EffectDate = #EffectDate
and Calculated = 0
for xml path('')
), 1, 3, ''
),
'"]'
))
for json path, without_array_wrapper
)

Hierarchical JSON output from table

I've got this table structure
| User | Type | Data |
|------|---------|------|
| 1 | "T1" | "A" |
| 1 | "T1" | "B" |
| 1 | "T2" | "C" |
| 2 | "T1" | "D" |
I want to get a hierarchical JSON string returned from my query
{
"1": {
"T1": [
"A",
"B"
],
"T2": [
"C"
]
},
"2": {
"T1": [
"D"
]
}
}
So one entry for each User with a sub-entry for each Type and then a sub-entry for each Data
All I'm finding is the FOR JSON PATH, ROOT ('x') or AUTO statement but nothing that would make this hierarchical. Is this even possible out of the box? I couldn't find anything so I've experimented with (recursive) CTE but didn't get very far. I'd much appreciate if someone could just point me in the right direction.
I'm not sure that you can create JSON with variable key names using FOR JSON AUTO and FOR JSON PATH. I suggest the following solutions:
using FOR XML PATH to generate JSON with string manipulations
using STRING_AGG() to generate JSON with string manipulations for SQL Server 2017+
using STRING_AGG() and JSON_MODIFY() for SQL Server 2017+
Table:
CREATE TABLE #InputData (
[User] int,
[Type] varchar(2),
[Data] varchar(1)
)
INSERT INTO #InputData
([User], [Type], [Data])
VALUES
(1, 'T1', 'A'),
(1, 'T1', 'B'),
(1, 'T2', 'C'),
(2, 'T1', 'D')
Statement using FOR XML PATH:
;WITH SecondLevelCTE AS (
SELECT
d.[User],
d.[Type],
Json1 = CONCAT(
'[',
STUFF(
(
SELECT CONCAT(',"', [Data], '"')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
FOR XML PATH('')
), 1, 1, ''),
']')
FROM #InputData d
GROUP BY d.[User], d.[Type]
), FirstLevelCTE AS (
SELECT
d.[User],
Json2 = CONCAT(
'{',
STUFF(
(
SELECT CONCAT(',"', [Type], '":', [Json1])
FROM SecondLevelCTE
WHERE [User] = d.[User]
FOR XML PATH('')
), 1, 1, ''),
'}'
)
FROM SecondLevelCTE d
GROUP BY d.[User]
)
SELECT CONCAT(
'{',
STUFF(
(
SELECT CONCAT(',"', [User], '":', Json2)
FROM FirstLevelCTE
FOR XML PATH('')
), 1, 1, '') ,
'}'
)
Statement using STRING_AGG():
;WITH SecondLevelCTE AS (
SELECT
d.[User],
d.[Type],
Json1 = (
SELECT CONCAT('["', STRING_AGG([Data], '","'), '"]')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
)
FROM #InputData d
GROUP BY d.[User], d.[Type]
), FirstLevelCTE AS (
SELECT
d.[User],
Json2 = (
SELECT STRING_AGG(CONCAT('"', [Type], '":', [Json1]), ',')
FROM SecondLevelCTE
WHERE [User] = d.[User]
)
FROM SecondLevelCTE d
GROUP BY d.[User]
)
SELECT CONCAT('{', STRING_AGG(CONCAT('"', [User], '":{', Json2, '}'), ','), '}')
FROM FirstLevelCTE
Statement using STRING_AGG() and JSON_MODIFY():
DECLARE #json nvarchar(max) = N'{}'
SELECT
#json = JSON_MODIFY(
CASE
WHEN JSON_QUERY(#json, CONCAT('$."', [User] , '"')) IS NULL THEN JSON_MODIFY(#json, CONCAT('$."', [User] , '"'), JSON_QUERY('{}'))
ELSE #json
END,
CONCAT('$."', [User] , '".', [Type]),
JSON_QUERY(Json)
)
FROM (
SELECT
d.[User],
d.[Type],
Json = (
SELECT CONCAT('["', STRING_AGG([Data], '","'), '"]')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
)
FROM #InputData d
GROUP BY d.[User], d.[Type]
) t
Output:
{"1":{"T1":["A","B"],"T2":["C"]},"2":{"T1":["D"]}}
This isn't exactly what you want (I'm not great with FOR JSON) but it does get you close to the shape you need until something better comes along...
(https://jsonformatter.org/json-parser/974b6b)
use tempdb
GO
drop table if exists users
create table users (
[user] integer
, [type] char(2)
, [data] char(1)
)
insert into users
values (1, 'T1', 'A')
, (1, 'T1', 'B')
, (1, 'T2', 'C')
, (2, 'T1', 'D')
select DISTINCT ONE.[user], two.[type], three.[data]
from users AS ONE
inner join users two
on one.[user] = two.[user]
inner join users three
on one.[user] = three.[user]
and two.[type] = three.[type]
for JSON AUTO