How to I prevent repetitive columns in FOR JSON PATH SQL queries? - json

So, I'm trying to get data from MSSQL to update some fields in an HTML form, which includes 1 checkbox and a set of options for a select input.
I thought I was being smart by writing my query as shown below. It gets BOTH the two fields at once, instead of two independent queries... I mean, it's OKAY, but I have a lot of repeated items.
Is there a way to flatten this out?
// how do I flatten this
{
"Calculated": [
{
"Calculated": false
}
],
"Schedule": [
{
"Schedule": "THX-1138"
},
{
"Schedule": "LUH-3417"
},
{
"Schedule": "SEN-5241"
}
]
}
// into something more like this?
{
"Calculated": false,
"Schedule": [
"THX-1138",
"LUH-3417",
"SEN-5241"
]
}
here is the query:
declare
#EffectDate smalldatetime = '07-01-2012'
,#Grade varchar(3) = '001'
,#Schedule varchar(9) = 'THX-1138'
,#Step smallint = '15'
,#jsonResponse nvarchar(max)
;
select #jsonResponse = (
select
[Calculated] =
(
select
b.Calculated
from
tblScalesHourly a
inner join
tblSchedules b
on a.EffectDate = b.EffectDate
and a.Schedule = b.Schedule
where
a.EffectDate = #EffectDate
and a.Schedule = #Schedule
and a.Grade = #Grade
and a.Step = #Step
for json path
)
,[Schedule] =
(
select
Schedule
from
tblSchedules
where
EffectDate = #EffectDate
and Calculated = 0
order by
Schedule asc
for json path
)
for json path, without_array_wrapper
)

It's probably a late answer, but I'm able to reproduce this issue with the following test data:
declare #jsonResponse nvarchar(max)
select #jsonResponse = (
select
[Calculated] =
(
select CONVERT(bit, 0) AS Calculated
for json path
)
,
[Schedule] =
(
select Schedule
from (values ('THX-1138'), ('LUH-3417'), ('SEN-5241')) tblSchedules (Schedule)
order by Schedule asc
for json path
)
for json path, without_array_wrapper
)
You can get the expected results with the following approach. Note, that you can't generate a JSON array of scalar values using FOR JSON, so you need to use a string aggregation (FOR XML PATH('') for SQL Server 2016 or STRING_AGG() for SQL Server 2017+):
select #jsonResponse = (
select
[Calculated] = (
select CONVERT(bit, 0) AS Calculated
)
,
[Schedule] = JSON_QUERY(CONCAT(
'["',
STUFF(
(
select CONCAT('","', Schedule)
from (values ('THX-1138'), ('LUH-3417'), ('SEN-5241')) tblSchedules (Schedule)
order by Schedule asc
for xml path('')
), 1, 3, ''
),
'"]'
))
for json path, without_array_wrapper
)
Output:
{"Calculated":false,"Schedule":["LUH-3417","SEN-5241","THX-1138"]}
And finally, using the statement from the question (not tested):
declare
#EffectDate smalldatetime = '07-01-2012'
,#Grade varchar(3) = '001'
,#Schedule varchar(9) = 'THX-1138'
,#Step smallint = '15'
,#jsonResponse nvarchar(max)
;
select #jsonResponse = (
select
[Calculated] = (
select
b.Calculated
from
tblScalesHourly a
inner join
tblSchedules b
on a.EffectDate = b.EffectDate
and a.Schedule = b.Schedule
where
a.EffectDate = #EffectDate
and a.Schedule = #Schedule
and a.Grade = #Grade
and a.Step = #Step
),
[Schedule] = JSON_QUERY(CONCAT(
'["',
STUFF(
(
select CONCAT('","', Schedule)
from
tblSchedules
where
EffectDate = #EffectDate
and Calculated = 0
for xml path('')
), 1, 3, ''
),
'"]'
))
for json path, without_array_wrapper
)

Related

SQL Server OpenJson - retrieve row based on nested Json, querying multiple Json rows

Given the data table below, how can I retrieve only the row #3, querying the field "chave", based on multiple json rows?
I want to retrieve the master row where the json field (NomeCampo = id and Valor = 3) and also (NomeCampo = id2 and Valor = 5)
id id_modulo chave
624D4FB5-6197-11EA-A947-9C5C8ED7177E 17 [{"NomeCampo":"id","Valor":2},{"NomeCampo":"id2","Valor":5}]
4CF95795-4BFD-EC11-8CE5-80A589B639E0 17 [{"NomeCampo":"id","Valor":3},{"NomeCampo":"id2","Valor":4}]
DBE9275A-9BFF-EC11-8CE5-80A589B639E0 17 [{"NomeCampo":"id","Valor":3},{"NomeCampo":"id2","Valor":5}]
BE3228C6-9BFF-EC11-8CE5-80A589B639E0 17 [{"NomeCampo":"id","Valor":3},{"NomeCampo":"id2","Valor":6}]
This is the SQL that I have but it is not retrieving any row at all:
SELECT id, id_modulo, chave
FROM myTable
WHERE id_modulo = 17
AND EXISTS (SELECT *
FROM OPENJSON(chave)
WITH (NomeCampo nvarchar(max) '$.NomeCampo',
Valor nvarchar(max) '$.Valor') AS [Info]
WHERE ([Info].NomeCampo = 'id' AND [Info].Valor = '3')
AND ([Info].NomeCampo = 'id2' AND [Info].Valor = '5'))
Is this even possible to do?
This is actually a case of relational division. You need to find the set of JSON rows which have these properties, so you need to group it
SELECT
t.id,
t.id_modulo,
t.chave
FROM myTable t
WHERE t.id_modulo = 17
AND EXISTS (SELECT 1
FROM OPENJSON(t.chave) WITH (
NomeCampo nvarchar(100),
Valor nvarchar(1000)
) AS Info
WHERE (Info.NomeCampo = 'id' AND Info.Valor = '3'
OR Info.NomeCampo = 'id2' AND Info.Valor = '5')
GROUP BY ()
HAVING COUNT(*) = 2
);
Another option
SELECT
t.id,
t.id_modulo,
t.chave
FROM myTable t
WHERE t.id_modulo = 17
AND EXISTS (SELECT 1
FROM OPENJSON(t.chave) WITH (
NomeCampo nvarchar(100),
Valor nvarchar(1000)
) AS Info
GROUP BY ()
HAVING COUNT(CASE WHEN Info.NomeCampo = 'id' AND Info.Valor = '3' THEN 1 END) > 0
AND COUNT(CASE WHEN Info.NomeCampo = 'id2' AND Info.Valor = '5' THEN 1 END) > 0
);

Count vowels and consonants in an array

I am trying to write a function which will return number of vowels
and consonants. Using the IF statement function will successfully
compile, however when I call it in the select it shows the message :
"Conversion failed when converting the varchar value 'MAMAMIA' to
data type int."`
I tried with the CASE statement, but there are too many syntax
errors and i think it is not the best method of solving the problem
using CASE ...
CREATE FUNCTION VOW_CONS(#ARRAY VARCHAR(20))
RETURNS INT
BEGIN
DECLARE #COUNTT INT;
DECLARE #COUNTT1 INT;
SET #COUNTT=0;
SET #COUNTT1=0;
WHILE (#ARRAY!=0)
BEGIN
IF(#ARRAY LIKE '[aeiouAEIOU]%')
SET #COUNTT=#COUNTT+1
ELSE SET #COUNTT1=#COUNTT1+1
/*
DECLARE #C INT;
SET #C=(CASE #SIR WHEN 'A' THEN #COUNTT=#COUNTT+1;
WHEN 'E' THEN #COUNTT=#COUNTT+1
WHEN 'I' THEN #COUNTT=#COUNTT+1
WHEN 'O' THEN #COUNTT=#COUNTT+1
WHEN 'U' THEN #COUNTT=#COUNTT+1
WHEN 'A' THEN #COUNTT=#COUNTT+1
WHEN ' ' THEN ' '
ELSE #COUNTT1=#COUNTT1+1
END)
*/
END
RETURN #COUNTT;
END
SELECT DBO.VOW_CONS('MAMAMIA')
Without knowing what version of SQL Server you are using, I am going to assume you are using the latest version, meaning you have access to TRANSLATE. Also I'm going to assume you do need access to both the number of vowels and consonants, so a table-value function seems the better method here. As such, you could do something like this:
CREATE FUNCTION dbo.CountVowelsAndConsonants (#String varchar(20))
RETURNS table
AS RETURN
SELECT DATALENGTH(#String) - DATALENGTH(REPLACE(TRANSLATE(#String,'aeiou','|||||'),'|','')) AS Vowels, --Pipe is a character that can't be in your string
DATALENGTH(#String) - DATALENGTH(REPLACE(TRANSLATE(#String,'bcdfghjklmnpqrstvwxyz','|||||||||||||||||||||'),'|','')) AS Consonants --Pipe is a character that can't be in your string
GO
And then you can use the function like so:
SELECT *
FROM (VALUES('MAMAMIA'),('Knowledge is power'))V(YourString)
CROSS APPLY dbo.CountVowelsAndConsonents(V.YourString);
Another option would be to split the string into its individual letters using a tally table, join that to a table of vowels/consonants and then get your counts.
Here is a working example, you'll have to update/change for your specific needs but should give you an idea on how it works.
DECLARE #string NVARCHAR(100)
SET #string = 'Knowledge is power';
--Here we build a table of all the letters setting a flag on which ones are vowels
DECLARE #VowelConsonants TABLE
(
[Letter] CHAR(1)
, [IsVowel] BIT
);
--We load it with the data
INSERT INTO #VowelConsonants (
[Letter]
, [IsVowel]
)
VALUES ( 'a', 1 ) , ( 'b', 0 ) , ( 'c', 0 ) , ( 'd', 0 ) , ( 'e', 1 ) , ( 'f', 0 ) , ( 'g', 0 ) , ( 'h', 0 ) , ( 'i', 1 ) , ( 'j', 0 ) , ( 'k', 0 ) , ( 'l', 0 ) , ( 'm', 0 ) , ( 'n', 0 ) , ( 'o', 1 ) , ( 'p', 0 ) , ( 'q', 0 ) , ( 'r', 0 ) , ( 's', 0 ) , ( 't', 0 ) , ( 'u', 1 ) , ( 'v', 0 ) , ( 'w', 0 ) , ( 'x', 0 ) , ( 'y', 0 ) , ( 'z', 0 );
--This tally table example gives 10,000 numbers
WITH
E1(N) AS (select 1 from (values (1),(1),(1),(1),(1),(1),(1),(1),(1),(1))dt(n)),
E2(N) AS (SELECT 1 FROM E1 a, E1 b), --10E+2 or 100 rows
E4(N) AS (SELECT 1 FROM E2 a, E2 b), --10E+4 or 10,000 rows max
cteTally(N) AS
(
SELECT ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) FROM E4
)
SELECT SUM( CASE WHEN [vc].[IsVowel] = 1 THEN 1
ELSE 0
END
) AS [VowelCount]
, SUM( CASE WHEN [vc].[IsVowel] = 0 THEN 1
ELSE 0
END
) AS [ConsonantCount]
FROM [cteTally] [t] --Select from tally cte
INNER JOIN #VowelConsonants [vc] --Join to VowelConsonants table on the letter.
ON LOWER([vc].[Letter]) = LOWER(SUBSTRING(#string, [t].[N], 1)) --Using the number from the tally table we can easily split out each letter using substring
WHERE [t].[N] <= LEN(#string);
Giving you results of:
VowelCount ConsonantCount
----------- --------------
6 10

Extract value from JSON ARRAY in BigQuery

I am trying to extract value from JSON ARRAY as below
with `project.dataset.table` as (
select '{"fruit":[{"apples":"5","oranges":"10","pear":"20"},
{"apples":"5","oranges":"4"},
{"apples":"5","oranges":"9","pear":"40"}]}' as json union all
select '{"fruit":[{"lettuce":"7","kale": "8"}]}'
)
select json, if(regexp_contains(json, '"apples":"5"'), (SELECT
ARRAY_AGG(json_extract_scalar(arr, '$.oranges') ignore nulls)
from
UNNEST(json_extract_ARRAY(json, '$.fruit')) as arr ), null) as oranges,
if(regexp_contains(json, '"apples":"5"'), (SELECT
ARRAY_AGG(json_extract_scalar(arr, '$.pear') ignore nulls)
from
UNNEST(json_extract_ARRAY(json, '$.fruit')) as arr ), null) as pear,
from `project.dataset.table`
It gives output as below
I am expecting output like
json
oranges
pear
{"fruit":[{"apples":"5","oranges":"10","pear":"20"},{"apples":"5","oranges":"4"},{"apples":"5","oranges":"9","pear":"40"}]}
10
20
4
null
9
40
{"fruit":[{"lettuce":"7","kale": "8"}]}.
null
null
Consider below approach with more explicit alignment of respective entries
select json,
array(
select
if(
json_extract_scalar(val, '$.apples') = '5',
struct(
json_extract_scalar(val, '$.oranges') as oranges,
json_extract_scalar(val, '$.pear') as pear
),
struct(null, null)
)
from t.arr val
) fruits
from `project.dataset.table`,
unnest([struct(json_extract_array(json, '$.fruit') as arr)]) t
It is less verbose and has output of fruits as a repeated record as opposed to two arrays
If applied to sample data in your question output is
In case if you really need to keep output is separate columns - use below
select json,
array(
select
if(
json_extract_scalar(val, '$.apples') = '5',
ifnull(json_extract_scalar(val, '$.oranges'), '0'),
'0'
)
from t.arr val
) as oranges,
array(
select
if(
json_extract_scalar(val, '$.apples') = '5',
ifnull(json_extract_scalar(val, '$.pear'), '0'),
'0'
)
from t.arr val
) as pear
from `project.dataset.table`,
unnest([struct(json_extract_array(json, '$.fruit') as arr)]) t
with output
When working with array_agg
an error is raised if an array in the final query result contains a
NULL element.
You can try using a string 'null' like this:
with `project.dataset.table` as (
select '{"fruit":[{"apples":"5","oranges":"10","pear":"20"},{"apples":"5","oranges":"4"},{"apples":"5","oranges":"9","pear":"40"}]}' as json union all
select '{"fruit":[{"lettuce":"7","kale": "8"}]}'
)
select
json,
if( regexp_contains(json, '"apples":"5"'),
(SELECT ARRAY_AGG(ifnull(json_extract_scalar(arr, '$.oranges'), 'null')) from UNNEST(json_extract_ARRAY(json, '$.fruit')) as arr ),
['null']
) as oranges,
if( regexp_contains(json, '"apples":"5"'),
(SELECT ARRAY_AGG(ifnull(json_extract_scalar(arr, '$.pear'), 'null')) from UNNEST(json_extract_ARRAY(json, '$.fruit')) as arr ),
['null']
) as pear,
from `project.dataset.table`

Hierarchical JSON output from table

I've got this table structure
| User | Type | Data |
|------|---------|------|
| 1 | "T1" | "A" |
| 1 | "T1" | "B" |
| 1 | "T2" | "C" |
| 2 | "T1" | "D" |
I want to get a hierarchical JSON string returned from my query
{
"1": {
"T1": [
"A",
"B"
],
"T2": [
"C"
]
},
"2": {
"T1": [
"D"
]
}
}
So one entry for each User with a sub-entry for each Type and then a sub-entry for each Data
All I'm finding is the FOR JSON PATH, ROOT ('x') or AUTO statement but nothing that would make this hierarchical. Is this even possible out of the box? I couldn't find anything so I've experimented with (recursive) CTE but didn't get very far. I'd much appreciate if someone could just point me in the right direction.
I'm not sure that you can create JSON with variable key names using FOR JSON AUTO and FOR JSON PATH. I suggest the following solutions:
using FOR XML PATH to generate JSON with string manipulations
using STRING_AGG() to generate JSON with string manipulations for SQL Server 2017+
using STRING_AGG() and JSON_MODIFY() for SQL Server 2017+
Table:
CREATE TABLE #InputData (
[User] int,
[Type] varchar(2),
[Data] varchar(1)
)
INSERT INTO #InputData
([User], [Type], [Data])
VALUES
(1, 'T1', 'A'),
(1, 'T1', 'B'),
(1, 'T2', 'C'),
(2, 'T1', 'D')
Statement using FOR XML PATH:
;WITH SecondLevelCTE AS (
SELECT
d.[User],
d.[Type],
Json1 = CONCAT(
'[',
STUFF(
(
SELECT CONCAT(',"', [Data], '"')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
FOR XML PATH('')
), 1, 1, ''),
']')
FROM #InputData d
GROUP BY d.[User], d.[Type]
), FirstLevelCTE AS (
SELECT
d.[User],
Json2 = CONCAT(
'{',
STUFF(
(
SELECT CONCAT(',"', [Type], '":', [Json1])
FROM SecondLevelCTE
WHERE [User] = d.[User]
FOR XML PATH('')
), 1, 1, ''),
'}'
)
FROM SecondLevelCTE d
GROUP BY d.[User]
)
SELECT CONCAT(
'{',
STUFF(
(
SELECT CONCAT(',"', [User], '":', Json2)
FROM FirstLevelCTE
FOR XML PATH('')
), 1, 1, '') ,
'}'
)
Statement using STRING_AGG():
;WITH SecondLevelCTE AS (
SELECT
d.[User],
d.[Type],
Json1 = (
SELECT CONCAT('["', STRING_AGG([Data], '","'), '"]')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
)
FROM #InputData d
GROUP BY d.[User], d.[Type]
), FirstLevelCTE AS (
SELECT
d.[User],
Json2 = (
SELECT STRING_AGG(CONCAT('"', [Type], '":', [Json1]), ',')
FROM SecondLevelCTE
WHERE [User] = d.[User]
)
FROM SecondLevelCTE d
GROUP BY d.[User]
)
SELECT CONCAT('{', STRING_AGG(CONCAT('"', [User], '":{', Json2, '}'), ','), '}')
FROM FirstLevelCTE
Statement using STRING_AGG() and JSON_MODIFY():
DECLARE #json nvarchar(max) = N'{}'
SELECT
#json = JSON_MODIFY(
CASE
WHEN JSON_QUERY(#json, CONCAT('$."', [User] , '"')) IS NULL THEN JSON_MODIFY(#json, CONCAT('$."', [User] , '"'), JSON_QUERY('{}'))
ELSE #json
END,
CONCAT('$."', [User] , '".', [Type]),
JSON_QUERY(Json)
)
FROM (
SELECT
d.[User],
d.[Type],
Json = (
SELECT CONCAT('["', STRING_AGG([Data], '","'), '"]')
FROM #InputData
WHERE [User] = d.[User] AND [Type] = d.[Type]
)
FROM #InputData d
GROUP BY d.[User], d.[Type]
) t
Output:
{"1":{"T1":["A","B"],"T2":["C"]},"2":{"T1":["D"]}}
This isn't exactly what you want (I'm not great with FOR JSON) but it does get you close to the shape you need until something better comes along...
(https://jsonformatter.org/json-parser/974b6b)
use tempdb
GO
drop table if exists users
create table users (
[user] integer
, [type] char(2)
, [data] char(1)
)
insert into users
values (1, 'T1', 'A')
, (1, 'T1', 'B')
, (1, 'T2', 'C')
, (2, 'T1', 'D')
select DISTINCT ONE.[user], two.[type], three.[data]
from users AS ONE
inner join users two
on one.[user] = two.[user]
inner join users three
on one.[user] = three.[user]
and two.[type] = three.[type]
for JSON AUTO

Passing parameter to a table-valued function

I am trying to pass a parameter value to a table-valued function which has four parameters and "returns table". However, I receive following error when I pass a parameter value to one of its varchar parameter:
Msg 8114, Level 16, State 5, Line 6 Error converting data type varchar
to bigint.
declare #Scenario1 as varchar(30)
set #Scenario1 = '2017_B01'
select *
From [dbo].[fn_GetAEAssumptionFacts](#Scenario1,null,null,null) fng
Glimpse at function:
CREATE FUNCTION [dbo].[fn_GetAEAssumptionFacts]
(
#pScenarioName varchar(500) = NULL
,#pBuildingID varchar(500) = NULL
,#pLeaseID varchar(500) = NULL
,#pTenantName varchar(500) = NULL
)
RETURNS TABLE
AS
RETURN
select
.....
from ae11.dbo.rvw_FinancialLineItems fli
....
INNER JOIN ae11.dbo.rvw_Scenarios s on s.Id = pas.ScenarioId
left join
(select
externalID,
PropertyAssetId,
LeaseID,
BeginDate
from ae11.dbo.ivw_Leases
WHERE PropertyAssetID IN
(select ID from AE11.dbo.PropertyAssets where scenarioID =
(CASE WHEN isnull(#pScenarioName, '') = ''
THEN (select ID from AEX.[dbo].[ConfigurationFieldTable]
where [Type] = 'Lease Connect Current Scenario' )
ELSE #pScenarioName
END)
)
) lea
ON lea.LeaseID = uni.ExternalID
AND lea.PropertyAssetID = uni.PropertyAssetId
where 1=1
......
AND s.id = (CASE WHEN isnull(#pScenarioName, '') = ''
THEN (select ID from AEX.[dbo].[ConfigurationFieldTable]
where [Type] = 'Lease Connect Current Scenario' )
ELSE #pScenarioName
END)
Here
(CASE WHEN isnull(#pScenarioName, '') = ''
THEN (select ID from AEX.[dbo].[ConfigurationFieldTable]
where [Type] = 'Lease Connect Current Scenario' )
ELSE #pScenarioName
END)
You are taking a value depending on #ScenarioName. This will either be the result of select ID from AEX.[dbo].[ConfigurationFieldTable] WHERE... or the content of #ScenarioName.
I assume, that this ID is a bigint, while your #SenarioName is a string. And the s.ID you want to compare it against - I don't know...
But - to be honest - my magic crystall ball is out for cleaning and the information you provide is not enough.