Where clause on json data in Sql Server 2016 - json

I have a nvarchar(1000) field in my table and I am storing JSON data in that column.
eg :
CONTENT_RULE_ID CONTENT_RULE
1 {"EntityType":"Inquiry", "Values":[1,2]}
2 {"EntityType":"Inquiry", "Values":[1,3]}
3 {"EntityType":"Inquiry", "Values":[2,4]}
4 {"EntityType":"Inquiry", "Values":[5,6,1]}
6 {"EntityType":"Inquiry", "Values":[8,1]}
8 {"EntityType":"Inquiry", "Values":[10,12,11]}
from this how can I get all the CONTENT_RULE_ID which is having inquiry id 1 using JSON_QUERY in sql server

SELECT c.*
FROM CONTENT_RULES AS c
CROSS APPLY OPENJSON(JSON_QUERY(content_rule, '$')) AS x
CROSS APPLY OPENJSON(x.[Value], '$') AS y
where x.[key]='Values' and y.[value]=1

#Harisyam, could you please try following query
declare #val int = 1
;with cte as (
select *
from CONTENT_RULES
cross apply openjson (CONTENT_RULE, '$')
), list as (
select
CONTENT_RULE_ID, replace(replace([value],'[',''),']','') as [value]
from cte
where CONTENT_RULE_ID in (
select CONTENT_RULE_ID
from cte
where [key] = 'EntityType' and [value] = 'Inquiry'
)
and [key] = 'Values'
)
select
CONTENT_RULE_ID, s.value
from list
cross apply string_split([value],',') s
where s.value = #val
I used SQL string_split function to get inquiry values one by one
output is
A second query can be following one
select
CONTENT_RULE_ID
from CONTENT_RULES
cross apply openjson (CONTENT_RULE, '$')
where replace(replace(value,'[',','),']',',') like '%,1,%'
And maybe the most complete SQL query which requires OpenJSON support is as follows
select
content_rule_id,
[value]
from Content as c
cross apply openjson(c.CONTENT_RULE, '$') with (
EntityType nvarchar(100),
[Values] nvarchar(max) as json
) as e
cross apply openjson([Values], '$') as v

My case is similar but instead of an integer array, mine is an array of complex type. Here is my code based on David Browne's solution
SELECT *
FROM TableName AS T
WHERE EXISTS
(
SELECT *
FROM OPENJSON(T.JsonColumn, '$.Details')
WITH
(
OrderNumber VARCHAR(200) '$.OrderNumber',
Quantity INT '$.Quantity'
)
WHERE OrderNumber = '1234567'
);
In your case:
SELECT C.*
FROM CONTENT_RULES AS C
WHERE EXISTS
(
SELECT *
FROM OPENJSON(C.CONTENT_RULE, '$.Values')
WHERE value = 1
);

sql server 2016 can open JSON.
Try this:
SELECT c.content_rule_ID, y.[key], y.[value]
FROM content_rules AS c
CROSS APPLY OPENJSON(JSON_QUERY(content_rule, '$.content_rule')) AS x
CROSS APPLY OPENJSON(x.[Values], '$') AS y
where y.[value] = 1
and x.[EntityType] = 'Inquiry';

Related

How to repeat a row N times based on a value within the row

I have a csv file containing three columns, class,malecount and femalecount as an input table.
My output should contain two columns named Class and Gender.
The malecount and femalecount values indicates how many times a row should be repeated. i.e. for Class = A and malecount=2, the row (A,M) should appear twice, and for Class = C and femalecount=3, the row (C,F) should appear three times. Check the following image to see the full output.
enter image description here
DDL & DML for the table:
create table mytable (class text, malecount int, femalecount int);
insert into mytable (class, malecount, femalecount) values
( 'A',2,1),
('B',3,1),
('C',0,3),
('D',2,4);
WITH RECURSIVE
-- define maximal amount of rows per class per gender to be generated
cte1 AS ( SELECT MAX(GREATEST(malecount, femalecount)) max_count
FROM test),
-- generate natural numbers till max. amount found above
cte2 AS ( SELECT 1 num
UNION ALL
SELECT num+1
FROM cte1
CROSS JOIN cte2
WHERE cte2.num <= cte1.max_count)
-- generate rows for male
SELECT test.class, 'm' gender
FROM test
JOIN cte2 ON cte2.num <= test.malecount
UNION ALL
-- generate rows for female
SELECT test.class, 'f'
FROM test
JOIN cte2 ON cte2.num <= test.femalecount
-- final sorting
ORDER BY gender DESC, class
https://dbfiddle.uk/?rdbms=mysql_8.0&fiddle=694dbb214e4c0cd5524800c56a02dc65
I used the LPAD function and then trimmed off the last comma
from there I had a comma delimited string like M,M,M and F,F
then I used json_table to extract the M,M,M into three rows and the F,F into two rows etc.
here is the fiddle https://www.db-fiddle.com/f/jEXes6AttKvc9GKx1mKY2/1
Schema (MySQL v8.0)
create table mytable (class text, malecount int, femalecount int);
insert into mytable (class, malecount, femalecount) values
( 'A',2,1),
('B',3,1),
('C',0,3),
('D',2,4);
Query #1
with t as (select class,
LPAD(' ', malecount * 2 + 1, 'M,') malecount,
LPAD(' ', femalecount * 2 + 1, 'F,') femalecount
from mytable),
t2 as(
select class,
LEFT(malecount,length(malecount)-1) malecount,
LEFT(femalecount,length(malecount)-1) femalecount
from t)
select t2.class, j.name
from t2
join json_table(
replace(json_array(t2.malecount), ',', '","'),
'$[*]' columns (name varchar(50) path '$')
) j where j.name = 'M'
union all
select t2.class, k.name
from t2
join json_table(
replace(json_array(t2.femalecount), ',', '","'),
'$[*]' columns (name varchar(50) path '$')
) k where k.name = 'F';
class
name
A
M
A
M
B
M
B
M
B
M
D
M
D
M
A
F
B
F
D
F
D
F
View on DB Fiddle
You can use Recursive CTE as the following:
with recursive cte as
(
select *,0 as repeats from
(select class, malecount as cnt, 'M' as Gender from Tbl
union
select class, femalecount as cnt, 'F' as Gender from Tbl
) D
union all
select class,cnt,Gender, repeats+1 from cte
where repeats<cnt-1
)
select class, gender from cte
where cnt>0
order by gender desc,class;
See a demo from db-fiddle.
You can use the function,the expression:
CASE input_expression
WHEN when_expression THEN
result_expression [...n ] [
ELSE
else_result_expression

Extract a field from a column (in json) and Concat them in SQL Server

I want to extract and concat a field in a json column .
I have a table with two field :
id
JsonColumn
In the json Column i can have x object which contains 2 fields (Name and Type)
For Example, in my table I can have :
What i want to do is to extract and concat the field Name in a json colum.
So i will have :
Don't hesitate to share your opinion.
(I was thinking about a big loop with openJson but i fear that the query will be very long after that).
You need to parse the JSON content using OPENSJON() to extract the "Name" key and then aggregate the values. For SQL Server 2016 you need to use FOR XML PATH for aggregation.
Table:
SELECT Id, JsonColumn
INTO Data
FROM (VALUES
(1, '[{"Name": "Matthew", "Type":"Public"}, {"Name": "Rachel", "Type":"Private"}]'),
(2, '[{"Name": "Sample", "Type":"Private"}]')
) v (Id, JsonColumn)
Statement:
SELECT
Id,
Name = STUFF(
(
SELECT ',' + Name
FROM OPENJSON(JsonColumn) WITH (Name varchar(100) '$.Name')
FOR XML PATH('')
), 1, 1, ''
)
FROM Data
Result:
Id JsonColumn
------------------
1 Matthew,Rachel
2 Sample
Rough approach (currently not enough time for me to provide working SQL to code).
To do it in one step (best as view) use the CTE approach to stagger the steps. This generates more code and over time allows easier amendments. It is a trade off.
Recursive approach
First step:
Extract relational records with ID and names. Use OPENJSON WITH a defined table structure where only the Name is extracted (rest can be ignored or left as additional JSON).
Second step:
Use the output from first step and turn into recursive concatenation. Using a variable to concatenate to forces the use of a procedure. Doing it in a view requires definition of anchor and end conditions. Not quite sure on this as it is tricky.
In a CTE part this requires an anchor element union'ed with all other elements. In effect this groups by the selected key field(s).
Third step:
Output of the finished recursion by key field(s).
Quick demo code
DECLARE
#Demo TABLE
(
id_col tinyint identity(1,1),
dsc_json nvarchar(max)
)
;
INSERT INTO
#Demo
(
dsc_json
)
SELECT N'[{"Name":"Celery","Type":"Vegetable"}, {"Name":"Tomato","Type":"Fruit"}]'
UNION
SELECT N'[{"Name":"Potato","Type":"Vegetable"}]'
UNION
SELECT N'[{"Name":"Cherry","Type":"Fruit"}, {"Name":"Apple","Type":"Fruit"}]'
;
SELECT
*
FROM
#Demo
;
-- extract JSON
SELECT
demo.id_col,
jsond.dsc_name,
Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name ASC ) AS val_row_asc,
Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name DESC ) AS val_row_desc
FROM
#Demo AS demo
CROSS APPLY OPENJSON( demo.dsc_json )
WITH
(
dsc_name nvarchar(100) '$.Name'
) AS jsond
;
WITH
cte_json
(
id_col,
dsc_name,
val_row_asc,
val_row_desc
)
AS
(
SELECT
demo.id_col,
jsond.dsc_name,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name ASC ) AS int ) AS val_row_asc,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name DESC ) AS int ) AS val_row_desc
FROM
#Demo AS demo
CROSS APPLY OPENJSON( demo.dsc_json )
WITH
(
dsc_name nvarchar(100) '$.Name'
) AS jsond
),
cte_concat
(
id_col,
dsc_names,
val_row_asc,
val_row_desc
)
AS
( -- anchor first
-- - emtpy string per ID
SELECT
anchor.id_col,
Cast( N'' AS nvarchar(500) ) AS names,
Cast( 0 AS int) AS val_row_asc,
Cast( -1 AS int ) AS val_row_desc
FROM
cte_json AS anchor
WHERE -- anchor criteria
val_row_asc = 1
UNION ALL
SELECT
anchor.id_col,
Cast( anchor.dsc_names + N', ' + element.dsc_name AS nvarchar(500) ) AS names,
element.val_row_asc,
element.val_row_desc
FROM
cte_json AS element
INNER JOIN cte_concat AS anchor
ON anchor.id_col = element.id_col
AND anchor.val_row_asc = element.val_row_asc -1
)
SELECT
cte.id_col,
Right( cte.dsc_names, Len( cte.dsc_names ) -2 ) AS dsc_names,
cte.val_row_desc
FROM
cte_concat AS cte
WHERE -- only latest result
cte.val_row_desc = 1
ORDER BY
cte.id_col ASC
;
The additional row numbers allow:
define start and end point for the recursive connection = val_row_asc
define "definition" of latest result = val_row_desc
Stuff ... for XML Path
This approach works on all versions and is much easier to read than the recursive part thanks to Zhorov's answer. Works on the base laid by the first part of the code above (or just straight afterwards).
WITH
cte_json
(
id_col,
dsc_name,
val_row_asc,
val_row_desc
)
AS
(
SELECT
demo.id_col,
jsond.dsc_name,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name ASC ) AS int ) AS val_row_asc,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name DESC ) AS int ) AS val_row_desc
FROM
#Demo AS demo
CROSS APPLY OPENJSON( demo.dsc_json )
WITH
(
dsc_name nvarchar(100) '$.Name'
) AS jsond
)
SELECT
cte_outer.id_col,
Stuff(
( SELECT
',' + cte_inner.dsc_name
FROM
cte_json AS cte_inner
WHERE
cte_inner.id_col = cte_outer.id_col
FOR XML PATH('')
), 1, 1, ''
) AS dsc_names
FROM
cte_json AS cte_outer
GROUP BY
cte_outer.id_col
;
String_agg
This approach only works with SQL Server 2017 onwards. It is a continuation to the code above.
WITH
cte_json
(
id_col,
dsc_name,
val_row_asc,
val_row_desc
)
AS
(
SELECT
demo.id_col,
jsond.dsc_name,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name ASC ) AS int ) AS val_row_asc,
Cast( Row_number() OVER ( PARTITION BY demo.id_col ORDER BY jsond.dsc_name DESC ) AS int ) AS val_row_desc
FROM
#Demo AS demo
CROSS APPLY OPENJSON( demo.dsc_json )
WITH
(
dsc_name nvarchar(100) '$.Name'
) AS jsond
)
SELECT
cte.id_col,
String_agg( cte.dsc_name, ',' ) AS dsc_names
FROM
cte_json AS cte
GROUP BY
cte.id_col
;

Using FOR JSON and CTE and store it in a variable

How can I generate a JSON in T-SQL using FOR JSON from a select statement that uses a Common Table Expression (CTE) and then store it in a local variable?
Without a CTE it could look like this:
DECLARE #var NVARCHAR(MAX) = (
SELECT x.Val
FROM (VALUES (1)) AS x(Val)
FOR JSON AUTO
)
SELECT #var
But how ist the syntax if a CTE is in used like the following?
WITH y AS (
SELECT Val
FROM (VALUES (1)) AS _(Val)
)
SELECT x.Val, y.Val ValY
FROM (VALUES (1)) AS x(Val)
JOIN y ON y.Val = x.Val
FOR JSON AUTO
Surrounding with DECLARE #var NVARCHAR(MAX) = ( ... ) does not work.
Might be something like this?
DECLARE #json NVARCHAR(MAX);
WITH y AS (
SELECT Val
FROM (VALUES (1)) AS _(Val)
)
SELECT #json=
(
SELECT x.Val, y.Val ValY
FROM (VALUES (1)) AS x(Val)
JOIN y ON y.Val = x.Val
FOR JSON AUTO
);
SELECT #json;
Additionally you can place a CTE as a sub-query instead of the "WITH". The CTE is not much more than "syntactic sugar" actually to avoid repeated sub-queries and to get your SQL better readable. You can use this for the same:
DECLARE #json NVARCHAR(MAX)=
(
SELECT x.Val, y.Val ValY
FROM (VALUES (1)) AS x(Val)
JOIN (
SELECT Val
FROM (VALUES (1)) AS _(Val)
) y ON y.Val = x.Val
FOR JSON AUTO
);
SELECT #json

Store values in different variables in SQL, separated by (Comma) ","

I need to separate values and store them in different variables in SQL,
for example
a='3100,3101,3102,....'
And the output should be
x=3100
y=3101
z=3102
.
.
.
create function [dbo].[udf_splitstring] (#tokens varchar(max),
#delimiter varchar(5))
returns #split table (
token varchar(200) not null )
as
begin
declare #list xml
select #list = cast('<a>'
+ replace(#tokens, #delimiter, '</a><a>')
+ '</a>' as xml)
insert into #split
(token)
select ltrim(t.value('.', 'varchar(200)')) as data
from #list.nodes('/a') as x(t)
return
end
GO
declare #cad varchar(100)='3100,3101,3102'
select *,ROW_NUMBER() over (order by token ) as rn from udf_splitstring(#cad,',')
token rn
3100 1
3101 2
3102 3
The results of the Parse TVF can easily be incorporated into a JOIN, or an IN
Declare #a varchar(max)='3100,3101,3102'
Select * from [dbo].[udf-Str-Parse](#a,',')
Returns
RetSeq RetVal
1 3100
2 3101
3 3102
The UDF if needed (much faster than recursive, loops, and xml)
CREATE FUNCTION [dbo].[udf-Str-Parse] (#String varchar(max),#Delimiter varchar(25))
Returns Table
As
Return (
with cte1(N) As (Select 1 From (Values(1),(1),(1),(1),(1),(1),(1),(1),(1),(1)) N(N)),
cte2(N) As (Select Top (IsNull(DataLength(#String),0)) Row_Number() over (Order By (Select NULL)) From (Select N=1 From cte1 a,cte1 b,cte1 c,cte1 d) A ),
cte3(N) As (Select 1 Union All Select t.N+DataLength(#Delimiter) From cte2 t Where Substring(#String,t.N,DataLength(#Delimiter)) = #Delimiter),
cte4(N,L) As (Select S.N,IsNull(NullIf(CharIndex(#Delimiter,#String,s.N),0)-S.N,8000) From cte3 S)
Select RetSeq = Row_Number() over (Order By A.N)
,RetVal = LTrim(RTrim(Substring(#String, A.N, A.L)))
From cte4 A
);
--Orginal Source http://www.sqlservercentral.com/articles/Tally+Table/72993/
--Much faster than str-Parse, but limited to 8K
--Select * from [dbo].[udf-Str-Parse-8K]('Dog,Cat,House,Car',',')
--Select * from [dbo].[udf-Str-Parse-8K]('John||Cappelletti||was||here','||')
I suggest you to use following query, it's much faster than other functions like cross apply and udf.
SELECT
Variables
,S_DATA
FROM (
SELECT
Variables
,CASE WHEN LEN(LIST2)>0 THEN LTRIM(RTRIM(SUBSTRING(LIST2, NUMBER+1, CHARINDEX(',', LIST2, NUMBER+1)-NUMBER - 1)))
ELSE NULL
END AS S_DATA
,NUMBER
FROM(
SELECT Variables
,','+COMMA_SEPARETED_COLUMN+',' LIST2
FROM Tb1
)DT
LEFT OUTER JOIN TB N ON (N.NUMBER < LEN(DT.LIST2)) OR (N.NUMBER=1 AND DT.LIST2 IS NULL)
WHERE SUBSTRING(LIST2, NUMBER, 1) = ',' OR LIST2 IS NULL
) DT2
WHERE S_DATA<>''
and also you should create a table 'NUMBER' before running the above query.
CREATE TABLE TB (Number INT)
DECLARE #I INT=0
WHILE #I<1000
BEGIN
INSERT INTO TB VALUES (#I)
SET #I=#I+1
END

Using CROSS APPLY for more than one column

Day #3 with SQL Server.
I am trying to combine 2 columns of delimited data into one output from a Table Valued Function. Here is my data:
I would like the data to be processed and placed into a table in the following format:
I am currently trying to use this CROSS APPLY TSQL statement, but I don't know what I'm doing.
USE [Metrics]
INSERT INTO dbo.tblSplitData(SplitKey, SplitString, SplitValues)
SELECT d.RawKey, c.*, e.*
FROM dbo.tblRawData d
CROSS APPLY dbo.splitstringcomma(d.DelimitedString) c, dbo.splitstringcomma(d.DelimitedValues) e
My research on CROSS APPLY has broad context, and I don't understand how it should be applied in this scenario. Do I need a subquery with an additional CROSS APPLY and a join to combine the returns from the two Table Valued Functions?
Here is the split function I was using originally (I can't remember the author to credit them):
CREATE FUNCTION [dbo].[splitstring] ( #stringToSplit VARCHAR(MAX), #Delimiter CHAR(1))
RETURNS
#returnList TABLE ([Name] [nvarchar] (500))
AS
BEGIN
DECLARE #name NVARCHAR(255)
DECLARE #pos INT
WHILE CHARINDEX(#Delimiter, #stringToSplit) > 0
BEGIN
SELECT #pos = CHARINDEX(#Delimiter, #stringToSplit)
SELECT #name = SUBSTRING(#stringToSplit, 1, #pos-1)
INSERT INTO #returnList
SELECT #name
SELECT #stringToSplit = SUBSTRING(#stringToSplit, #pos+1, LEN(#stringToSplit)-#pos)
END
INSERT INTO #returnList
SELECT #stringToSplit
RETURN
END
Edit & Revised Query
USE [Metrics]
INSERT INTO dbo.tblSplitData(SplitKey, SplitString, SplitValues)
SELECT s.RawKey, s.SplitString, v.SplitValues
FROM (
SELECT d.RawKey, d.DelimitedString,
c.item SplitString, c.rn
FROM dbo.tblRawData d
CROSS APPLY dbo.splitstring(d.DelimitedString, ',') c
) s
INNER JOIN
(
SELECT d.RawKey, d.DelimitedValues,
c.item SplitValues, c.rn
FROM dbo.tblRawData d
CROSS APPLY dbo.splitstring(d.DelimitedValues, ',') c
) v
on s.RawKey = v.RawKey
and s.rn = v.rn;
It might be easier to answer this if we could see your split string function. My answer is using a version of my split function that I have.
I would include in your split function a row number that you can use to JOIN the split string and the split values.
Split function:
CREATE FUNCTION [dbo].[Split](#String varchar(MAX), #Delimiter char(1))
returns #temptable TABLE (items varchar(MAX), rn int)
as
begin
declare #idx int
declare #slice varchar(8000)
declare #rn int = 1 -- row number that increments with each value in the delimited string
select #idx = 1
if len(#String)<1 or #String is null return
while #idx!= 0
begin
set #idx = charindex(#Delimiter,#String)
if #idx!=0
set #slice = left(#String,#idx - 1)
else
set #slice = #String
if(len(#slice)>0)
insert into #temptable(Items, rn) values(#slice, #rn)
set #String = right(#String,len(#String) - #idx)
set #rn = #rn +1
if len(#String) = 0 break
end
return
end;
Then if you have multiple columns to split, you could use a query similar to the following:
INSERT INTO dbo.tblSplitData(SplitKey, SplitString, SplitValues)
select s.rawkey,
s.splitstring,
v.splitvalues
from
(
SELECT d.RawKey, d.delimitedstring, d.delimitedvalues,
c.items SplitString,
c.rn
FROM dbo.tblRawData d
CROSS APPLY dbo.Split(d.DelimitedString, ',') c
) s
inner join
(
SELECT d.RawKey, d.delimitedstring, d.delimitedvalues,
c.items SplitValues,
c.rn
FROM dbo.tblRawData d
CROSS APPLY dbo.Split(d.DelimitedValues, ',') c
) v
on s.rawkey = v.rawkey
and s.delimitedstring = v.delimitedstring
and s.rn = v.rn;
See SQL Fiddle with Demo
This uses two subqueries that generate the list of split values, then they are joined using the row number created by the split function.
Since you are on Sql Server 2008 .You can do this without a UDF using XML.
;WITH CTE1 AS
(
SELECT *
,RN= Row_Number() OVER( Partition BY DelemitedString,DelimitedValues,RawKey,TableID ORDER BY TableID)
FROM
(
SELECT *
,DelimitedStringXML = CAST('<d>'+REPLACE(DelemitedString,',','</d><d>')+'</d>' AS XML)
,DelimitedValueXML = CAST('<d>'+REPLACE(DelimitedValues,',','</d><d>')+'</d>' AS XML)
FROM dbo.tblRawData
) as t
Cross Apply
(
SELECT y.value('.', 'VARCHAR(30)') AS SplitString FROM DelimitedStringXML.nodes('//d') as x(y)
) as b
)
,CTE2 AS
(
SELECT *
,RN= Row_Number() OVER( Partition BY DelemitedString,DelimitedValues,RawKey,TableID ORDER BY TableID)
FROM
(
SELECT *
,DelimitedStringXML = CAST('<d>'+REPLACE(DelemitedString,',','</d><d>')+'</d>' AS XML)
,DelimitedValueXML = CAST('<d>'+REPLACE(DelimitedValues,',','</d><d>')+'</d>' AS XML)
FROM dbo.tblRawData
) as t
CROSS APPLY
(
SELECT h.value('.', 'VARCHAR(30)') AS SplitValue FROM DelimitedValueXML.nodes('//d') as g(h)
) as c
)
SELECT a.RawKey,a.SplitString,b.SplitValue
FROM CTE1 as a
INNER JOIN CTE2 as b
on a.TableID= b.TableID
AND a.RN = b.RN
Here is SQLFiddle Demo