Find match item in JSON array - json

I have a SQL table called 'Interactions' and a column called Events that holds an array of JSON data. I am looking for any row where the Events json has any array with #odata.type = #Sitecore.XConnect.Goal
[
{
"#odata.type":"#Sitecore.XConnect.Collection.Model.PageViewEvent",
"CustomValues":[
],
"DefinitionId":"9326cb1e-cec8-48f2-9a3e-91c7dbb2166c",
"ItemId":"5ae02a76-ac59-4dbb-913f-3b2e51d92bae",
"Id":"b067f72f-1d60-4773-a2e8-9d23770fea54",
"Timestamp":"2019-11-11T17:18:31.2206225Z",
"ItemLanguage":"en",
"ItemVersion":1,
"Url":"/renewal-confirmation",
"SitecoreRenderingDevice":{
"Id":"fe5d7fdf-89c0-4d99-9aa3-b5fbd009c9f3",
"Name":"Default"
}
},
{
"#odata.type":"#Sitecore.XConnect.Goal",
"CustomValues":[
],
"DataKey":"/sitecore/content/Client/home/renewal-confirmation",
"DefinitionId":"c0bc91b9-b5fc-4faa-bc12-3dba8bbae44f",
"ItemId":"5ae02a76-ac59-4dbb-913f-3b2e51d92bae",
"EngagementValue":100,
"Id":"e7031fb4-ce57-459f-a9f1-2682748d3431",
"ParentEventId":"b067f72f-1d60-4773-a2e8-9d23770fea54",
"Timestamp":"2019-11-11T17:18:31.2518732Z"
}
]
What I am currently trying is this, but no results
select *
from [Interactions] I
where exists
(
select *
from openjson(I.Events,'$."#odata.type"')
where value = '#Sitecore.XConnect.Goal'
)
If I use this, I can get any row where the 1st item in the array #odata.type = #Sitecore.XConnect.Goal. This works. But I want it from any item in the array.
SELECT *
FROM [Interactions]
WHERE JSON_VALUE([Events], '$[0]."#odata.type"') = '#Sitecore.XConnect.Goal'

One possible approach is to parse the JSON column using OPENJSON() with explicit schema (WITH clause with columns definitions). With this approach you can filter the Interactions table and get information from the JSON data.
Table:
CREATE TABLE Interactions (
Events nvarchar(max)
)
INSERT INTO Interactions
(Events)
VALUES
(N'[
{
"#odata.type":"#Sitecore.XConnect.Collection.Model.PageViewEvent",
"CustomValues":[
],
"DefinitionId":"9326cb1e-cec8-48f2-9a3e-91c7dbb2166c",
"ItemId":"5ae02a76-ac59-4dbb-913f-3b2e51d92bae",
"Id":"b067f72f-1d60-4773-a2e8-9d23770fea54",
"Timestamp":"2019-11-11T17:18:31.2206225Z",
"ItemLanguage":"en",
"ItemVersion":1,
"Url":"/renewal-confirmation",
"SitecoreRenderingDevice":{
"Id":"fe5d7fdf-89c0-4d99-9aa3-b5fbd009c9f3",
"Name":"Default"
}
},
{
"#odata.type":"#Sitecore.XConnect.Goal",
"CustomValues":[
],
"DataKey":"/sitecore/content/Client/home/renewal-confirmation",
"DefinitionId":"c0bc91b9-b5fc-4faa-bc12-3dba8bbae44f",
"ItemId":"5ae02a76-ac59-4dbb-913f-3b2e51d92bae",
"EngagementValue":100,
"Id":"e7031fb4-ce57-459f-a9f1-2682748d3431",
"ParentEventId":"b067f72f-1d60-4773-a2e8-9d23770fea54",
"Timestamp":"2019-11-11T17:18:31.2518732Z"
}
]')
Statement:
SELECT *
FROM Interactions i
CROSS APPLY OPENJSON(i.Events) WITH (
ODataType nvarchar(100) '$."#odata.type"'
-- and additional columns definitions
) j
WHERE j.ODataType = '#Sitecore.XConnect.Goal'

Your original query is close but doesn't work because you can't directly pick out scalars with OPENJSON. You want something like this:
SELECT *
FROM [interactions]
WHERE EXISTS (
SELECT 1
FROM OPENJSON([events]) WITH (
[#odata.type] NVARCHAR(MAX)
)
WHERE [#odata.type] = '#Sitecore.XConnect.Goal'
)

Related

How can I filter array values in a Json in Sql Server

How can I filter an array value without specifying the index in the array
This is my array
{
"level1":{
"level2":[
{
"level3":"test",
}
]
}
}
I want to retrive all rows that contains a level3 a value test.
Something like this
select * from Documents
where Json_Value(DocumentInfo, '$.level1.level2[X].level3') = 'test'
It this not possible in Sql Server?
Guessing of bit here, but use an EXISTS? This also assumes the JSON is valid, as the JSON in your question is not:
SELECT * --Should be a distinct column list
FROM dbo.Documents D
WHERE EXISTS (SELECT 1
FROM OPENJSON(D.DocumentInfo)
WITH (level1 nvarchar(MAX) AS JSON) L1
CROSS APPLY OPENJSON(L1.Level1)
WITH(level2 nvarchar(MAX) AS JSON) L2
CROSS APPLY OPENJSON(L2.Level2)
WITH (level3 varchar(10)) L3
WHERE L3.level3 = 'test');

Query SQL database with JSON Value

Here is my JSON:
[{"Key":"schedulerItemType","Value":"schedule"},{"Key":"scheduleId","Value":"82"},{"Key":"scheduleEventId","Value":"-1"},{"Key":"scheduleTypeId","Value":"2"},{"Key":"scheduleName","Value":"Fixed Schedule"},{"Key":"moduleId","Value":"5"}]
I want to query the database by FileMetadata column
I've tried this:
SELECT * FROM FileSystemItems WHERE JSON_VALUE(FileMetadata, '$.Key') = 'scheduleId' and JSON_VALUE(FileMetadata, '$.Value') = '82'
but it doesn't work!
I had it working with just a dictionary key/value pair, but I needed to return the data differently, so I am adding it with key and value into the json now.
What am I doing wrong?
With the sample data given you'd have to supply an array index to query the 1th element (0-based array indexes), e.g.:
select *
from dbo.FileSystemItems
where json_value(FileMetadata, '$[1].Key') = 'scheduleId'
and json_value(FileMetadata, '$[1].Value') = '82'
If the scheduleId key can appear at arbitrary positions in the array then you can restructure the query to use OPENJSON instead, e.g.:
select *
from dbo.FileSystemItems
cross apply openjson(FileMetadata) with (
[Key] nvarchar(50) N'$.Key',
Value nvarchar(50) N'$.Value'
) j
where j.[Key] = N'scheduleId'
and j.Value = N'82'

How to retrieve a value in a json object in sqlite when the key is an empty string?

I am processing an sqlite table which contains json objects. These json objects have keys that are empty strings. How can I retrieve the value? For example:
select json_extract('{"foo": "bar", "":"empty"}', '$.foo') as data;
-result: "bar"
How can I retrieve "empty"?
Using your example:
sqlite> SELECT value FROM json_each('{"foo":"bar","":"empty"}') WHERE key = '';
value
----------
empty
As part of a larger query from a table:
SELECT (SELECT j.value FROM json_each(t.your_json_column) AS j WHERE j.key = '') AS data
FROM your_table AS t;

Retrieving json elements with a specific key name from a complex nested structure in postgres

I have a complex nested json structure in a postgres json field. I want to list all element values with key '$type' no matter where in the nested structure they appear. The structure contains arrays nested within arrays to several levels deep. What is the sql query I should use?
The table structure is:
create table if not exists documents
(
id text not null
constraint documents_pkey primary key,
value json not null
)
This recursive function extracts all attributes from a complex jsonb object:
create or replace function jsonb_extract_all(jsonb_data jsonb, curr_path text[] default '{}')
returns table(path text[], value text)
language plpgsql as $$
begin
if jsonb_typeof(jsonb_data) = 'object' then
return query
select (jsonb_extract_all(val, curr_path || key)).*
from jsonb_each(jsonb_data) e(key, val);
elseif jsonb_typeof(jsonb_data) = 'array' then
return query
select (jsonb_extract_all(val, curr_path || ord::text)).*
from jsonb_array_elements(jsonb_data) with ordinality e(val, ord);
else
return query
select curr_path, jsonb_data::text;
end if;
end $$;
Example usage:
with my_table(data) as (
select
'{
"$type": "a",
"other": "x",
"nested_object": {"$type": "b"},
"array_1": [{"other": "y"}, {"$type": "c"}],
"array_2": [{"$type": "d"}, {"other": "z"}]
}'::jsonb
)
select f.*
from my_table
cross join jsonb_extract_all(data) f
where path[cardinality(path)] = '$type';
path | value
-----------------------+-------
{$type} | "a"
{array_1,2,$type} | "c"
{array_2,1,$type} | "d"
{nested_object,$type} | "b"
(4 rows)
You can use a resursive query. I have done most of the work here:
with recursive dived(jkey, jval, jtype) as (
select t.key, t.value,
json_typeof(t.value) jtype
from json_each('{"id":"243769","name":"domains","type":"TABLE","adata":{"sfield":"name"},"fields":{"id":{"ind":1,"enum":null,"refs":[null,null],"reqd":true,"type":"int4","constr":["p",null],"default":null},"name":{"ind":2,"enum":null,"refs":[null,null],"reqd":true,"type":"text","constr":["u",null],"default":null},"appid":{"ind":5,"enum":null,"refs":["apps","id"],"reqd":true,"type":"int4","constr":[null,null],"default":null},"userid":{"ind":8,"enum":null,"refs":["users","id"],"reqd":true,"type":"int8","constr":[null,null],"default":null},"createdat":{"ind":6,"enum":null,"refs":[null,null],"reqd":true,"type":"timestamptz","constr":[null,null],"default":null},"updatedat":{"ind":7,"enum":null,"refs":[null,null],"reqd":true,"type":"timestamptz","constr":[null,null],"default":null},"subdomainforward":{"ind":4,"enum":null,"refs":[null,null],"reqd":false,"type":"text","constr":[null,null],"default":null},"wilcardsubdomain":{"ind":3,"enum":null,"refs":[null,null],"reqd":false,"type":"bool","constr":[null,null],"default":null}},"schema":"web","relchecks":0,"relhasrules":false,"relhastriggers":true,"relrowsecurity":false,"relforcerowsecurity":false}'::json) t
union all
select t.key, t.value,
json_typeof(t.value) jtype
from dived, json_each(dived.jval) as t
where dived.jtype in ('object' /*, 'array'*/)
)
select * From dived where jkey = 'yourkey' limit 100
You will simply need to add in an case when or some logic when it comes to arrays and json_array_elements.
Iterating through nested arrays with json is not too difficult with a recursive query but I find it tedious.
Place the CASE WHEN in front of the json_each as something like:
CASE WHEN dived.jtype = 'array' then
json_array_elements(dived.jval) t
It may be possible to handle the situation with the case when scenario, otherwise you may need a separate recursive query specifically for arrays and then do a union with the object keys/values.
You also may find more info here:
Collect Recursive JSON Keys In Postgres
I hope this helps!

How to update a JSONB object with a parent in postgresql

I have a database with a table foo containing a column id and a column data with the following data:
{
"startDate":"2017-07-04",
"endDate":"2017-07-10",
"notDelegated":false,
"sold":false,
"disableRanking":false,
"type":"PERIOD"
}
I would like to update this data with a parent rangeData and extract the type property like this:
{
"rangeData": {
"startDate":"2017-07-04",
"endDate":"2017-07-10",
"notDelegated":false,
"sold":false,
"disableRanking":false
},
"type":"PERIOD"
}
I tried a lot of things with JSON operators in vain.
Thank you for your answers.
Use the function jsonb_build_object() and delete operator:
update foo
set data = jsonb_build_object('rangeData', data- 'type', 'type', data->'type');
In the above function call you are creating a json object with two elements:
key value
-------------------------
'rangeData' data- 'type' json object 'data' from which the key 'type' was removed
'type' data->'type' value of 'type' element of json object 'data'
SqlFiddle
You can use jsonb_set(target jsonb, path text[], new_value jsonb[, create_missing boolean]) (see the docs).
It's not very pretty, but I did this (you can run it yourself on this SQLFiddle I made):
SELECT
jsonb_set(
(final.range_data):: jsonb,
'{type}',
to_jsonb(final.type)
)
FROM
(
SELECT
jsonb_set(
jsonb '{}', '{rangeData}', fields.range_data
) AS range_data,
fields.type AS type
FROM
(
SELECT
data.jsonb - 'type' AS range_data,
data.jsonb ->> 'type' AS type
FROM
(
SELECT
'{"startDate":"2017-07-04","endDate":"2017-07-10","notDelegated":false,"sold":false,"disableRanking":false,"type":"PERIOD"}' :: jsonb
) data
) fields
) final;
This will get you your jsonb output of:
{"rangeData": {"startDate":"2017-07-04","endDate":"2017-07-10","notDelegated":false,"sold":false,"disableRanking":false},"type":"PERIOD"}
So you'll end up doing something like:
UPDATE
foo
SET
data = (
SELECT
jsonb_set(
(final.range_data):: jsonb,
'{type}',
to_jsonb(final.type)
)
FROM
(
SELECT
jsonb_set(
jsonb '{}', '{rangeData}', fields.range_data
) AS range_data,
fields.type AS type
FROM
(
SELECT
data.jsonb - 'type' AS range_data,
data.jsonb ->> 'type' AS type
FROM
(
SELECT
foo.data
) data
) fields
) final
);
I haven't run the final UPDATE query, but I did run the naïve SELECT statement and it got me the output as expected.
Also, caveat, I'm new to SQL (and especially the jsonb stuff), so I'm sure there will be a more performant and/or more correct "jsonb-like" way to do this. Depends on your use-case (if it's a one-off migration, or if you need to do this during every live read, etc.)