I have a SQL Server Table with a JSON column. In it, I have a property 'Code' that contained a string with one value. Now, I want that 'Code' to be an Array to be able to contain more than one strings.
How can I update all my table values to change the property to an array?
"Code" : null --> "Code" : []
"Code" : "XX" --> "Code" : ["XX"]
You may try to modify the stored JSON as text using OPENJSON() with default schema to get the type of the $.Code part:
Sample data:
SELECT *
INTO Data
FROM (VALUES
(CONVERT(nvarchar(max), N'{"Code": "XX"}')),
(CONVERT(nvarchar(max), N'{"Code": null}')),
(CONVERT(nvarchar(max), N'{"Code": 1}')),
(CONVERT(nvarchar(max), N'{"Code": []}')),
(CONVERT(nvarchar(max), N'{"Code": {}}'))
) d (JsonColumn)
Statement:
UPDATE d
SET JsonColumn = JSON_MODIFY(
JsonColumn,
'$.Code',
JSON_QUERY(CONCAT('[', j.[value], ']'))
)
FROM Data d
OUTER APPLY (
SELECT
CASE
WHEN [type] = 0 THEN ''
WHEN [type] = 1 THEN CONCAT('"', STRING_ESCAPE([value], 'json'), '"')
WHEN [type] = 2 THEN [value]
ELSE '"not a scalar value"'
END AS [value]
FROM OPENJSON(d.JsonColumn, '$')
WHERE [key] = 'Code'
) j
Result:
JsonColumn
--------------------------------
{"Code": ["XX"]}
{"Code": []}
{"Code": [1]}
{"Code": ["not a scalar value"]}
{"Code": ["not a scalar value"]}
You may consider the #JeroenMostert's comment and use something like this:
UPDATE d
SET JsonColumn = JSON_MODIFY(
JsonColumn,
'$.Code',
JSON_QUERY(CONCAT('[', j.[value], ']'))
)
FROM Data d
OUTER APPLY (
SELECT IIF ([type] = 1, CONCAT('"', STRING_ESCAPE([value], 'json'), '"'), [value]) AS [value]
FROM OPENJSON(d.JsonColumn, '$')
WHERE [key] = 'Code'
) j
Related
What I'm trying to do is fill up a table with the data from a JSON. The file is formatted like this.
[
{
"name": "Victor",
"age": "20"
},
{
"name": "Ana",
"age": "23"
}
]
I can't change how it's formatted.
I tried using APEX_JSON to parse it and add row by row, but I can't even use the GET_COUNT, none of the paths I tried worked.
The database is an Oracle 11g, so there's no JSON_TABLE
--oracle 12c or later
SELECT *
FROM JSON_TABLE (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]',
'$[*]'
COLUMNS
NAME VARCHAR2 (2000) PATH '$.name',
AGE VARCHAR2 (2000) PATH '$.age')
--oracle 11g
SELECT *
FROM XMLTABLE (
'/json/row'
PASSING apex_json.to_xmltype (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]')
COLUMNS
NAME VARCHAR2 (2000) PATH '/row/name',
AGE VARCHAR2 (2000) PATH '/row/age')
You can use XMLTABLE along with APEX_JSON.TO_XMLTYPE() function in order to simulate JSON_TABLE such as
WITH t(jsCol) AS
(
SELECT '[
{
"name": "Victor",
"age": "20"
},
{
"name": "Anna",
"age": "23"
}
]'
FROM dual
)
SELECT name, age
FROM t,
XMLTABLE('/json/row'
PASSING APEX_JSON.TO_XMLTYPE(jsCol)
COLUMNS
name VARCHAR2(100) PATH 'name',
age VARCHAR2(100) PATH 'age'
)
NAME
AGE
Victor
20
Anna
23
With APEX_JSON you can do something like this:
DECLARE
l_json_text VARCHAR2(32767);
l_json_values apex_json.t_values;
BEGIN
l_json_text := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]
';
apex_json.parse(
p_values => l_json_values,
p_source => l_json_text
);
DBMS_OUTPUT.put_line('----------------------------------------');
FOR r IN 1 .. nvl(apex_json.get_count(p_path => '.', p_values => l_json_values),0) loop
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values));
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values));
/* insert into your_table
(name,
age
)
VALUES
(
apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values),
apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values)
);
*/
END loop;
DBMS_OUTPUT.put_line('----------------------------------------');
END;
/
If you can find a proper JSON parser then you should use that; however, if one is not available, you could parse it yourself. From Oracle 11gR2, you can use:
INSERT INTO table_name (name, age)
WITH jsondata (json) AS (
SELECT '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"},
{
"name":"Betty",
"age":"24"
},
{
"age":"25",
"name":"Carol"
}
]' FROM DUAL
),
data (json, items, i, name, age) AS (
SELECT json,
REGEXP_COUNT(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
'n'
),
1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM jsondata
UNION ALL
SELECT json,
items,
i + 1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM data
WHERE i < items
)
SELECT name, age
FROM data;
(Note: the regular expression does not handle escaped quotes in the strings as I am assuming they will not occur in names; however, if they do then instead of .*? you can use (\(["\/bfnrt]|u[0-9a-fA-F]{4})|[^"])*.)
Which, given the table:
CREATE TABLE table_name (name VARCHAR2(30), age NUMBER);
Then after the insert:
SELECT * FROM table_name;
Outputs:
NAME
AGE
Victor
20
Ana
23
Betty
24
Carol
25
db<>fiddle here
Last time done that with a clob variable.
Try to do it like :
DECLARE
json_body clob := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]';
BEGIN
FOR items IN (SELECT *
FROM
JSON_TABLE(json_body FORMAT JSON,'$[*]'
COLUMNS (
name_ varchar (200) PATH '$.name',
age_ varchar (200) PATH '$.age')))
LOOP
INSERT INTO T_DATA (
name,
age
) VALUES (
items.name_,
items.age_
);
END LOOP;
END;
/
This will put your data into a table and then you can play with them
select * from T_DATA;
Resulting into :
result
Using PostgreSQL 13.4 I have a table with a JSON column in a structure like the following sample:
{
"username": "jsmith",
"location": "United States",
"posts": [
{
"id":"1",
"title":"Welcome",
"newKey":true <----------- insert new key/value pair here
},
{
"id":"4",
"title":"What started it all",
"newKey":true <----------- insert new key/value pair here
}
]
}
For changing keys on the first level, I used a simple query like this
UPDATE
sample_table_json
SET
json = json::jsonb || '{"active": true}';
But this doesn't work for nested objects and objects in an array like in the sample.
How would I insert a key/value pair into a JSON column with nested objects in an array?
You have to use the jsonb_set function while specifying the right path see the manual.
For a single json update :
UPDATE sample_table_json
SET json = jsonb_set( json::jsonb
, '{post,0,active}'
, 'true'
, true
)
For a (very) limited set of json updates :
UPDATE sample_table_json
SET json = jsonb_set(jsonb_set( json::jsonb
, '{post,0,active}'
, 'true'
, true
)
, '{post,1,active}'
, 'true'
, true
)
For a larger set of json updates of the same json data, you can create the "aggregate version" of the jsonb_set function :
CREATE OR REPLACE FUNCTION jsonb_set(x jsonb, y jsonb, p text[], e jsonb, b boolean)
RETURNS jsonb LANGUAGE sql AS $$
SELECT jsonb_set(COALESCE(x,y), p, e, b) ; $$ ;
CREATE OR REPLACE AGGREGATE jsonb_set_agg(x jsonb, p text[], e jsonb, b boolean)
( STYPE = jsonb, SFUNC = jsonb_set) ;
and then use the new aggregate function jsonb_set_agg while iterating on a query result where the path and val fields could be calculated :
SELECT jsonb_set_agg('{"username": "jsmith","location": "United States","posts": [{"id":"1","title":"Welcome"},{"id":"4","title":"What started it all"}]}' :: jsonb
, l.path :: text[]
, to_jsonb(l.val)
, true)
FROM (VALUES ('{posts,0,active}', 'true'), ('{posts,1,active}', 'true')) AS l(path, val) -- this list could be the result of a subquery
This query could finally be used in order to update some data :
WITH list AS
(
SELECT id
, jsonb_set_agg(json :: jsonb
, l.path :: text[]
, to_jsonb(l.val)
, true) AS res
FROM sample_table_json
CROSS JOIN (VALUES ('{posts,0,active}', 'true'), ('{posts,1,active}', 'true')) AS l(path, val)
GROUP BY id
)
UPDATE sample_table_json AS t
SET json = l.res
FROM list AS l
WHERE t.id = l.id
see the test result in dbfiddle
It became a bit complicated. Loop through the array, add the new key/value pair to each array element and re-aggregate the array, then rebuild the whole object.
with t(j) as
(
values ('{
"username": "jsmith",
"location": "United States",
"posts": [
{
"id":"1", "title":"Welcome", "newKey":true
},
{
"id":"4", "title":"What started it all", "newKey":true
}]
}'::jsonb)
)
select j ||
jsonb_build_object
(
'posts',
(select jsonb_agg(je||'{"active":true}') from jsonb_array_elements(j->'posts') je)
)
from t;
Dataset :
create table grievances(grivanceid int ,grivancedesc varchar(10))
create table grievanceType(grivanceid int ,grivanceType varchar(10))
insert into grievances values (1,'abc')
insert into grievanceType values (1,'type1')
insert into grievanceType values (1,'type2')
Desired output:
{
"grivanceid": 1,
"grivancedesc": "abc",
"grivanceType": [ "type1", "type2"]
}
My query : not fully achieved
select *
from
(select
a.*,
stuff(list.grivanceType, 1, 1, '') grivanceType
from
grievances a
cross apply
(select
',' + grivanceType
from
grievanceType b
where
grivanceid = a.grivanceid
for xml path ('')
) list(grivanceType)) a
for json path, without_array_wrapper
It helps if you wrap your XML results in a JSON_Query()
Example
Select *
,grivanceType = JSON_QUERY('['+stuff((Select concat(',"',grivanceType,'"' )
From grievanceType
Where grivanceid =A.grivanceid
For XML Path ('')),1,1,'')+']'
)
From grievances A
for json path, without_array_wrapper
Returns
{
"grivanceid": 1,
"grivancedesc": "abc",
"grivanceType": ["type1", "type2"]
}
I have a json field in a table as below, i am unable to query the "day" from it :
{"FID":54,"header_json":"{\"date\":{\"day\":2,\"month\":6,\"year\":2020},\"amt\":10,\"count\":1}"}
SQL tried:
select jt.*
from order_json o,
json_table(o.order_json,'$.header_json.date[*]'
columns ("day" varchar2(2) path '$.day')) as jt;
That's pretty easy: as you can see header_json is just a string, not usual nested json. So you need to get this quoted string and parse as a json again:
select *
from
(
select--+ no_merge
jh.*
from order_json o,
json_table(o.order_json,'$.header_json[*]'
columns (
header_json varchar2(200) path '$')
) as jh
) headers,
json_table(headers.header_json,'$.date[*]'
columns (
"day" varchar2(2) path '$.day')
) as j
;
Full example with sample data:
-- sample data:
with order_json(order_json) as (
select
'{"FID":54,"header_json":"{\"date\":{\"day\":2,\"month\":6,\"year\":2020},\"amt\":10,\"count\":1}"}'
from dual
)
-- main query
select *
from
(
select--+ no_merge
jh.*
from order_json o,
json_table(o.order_json,'$.header_json[*]'
columns (
header_json varchar2(200) path '$')
) as jh
) headers,
json_table(headers.header_json,'$.date[*]'
columns (
"day" varchar2(2) path '$.day')
) as j
;
I've got MySQL table with JSON field, where I store data in such a format.
{
"fields": {
"1": {
"s": "y"
},
"2": {
"s": "n"
}
}
}
I need to obtain the keys in fields, e.g. 1 or 2 given the value of s.
Example query:
create table mytable ( mycol json );
insert into mytable set mycol = '{"fields": {"1": {"s": "y"},"2": {"s": "n"}}}';
select j.* from mytable, JSON_TABLE(mycol,
'$.fields.*' COLUMNS (
json_key VARCHAR(10) PATH '$',
s VARCHAR(10) PATH '$.s'
)
) AS j where j.s = 'y';
gives:
# json_key, s
null, y
I would expect to get
# json_key, s
1, y
Is it possible to get that data somehow?
I don't need the results in row / table format. I would be happy to get the comma separated list of IDs (json_keys) meeting my criterium.
EDIT:
I was also thinking about getting the paths using JSON_SEARCH and passing that to JSON_EXTRACT, this was achieved here: Combining JSON_SEARCH and JSON_EXTRACT get me: "Invalid JSON path expression."
Unfortunately the difference is that I would need to use JSON_SEARCH in all mode, as I need all results. In such a mode JSON_SEARCH returns list of paths, where as JSON_EXTRACT accepts list of arguments.
Try FOR ORDINALITY (see 12.17.6 JSON Table Functions), this type enumerates rows in the COLUMNS clause:
SELECT
JSON_UNQUOTE(
JSON_EXTRACT(
JSON_KEYS(`mycol` ->> '$.fields'),
CONCAT('$[', `j`.`row` - 1, ']')
)
) `json_key`,
`j`.`s`
FROM
`mytable`,
JSON_TABLE(
`mycol`,
'$.fields.*' COLUMNS (
`row` FOR ORDINALITY,
`s` VARCHAR(10) PATH '$.s'
)
) `j`
WHERE
`j`.`s` = 'y';
See dbfiddle.