What I'm trying to do is fill up a table with the data from a JSON. The file is formatted like this.
[
{
"name": "Victor",
"age": "20"
},
{
"name": "Ana",
"age": "23"
}
]
I can't change how it's formatted.
I tried using APEX_JSON to parse it and add row by row, but I can't even use the GET_COUNT, none of the paths I tried worked.
The database is an Oracle 11g, so there's no JSON_TABLE
--oracle 12c or later
SELECT *
FROM JSON_TABLE (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]',
'$[*]'
COLUMNS
NAME VARCHAR2 (2000) PATH '$.name',
AGE VARCHAR2 (2000) PATH '$.age')
--oracle 11g
SELECT *
FROM XMLTABLE (
'/json/row'
PASSING apex_json.to_xmltype (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]')
COLUMNS
NAME VARCHAR2 (2000) PATH '/row/name',
AGE VARCHAR2 (2000) PATH '/row/age')
You can use XMLTABLE along with APEX_JSON.TO_XMLTYPE() function in order to simulate JSON_TABLE such as
WITH t(jsCol) AS
(
SELECT '[
{
"name": "Victor",
"age": "20"
},
{
"name": "Anna",
"age": "23"
}
]'
FROM dual
)
SELECT name, age
FROM t,
XMLTABLE('/json/row'
PASSING APEX_JSON.TO_XMLTYPE(jsCol)
COLUMNS
name VARCHAR2(100) PATH 'name',
age VARCHAR2(100) PATH 'age'
)
NAME
AGE
Victor
20
Anna
23
With APEX_JSON you can do something like this:
DECLARE
l_json_text VARCHAR2(32767);
l_json_values apex_json.t_values;
BEGIN
l_json_text := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]
';
apex_json.parse(
p_values => l_json_values,
p_source => l_json_text
);
DBMS_OUTPUT.put_line('----------------------------------------');
FOR r IN 1 .. nvl(apex_json.get_count(p_path => '.', p_values => l_json_values),0) loop
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values));
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values));
/* insert into your_table
(name,
age
)
VALUES
(
apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values),
apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values)
);
*/
END loop;
DBMS_OUTPUT.put_line('----------------------------------------');
END;
/
If you can find a proper JSON parser then you should use that; however, if one is not available, you could parse it yourself. From Oracle 11gR2, you can use:
INSERT INTO table_name (name, age)
WITH jsondata (json) AS (
SELECT '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"},
{
"name":"Betty",
"age":"24"
},
{
"age":"25",
"name":"Carol"
}
]' FROM DUAL
),
data (json, items, i, name, age) AS (
SELECT json,
REGEXP_COUNT(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
'n'
),
1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM jsondata
UNION ALL
SELECT json,
items,
i + 1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM data
WHERE i < items
)
SELECT name, age
FROM data;
(Note: the regular expression does not handle escaped quotes in the strings as I am assuming they will not occur in names; however, if they do then instead of .*? you can use (\(["\/bfnrt]|u[0-9a-fA-F]{4})|[^"])*.)
Which, given the table:
CREATE TABLE table_name (name VARCHAR2(30), age NUMBER);
Then after the insert:
SELECT * FROM table_name;
Outputs:
NAME
AGE
Victor
20
Ana
23
Betty
24
Carol
25
db<>fiddle here
Last time done that with a clob variable.
Try to do it like :
DECLARE
json_body clob := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]';
BEGIN
FOR items IN (SELECT *
FROM
JSON_TABLE(json_body FORMAT JSON,'$[*]'
COLUMNS (
name_ varchar (200) PATH '$.name',
age_ varchar (200) PATH '$.age')))
LOOP
INSERT INTO T_DATA (
name,
age
) VALUES (
items.name_,
items.age_
);
END LOOP;
END;
/
This will put your data into a table and then you can play with them
select * from T_DATA;
Resulting into :
result
Related
I have a SQL Server Table with a JSON column. In it, I have a property 'Code' that contained a string with one value. Now, I want that 'Code' to be an Array to be able to contain more than one strings.
How can I update all my table values to change the property to an array?
"Code" : null --> "Code" : []
"Code" : "XX" --> "Code" : ["XX"]
You may try to modify the stored JSON as text using OPENJSON() with default schema to get the type of the $.Code part:
Sample data:
SELECT *
INTO Data
FROM (VALUES
(CONVERT(nvarchar(max), N'{"Code": "XX"}')),
(CONVERT(nvarchar(max), N'{"Code": null}')),
(CONVERT(nvarchar(max), N'{"Code": 1}')),
(CONVERT(nvarchar(max), N'{"Code": []}')),
(CONVERT(nvarchar(max), N'{"Code": {}}'))
) d (JsonColumn)
Statement:
UPDATE d
SET JsonColumn = JSON_MODIFY(
JsonColumn,
'$.Code',
JSON_QUERY(CONCAT('[', j.[value], ']'))
)
FROM Data d
OUTER APPLY (
SELECT
CASE
WHEN [type] = 0 THEN ''
WHEN [type] = 1 THEN CONCAT('"', STRING_ESCAPE([value], 'json'), '"')
WHEN [type] = 2 THEN [value]
ELSE '"not a scalar value"'
END AS [value]
FROM OPENJSON(d.JsonColumn, '$')
WHERE [key] = 'Code'
) j
Result:
JsonColumn
--------------------------------
{"Code": ["XX"]}
{"Code": []}
{"Code": [1]}
{"Code": ["not a scalar value"]}
{"Code": ["not a scalar value"]}
You may consider the #JeroenMostert's comment and use something like this:
UPDATE d
SET JsonColumn = JSON_MODIFY(
JsonColumn,
'$.Code',
JSON_QUERY(CONCAT('[', j.[value], ']'))
)
FROM Data d
OUTER APPLY (
SELECT IIF ([type] = 1, CONCAT('"', STRING_ESCAPE([value], 'json'), '"'), [value]) AS [value]
FROM OPENJSON(d.JsonColumn, '$')
WHERE [key] = 'Code'
) j
Dataset :
create table grievances(grivanceid int ,grivancedesc varchar(10))
create table grievanceType(grivanceid int ,grivanceType varchar(10))
insert into grievances values (1,'abc')
insert into grievanceType values (1,'type1')
insert into grievanceType values (1,'type2')
Desired output:
{
"grivanceid": 1,
"grivancedesc": "abc",
"grivanceType": [ "type1", "type2"]
}
My query : not fully achieved
select *
from
(select
a.*,
stuff(list.grivanceType, 1, 1, '') grivanceType
from
grievances a
cross apply
(select
',' + grivanceType
from
grievanceType b
where
grivanceid = a.grivanceid
for xml path ('')
) list(grivanceType)) a
for json path, without_array_wrapper
It helps if you wrap your XML results in a JSON_Query()
Example
Select *
,grivanceType = JSON_QUERY('['+stuff((Select concat(',"',grivanceType,'"' )
From grievanceType
Where grivanceid =A.grivanceid
For XML Path ('')),1,1,'')+']'
)
From grievances A
for json path, without_array_wrapper
Returns
{
"grivanceid": 1,
"grivancedesc": "abc",
"grivanceType": ["type1", "type2"]
}
JSON needs to be parsed using only PL/SQL code like regular expressions to get sentiment and confidence values out of it.
Something similar to this
[
{
"sentiment":"negative",
"confidence":0.6211975044276729
},
{
"sentiment":"neutral",
"confidence":0.3510681601407111
},
{
"sentiment":"positive",
"confidence":0.027734335431616075
}
]
above JSON needs to be parsed to get sentiment and confidence values out of it
The JSON_TABLE function is available starting with Oracle Database 12c Release 1 (12.1.0.2).
SET NUMWIDTH 20 --Use this if SQL*Plus/ SQL developer truncates digits.
--test data
WITH t ( json_col ) AS ( SELECT '[
{
"sentiment":"negative",
"confidence":0.6211975044276729
},
{
"sentiment":"neutral",
"confidence":0.3510681601407111
},
{
"sentiment":"positive",
"confidence":0.027734335431616075
}
]'
FROM dual
) --test data ends
SELECT j.*
FROM t
CROSS JOIN
JSON_TABLE ( json_col,'$[*]'
COLUMNS (
sentiment VARCHAR2(20) PATH '$.sentiment',
confidence NUMBER PATH '$.confidence'
)
)
j;
SENTIMENT CONFIDENCE
-------------------- --------------------
negative .6211975044276729
neutral .3510681601407111
positive .027734335431616075
If you really don't want to use any of the built-in JSON functions and your input does not involve any nested objects then you can use SQL with a recursive sub-query factoring clause:
Oracle Setup:
CREATE TABLE test_data ( json ) AS
SELECT '[
{
"sentiment":"negative",
"confidence":0.6211975044276729
},
{
"confidence":0.3510681601407111,
"sentiment":"neutral"
},
{
"sentiment":"positive",
"confidence":0.027734335431616075
}
]' FROM DUAL
Query:
WITH rsqfc ( json, obj, lvl, cnt ) AS (
SELECT json,
REGEXP_SUBSTR( json, '\{(.*?)\}', 1, 1, 'n' ),
1,
REGEXP_COUNT( json, '\{(.*?)\}', 1, 'n' )
FROM test_data
WHERE REGEXP_COUNT( json, '\{(.*?)\}', 1, 'n' ) > 1
UNION ALL
SELECT json,
REGEXP_SUBSTR( json, '\{(.*?)\}', 1, LVL + 1, 'n' ),
lvl + 1,
cnt
FROM rsqfc
WHERE lvl < cnt
)
SELECT REGEXP_SUBSTR( obj, '"sentiment":\s*"(negative|neutral|positive)"', 1, 1, 'n', 1 ) AS sentiment,
TO_NUMBER( REGEXP_SUBSTR( obj, '"confidence":\s*(\d+(\.\d*)?)', 1, 1, 'n', 1 ) ) AS confidence
FROM rsqfc
Output:
SENTIMENT | CONFIDENCE
:-------- | ------------------:
negative | .6211975044276729
neutral | .3510681601407111
positive | .027734335431616075
PL/SQL:
Or using PL/SQL:
DECLARE
json CLOB := '[
{
"sentiment":"negative",
"confidence":0.6211975044276729
},
{
"confidence":0.3510681601407111,
"sentiment":"neutral"
},
{
"sentiment":"positive",
"confidence":0.027734335431616075
}
]';
cnt PLS_INTEGER;
obj VARCHAR2(4000);
sentiment VARCHAR2(20);
confidence NUMBER;
BEGIN
cnt := REGEXP_COUNT( json, '\{(.*?)\}', 1, 'n' );
FOR i IN 1 .. cnt LOOP
obj := REGEXP_SUBSTR( json, '\{(.*?)\}', 1, i, 'n' );
sentiment := REGEXP_SUBSTR( obj, '"sentiment":\s*"(negative|neutral|positive)"', 1, 1, 'n', 1 );
confidence := TO_NUMBER( REGEXP_SUBSTR( obj, '"confidence":\s*(\d+(\.\d*)?)', 1, 1, 'n', 1 ) );
DBMS_OUTPUT.PUT_LINE( sentiment || ' - ' || confidence );
END LOOP;
END;
/
Output:
dbms_output:
negative - .6211975044276729
neutral - .3510681601407111
positive - .027734335431616075
db<>fiddle here
I'm storing a java class A as A_DOC in a clob column in my database.
The structure of A is like:
{
id : 123
var1: abc
subvalues : [{
id: 1
value : a
},
{
id: 1
value :b
}
...
}
]}
I know I can do things like
select json_query(a.A_DOC, '$.subvalues.value') from table_name a;
and so on, but how I'm looking for a way to count the number of elements in the subvalues array through an sql query. Is this possible?
the function exists in Oracle 18 only
SELECT json_query('[19, 15, [16,2,3]]','$[*].size()' WITH ARRAY WRAPPER) FROM dual;
SELECT json_value('[19, 15, [16,2,3]]','$.size()') FROM dual;
You can use JSON_TABLE:
SELECT
id, var1, count(sub_id) subvalues
FROM
JSON_TABLE (
to_clob('{ id: 123, var1: "abc", subvalues : [{ id: 1, value: "a", }, { id: 2, value: "b" } ]}'),
'$'
COLUMNS (
id NUMBER PATH '$.id',
var1 VARCHAR PATH '$.var1',
NESTED PATH '$.subvalues[*]'
COLUMNS (
sub_id NUMBER PATH '$.id'
)
)
)
GROUP BY id, var1
i have JSON string in one column in oracle 10g database like
[{"id":"1","contactBy":"Rajesh Kumar"},{"id":"2","contactBy":"Rakesh Kumar"}]
I have to get the value for ContactBy in that column for one of the reports.
is there any built in function to parse the JSON string in Oracle 10g or any user defined funciton to parse the String
As said by Jens in comments, JSON support is only available from 12c, but you can use regular expressions as a workaround to get what you want:
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]',
'"contactBy":\s*("(\w| )*")', 1, level),
'"contactBy":\s*"((\w| )*)"', '\1', 1, 1) contact
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]', '"contactBy":\s*("(\w| )*")', 1, level) is not null
;
EDIT : request modified to take both special characters and display answers in a single row:
select listagg(contact, ', ') within group (order by lev)
from
(
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]',
'"contactBy":\s*(".*?")', 1, level),
'"contactBy":\s*"(.*?)"', '\1', 1, 1) contact, level lev
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]', '"contactBy":\s*(".*?")', 1, level) is not null
)
;
# Emmanuel your code is really helped a lot, thank you very much. but your query is taking too much of time, so i changed to a function , which will return the required values.
CREATE OR REPLACE FUNCTION SFGETCRCONTACTBY(INCRID NUMBER) RETURN VARCHAR2 AS
TEMPINT NUMBER :=0;
OUTPUT VARCHAR2(10000) ;
TEMPVAR VARCHAR2(1000);
BEGIN
SELECT REGEXP_COUNT(CR_CONTACT_BY, '"contactBy":\S*(".*?")')
INTO TEMPINT
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
WHILE TEMPINT > 0
LOOP
SELECT REGEXP_REPLACE(REGEXP_SUBSTR(CR_CONTACT_BY, '"contactBy":\S*(".*?")', 1,TEMPINT), '"contactBy":\S*"(.*?)"', '\1', 1, 1) INTO TEMPVAR
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
IF OUTPUT IS NULL THEN
OUTPUT := TEMPVAR;
ELSE
OUTPUT := OUTPUT ||',' || TEMPVAR;
END IF;
TEMPINT := TEMPINT-1;
END LOOP;
RETURN OUTPUT;
END;
/