parsing JSON string in oracle - json

i have JSON string in one column in oracle 10g database like
[{"id":"1","contactBy":"Rajesh Kumar"},{"id":"2","contactBy":"Rakesh Kumar"}]
I have to get the value for ContactBy in that column for one of the reports.
is there any built in function to parse the JSON string in Oracle 10g or any user defined funciton to parse the String

As said by Jens in comments, JSON support is only available from 12c, but you can use regular expressions as a workaround to get what you want:
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]',
'"contactBy":\s*("(\w| )*")', 1, level),
'"contactBy":\s*"((\w| )*)"', '\1', 1, 1) contact
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]', '"contactBy":\s*("(\w| )*")', 1, level) is not null
;
EDIT : request modified to take both special characters and display answers in a single row:
select listagg(contact, ', ') within group (order by lev)
from
(
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]',
'"contactBy":\s*(".*?")', 1, level),
'"contactBy":\s*"(.*?)"', '\1', 1, 1) contact, level lev
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]', '"contactBy":\s*(".*?")', 1, level) is not null
)
;

# Emmanuel your code is really helped a lot, thank you very much. but your query is taking too much of time, so i changed to a function , which will return the required values.
CREATE OR REPLACE FUNCTION SFGETCRCONTACTBY(INCRID NUMBER) RETURN VARCHAR2 AS
TEMPINT NUMBER :=0;
OUTPUT VARCHAR2(10000) ;
TEMPVAR VARCHAR2(1000);
BEGIN
SELECT REGEXP_COUNT(CR_CONTACT_BY, '"contactBy":\S*(".*?")')
INTO TEMPINT
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
WHILE TEMPINT > 0
LOOP
SELECT REGEXP_REPLACE(REGEXP_SUBSTR(CR_CONTACT_BY, '"contactBy":\S*(".*?")', 1,TEMPINT), '"contactBy":\S*"(.*?)"', '\1', 1, 1) INTO TEMPVAR
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
IF OUTPUT IS NULL THEN
OUTPUT := TEMPVAR;
ELSE
OUTPUT := OUTPUT ||',' || TEMPVAR;
END IF;
TEMPINT := TEMPINT-1;
END LOOP;
RETURN OUTPUT;
END;
/

Related

How to split column values by comma and return it as an array

As you can see below I have Name column. I want to split it by / and return the value in array.
MyTable
Id
Name
1
John/Warner/Jacob
2
Kol
If I write a query as
Select Id, Name from MyTable
it will return
{
"id": 1,
"name": "John/Warner/Jacob",
},
{
"id": 2,
"name": "Kol",
},
Which query should I write to get below result ?
{
"id": 1,
"name": ["John", "Warner", "Jacob"],
},
{
"id": 2,
"name": ["Kol"] ,
},
Don't think you can return an array in the query itself, but you could do this...
SELECT id,
SUBSTRING_INDEX(name, '/', 1)
AS name_part_1,
SUBSTRING_INDEX(name, '/', -1)
AS name_part_2
FROM tableName;
Only way to build it as an array would be when processing the result accordingly in whatever language you are using.
You can define a function split, which is based on the fact that substring_index(substring_index(name,'/',x),'/',-1) will return the x-th part of a name when separated by '/'.
CREATE FUNCTION `test`.`SPLIT`(s varchar(200), c char, i integer) RETURNS varchar(200) CHARSET utf8mb4
DETERMINISTIC
BEGIN
DECLARE retval varchar(200);
WITH RECURSIVE split as (
select 1 as x,substring_index(substring_index(s,c,1),c,-1) as y, s
union all
select x+1,substring_index(substring_index(s,c,x+1),c,-1),s from split where x<= (LENGTH(s) - LENGTH(REPLACE(s,c,'')))
)
SELECT y INTO retval FROM split WHERE x=i ;
return retval;
END
and then do:
with mytable as (
select 1 as Id, 'John/Warner/Jacob' as Name
union all
select 2, 'Kol')
select
id, split(Name,'/',x) as name
from mytable
cross join (select 1 as x union all select 2 union all select 3) x
order by id, name;
output:
Id
name
1
Jacob
1
John
1
Warner
2
[NULL]
2
[NULL]
2
Kol
It is, of course, possible to refine this, and leave out the NULL values ...
I will not convert this output to JSON for you ...

Parse JSON list with no key in PLSQL

What I'm trying to do is fill up a table with the data from a JSON. The file is formatted like this.
[
{
"name": "Victor",
"age": "20"
},
{
"name": "Ana",
"age": "23"
}
]
I can't change how it's formatted.
I tried using APEX_JSON to parse it and add row by row, but I can't even use the GET_COUNT, none of the paths I tried worked.
The database is an Oracle 11g, so there's no JSON_TABLE
--oracle 12c or later
SELECT *
FROM JSON_TABLE (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]',
'$[*]'
COLUMNS
NAME VARCHAR2 (2000) PATH '$.name',
AGE VARCHAR2 (2000) PATH '$.age')
--oracle 11g
SELECT *
FROM XMLTABLE (
'/json/row'
PASSING apex_json.to_xmltype (
'[{"name":"Victor", "age":"20"},{"name":"Ana", "age":"23"}]')
COLUMNS
NAME VARCHAR2 (2000) PATH '/row/name',
AGE VARCHAR2 (2000) PATH '/row/age')
You can use XMLTABLE along with APEX_JSON.TO_XMLTYPE() function in order to simulate JSON_TABLE such as
WITH t(jsCol) AS
(
SELECT '[
{
"name": "Victor",
"age": "20"
},
{
"name": "Anna",
"age": "23"
}
]'
FROM dual
)
SELECT name, age
FROM t,
XMLTABLE('/json/row'
PASSING APEX_JSON.TO_XMLTYPE(jsCol)
COLUMNS
name VARCHAR2(100) PATH 'name',
age VARCHAR2(100) PATH 'age'
)
NAME
AGE
Victor
20
Anna
23
With APEX_JSON you can do something like this:
DECLARE
l_json_text VARCHAR2(32767);
l_json_values apex_json.t_values;
BEGIN
l_json_text := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]
';
apex_json.parse(
p_values => l_json_values,
p_source => l_json_text
);
DBMS_OUTPUT.put_line('----------------------------------------');
FOR r IN 1 .. nvl(apex_json.get_count(p_path => '.', p_values => l_json_values),0) loop
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values));
dbms_output.put_line(apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values));
/* insert into your_table
(name,
age
)
VALUES
(
apex_json.get_varchar2(p_path => '[%d].name', p0 => r, p_values => l_json_values),
apex_json.get_varchar2(p_path => '[%d].age', p0 => r, p_values => l_json_values)
);
*/
END loop;
DBMS_OUTPUT.put_line('----------------------------------------');
END;
/
If you can find a proper JSON parser then you should use that; however, if one is not available, you could parse it yourself. From Oracle 11gR2, you can use:
INSERT INTO table_name (name, age)
WITH jsondata (json) AS (
SELECT '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"},
{
"name":"Betty",
"age":"24"
},
{
"age":"25",
"name":"Carol"
}
]' FROM DUAL
),
data (json, items, i, name, age) AS (
SELECT json,
REGEXP_COUNT(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
'n'
),
1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM jsondata
UNION ALL
SELECT json,
items,
i + 1,
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"name"\s*:\s*"(.*?)"',
1,
1,
'n',
1
),
REGEXP_SUBSTR(
REGEXP_SUBSTR(
json,
'\{\s*"name"\s*:\s*"(.*?)"\s*,\s*"age"\s*:\s*"(.*?)"\s*\}'
|| '|\{\s*"age"\s*:\s*"(.*?)"\s*,\s*"name"\s*:\s*"(.*?)"\s*\}',
1,
i + 1,
'n'
),
'"age"\s*:\s*"(.*?)"',
1,
1,
'n',
1
)
FROM data
WHERE i < items
)
SELECT name, age
FROM data;
(Note: the regular expression does not handle escaped quotes in the strings as I am assuming they will not occur in names; however, if they do then instead of .*? you can use (\(["\/bfnrt]|u[0-9a-fA-F]{4})|[^"])*.)
Which, given the table:
CREATE TABLE table_name (name VARCHAR2(30), age NUMBER);
Then after the insert:
SELECT * FROM table_name;
Outputs:
NAME
AGE
Victor
20
Ana
23
Betty
24
Carol
25
db<>fiddle here
Last time done that with a clob variable.
Try to do it like :
DECLARE
json_body clob := '[
{"name":"Victor", "age":"20"},
{"name":"Ana", "age":"23"}
]';
BEGIN
FOR items IN (SELECT *
FROM
JSON_TABLE(json_body FORMAT JSON,'$[*]'
COLUMNS (
name_ varchar (200) PATH '$.name',
age_ varchar (200) PATH '$.age')))
LOOP
INSERT INTO T_DATA (
name,
age
) VALUES (
items.name_,
items.age_
);
END LOOP;
END;
/
This will put your data into a table and then you can play with them
select * from T_DATA;
Resulting into :
result

How to parse this JSON to populate into APEX Collection and sort by date

I've below JSON to parse using APEX_JSON in Oracle 11gR2 database.
My requirement is to sort the date by closed_at and pick the latest close_reason. I'm thinking to populate closed_at and close_reason into APEX collection. Sort the date descending in apex_collections to pick the latest close_reason.
Can you please help with how to write APEX_JSON.get_count and APEX_JSON.get_varchar2 to fetch closed_at and close_reason.
{"openings": [{
"id": 5003,
"opening_id": null,
"status": "closed",
"opened_at": "2020-11-30T20:09:56.487Z",
"closed_at": "2020-12-02T22:43:45.736Z",
"application_id": 76370003,
"close_reason": null
}, {
"id": 8003,
"opening_id": null,
"status": "closed",
"opened_at": "2020-11-30T20:59:02.267Z",
"closed_at": "2020-12-04T11:07:26.087Z",
"application_id": 45990003,
"close_reason": {
"id": 7003,
"name": "Hire - New Headcount"
}
}
]
}
Thanks
Kishore
Prior to use APEX_JSON.GET_VARCHAR2(), need to parse APEX_JSON.PARSE() within a PL/SQL code. Rather than using PL/SQL, SQL might be used directly through use of XMLTABLE such as
WITH t1 AS
(
SELECT APEX_JSON.TO_XMLTYPE(jsdata) AS xml_data
FROM t0 -- suppose your json data is inserted into jsdata column of this table
), t2 AS
(
SELECT close_reason,
ROW_NUMBER() OVER
(ORDER BY TO_TIMESTAMP(closed_at, 'yyyy-mm-dd"T"hh24:mi:ss.ff"Z"') DESC) AS rn
FROM t1
CROSS JOIN
XMLTABLE('/json/openings/row'
PASSING xml_data
COLUMNS
closed_at VARCHAR2(100) PATH 'closed_at',
close_reason VARCHAR2(900) PATH 'close_reason/name'
)
)
SELECT close_reason
FROM t2
WHERE rn = 1
in order to bring the latest close reason.
If you really need to use those functions given in the question, then use the following code block
DECLARE
v_json VARCHAR2(32767);
v_ct OWA.VC_ARR;
v_cr OWA.VC_ARR;
TYPE ts IS TABLE OF TIMESTAMP INDEX BY BINARY_INTEGER;
v_ts ts;
v_ts1 TIMESTAMP;
idx INT;
BEGIN
SELECT *
INTO v_json
FROM t0; -- there's no WHERE clause assuming only one row is to be inserted
APEX_JSON.PARSE(v_json);
FOR i IN 1..APEX_JSON.GET_COUNT('openings')
LOOP
v_ct(i) := APEX_JSON.GET_VARCHAR2('openings['||i||'].closed_at');
v_ts(i) := TO_TIMESTAMP(v_ct(i), 'yyyy-mm-dd"T"hh24:mi:ss.ff"Z"');
IF NVL(v_ts1, v_ts(i)) <= v_ts(i) THEN
v_ts1 := v_ts(i);
idx := i;
END IF;
v_cr(i) := APEX_JSON.GET_VARCHAR2('openings['||i||'].close_reason.name');
END LOOP;
DBMS_OUTPUT.PUT_LINE(v_cr(idx));
END;
/

Optimize - Function that SELECTs from TEMP TABLE within loop to get averages of JSON values

I have a Mysql Function that runs as part of a larger query reading a few million records. In order to detect anomalies, I'm figuring out the average change over time. The data in the table is stored as JSON objects with UNIX timestamps as the key for up to 30 days.
As an example, the input (input_array) would look something like:
[{"1532944806": 16}, {"1533031206": 14}, {"1533117605": 13}, {"1533204305": 12}, {"1533290708": 10}, {"1533463506": 9}, {"1533549907": 9}, {"1533636306": 9}, {"1533722707": 9}, {"1533809108": 9}, {"1533895506": 9}, {"1533981906": 8}, {"1534068306": 7}, {"1534154706": 7}, {"1534241108": 7}, {"1534590304": 7}, {"1534673106": 12}, {"1534759508": 6}, {"1534845905": 7}, {"1534932306": 7}, {"1535018707": 5}, {"1535105106": 3}, {"1535191505": 7}, {"1535277907": 6}, {"1535364305": 7}, {"1535450706": 2}, {"1535537107": 1}]
I'm only looking to average decreasing changes - not any change that increases over a day.
I'm checking that a value for the previous day exists, and if so, I'm calculating change and adding it into a temporary table that gets queried at to select the average.
So far I have:
CREATE FUNCTION `daily_averages`(input_array JSON) RETURNS int(4)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE array_length INTEGER(2);
DECLARE prev_value INTEGER(4);
DECLARE idx INTEGER(4);
DROP TEMPORARY TABLE IF EXISTS collection;
CREATE TEMPORARY TABLE collection (change INTEGER(4) SIGNED DEFAULT 0);
SELECT JSON_LENGTH(input_array) INTO array_length;
SET idx = 0;
WHILE idx < array_length DO
SELECT
IF(idx-1 > -1,
CONVERT(
JSON_EXTRACT(
JSON_EXTRACT(
JSON_EXTRACT( input_array, CONCAT( '$[', idx-1, ']' ) )
, '$.*'
)
, '$[0]'
), SIGNED INTEGER
)
, -1
)
INTO prev_value;
INSERT INTO collection
SELECT (prev_value -
(
CONVERT(
JSON_EXTRACT(
JSON_EXTRACT(
JSON_EXTRACT( input_array, CONCAT( '$[', idx, ']' ) )
, '$.*'
)
, '$[0]'
), SIGNED INTEGER
)
)
)
FROM DUAL
WHERE prev_value > 0;
SET idx = idx + 1;
END WHILE;
RETURN (SELECT AVG(change) FROM collection WHERE change > -1);
END
With about 2.7 million records, it takes about 20 minutes to run currently. I'm looking to optimize this or re-write it by avoiding the DROP/CREATE overhead.
It seems unnecessary to create a table just to calculate an average, it's simple to do in the loop. Instead of inserting each value into a table, add it to a total variable. At the end, return total/count.
Since you're totalling the differences between values,
You can also use SET statements to assign variables, rather than SELECT ... INTO variable.
DECLARE array_length INTEGER(2);
DECLARE prev_value INTEGER(4);
DECLARE idx INTEGER(4);
DECLARE total INTEGER(4);
DECLARE counter INTEGER(4);
DECLARE cur_value INTEGER(4);
SET array_length = JSON_LENGTH(input_array);
SET total = 0;
SET counter = 0;
-- Initialize prev_value to the first element
SET prev_value = CONVERT(
JSON_EXTRACT(
JSON_EXTRACT(
JSON_EXTRACT( input_array, '$[0]' )
, '$.*'
)
, '$[0]'
), SIGNED INTEGER
);
SET idx = 1;
WHILE idx < array_length DO
SET cur_value = CONVERT(
JSON_EXTRACT(
JSON_EXTRACT(
JSON_EXTRACT( input_array, CONCAT( '$[', idx, ']' ) )
, '$.*'
)
, '$[0]'
), SIGNED INTEGER
);
IF cur_value < prev_value
THEN
SET total = total + (prev_value - cur_value);
SET counter = counter + 1;
END IF;
SET prev_value = cur_value;
SET idx = idx + 1;
END WHILE;
RETURN total / counter;
Digging inside a million JSON strings. I'm amazed it took only 20 minutes.
As you insert the rows, do some calculations and store the results somewhere. Then use that for doing the monitoring.
Even if you can't do it as you insert the rows, do it only to the 'new' rows. Again save the previous info somewhere.
As for DROP/CREATE... That can be sped up by having a permanent table, then use only TRUNCATE TABLE at the start of each proc call.
The (4) in INTEGER(4) does not mean anything. You will always get a 32-bit integer. (This note probably has no impact on the proc.)

Converting JSON to table in SQL Server 2016

I'm working on a Web project where the client application communicates with the DB via JSONs.
The initial implementation took place with SQL Server 2012 (NO JSON support and hence we implemented a Stored Function that handled the parsing) and now we are moving to 2016 (YES JSON support).
So far, we are reducing processing time by a significant factor (in some cases, over 200 times faster!).
There are some interactions that contain arrays that need to be converted into tables. To achieve that, the OPENJSON function does ALMOST what we need.
In some of these (array-based) cases, records within the arrays have one or more fields that are also OBJECTS (in this particular case, also arrays), for instance:
[{
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": [{
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 1",
"$$hashKey": "object:1848"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "User_Comment",
"Body": "[]: Comment 2",
"$$hashKey": "object:1857"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 3",
"$$hashKey": "object:1862"
}]
}, {
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": []
}]
Here, General_Comment is also an array.
When running the command:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(4000) '$.General_Comment'
) ;
[#_l_Table_Data is a variable holding the JSON string]
we are getting the column General_Comment = NULL even though the is data in there (at least in the first element of the array).
I guess that I should be using a different syntax for those columns that may contain OBJECTS and not SIMPLE VALUES, but I have no idea what that syntax should be.
I found a Microsoft page that actually solves the problem.
Here is how the query should look like:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(MAX) '$.General_Comment' AS JSON
) ;
So, you need to add AS JSON at the end of the column definition and (God knows why) the type MUST be NVARCHAR(MAX).
Very simple indeed!!!
Create Function ParseJson:
Create or Alter FUNCTION [dbo].[ParseJson] (#JSON NVARCHAR(MAX))
RETURNS #Unwrapped TABLE
(
[id] INT IDENTITY, --just used to get a unique reference to each json item
[level] INT, --the hierarchy level
[key] NVARCHAR(100), --the key or name of the item
[Value] NVARCHAR(MAX),--the value, if it is a null, int,binary,numeric or string
type INT, --0 TO 5, the JSON type, null, numeric, string, binary, array or object
SQLDatatype sysname, --whatever the datatype can be parsed to
parent INT, --the ID of the parent
[path] NVARCHAR(4000) --the path as used by OpenJSON
)
AS begin
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
VALUES
(0, --the level
NULL, --the key,
#json, --the value,
CASE WHEN Left(ltrim(#json),1)='[' THEN 4 ELSE 5 END, --the type
'json', --SQLDataType,
0 , --no parent
'$' --base path
);
DECLARE #ii INT = 0,--the level
#Rowcount INT = -1; --the number of rows from the previous iteration
WHILE #Rowcount <> 0 --while we are still finding levels
BEGIN
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
SELECT [level] + 1 AS [level], new.[Key] AS [key],
new.[Value] AS [value], new.[Type] AS [type],
-- SQL Prompt formatting off
/* in order to determine the datatype of a json value, the best approach is to a determine
the datatype that can be parsed. It JSON, an array of objects can contain attributes that arent
consistent either in their name or value. */
CASE
WHEN new.Type = 0 THEN 'bit null'
WHEN new.[type] IN (1,2) then COALESCE(
CASE WHEN TRY_CONVERT(INT,new.[value]) IS NOT NULL THEN 'int' END,
CASE WHEN TRY_CONVERT(NUMERIC(14,4),new.[value]) IS NOT NULL THEN 'numeric' END,
CASE WHEN TRY_CONVERT(FLOAT,new.[value]) IS NOT NULL THEN 'float' END,
CASE WHEN TRY_CONVERT(MONEY,new.[value]) IS NOT NULL THEN 'money' END,
CASE WHEN TRY_CONVERT(DateTime,new.[value],126) IS NOT NULL THEN 'Datetime2' END,
CASE WHEN TRY_CONVERT(Datetime,new.[value],127) IS NOT NULL THEN 'Datetime2' END,
'nvarchar')
WHEN new.Type = 3 THEN 'bit'
WHEN new.Type = 5 THEN 'object' ELSE 'array' END AS SQLDatatype,
old.[id],
old.[path] + CASE WHEN old.type = 5 THEN '.' + new.[Key]
ELSE '[' + new.[Key] COLLATE DATABASE_DEFAULT + ']' END AS path
-- SQL Prompt formatting on
FROM #Unwrapped old
CROSS APPLY OpenJson(old.[Value]) new
WHERE old.[level] = #ii AND old.type IN (4, 5);
SELECT #Rowcount = ##RowCount;
SELECT #ii = #ii + 1;
END;
return
END
For Usage:
select * from ParseJson(jsonString)