Oracle 12c: Remove JSON object from JSON array - json

Need to create a function which takes input of CLOB and I need to remove array matching the condition.
create or replace FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t;
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN 0.. l_ja.get_size - 1 LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
dbms_output.put('Key to remove in the JSON: ' || l_key);
IF l_key = p_in_key THEN
dbms_output.put('Key to remove in the JSON: ' || l_key);
l_ja.remove (idx);
-- dbms_output.new_line;
dbms_output.put('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
When called with:
update COLD_DRINKS cd set cd.configuration = remove_config_node_by_key(cd.configuration, 'b')
where country='INDIA';
I get error:
Error report -
ORA-30625: method dispatch on NULL SELF argument is disallowed
ORA-06512: at "SYS.JSON_OBJECT_T", line 72
ORA-06512: at "PLATFORM_ADMIN_DATA.REMOVE_CONFIG_NODE_BY_KEY", line 11
input JSON:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "b",
"value": "Coke"
},
{
"key": "c",
"value": "Pepsi"
}
]
Expected JSON after execution:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "c",
"value": "Pepsi"
}
]
I think something is wrong about this l_ja.remove (idx); as this one causes the exception. Not able to remove the object at index.

In 18c at least it works with your sample data (with the trailing comma removed from the array), but it gets that error with a null configuration.
So you can either test for null in your function, or exclude nulls from your update, or fix your data so it doesn't have nulls.
The simplest thing to do is probably add a null check:
...
BEGIN
IF p_in_json IS NULL THEN
RETURN NULL;
END IF;
l_ja := json_array_t.parse(p_in_json);
...
fiddle

You can also remove it using json_transform:
create or replace function remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_result CLOB ;
begin
execute immediate q'~select json_transform(
:p_in_json,
REMOVE '$[*]?(#.key == "~' || p_in_key || q'~")'
)
from dual~' into l_result using p_in_json
;
return l_result ;
end ;
/
(with all usual comments regarding possible SQL injection...)

The issue was resolved when I added REVERSE in for loop
before [ERROR]
FOR idx IN 0.. l_ja.get_size - 1
after [PASS]
FOR idx IN REVERSE 0.. l_ja.get_size - 1
Complete working function
CREATE OR REPLACE FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t := json_array_t ();
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN REVERSE 0.. l_ja.get_size - 1
LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
IF l_key = p_in_key THEN
dbms_output.put_line('Key to remove in the JSON: ' || l_key || ' at index : ' || idx);
l_ja.remove (idx);
dbms_output.put_line('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
/

Related

Postgres jsonb_object_agg returns only the last row

I want to transform a JSON table on a JSON list.
This code :
DO
$$
DECLARE
varMyJson jsonb;
BEGIN
varMyJson := '[{"Field1":"Value1"},{"Field2":"Value2"}]'::jsonb;
RAISE NOTICE 'varMyJson : %', varMyJson;
SELECT jsonb_object_agg(a.key, a.value)
INTO varMyJson
FROM
(
SELECT 'MyKey' as key, JsonString.value
FROM jsonb_array_elements(varMyJson) JsonString
) a;
RAISE NOTICE 'varMyJson : %', varMyJson;
END
$$
returns :
NOTICE: varMyJson : [{"Field1": "Value1"}, {"Field2": "Value2"}]
NOTICE: varMyJson : {"MyKey": {"Field2": "Value2"}}
But, I want this :
{"MyKey":{"Field1":"Value1"},"MyKey":{"Field2": "Value2"}}
I don't understand why it dosn't work.
You cannot have a jsonb object with duplicate keys. Your json_object_agg function will work expectedly when your keys are unique.
You can get your desired results in a jsonb array:
with data as (
select '[{"Field1":"Value1"},{"Field2":"Value2"}]'::jsonb as items
)
select json_agg(v) from (
select jsonb_build_object('myKey', jsonb_array_elements(items)) as v from data
) x
SELECT json_object_agg('MyKey', JsonString.value)
FROM json_array_elements('[{"Field1": "Value1"}, {"Field2": "Value2"}]' :: json) JsonString
result = { "MyKey" : {"Field1": "Value1"}, "MyKey" : {"Field2": "Value2"} }

Oracle select JSON column as key / value table [duplicate]

This question already has an answer here:
Query json dictionary data in SQL
(1 answer)
Closed 1 year ago.
In Oracle 12c, having a column with JSON data in this format:
{
"user_name": "Dave",
"phone_number": "13326415",
"married": false,
"age": 18
}
How can I select it in this format:
key val
-------------- ----------
"user_name" "Dave"
"phone_number" "13326415"
"married" "false"
"age" "18"
As stated in the comment, there is no way to get the keys of a JSON object using just SQL. With PL/SQL you can create a pipelined function to get the information you need. Below is a very simple pipelined function that will get the keys of a JSON object and print the type each key is, as well as the key name and the value.
First, you will need to create the types that will be used by the function
CREATE OR REPLACE TYPE key_value_table_rec FORCE AS OBJECT
(
TYPE VARCHAR2 (100),
key VARCHAR2 (200),
VALUE VARCHAR2 (200)
);
/
CREATE OR REPLACE TYPE key_value_table_t AS TABLE OF key_value_table_rec;
/
Next, create the pipelined function that will return the information in the format of the types defined above.
CREATE OR REPLACE FUNCTION get_key_value_table (p_json CLOB)
RETURN key_value_table_t
PIPELINED
AS
l_json json_object_t;
l_json_keys json_key_list;
l_json_element json_element_t;
BEGIN
l_json := json_object_t (p_json);
l_json_keys := l_json.get_keys;
FOR i IN 1 .. l_json_keys.COUNT
LOOP
l_json_element := l_json.get (l_json_keys (i));
PIPE ROW (key_value_table_rec (
CASE
WHEN l_json_element.is_null THEN 'null'
WHEN l_json_element.is_boolean THEN 'boolean'
WHEN l_json_element.is_number THEN 'number'
WHEN l_json_element.is_timestamp THEN 'timestamp'
WHEN l_json_element.is_date THEN 'date'
WHEN l_json_element.is_string THEN 'string'
WHEN l_json_element.is_object THEN 'object'
WHEN l_json_element.is_array THEN 'array'
ELSE 'unknown'
END,
l_json_keys (i),
l_json.get_string (l_json_keys (i))));
END LOOP;
RETURN;
EXCEPTION
WHEN OTHERS
THEN
CASE SQLCODE
WHEN -40834
THEN
--JSON format is not valid
NULL;
ELSE
RAISE;
END CASE;
END;
/
Finally, you can call the pipelined function from a SELECT statement
SELECT * FROM TABLE (get_key_value_table (p_json => '{
"user_name": "Dave",
"phone_number": "13326415",
"married": false,
"age": 18
}'));
TYPE KEY VALUE
__________ _______________ ___________
string user_name Dave
string phone_number 13326415
boolean married false
number age 18
If your JSON values are stored in a column in a table, you can view the keys/values using CROSS JOIN
WITH
sample_table (id, json_col)
AS
(SELECT 1, '{"key1":"val1","key_obj":{"nested_key":"nested_val"},"key_bool":false}'
FROM DUAL
UNION ALL
SELECT 2, '{"key3":3.14,"key_arr":[1,2,3]}' FROM DUAL)
SELECT t.id, j.*
FROM sample_table t CROSS JOIN TABLE (get_key_value_table (p_json => t.json_col)) j;
ID TYPE KEY VALUE
_____ __________ ___________ ________
1 string key1 val1
1 object key_obj
1 boolean key_bool false
2 number key3 3.14
2 array key_arr

how to merge two json_arrays in postgresql

I need to MERGE two JSONB_ARRAYS
i have in my table column jsonb of items which looks like this:
[
{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}
]
where fav's value is unique number.
And i have incoming data, where could be the same items which also exists in my table and also new items and after merging the result must be that way where new items just need to add but existing items i need to update
so after merging the result must look like this:
merge:
[
{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}
]::jsonb ||
[
{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"},
{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"}
]::jsonb
------------------------------------------------------------------------
result:
[
{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"},
{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"}
]
as expected the "fav": 1 -> was updated and "fav": 3 -> was added
maybe i need some refactroring structure of my json maybe something else?
and maybe it would be better if i retrieve json to Collection and work with objects and after all manipulations just save it back?
Update 1
i try write custom function:
CREATE OR REPLACE FUNCTION public.json_array_merge(data1 jsonb, merge_data jsonb)
RETURNS jsonb
IMMUTABLE
LANGUAGE sql
AS $$
SELECT jsonb_agg(expression)::jsonb
FROM (
WITH to_merge AS (
SELECT * FROM jsonb_each(jsonb_array_elements(merge_data))
)
SELECT *
FROM json_each(jsonb_array_elements(data1))
WHERE value NOT IN (SELECT value FROM to_merge)
UNION ALL
SELECT * FROM to_merge
) expression;
$$;
but now it doesnt work (
You probably will want to write a custom function to handle this. The default behaviour is to append each value because it has no way of knowing that you want fav to be unique.
If your data used fav as a key e.g.
{
"fav1": {"date": "2020-00-00 11:07:05.710000", "is_active": false},
"fav2": {"date": "1998-00-00 11:07:05.710000", "is_active": true},
"fav3": {"date": "2019-00-00 11:07:05.710000", "is_active": true}
}
this would be simple to manage, but since you are using an array you would need to make a custom function that iterates and check each value.
Edit you would need to run a few loops with plpgsql, this could be achieved more efficiently using plv8
CREATE OR REPLACE FUNCTION public.json_array_merge(
data_new jsonb,
data_old jsonb,
key_val text
)
RETURNS jsonb
AS $$
DECLARE
ret jsonb := '[]'::jsonb;
cur text;
add boolean := true;
i integer := 0;
ic integer := jsonb_array_length(data_old);
j integer := 0;
jc integer := jsonb_array_length(data_new);
BEGIN
IF ic > 0 AND jc > 0 THEN
-- populate or replace the records that are already there
WHILE i < ic LOOP
cur := null;
j := 0;
-- loop new array
WHILE j < jc LOOP
IF data_old->i->>key_val = data_new->j->>key_val THEN
cur := data_new->>j;
add := false;
END IF;
j := j + 1;
END LOOP;
-- add or replace
IF add THEN
ret := ret || format('[%s]', data_old->>i)::jsonb;
ELSE
ret := ret || format('[%s]', cur)::jsonb;
END IF;
add := true;
i := i + 1;
END LOOP;
-- loop through the new data again and add any values not in ret
ic := jsonb_array_length(ret);
j := 0;
WHILE j < jc LOOP
i := 0;
add := true;
WHILE i < ic LOOP
IF ret->i->>key_val = data_new->j->>key_val THEN
add := false;
END IF;
i := i + 1;
END LOOP;
IF add THEN
ret := ret || format('[%s]', data_new->>j)::jsonb;
END IF;
j := j + 1;
END LOOP;
ELSE
ret := data_new;
END IF;
RETURN ret;
END
$$
LANGUAGE plpgsql IMMUTABLE;
Running this should give you the desired result
SELECT json_array_merge(
'[{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"},{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"}]',
'[{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}]',
'fav'
)

Converting JSON to table in SQL Server 2016

I'm working on a Web project where the client application communicates with the DB via JSONs.
The initial implementation took place with SQL Server 2012 (NO JSON support and hence we implemented a Stored Function that handled the parsing) and now we are moving to 2016 (YES JSON support).
So far, we are reducing processing time by a significant factor (in some cases, over 200 times faster!).
There are some interactions that contain arrays that need to be converted into tables. To achieve that, the OPENJSON function does ALMOST what we need.
In some of these (array-based) cases, records within the arrays have one or more fields that are also OBJECTS (in this particular case, also arrays), for instance:
[{
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": [{
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 1",
"$$hashKey": "object:1848"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "User_Comment",
"Body": "[]: Comment 2",
"$$hashKey": "object:1857"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 3",
"$$hashKey": "object:1862"
}]
}, {
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": []
}]
Here, General_Comment is also an array.
When running the command:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(4000) '$.General_Comment'
) ;
[#_l_Table_Data is a variable holding the JSON string]
we are getting the column General_Comment = NULL even though the is data in there (at least in the first element of the array).
I guess that I should be using a different syntax for those columns that may contain OBJECTS and not SIMPLE VALUES, but I have no idea what that syntax should be.
I found a Microsoft page that actually solves the problem.
Here is how the query should look like:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(MAX) '$.General_Comment' AS JSON
) ;
So, you need to add AS JSON at the end of the column definition and (God knows why) the type MUST be NVARCHAR(MAX).
Very simple indeed!!!
Create Function ParseJson:
Create or Alter FUNCTION [dbo].[ParseJson] (#JSON NVARCHAR(MAX))
RETURNS #Unwrapped TABLE
(
[id] INT IDENTITY, --just used to get a unique reference to each json item
[level] INT, --the hierarchy level
[key] NVARCHAR(100), --the key or name of the item
[Value] NVARCHAR(MAX),--the value, if it is a null, int,binary,numeric or string
type INT, --0 TO 5, the JSON type, null, numeric, string, binary, array or object
SQLDatatype sysname, --whatever the datatype can be parsed to
parent INT, --the ID of the parent
[path] NVARCHAR(4000) --the path as used by OpenJSON
)
AS begin
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
VALUES
(0, --the level
NULL, --the key,
#json, --the value,
CASE WHEN Left(ltrim(#json),1)='[' THEN 4 ELSE 5 END, --the type
'json', --SQLDataType,
0 , --no parent
'$' --base path
);
DECLARE #ii INT = 0,--the level
#Rowcount INT = -1; --the number of rows from the previous iteration
WHILE #Rowcount <> 0 --while we are still finding levels
BEGIN
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
SELECT [level] + 1 AS [level], new.[Key] AS [key],
new.[Value] AS [value], new.[Type] AS [type],
-- SQL Prompt formatting off
/* in order to determine the datatype of a json value, the best approach is to a determine
the datatype that can be parsed. It JSON, an array of objects can contain attributes that arent
consistent either in their name or value. */
CASE
WHEN new.Type = 0 THEN 'bit null'
WHEN new.[type] IN (1,2) then COALESCE(
CASE WHEN TRY_CONVERT(INT,new.[value]) IS NOT NULL THEN 'int' END,
CASE WHEN TRY_CONVERT(NUMERIC(14,4),new.[value]) IS NOT NULL THEN 'numeric' END,
CASE WHEN TRY_CONVERT(FLOAT,new.[value]) IS NOT NULL THEN 'float' END,
CASE WHEN TRY_CONVERT(MONEY,new.[value]) IS NOT NULL THEN 'money' END,
CASE WHEN TRY_CONVERT(DateTime,new.[value],126) IS NOT NULL THEN 'Datetime2' END,
CASE WHEN TRY_CONVERT(Datetime,new.[value],127) IS NOT NULL THEN 'Datetime2' END,
'nvarchar')
WHEN new.Type = 3 THEN 'bit'
WHEN new.Type = 5 THEN 'object' ELSE 'array' END AS SQLDatatype,
old.[id],
old.[path] + CASE WHEN old.type = 5 THEN '.' + new.[Key]
ELSE '[' + new.[Key] COLLATE DATABASE_DEFAULT + ']' END AS path
-- SQL Prompt formatting on
FROM #Unwrapped old
CROSS APPLY OpenJson(old.[Value]) new
WHERE old.[level] = #ii AND old.type IN (4, 5);
SELECT #Rowcount = ##RowCount;
SELECT #ii = #ii + 1;
END;
return
END
For Usage:
select * from ParseJson(jsonString)

parsing JSON string in oracle

i have JSON string in one column in oracle 10g database like
[{"id":"1","contactBy":"Rajesh Kumar"},{"id":"2","contactBy":"Rakesh Kumar"}]
I have to get the value for ContactBy in that column for one of the reports.
is there any built in function to parse the JSON string in Oracle 10g or any user defined funciton to parse the String
As said by Jens in comments, JSON support is only available from 12c, but you can use regular expressions as a workaround to get what you want:
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]',
'"contactBy":\s*("(\w| )*")', 1, level),
'"contactBy":\s*"((\w| )*)"', '\1', 1, 1) contact
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test"}]', '"contactBy":\s*("(\w| )*")', 1, level) is not null
;
EDIT : request modified to take both special characters and display answers in a single row:
select listagg(contact, ', ') within group (order by lev)
from
(
select regexp_replace(regexp_substr('[{"id": "1", "contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]',
'"contactBy":\s*(".*?")', 1, level),
'"contactBy":\s*"(.*?)"', '\1', 1, 1) contact, level lev
from dual
connect by regexp_substr('[{"id": "1","contactBy":"Rajesh Kumar"},{"id": "2","contactBy": "Emmanuel Test+-"}]', '"contactBy":\s*(".*?")', 1, level) is not null
)
;
# Emmanuel your code is really helped a lot, thank you very much. but your query is taking too much of time, so i changed to a function , which will return the required values.
CREATE OR REPLACE FUNCTION SFGETCRCONTACTBY(INCRID NUMBER) RETURN VARCHAR2 AS
TEMPINT NUMBER :=0;
OUTPUT VARCHAR2(10000) ;
TEMPVAR VARCHAR2(1000);
BEGIN
SELECT REGEXP_COUNT(CR_CONTACT_BY, '"contactBy":\S*(".*?")')
INTO TEMPINT
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
WHILE TEMPINT > 0
LOOP
SELECT REGEXP_REPLACE(REGEXP_SUBSTR(CR_CONTACT_BY, '"contactBy":\S*(".*?")', 1,TEMPINT), '"contactBy":\S*"(.*?)"', '\1', 1, 1) INTO TEMPVAR
FROM T_LOAN_REQUEST_MARKET WHERE CR_ID=INCRID;
IF OUTPUT IS NULL THEN
OUTPUT := TEMPVAR;
ELSE
OUTPUT := OUTPUT ||',' || TEMPVAR;
END IF;
TEMPINT := TEMPINT-1;
END LOOP;
RETURN OUTPUT;
END;
/