how to merge two json_arrays in postgresql - json

I need to MERGE two JSONB_ARRAYS
i have in my table column jsonb of items which looks like this:
[
{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}
]
where fav's value is unique number.
And i have incoming data, where could be the same items which also exists in my table and also new items and after merging the result must be that way where new items just need to add but existing items i need to update
so after merging the result must look like this:
merge:
[
{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}
]::jsonb ||
[
{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"},
{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"}
]::jsonb
------------------------------------------------------------------------
result:
[
{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"},
{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"},
{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"}
]
as expected the "fav": 1 -> was updated and "fav": 3 -> was added
maybe i need some refactroring structure of my json maybe something else?
and maybe it would be better if i retrieve json to Collection and work with objects and after all manipulations just save it back?
Update 1
i try write custom function:
CREATE OR REPLACE FUNCTION public.json_array_merge(data1 jsonb, merge_data jsonb)
RETURNS jsonb
IMMUTABLE
LANGUAGE sql
AS $$
SELECT jsonb_agg(expression)::jsonb
FROM (
WITH to_merge AS (
SELECT * FROM jsonb_each(jsonb_array_elements(merge_data))
)
SELECT *
FROM json_each(jsonb_array_elements(data1))
WHERE value NOT IN (SELECT value FROM to_merge)
UNION ALL
SELECT * FROM to_merge
) expression;
$$;
but now it doesnt work (

You probably will want to write a custom function to handle this. The default behaviour is to append each value because it has no way of knowing that you want fav to be unique.
If your data used fav as a key e.g.
{
"fav1": {"date": "2020-00-00 11:07:05.710000", "is_active": false},
"fav2": {"date": "1998-00-00 11:07:05.710000", "is_active": true},
"fav3": {"date": "2019-00-00 11:07:05.710000", "is_active": true}
}
this would be simple to manage, but since you are using an array you would need to make a custom function that iterates and check each value.
Edit you would need to run a few loops with plpgsql, this could be achieved more efficiently using plv8
CREATE OR REPLACE FUNCTION public.json_array_merge(
data_new jsonb,
data_old jsonb,
key_val text
)
RETURNS jsonb
AS $$
DECLARE
ret jsonb := '[]'::jsonb;
cur text;
add boolean := true;
i integer := 0;
ic integer := jsonb_array_length(data_old);
j integer := 0;
jc integer := jsonb_array_length(data_new);
BEGIN
IF ic > 0 AND jc > 0 THEN
-- populate or replace the records that are already there
WHILE i < ic LOOP
cur := null;
j := 0;
-- loop new array
WHILE j < jc LOOP
IF data_old->i->>key_val = data_new->j->>key_val THEN
cur := data_new->>j;
add := false;
END IF;
j := j + 1;
END LOOP;
-- add or replace
IF add THEN
ret := ret || format('[%s]', data_old->>i)::jsonb;
ELSE
ret := ret || format('[%s]', cur)::jsonb;
END IF;
add := true;
i := i + 1;
END LOOP;
-- loop through the new data again and add any values not in ret
ic := jsonb_array_length(ret);
j := 0;
WHILE j < jc LOOP
i := 0;
add := true;
WHILE i < ic LOOP
IF ret->i->>key_val = data_new->j->>key_val THEN
add := false;
END IF;
i := i + 1;
END LOOP;
IF add THEN
ret := ret || format('[%s]', data_new->>j)::jsonb;
END IF;
j := j + 1;
END LOOP;
ELSE
ret := data_new;
END IF;
RETURN ret;
END
$$
LANGUAGE plpgsql IMMUTABLE;
Running this should give you the desired result
SELECT json_array_merge(
'[{"fav": 3, "is_active": true, "date": "2019-00-00 11:07:05.710000"},{"fav": 1, "is_active": false, "date": "2020-00-00 11:07:05.710000"}]',
'[{"fav": 1, "is_active": true, "date": "1999-00-00 11:07:05.710000"},{"fav": 2, "is_active": true, "date": "1998-00-00 11:07:05.710000"}]',
'fav'
)

Related

Loop through dynamic JSON file IN pl/SQL and do DML operations

I have to write a function for this app that has a grid where you can input value in rows, delete rows, update them etc.
When you do such operation, a JSON file is sent as payload and I can handle it with a procedure, but I am having problems with situations where a user wants to edit multiple rows, it updates just one of them.
This is an example of JSON file, where the description key is the one that is changed from null to the one with values (v1, v2, v3)
If value in grid is changed, it includes key "Changed" with value 1, in case of delete "Deleted" and so on.
{
"Changes": [
{
"id": "AR46",
"Changed": 1,
"SESSION_ID": "963",
"NAME": "IMAGE_LOGO",
"VALUE": "",
"DESCRIPTION": "v1",
"TYPE": "IMAGE",
"PARAM_GROUP": "J",
"BLOB_VALUE": "oracle.sql.BLOB#2ba32",
"EDIT": "Edit",
"DOWNLOAD": "Download",
"CLOB_VALUE": "oracle.sql.CLOB#7fd86843",
"XML_VALUE": "",
"CREATE_DATE": "11.03.2022 13:04:26",
"_DefaultSort": ""
},
{
"id": "AR47",
"Changed": 1,
"SESSION_ID": "963",
"NAME": "IMAGE_HPB_MEMO_FOOTER",
"VALUE": "",
"DESCRIPTION": "v2",
"TYPE": "IMAGE",
"PARAM_GROUP": "JASPER",
"BLOB_VALUE": "oracle.sql.BLOB#7621f9df",
"EDIT": "Edit",
"DOWNLOAD": "Download",
"CLOB_VALUE": "oracle.sql.CLOB#43e24152",
"XML_VALUE": "",
"CREATE_DATE": "11.03.2022 13:04:35",
"_DefaultSort": ""
},
{
"id": "AR48",
"Changed": 1,
"SESSION_ID": "963",
"NAME": "IMAGE_HPB_MEMO_INVCRED",
"VALUE": "",
"DESCRIPTION": "v3",
"TYPE": "IMAGE",
"PARAM_GROUP": "JASPER",
"BLOB_VALUE": "oracle.sql.BLOB#762074f6",
"EDIT": "Edit",
"DOWNLOAD": "Download",
"CLOB_VALUE": "oracle.sql.CLOB#4a068001",
"XML_VALUE": "",
"CREATE_DATE": "11.03.2022 13:04:46",
"_DefaultSort": ""
}
]
}
And this is the function that I wrote that works for just one edit/update. There aren't any errors if you try to update multiple rows, but still, just one(the first) one, is changed.
create or replace function changesResources (p_data varchar2)
return varchar2
IS
l_nullEx exception;
PRAGMA EXCEPTION_INIT(l_nullEx, -1400);
p_rez varchar2(100);
p_session_id number;
p_name varchar2(100);
p_value VARCHAR2(500);
p_description VARCHAR2(1000);
p_type VARCHAR2(100);
p_param_group VARCHAR2(100);
p_blob_value VARCHAR2(1000);
p_clob_value VARCHAR2(1000);
p_xml_value VARCHAR2(1000);
p_create_date varchar2(50);
l_json_obj JSON_OBJECT_T;
l_json_arr JSON_ARRAY_T;
Begin
l_json_obj := JSON_OBJECT_T.PARSE(p_data);
l_json_arr := l_json_obj.get_array('Changes');
FOR i IN 0..l_json_arr.get_size()-1 LOOP
p_session_id := JSON_VALUE(l_json_arr.get(i).to_string(), '$.SESSION_ID');
p_name := JSON_VALUE(l_json_arr.get(i).to_string(), '$.NAME');
p_value := JSON_VALUE(l_json_arr.get(i).to_string(), '$.VALUE');
p_description := JSON_VALUE(l_json_arr.get(i).to_string(), '$.DESCRIPTION');
p_type := JSON_VALUE(l_json_arr.get(i).to_string(), '$.TYPE');
p_param_group := JSON_VALUE(l_json_arr.get(i).to_string(), '$.PARAM_GROUP');
p_blob_value := JSON_VALUE(l_json_arr.get(i).to_string(), '$.BLOB_VALUE');
p_clob_value := JSON_VALUE(l_json_arr.get(i).to_string(), '$.CLOB_VALUE');
p_xml_value := JSON_VALUE(l_json_arr.get(i).to_string(), '$.XML_VALUE');
p_create_date := JSON_VALUE(l_json_arr.get(i).to_string(), '$.CREATE_DATE');
IF JSON_VALUE(l_json_arr.get(i).to_string(), '$.Changed') = 1
THEN
UPDATE BF_RESOURCES_CONF
SET description = p_description,
value=p_value,
type = p_type,
param_group = p_param_group,
blob_value = utl_raw.cast_to_raw(p_blob_value),
clob_value = TO_CLOB(p_clob_value),
xml_value=p_xml_value,
create_date = TO_DATE(p_create_date,'DD.MM.YYYY HH24:MI:SS')
where session_id = p_session_id
and name = p_name;
p_rez := '1|success!';
return p_rez;
ELSIF JSON_VALUE(l_json_arr.get(i).to_string(), '$.Deleted') = 1
THEN
DELETE FROM BF_RESOURCES_CONF
WHERE session_id = p_session_id
and name = p_name;
p_rez := '1|success!';
return p_rez;
ELSE
INSERT INTO BF_RESOURCES_CONF (session_id,name, value,description, type,param_group,blob_value,clob_value,xml_value,create_date) VALUES (p_session_id, p_name, p_value, p_description, p_type, p_param_group, utl_raw.cast_to_raw(p_blob_value),TO_CLOB(p_clob_value),p_xml_value,TO_DATE(p_create_date,'DD.MM.YYYY HH24:MI:SS'));
p_rez := '1|success!';
return p_rez;
END IF;
END LOOP;
EXCEPTION
WHEN l_nullEx THEN
p_rez := '-1|Columns SESSION_ID, NAME I CREATE_DATE have to contain values!';
RETURN p_rez;
--WHEN OTHERS THEN
-- p_rez := '-1|Error!';
-- RETURN p_rez;
END changesResources ;
On 12.1 and above JSON_TABLE is available. Here is an example on the emp sample table similar to yours:
CREATE OR REPLACE function update_emp (p_data VARCHAR2)
RETURN VARCHAR2
IS
l_result VARCHAR2(1000);
BEGIN
FOR r IN (
with json_doc AS
(SELECT p_data AS json_data FROM dual
)
SELECT
empno,
changed,
deleted,
salary
FROM
json_doc t,
JSON_TABLE(json_data, '$.Changes[*]'
COLUMNS (
empno NUMBER PATH '$.empno',
changed NUMBER PATH '$.Changed',
deleted NUMBER PATH '$.Deleted',
salary NUMBER PATH '$.Salary'
))
) LOOP
IF r.changed = 1 THEN
UPDATE emp SET sal = r.salary WHERE empno = r.empno;
l_result := l_result || ', updated: '||r.empno;
ELSIF r.deleted = 1 THEN
DELETE FROM emp WHERE empno = r.empno;
l_result := l_result || ', deleted: '||r.empno;
END IF;
END LOOP;
RETURN LTRIM(l_result, ', ');
END update_emp;
/
set serveroutput on size 999999
clear screen
declare
l_data varchar2(1000);
l_return varchar2(1000);
begin
l_data := '{
"Changes": [
{
"empno": 7698,
"Changed": 1,
"Salary": 4000,
},
{
"empno": 7788,
"Changed": 1,
"Salary": 5000,
},
{
"empno": 7876,
"Deleted": 1
}
]
}';
l_return := update_emp(p_data => l_data);
dbms_output.put_line('l_return = ' || l_return);
end;
/
l_return = updated: 7698, updated: 7788, deleted: 7876
PL/SQL procedure successfully completed.

Oracle 12c: Remove JSON object from JSON array

Need to create a function which takes input of CLOB and I need to remove array matching the condition.
create or replace FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t;
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN 0.. l_ja.get_size - 1 LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
dbms_output.put('Key to remove in the JSON: ' || l_key);
IF l_key = p_in_key THEN
dbms_output.put('Key to remove in the JSON: ' || l_key);
l_ja.remove (idx);
-- dbms_output.new_line;
dbms_output.put('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
When called with:
update COLD_DRINKS cd set cd.configuration = remove_config_node_by_key(cd.configuration, 'b')
where country='INDIA';
I get error:
Error report -
ORA-30625: method dispatch on NULL SELF argument is disallowed
ORA-06512: at "SYS.JSON_OBJECT_T", line 72
ORA-06512: at "PLATFORM_ADMIN_DATA.REMOVE_CONFIG_NODE_BY_KEY", line 11
input JSON:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "b",
"value": "Coke"
},
{
"key": "c",
"value": "Pepsi"
}
]
Expected JSON after execution:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "c",
"value": "Pepsi"
}
]
I think something is wrong about this l_ja.remove (idx); as this one causes the exception. Not able to remove the object at index.
In 18c at least it works with your sample data (with the trailing comma removed from the array), but it gets that error with a null configuration.
So you can either test for null in your function, or exclude nulls from your update, or fix your data so it doesn't have nulls.
The simplest thing to do is probably add a null check:
...
BEGIN
IF p_in_json IS NULL THEN
RETURN NULL;
END IF;
l_ja := json_array_t.parse(p_in_json);
...
fiddle
You can also remove it using json_transform:
create or replace function remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_result CLOB ;
begin
execute immediate q'~select json_transform(
:p_in_json,
REMOVE '$[*]?(#.key == "~' || p_in_key || q'~")'
)
from dual~' into l_result using p_in_json
;
return l_result ;
end ;
/
(with all usual comments regarding possible SQL injection...)
The issue was resolved when I added REVERSE in for loop
before [ERROR]
FOR idx IN 0.. l_ja.get_size - 1
after [PASS]
FOR idx IN REVERSE 0.. l_ja.get_size - 1
Complete working function
CREATE OR REPLACE FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t := json_array_t ();
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN REVERSE 0.. l_ja.get_size - 1
LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
IF l_key = p_in_key THEN
dbms_output.put_line('Key to remove in the JSON: ' || l_key || ' at index : ' || idx);
l_ja.remove (idx);
dbms_output.put_line('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
/

how to merge all MySQL JSON column values with JSON_MERGE_PATCH?

Am trying to turn all scalar values in any given JSON into an empty string.
Given i have a JSON column called arbitraryjsonvalues in mysql database table called ITEMS and the arbitraryjsonvalues column have the following values.
arbitraryjsonvalues
===================
{"foo": {"big": {"cat": 16, "dog": 90}, "boo": ["babe1", "babe2", "babe3"], "small": ["paradise", "hell"]}, "goo": "heavens gate", "yeah": "rice pot on fire"}
{"foo": {"big": {"cone": 90, "cylinder": 16}, "lover": ["lover1", "lover2", "lover3"], "small": ["banner", "logo"]}, "yeah": "pizza on the table"}
{"foo": {"big": {"ape": 16, "humans": 90}, "kalo": ["kalo1", "kalo2", "kalo3"], "small": ["paradise", "hell"]}, "goo": "heavens gate", "yeah": "rice pot on fire", "freak": "bed"}
{"fcs": ["arsenal", "chelsea", "man utd", "leicester", "inter milan"], "foo": {"big": {"ape": 16, "humans": 90}, "kalo": ["kalo1", "kalo2", "kalo3"], "small": ["paradise", "hell"]}, "goo": "heavens gate", "yeah": "rice pot on fire", "freak": "bed"}
{"a": "dsd"}
{"foo": {"ll": true, "boo": ["", "", {"cc": {"dd": ["", true, "", 43], "gg": true}, "jj": "fu"}, "", 90, false, true]}, "lls": 90, "mmn": 9, "bbbd": "ad_a", "lulu": "adfdasf", "_alago": {"a": 4}}
{"phone": "+234809488485"}
{"foo": {"big": {"cat": 16, "dog": 90}, "boo": ["", "", ""], "small": ["http://koks.com", ""]}, "goo": "+2345554444 677888", "yeah": "rice pot on fire"}
{"ll": true, "boo": ["http://kij.com", "___89jjjjdhfhhf8jjd__", {"cc": {"dd": ["", true, "", 43], "gg": true}, "jj": "fu"}, "", 90, false, true]}
{"ll": true, "boo": ["http://kij.com", "___+++++89jjjjdhfhhf8jjd__", {"cc": {"dd": ["", true, "", 43], "gg": true}, "jj": "fu"}, "", 90, false, true]}
{"ll": true, "boo": ["http://kij.com", "___+++++", {"cc": {"dd": ["", true, "", 43], "gg": true}, "jj": "fu"}, "", 90, false, true]}
{"gg": ["a", {"ll": "pink"}]}
{"gg": ["a", {"ll": ["pink", "orange"]}]}
I have created the following stored procedure to enable me merge all json values in a JSON column.
MERGE_ALL_JSON Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `MERGE_ALL_JSON`(IN `$LIMIT` INT)
BEGIN
DECLARE `$LIST` LONGTEXT;
DECLARE `$REMAINING` INT;
DECLARE `$PAGE` INT;
DECLARE `$TOTAL_JSON_OBJECT_COUNT` INT;
DECLARE `$OFFSET` INT;
DECLARE `$NEXT` TEXT;
DECLARE `$NEXT_LENGTH` INT;
DECLARE `$VALUE` TEXT;
DECLARE `$COUNTER` INT;
DECLARE `$MERGED_JSON` JSON;
SET `$MERGED_JSON` = '{}';
SET `$OFFSET` = 1;
CALL
GET_TOTAL_JSON_OBJECT_COUNT(`$TOTAL_JSON_OBJECT_COUNT`);
CALL CALCULATE_PAGE_COUNT(
`$LIMIT`,`$TOTAL_JSON_OBJECT_COUNT`,`$PAGE`
);
WHILE `$OFFSET` <= `$PAGE`
DO
CALL GET_JSON_LIST(`$LIMIT`, `$OFFSET`, `$LIST`);
SET `$COUNTER` = 0;
SELECT `$LIST`;
iterator:
LOOP
IF CHAR_LENGTH(TRIM(`$LIST`)) = 0 OR `$LIST` IS NULL
THEN
LEAVE iterator;
END IF;
SET `$NEXT` = SUBSTRING_INDEX(`$LIST`,"__|__',",1);
SET `$NEXT_LENGTH` = CHAR_LENGTH(`$NEXT`);
SET `$COUNTER` = `$COUNTER` + 1;
SET `$REMAINING` = (`$TOTAL_JSON_OBJECT_COUNT` - ((`$OFFSET` - 1)*`$LIMIT`));
IF `$OFFSET` = `$PAGE` AND `$COUNTER` = `$REMAINING` THEN
SET `$NEXT` = SUBSTRING_INDEX(`$NEXT`, "__|__'", 1);
END IF;
SET `$VALUE` = CONCAT(TRIM(`$NEXT`), "'");
SET `$VALUE` = substring(`$VALUE`, 2, length(`$VALUE`) - 2);
SET `$MERGED_JSON` = JSON_MERGE_PATCH(
`$MERGED_JSON`,`$VALUE`
);
SET `$LIST` = INSERT(`$LIST`,1,`$NEXT_LENGTH` + CHAR_LENGTH("__|__',"),'');
END LOOP;
SET `$OFFSET` = `$OFFSET` + 1;
END WHILE;
SELECT `$MERGED_JSON`;
END$$
DELIMITER ;
GET_JSON_LIST Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `GET_JSON_LIST`(IN `$LIMIT` INT, IN `$OFFSET` INT, OUT `$LIST` LONGTEXT)
BEGIN
DECLARE `$NEWOFFSET` INT;
SET `$NEWOFFSET` = (`$OFFSET`-1)*`$LIMIT`;
SET #t = ##group_concat_max_len;
SET ##group_concat_max_len = 4294967295899999;
SET `$LIST` = (SELECT
GROUP_CONCAT(
(SELECT DISTINCT
CONCAT(
"'",
arbitraryjsonvalues,
"__|__'"
))
)
FROM (
SELECT DISTINCT arbitraryjsonvalues
FROM ITEMS
WHERE arbitraryjsonvalues != JSON_OBJECT() AND
JSON_TYPE(arbitraryjsonvalues) = "OBJECT"
LIMIT `$NEWOFFSET`, `$LIMIT`
) as jsonvalues);
SET ##group_concat_max_len = #t;
END$$
DELIMITER ;
GET_TOTAL_JSON_OBJECT_COUNT Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `GET_TOTAL_JSON_OBJECT_COUNT`(OUT `$TOTAL_JSON_OBJECT_COUNT` INT)
BEGIN
SELECT COUNT(*) FROM (
SELECT DISTINCT arbitraryjsonvalues
FROM ITEMS
WHERE JSON_TYPE(arbitraryjsonvalues) = "OBJECT" AND
arbitraryjsonvalues != JSON_OBJECT()
) as distinctcount INTO `$TOTAL_JSON_OBJECT_COUNT`;
END$$
DELIMITER ;
finally, CALCULATE_PAGE_COUNT Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `CALCULATE_PAGE_COUNT`(IN `$LIMIT` INT, IN `$TOTAL` INT, OUT `$PAGE` INT)
BEGIN
DECLARE `$REMAINDER` INT;
SET `$PAGE` = `$TOTAL` / `$LIMIT`;
SET `$REMAINDER` = `$TOTAL` - (`$PAGE`*`$LIMIT`);
IF `$REMAINDER` > 0 THEN
SET `$PAGE` = `$PAGE` + 1;
END IF;
END$$
DELIMITER ;
However, I discovered that two JSON values with the same structure but different scalar values are distinct to one another, which means an attempt to merge all values in JSON column may fetch too many values ranging to thousands or more for processing; meanwhile, each of this JSON values will possibly not vary that much in structure, so I believe it will be good to turn all scalar values into an empty string, which is the problem I am currently trying to solve.
Afterward, I will be looking to turn series of empty strings in an array into a single empty string, for example
["", "", "", {....}, "", ""] will be equal to ["", {}]
["", "", "", {"a": ["", ""]}, "", ""] will be equal to ["", {"a": [""]}]
So if I can solve this two problems, which is to have a regex to turn all the scalar values into an empty string and a regex to turn series of empty string as described above, I will be able to figure out where I can use the regex in MYSQL statement of the above procedures to fetch distinct json values.
I believe by doing so, i will be able to fetch not too many distinct values. and my goal is to merge any given JSON column in a split of seconds. I don't care to have the scalar values, all i wanted is for my backend to automatically determine the structure and send it to the frontend to display a tree or collapsing tree where each node can have a text field to allow for a more context-specific search.
So whenever a node is searched, the frontend is aware of the path and the backend knows where to get the data...
Trying to implement some sort of reusable backend for searching JSON column of any given mysql database table column with JSON type.
I am using the following regex below to perform this action but it matches not only the scalar values but also the key. wish to get one that will match only the scalar values and not the keys.
true|false|\d+|("(?:(?!")((?:\\"|[^"])*))*")
Given that I have this merged JSON value for testing
{
"a": "dsd",
"ll": [true, true, true],
"boo": [
"http://kij.com",
"___89jjjjdhfhhf8jjd__",
{
"cc": {
"dd": ["", true, "", 43],
"gg": true
},
"jj": "f'u"
},
"",
90,
false,
true,
"http://kij.com",
"___+++++89jjjjdhfhhf8jjd__",
{
"cc": {
"dd": ["", true, "", 43],
"gg": true
},
"jj": "fu"
},
"",
90,
false,
true,
"http://kij.com",
"___+++++",
{
"cc": {
"dd": ["", true, "#8jkk=", 43],
"gg": true
},
"jj": "fu#"
},
"",
90,
false,
true
],
"fcs": ["arsenal", "chelsea", "man utd", "leicester", "inter milan"],
"foo": {
"ll": true,
"big": {
"ape": [16, 16],
"cat": [16, 16],
"dog": [90, 90],
"cone": 90,
"humans": [90, 90],
"cylinder": 16
},
"boo": ["babe1", "babe2", "babe3", "", "", {
"cc": {
"dd": ["", true, "", 43],
"gg": true
},
"jj": "fu"
}, "", 90, false, true, "", "", ""],
"kalo": ["kalo1", "kalo2", "kalo3", "kalo1", "kalo2", "kalo3"],
"lover": ["lover1", "lover2", "lover3"],
"small": ["paradise", "hell", "banner", "logo", "paradise", "hell", "paradise", "hell", "http://koks.com", ""]
},
"goo": ["heavens gate", "heavens gate", "heavens gate", "+2345554444 677888"],
"lls": 90,
"mmn": 9,
"bbbd": "ad_a",
"lulu": "adfdasf",
"yeah": ["rice pot on fire", "pizza on the table", "rice pot on fire", "rice pot on fire", "rice pot on fire"],
"freak": ["bed", "bed"],
"phone": "+2347777777",
"_alago": {"a": "$4m-jkk+=$900"}
}
Here is a link for you to test it
Test Regex
Please i need someone to help me solve this problem
after digging this is what I've come up with..
GET_JSON_LIST procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `GET_JSON_LIST`(IN `$LIMIT` INT, IN `$OFFSET` INT, OUT `$LIST` LONGTEXT)
BEGIN
DECLARE `$NEWOFFSET` INT;
SET `$NEWOFFSET` = (`$OFFSET`-1)*`$LIMIT`;
SET #t = ##group_concat_max_len;
SET ##group_concat_max_len = 4294967295899999;
SET #LIST = '{}';
SET #TEMP=(
SELECT GROUP_CONCAT(
#LIST:=JSON_MERGE_PRESERVE(
#LIST,
jsonvalues
)
)
FROM (
SELECT DISTINCT REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
arbitraryjsonvalues,
'"(\\w+)":',
'$1:'
),
'(true|false|\\d+|("(?:(?!")((?:\\\\"|[^"])*))*"))',
'""'
),
'(\\w*):',
'"$1":'
),
'(?:""\\s*,\\s*){2,}',
'"",'
),
'(?:"",\\s*"")+]',
'""]'
) as jsonvalues
FROM ITEMS
WHERE JSON_TYPE(arbitraryjsonvalues) = "OBJECT" AND arbitraryjsonvalues != JSON_OBJECT()
LIMIT `$NEWOFFSET`, `$LIMIT`
) AS arbitval2
);
SET ##group_concat_max_len = #t;
SET `$LIST` = REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
#LIST,
'"(\\w+)":',
'$1:'
),
'(true|false|\\d+|("(?:(?!")((?:\\\\"|[^"])*))*"))',
'""'
),
'(\\w*):',
'"$1":'
),
'(?:""\\s*,\\s*){2,}',
'"",'
),
'(?:"",\\s*"")+]',
'""]'
);
END$$
DELIMITER ;
GET_TOTAL_JSON_OBJECT_COUNT Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `GET_TOTAL_JSON_OBJECT_COUNT`(OUT `$TOTAL_JSON_OBJECT_COUNT` INT)
BEGIN
SELECT COUNT(*)
FROM (
SELECT DISTINCT REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
REGEXP_REPLACE(
arbitraryjsonvalues,
'"(\\w+)":',
'$1:'
),
'(true|false|\\d+|("(?:(?!")((?:\\\\"|[^"])*))*"))',
'""'
),
'(\\w*):',
'"$1":'
),
'(?:""\\s*,\\s*){2,}',
'"",'
),
'(?:"",\\s*"")+]',
'""]'
) AS total
FROM ITEMS
WHERE arbitraryjsonvalues != JSON_OBJECT() AND arbitraryjsonvalues != JSON_ARRAY()
) as distinctcount INTO `$TOTAL_JSON_OBJECT_COUNT`;
END$$
DELIMITER ;
MERGE_ALL_JSON Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `MERGE_ALL_JSON`(IN `$LIMIT` INT)
BEGIN
DECLARE `$LIST` LONGTEXT;
DECLARE `$PAGE` INT;
DECLARE `$TOTAL_JSON_OBJECT_COUNT` INT;
DECLARE `$OFFSET` INT;
DECLARE `$NEXT` TEXT;
DECLARE `$NEXT_LENGTH` INT;
DECLARE `$VALUE` TEXT;
DECLARE `$MERGED_JSON` JSON;
SET `$MERGED_JSON` = '{}';
SET `$OFFSET` = 1;
CALL
GET_TOTAL_JSON_OBJECT_COUNT(`$TOTAL_JSON_OBJECT_COUNT`);
CALL CALCULATE_PAGE_COUNT(`$LIMIT`,`$TOTAL_JSON_OBJECT_COUNT`,`$PAGE`);
WHILE `$OFFSET`<=`$PAGE`
DO
CALL GET_JSON_LIST(`$LIMIT`, `$OFFSET`, `$LIST`);
SET `$MERGED_JSON` = JSON_MERGE_PRESERVE(`$MERGED_JSON`,`$LIST`);
SET `$OFFSET`=(`$OFFSET`+1);
END WHILE;
SELECT `$MERGED_JSON`;
END$$
DELIMITER ;
CALCULATE_PAGE_COUNT Procedure
DELIMITER $$
CREATE DEFINER=`root`#`%` PROCEDURE `CALCULATE_PAGE_COUNT`(IN `$LIMIT` INT, IN `$TOTAL` INT, OUT `$PAGE` INT)
BEGIN
DECLARE `$REMAINDER` INT;
SET `$PAGE` = `$TOTAL` / `$LIMIT`;
SET `$REMAINDER` = `$TOTAL` - (`$PAGE`*`$LIMIT`);
IF `$REMAINDER` > 0 THEN
SET `$PAGE` = `$PAGE` + 1;
END IF;
END$$
DELIMITER ;
Running MERGE_ALL_JSON Procedure produces the result below, and this solves my problem somehow, but I am looking for a way to merge all objects inside an array into one with REGEX_REPLACE and JSON_MERGE_PRESERVE.
{
"a": "",
"gg": ["", {"ll": ""}, "", {"ll": [""]}],
"ll": "", "boo": ["", {"cc": {"dd": [""], "gg": ""}, "jj": ""}, ""],
"fcs": [""],
"foo": [
{
"ll": "",
"big": {
"ape": [""],
"cat": [""],
"dog": [""],
"cone": "",
"humans": [""],
"cylinder": ""
},
"boo": ["", {"cc": {"dd": [""], "gg": ""}, "jj": ""}, ""],
"kalo": [""],
"lover": [""],
"small": [""]
},
"",
{"bag": ""}
],
"goo": [""],
"lls": "",
"mmn": "",
"bbbd": "",
"lulu": "",
"yeah": ["", {"jj": ""}, "", {"jj": ""}, ""],
"freak": [""],
"light": "",
"phone": "",
"_alago": {"a": ""}
}
Let me drop this answer here, it might help someone.

How to parse JSON in Delphi?

I have a JSON like this:
{
"Content": [{
"Identifier": "AABBCC",
"Description": "test terfdfg",
"GenericProductIdentifier": "AABBCC",
"ProductFamilyDescription": "sampling",
"LifeCycleStatus": "ACTIVE",
"Price": {
"Value": 1.00,
"Quantity": 1000
},
"LeadTimeWeeks": "16",
"FullBoxQty": 200,
}],
"TotalElements": 1,
"TotalPages": 1,
"NumberOfElements": 1,
"First": true,
"Size": 1,
"Number": 0
}
In Delphi 10.4, I'm trying to parse it, but I can't access the values ​​contained in 'Price'.
I wrote code like this:
var
vContent: TJSONArray;
vJson: TJSONObject;
vContentRow: TJSONObject;
i,j : Integer;
begin
Memo2.Lines.Clear;
if Memo1.Text = '' then
exit;
vJson := TJSONObject(TJSONObject.ParseJSONValue(TEncoding.ASCII.GetBytes(Memo1.Text),0));
try
vContent := TJSONArray(vJson.Get('Content').JsonValue);
for i := 0 to Pred(vContent.Count) do
begin
vContentRow := TJSONObject(vContent.Items[i]);
for j := 0 to Pred(vContentRow.Count) do
begin
Memo2.Lines.Add(' '+ vContentRow.Get(j).JsonString.Value+' : '+ vContentRow.Get(j).JsonValue.Value);
end;
end;
Memo2.Lines.Add(vContent.Value);
finally
end;
end;
What is the correct way to read the values ​​contained in 'Price'?
Here is a sample code to parse your JSON:
uses
System.IOUtils, System.JSON, System.Generics.Collections;
procedure TForm1.Button1Click(Sender: TObject);
procedure GetPrices(const S: string);
var
V: TJsonValue;
O, E, P: TJsonObject;
A: TJsonArray;
begin
V := TJSONObject.ParseJSONValue(S);
if not Assigned(V) then
raise Exception.Create('Invalid JSON');
try
O := V as TJSONObject;
A := O.GetValue<TJsonArray>('Content');
for var I := 0 to A.Count - 1 do
begin
E := A.Items[I] as TJsonObject; // Element
P := E.GetValue<TJsonObject>('Price');
ShowMessage('Value: ' + P.GetValue<string>('Value') + ' ' + 'Quantity: ' + P.GetValue<string>('Quantity'));
end;
finally
V.Free;
end;
end;
var
S: string;
begin
S := TFile.ReadAllText('d:\json.txt'); // Retrieve it using some webservice
GetPrices(S);
end;
Note, your JSON is invalid, the corect definition is:
{
"Content": [{
"Identifier": "AABBCC",
"Description": "test terfdfg",
"GenericProductIdentifier": "AABBCC",
"ProductFamilyDescription": "sampling",
"LifeCycleStatus": "ACTIVE",
"Price": {
"Value": 1.00,
"Quantity": 1000
},
"LeadTimeWeeks": "16",
"FullBoxQty": 200
}],
"TotalElements": 1,
"TotalPages": 1,
"NumberOfElements": 1,
"First": true,
"Size": 1,
"Number": 0
}
You can use the JSON library of Delphi.
The JSON library has the JsonToObject class function that can convert directly the string to an Object (Object structure)
See this:
https://docwiki.embarcadero.com/Libraries/Sydney/en/REST.Json.TJson.JsonToObject
You can create the classes structure manually o using the web: https://jsontodelphi.com/
The classes structure for your JSON created is this:
type
TPrice = class;
TPrice = class
private
FQuantity: Integer;
FValue: Double;
published
property Quantity: Integer read FQuantity write FQuantity;
property Value: Double read FValue write FValue;
end;
TContent = class
private
FDescription: string;
FFullBoxQty: Integer;
FGenericProductIdentifier: string;
FIdentifier: string;
FLeadTimeWeeks: string;
FLifeCycleStatus: string;
FPrice: TPrice;
FProductFamilyDescription: string;
published
property Description: string read FDescription write FDescription;
property FullBoxQty: Integer read FFullBoxQty write FFullBoxQty;
property GenericProductIdentifier: string read FGenericProductIdentifier write FGenericProductIdentifier;
property Identifier: string read FIdentifier write FIdentifier;
property LeadTimeWeeks: string read FLeadTimeWeeks write FLeadTimeWeeks;
property LifeCycleStatus: string read FLifeCycleStatus write FLifeCycleStatus;
property Price: TPrice read FPrice;
property ProductFamilyDescription: string read FProductFamilyDescription write FProductFamilyDescription;
public
constructor Create;
destructor Destroy; override;
end;
TRoot = class(TJsonDTO)
private
[JSONName('Content'), JSONMarshalled(False)]
FContentArray: TArray<TContent>;
[GenericListReflect]
FContent: TObjectList<TContent>;
FFirst: Boolean;
FNumber: Integer;
FNumberOfElements: Integer;
FSize: Integer;
FTotalElements: Integer;
FTotalPages: Integer;
function GetContent: TObjectList<TContent>;
protected
function GetAsJson: string; override;
published
property Content: TObjectList<TContent> read GetContent;
property First: Boolean read FFirst write FFirst;
property Number: Integer read FNumber write FNumber;
property NumberOfElements: Integer read FNumberOfElements write FNumberOfElements;
property Size: Integer read FSize write FSize;
property TotalElements: Integer read FTotalElements write FTotalElements;
property TotalPages: Integer read FTotalPages write FTotalPages;
public
destructor Destroy; override;
end;
Now, the code for parse elements is more simple. You only need a code like this to access different properties of your structure:
var
Root: TRoot;
begin
root := TJSON.JsonToObject<TRoot>(Memo1.Lines.Text);
lblid.Caption := 'TotalElements: ' + Root.TotalElements.ToString;
lblvalue.Caption := 'TotalPages: ' + Root.TotalPages.ToString;
lblcount.Caption := 'Identifier: ' + Root.Content[0].Identifier;
lblfirstonclick.Caption := 'Description: ' + Root.Content[0].Description;
lbllastonclick.Caption := 'Price/Quantity:' + Root.Content[0].Price.Quantity.ToString;
//...
Try this, i make some helper for TFDMemtable. Simple to uses, no need parsing everytime you have other JSON.
const
JSONString =
'{
"Content": [{
"Identifier": "AABBCC",
"Description": "test terfdfg",
"GenericProductIdentifier": "AABBCC",
"ProductFamilyDescription": "sampling",
"LifeCycleStatus": "ACTIVE",
"Price": {
"Value": 1.00,
"Quantity": 1000
},
"LeadTimeWeeks": "16",
"FullBoxQty": 200,
}],
"TotalElements": 1,
"TotalPages": 1,
"NumberOfElements": 1,
"First": true,
"Size": 1,
"Number": 0
}';
begin
if not Memtable.FillDataFromString(JSONString) then begin
ShowMessages(Memtable.FieldByName('messages').AsString);
end else begin
Memtable.FillDataFromString(Memtable.FieldByName('Content').AsString);
ShowMessages(Memtable.FieldByName('Price').AsString);
end;
end;
====
unit BFA.Helper.MemTable;
interface
uses
System.SysUtils, System.Types, System.UITypes, System.Classes, System.Variants,
FMX.Types, FMX.Controls, FMX.Forms, FMX.Graphics, FMX.Dialogs, FMX.Memo.Types,
System.Rtti, FMX.Grid.Style, FMX.Grid, FMX.ScrollBox, FMX.Memo, FMX.Edit,
FMX.Controls.Presentation, FMX.StdCtrls, FireDAC.Stan.Intf,
FireDAC.Stan.Option, FireDAC.Stan.Param, FireDAC.Stan.Error, FireDAC.DatS,
FireDAC.Phys.Intf, FireDAC.DApt.Intf, System.Net.URLClient,
System.Net.HttpClient, System.Net.HttpClientComponent, Data.DB,
FireDAC.Comp.DataSet, FireDAC.Comp.Client, System.JSON, System.Net.Mime;
type
TFDMemTableHelper = class helper for TFDMemTable
procedure FillError(FMessage, FError : String);
function FillDataFromString(FJSON : String) : Boolean; //ctrl + shift + C
end;
implementation
{ TFDMemTableHelper }
function TFDMemTableHelper.FillDataFromString(FJSON: String): Boolean; //bug memoryleak fix at checking is Object / array soon
const
FArr = 0;
FObj = 1;
FEls = 2;
function isCheck(FString : String) : Integer; begin
Result := FEls;
var FCheck := TJSONObject.ParseJSONValue(FJSON);
if FCheck is TJSONObject then
Result := FObj
else if FCheck is TJSONArray then
Result := FArr;
FCheck.DisposeOf;
end;
var
JObjectData : TJSONObject;
JArrayJSON : TJSONArray;
JSONCheck : TJSONValue;
begin
var FResult := isCheck(FJSON);
try
Self.Active := False;
Self.Close;
Self.FieldDefs.Clear;
if FResult = FObj then begin
JObjectData := TJSONObject.ParseJSONValue(FJSON) as TJSONObject;
end else if FResult = FArr then begin
JArrayJSON := TJSONObject.ParseJSONValue(FJSON) as TJSONArray;
JObjectData := TJSONObject(JArrayJSON.Get(0));
end else begin
Self.FillError('FAILED PARSING JSON', 'THIS IS NOT JSON');
Result := False;
Exit;
end;
for var i := 0 to JObjectData.Size - 1 do begin
Self.FieldDefs.Add(
StringReplace(JObjectData.Get(i).JsonString.ToString, '"', '', [rfReplaceAll, rfIgnoreCase]),
ftString,
100000,
False
);
end;
Self.CreateDataSet;
Self.Active := True;
Self.Open;
try
if FResult = FArr then begin
for var i := 0 to JArrayJSON.Size - 1 do begin
JObjectData := TJSONObject(JArrayJSON.Get(i));
Self.Append;
for var ii := 0 to JObjectData.Size - 1 do begin
JSONCheck := TJSONObject.ParseJSONValue(JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON);
if JSONCheck is TJSONObject then
Self.Fields[ii].AsString := JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON
else if JSONCheck is TJSONArray then
Self.Fields[ii].AsString := JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON
else
Self.Fields[ii].AsString := JObjectData.Values[Self.FieldDefs[ii].Name].Value;
JSONCheck.DisposeOf;
end;
Self.Post;
end;
end else begin
Self.Append;
for var ii := 0 to JObjectData.Size - 1 do begin
JSONCheck := TJSONObject.ParseJSONValue(JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON);
if JSONCheck is TJSONObject then
Self.Fields[ii].AsString := JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON
else if JSONCheck is TJSONArray then
Self.Fields[ii].AsString := JObjectData.GetValue(Self.FieldDefs[ii].Name).ToJSON
else
Self.Fields[ii].AsString := JObjectData.Values[Self.FieldDefs[ii].Name].Value;
JSONCheck.DisposeOf;
end;
Self.Post;
end;
Result := True;
except
on E : Exception do begin
Result := False;
Self.FillError('Error Parsing JSON', E.Message);
end;
end;
finally
if FResult = FObj then
JObjectData.DisposeOf;
if FResult = FArr then
JArrayJSON.DisposeOf;
Self.First;
end;
end;
procedure TFDMemTableHelper.FillError(FMessage, FError : String);
begin
Self.Active := False;
Self.Close;
Self.FieldDefs.Clear;
Self.FieldDefs.Add('status', ftString, 200, False);
Self.FieldDefs.Add('messages', ftString, 200, False);
Self.FieldDefs.Add('data', ftString, 200, False);
Self.CreateDataSet;
Self.Active := True;
Self.Open;
Self.Append;
Self.Fields[0].AsString := FError;
Self.Fields[1].AsString := FMessage;
Self.Post;
end;
end.

In PostgreSQL, how to add a list to an existing JSON object, using JSONB

This code:
do
$j$
declare arr text[]; i int; num text; obj jsonb; jb_arr jsonb[];
begin
jb_arr = array_append(jb_arr, jsonb_build_object('k', 'acg', 'v', 'val'));
jb_arr = array_append(jb_arr, jsonb_build_object('k', 'xyz', 'v', 'xxx'));
obj = (select '{ "cmds":[]}'::jsonb);
RAISE NOTICE '%', to_jsonb(jb_arr);
RAISE NOTICE '%', obj;
end;
$j$
Outputs this:
[{"k": "acg", "v": "val"}, {"k": "xyz", "v": "xxx"}]
{"cmds": []}
How do I merge those two so that I end up with this:
{"cmds": [{"k": "acg", "v": "val"}, {"k": "xyz", "v": "xxx"}]}
Thanks!
this is how: (select json_build_object('cmds', jb_arr));