Json to PLSQL table -> ORA-00942 - json

I have a json string parameter. I would like to create a function which gives back a table with the data.
I created a type:
CREATE OR REPLACE TYPE AttachmentResponseType AS OBJECT (
savePath VARCHAR (255) ,
originalFileName VARCHAR (255) ,
fileSize number(20) );
I created a list
CREATE OR REPLACE TYPE JSON_DOC_JUKEBOX_TBL IS TABLE OF AttachmentResponseType;
And a function
CREATE OR REPLACE FUNCTION JSONSTRING_TO_JB_TYPE (p_json in varchar2) return JSON_DOC_JUKEBOX_TBL as
resTBL JSON_DOC_JUKEBOX_TBL;
JSON_VAR varchar2(3000);
begin
JSON_VAR := p_json;
with x as (SELECT t.savePath, t.originalFileName, t.fileSize FROM JSON_TABLE(
JSON_VAR,
'$'
COLUMNS(
NESTED '$.AttachmentResponseType[*]'COLUMNS (
savePath varchar2(255),
originalFileName varchar2(255),
fileSize number(20)
)
)
) as t) select AttachmentResponseType(x.savePath, x.originalFileName, x.fileSize) bulk collect INTO resTBL from x;
commit;
return resTBL;
end;
I try to test it, I have a tst table for that with the same columns
declare
test JSON_DOC_JUKEBOX_TBL;
begin
test := JSONSTRING_TO_JB_TYPE('{"AttachmentResponseType" : [{
"savePath" : "dsadsad/dsadas.xaxaxa.pfg",
"originalFileName" : "xaxaxa.pfg",
"fileSize" : "12"},
{
"savePath" : "dsadsad/1111g",
"originalFileName" : "1.pfg",
"fileSize" : "1112"}]}');
INSERT INTO tst
(savePath, originalFileName, fileSize) values
(SELECT savePath, originalFileName, fileSize
FROM test);
commit;
end;
test is highlighted as 'Undefined table'
and when I run, the exception is :ORA-00942
Am I doing something wrong? should I initialise the 'test' table somehow?

Instead of using multiple TYPEs, and a FUNCTION just to insert the records, why not just use INSERT INTO...SELECT...
See example below:
CREATE TABLE tst
(
savePath VARCHAR (255),
originalFileName VARCHAR (255),
fileSize NUMBER (20)
);
INSERT INTO tst (savepath, originalfilename, filesize)
SELECT savepath, originalfilename, filesize
FROM JSON_TABLE (
'{"AttachmentResponseType" : [{
"savePath" : "dsadsad/dsadas.xaxaxa.pfg",
"originalFileName" : "xaxaxa.pfg",
"fileSize" : "12"},
{
"savePath" : "dsadsad/1111g",
"originalFileName" : "1.pfg",
"fileSize" : "1112"}]}',
'$'
COLUMNS (
NESTED '$.AttachmentResponseType[*]'
COLUMNS (savePath VARCHAR2 (255),
originalFileName VARCHAR2 (255),
fileSize NUMBER (20))));

Related

Sum fields inside json array in mysql

I have this table:
CREATE TABLE stackoverflow_question (
id int NOT NULL AUTO_INCREMENT,
name varchar(255) NOT NULL,
json_ob mediumtext default null,
PRIMARY KEY (id)
);
I do some inserts:
insert into stackoverflow_question values(null, 'albert', '[{name: "albert1", qt: 2},{name: "albert2", qt: 2}]');
insert into stackoverflow_question values(null, 'barbara', '[{name: "barbara1", qt: 4},{name: "barbara2", qt: 7}]');
insert into stackoverflow_question values(null, 'paul', '[{name: "paul1", qt: 9},{name: "paul2", qt: 11}]');
Eventually, I will need to sort this table by total quantity.
in the examples above, "paul" has quantity = 20, while "barbara" has quantity = 11. And "albert" has quantity = 4.
Is it possible to create a select statement where a new field is created on the fly? Something like this:
SELECT
SUM (loop json_ob and sum all the quantity fields) AS total_quantity,
id,
name
FROM
stackoverflow_question
ORDER BY total_quantity
If json_ob is actually a valid json object then you can use JSON_TABLE() to extract the quantities and aggregate:
SELECT s.*, SUM(t.qt) total_quantity
FROM stackoverflow_question s,
JSON_TABLE(json_ob, '$[*]' COLUMNS (qt INTEGER PATH '$.qt')) t
GROUP BY s.id
ORDER BY total_quantity DESC;
See the demo.
According to jsonlint your JSON is not valid.
That's why this SQL returns an error (ERROR 3141 (22032): Invalid JSON text in argument 1 to function json_table: "Missing a name for object member." at position 2.")
SELECT
j.name, j.qt
FROM JSON_TABLE('[{name: "paul1", qt: 9},{name: "paul2", qt: 11}]',
"$[*]" COLUMNS (name varchar(20) PATH "$.name", qt int PATH "$.qt")) j ;
and this will return the values:
SELECT
j.name, j.qt
FROM JSON_TABLE('[{"name": "paul1", "qt": 9},{"name": "paul2", "qt": 11}]',
"$[*]" COLUMNS (name varchar(20) PATH "$.name", qt int PATH "$.qt")) j ;
output:
name
qt
paul1
9
paul2
11
You can convert your relaxedJSON, to JSON, using tools like : www.relaxedjson.org

How can I use json values as columns in a query SQL?

I just want to receive a json data and use it's fields as a table column for another queries.
I'm trying to make the value in the key "nameProperty" into a column in a table, and the value of the keys "newValue"fill the rows of that column.
For example:
i get a json file like this
{
"operation":{
"ID":"ABC",
"KinshipDescription":"--"
},
"fields":[
{
"property":{
"nameProperty":"ID",
"oldValue":"",
"newValue":"123456",
"confirmed":"false",
"labelProperty":"ID",
"oldValueDescription":"",
"newValueDescription":"123456"
}
},
{
"property":{
"nameProperty":"Name",
"oldValue":"",
"newValue":"John",
"confirmed":"false",
"labelProperty":"Name",
"oldValueDescription":"",
"newValueDescription":"John"
}
}
]
}
I want to extract the objects on the list "fields", but i only can make them an row for key, and another row for values like the script below makes.
DECLARE #jsonObj NVARCHAR(MAX)
--Set a result in
SET #jsonObj = (select JSON_Query(data, '$.fields') from table where id = 'ABC')
select * from openjson(#jsonObj)
with (Property nvarchar(255) '$.property.nameProperty',
newValue nvarchar(50) '$.property.newValue')
and I have no idea how I can do this
the results of this script is something like this
ID 123456
Name John
and the results that i want to see is
ID Name --column name, not a row
123456 John
The quickest (thought-wise, not necessarily performance) way I can come up with on this is using dynamic SQL. In fact, I'm pretty certain you'll have to use it.
Here's an example that can get you moving. You can run this in SSMS.
DECLARE #json NVARCHAR(MAX) =
'{
"operation":{
"ID":"ABC",
"KinshipDescription":"--"
},
"fields":[
{
"property":{
"nameProperty":"ID",
"oldValue":"",
"newValue":"123456",
"confirmed":"false",
"labelProperty":"ID",
"oldValueDescription":"",
"newValueDescription":"123456"
}
},
{
"property":{
"nameProperty":"Name",
"oldValue":"",
"newValue":"John",
"confirmed":"false",
"labelProperty":"Name",
"oldValueDescription":"",
"newValueDescription":"John"
}
}
]
}';
-- Variable to hold the column/values.
DECLARE #cols VARCHAR(MAX) = '';
-- Generate the column/value pairs.
SELECT
#cols = #cols
+ CASE WHEN ( LEN( #cols ) > 0 ) THEN ', ' ELSE '' END -- add comma if needed.
+ '''' + Properties.newValue + ''' AS [' + Properties.nameProperty + '] '
FROM OPENJSON( #json, '$.fields' ) WITH (
property NVARCHAR(MAX) '$.property' AS JSON
)
CROSS APPLY (
SELECT * FROM OPENJSON( property ) WITH (
nameProperty VARCHAR(50) '$.nameProperty',
oldValue VARCHAR(50) '$.oldValue',
newValue VARCHAR(50) '$.newValue',
confirmed VARCHAR(50) '$.confirmed',
labelProperty VARCHAR(50) '$.labelProperty',
oldValueDescription VARCHAR(50) '$.oldValueDescription',
newValueDescription VARCHAR(50) '$.newValueDescription'
)
) AS Properties;
-- Execute column/value pairs as dynamic SQL.
EXEC ( 'SELECT ' + #cols );
Which returns:
+--------+------+
| ID | Name |
+--------+------+
| 123456 | John |
+--------+------+
If you were to PRINT #cols you would see
'123456' AS [ID] , 'John' AS [Name]
A few quick notes:
Performance may vary.
Values are quoted but can be CAST if needed.
Included all 'property' fields in CROSS APPLY for example. Only specify what is needed.
Note the use of NVARCHAR when using AS JSON
May want to consider OUTER APPLY if there's potential for no 'property' present.

Data truncation: '' While Inserting or Updating Value

I am trying to Insert/update a string (JSON). While inserting/Updating the value I get data truncation error.
I have tried making a JSON type column and passing an JSON_OBJECT() type but that fails as well.
select '''[{"id":"1202","title":"Asian","notes":"","active":"1"}]''';
CREATE TABLE mktesttable (
id int NOT NULL,
s VARCHAR(34530) NOT NULL
);
INSERT INTO mktesttable
select 1, '''[{"id":"1202","title":"Asian","notes":"","active":"1"}]''';
select * from mktesttable;
// That Works
INSERT INTO mktesttable
SELECT
patient_data.id,
CONCAT(
'''[{"id":"', patient_data.race,
'","title":"', list_options.title,
'","notes":"', list_options.notes,
'","active":"', list_options.active,
'"}]'''
) as s
FROM
patient_data
INNER JOIN list_options
ON patient_data.race = list_options.id order by 1 desc
Yields same result (Id's and data varies) but doesn't work
Result Set
If you want to store JSON object, you should use the JSON datatype instead of strings. To create a JSON object, you can use JSON_OBJECT.
CREATE TABLE mktesttable (
id int NOT NULL,
s JSON NOT NULL
);
INSERT INTO mktesttable
SELECT
patient_data.id,
JSON_OBJECT(
'id', patient_data.race,
'title', list_options.title,
'notes', list_options.notes,
'active', list_options.active,
)
FROM
patient_data
INNER JOIN list_options
ON patient_data.race = list_options.id
ORDER BY patient_data.id desc
If you need a JSON array as shown in your sample data, then :
JSON_ARRAY(
JSON_OBJECT(
'id', patient_data.race,
'title', list_options.title,
'notes', list_options.notes,
'active', list_options.active,
)
)

"Missing comma" error while inserting clob value in table

CREATE TABLE fcc_consistency_check
(
cons_id VARCHAR2(30),
cons_desc VARCHAR2(4000),
cons_query CLOB,
module_id VARCHAR2(2),
main_tab_name VARCHAR2(30),
hist_tab_name VARCHAR2(30),
col_name VARCHAR2(4000),
col_type VARCHAR2(4000),
check_reqd VARCHAR2(1)
);
INSERT INTO fcc_consistency_check
VALUES ('CHK_BC003','Missing records in contract_event_log','select a.CONTRACT_REF_NO ,a.Latest_Event_Seq_No,
c.PREV_WORKING_DAY from cstb_contract A ,sttm_dates c
where module_code = 'BC'
and c.Branch_code='000'
and not exists (select * from cstb_contract_event_log B
where a.contract_ref_no = b.contract_ref_no
and latest_event_seq_no = event_seq_no);',
'BC','BCCC_EVENT_LOG_MISREC','BCCC_EVENT_LOG_MISREC_HISTORY','CONTRACT_REF_NO,LATEST_EVENT_SEQ_NO,EOD_DATE','VARCHAR2(16),NUMBER,DATE','Y');
Not able to insert clob value, I'm getting this error
ORA-00917: missing comma
When I try to insert individual column value then I found that, error is throwing for column cons_query.
The problem is that you have quotes within your query:
'select a.CONTRACT_REF_NO
,a.Latest_Event_Seq_No,
c.PREV_WORKING_DAY from cstb_contract A ,sttm_dates
where module_code = 'BC'
^string starst here:
^ends here, there's a commma missing
However, the actual issue is not that a comma is missing but that you have quotes you forgot to escape. You need to write module_code = ''BC'' for example to escape those quotes (you have additional quotes in there, not just at 'BC').

How do I generate nested json objects using mysql native json functions?

Using only the native JSON fuctions (no PHP, etc) in MySQL version 5.7.12 (section 13.16 in the manual) I am trying to write a query to generate a JSON document from relational tables that contains a sub object. Given the following example:
CREATE TABLE `parent_table` (
`id` int(11) NOT NULL,
`desc` varchar(20) NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `child_table` (
`id` int(11) NOT NULL,
`parent_id` int(11) NOT NULL,
`desc` varchar(20) NOT NULL,
PRIMARY KEY (`id`,`parent_id`)
);
insert `parent_table` values (1,'parent row 1');
insert `child_table` values (1,1,'child row 1');
insert `child_table` values (2,1,'child row 2');
I am trying to generate a JSON document that looks like this:
[{
"id" : 1,
"desc" : "parent row 1",
"child_objects" : [{
"id" : 1,
"parent_id" : 1,
"desc" : "child row 1"
}, {
"id" : 2,
"parent_id" : 1,
"desc" : "child row 2"
}
]
}]
I am new to MySQL and suspect there is a SQL pattern for generating nested JSON objects from one to many relationships but I'm having trouble finding it.
In Microsoft SQL (which I'm more familiar with) the following works:
select
[p].[id]
,[p].[desc]
,(select * from [dbo].[child_table] where [parent_id] = [p].[id] for json auto) AS [child_objects]
from [dbo].[parent_table] [p]
for json path
I attempted to write the equivalent in MySQL as follows:
select json_object(
'id',p.id
,'desc',p.`desc`
,'child_objects',(select json_object('id',id,'parent_id',parent_id,'desc',`desc`)
from child_table where parent_id = p.id)
)
from parent_table p;
select json_object(
'id',p.id
,'desc',p.`desc`
,'child_objects',json_array((select json_object('id',id,'parent_id',parent_id,'desc',`desc`)
from child_table where parent_id = p.id))
)
from parent_table p
Both attempts fail with the following error:
Error Code: 1242. Subquery returns more than 1 row
The reason you are getting these errors is that the parent json object is not expecting a result set as one of its inputs, you need to have simple object pairs like {name, string} etc bug report - may be available in future functionality... this just means that you need to convert your multi row results into a concatination of results separated by commas and then converted into a json array.
You almost had it with your second example.
You can achieve what you are after with the GROUP_CONCAT function
select json_object(
'id',p.id
,'desc',p.`desc`
,'child_objects',json_array(
(select GROUP_CONCAT(
json_object('id',id,'parent_id',parent_id,'desc',`desc`)
)
from child_table
where parent_id = p.id))
)
from parent_table p;
This almost works, it ends up treating the subquery as a string which leaves the escape characters in there.
'{\"id\": 1,
\"desc\": \"parent row 1\",
\"child_objects\":
[\"
{\\\"id\\\": 1,
\\\"desc\\\": \\\"child row 1\\\",
\\\"parent_id\\\": 1
},
{\\\"id\\\": 2,
\\\"desc\\\": \\\"child row 2\\\",
\\\"parent_id\\\": 1}\"
]
}'
In order to get this working in an appropriate format, you need to change the way you create the JSON output as follows:
select json_object(
'id',p.id
,'desc',p.`desc`
,'child_objects',(select CAST(CONCAT('[',
GROUP_CONCAT(
JSON_OBJECT(
'id',id,'parent_id',parent_id,'desc',`desc`)),
']')
AS JSON) from child_table where parent_id = p.id)
) from parent_table p;
This will give you the exact result you require:
'{\"id\": 1,
\"desc\": \"parent row 1\",
\"child_objects\":
[{\"id\": 1,
\"desc\": \"child row 1\",
\"parent_id\": 1
},
{\"id\": 2,
\"desc\": \"child row 2\",
\"parent_id\": 1
}]
}'
For MariaDb, CAST AS JSON does not work. But JSON_EXTRACT may be used to convert a string to a JSON object:
select json_object(
'id',p.id
,'desc',p.`desc`
,'child_objects',JSON_EXTRACT(IFNULL((select
CONCAT('[',GROUP_CONCAT(
json_object('id',id,'parent_id',parent_id,'desc',`desc`)
),']')
from child_table where parent_id = p.id),'[]'),'$')
) from parent_table p;
I tried group_concat solution but I found it's problems larger string because of group_concat limitations (group_concat_max_len).
I wrote the new function resolve the problem about converting a string to JSON object as bellow and how to use it.
Tested on MariaDB 10.5.12
Usage: https://i.stack.imgur.com/cWfd7.jpg
CREATE FUNCTION `ut_tf_array`(input_json longtext) RETURNS longtext CHARSET utf8mb4 COLLATE utf8mb4_unicode_ci
COMMENT 'Function for transform json array agg'
BEGIN
DECLARE transformed_data_list longtext ;
DECLARE record longtext ;
DECLARE i_count int ;
DECLARE i_count_items int ;
SET i_count = 0;
SET i_count_items = JSON_LENGTH(JSON_EXTRACT(input_json,'$'));
SET transformed_data_list = '[]';
-- return array with length = zero
IF input_json is NULL THEN
RETURN transformed_data_list;
END IF;
WHILE i_count < i_count_items DO
-- fetch into record
SELECT JSON_EXTRACT( JSON_EXTRACT( input_json ,'$') , CONCAT('$[',i_count,']')) INTO record;
-- append to transformed_data_list
SELECT JSON_ARRAY_APPEND(transformed_data_list, '$', JSON_EXTRACT(record, '$')) into transformed_data_list;
SET i_count := i_count + 1;
END WHILE;
-- done
RETURN transformed_data_list;
END
Below Query works for me.
SELECT JSON_ARRAYAGG(JSON_OBJECT('Id', p.id, 'desc', p.`desc`, 'child_objects', temp_json)) AS json_value
FROM (
SELECT p.id, p.`desc`,
JSON_ARRAYAGG(JSON_OBJECT('id', p.id, 'parent_id', p.parent_id, 'desc', p.`desc`)) AS temp_json
FROM parent_table p
GROUP BY p.id
) t;