Need to create a function which takes input of CLOB and I need to remove array matching the condition.
create or replace FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t;
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN 0.. l_ja.get_size - 1 LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
dbms_output.put('Key to remove in the JSON: ' || l_key);
IF l_key = p_in_key THEN
dbms_output.put('Key to remove in the JSON: ' || l_key);
l_ja.remove (idx);
-- dbms_output.new_line;
dbms_output.put('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
When called with:
update COLD_DRINKS cd set cd.configuration = remove_config_node_by_key(cd.configuration, 'b')
where country='INDIA';
I get error:
Error report -
ORA-30625: method dispatch on NULL SELF argument is disallowed
ORA-06512: at "SYS.JSON_OBJECT_T", line 72
ORA-06512: at "PLATFORM_ADMIN_DATA.REMOVE_CONFIG_NODE_BY_KEY", line 11
input JSON:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "b",
"value": "Coke"
},
{
"key": "c",
"value": "Pepsi"
}
]
Expected JSON after execution:
[
{
"key": "a",
"value": "lemon soda"
},
{
"key": "c",
"value": "Pepsi"
}
]
I think something is wrong about this l_ja.remove (idx); as this one causes the exception. Not able to remove the object at index.
In 18c at least it works with your sample data (with the trailing comma removed from the array), but it gets that error with a null configuration.
So you can either test for null in your function, or exclude nulls from your update, or fix your data so it doesn't have nulls.
The simplest thing to do is probably add a null check:
...
BEGIN
IF p_in_json IS NULL THEN
RETURN NULL;
END IF;
l_ja := json_array_t.parse(p_in_json);
...
fiddle
You can also remove it using json_transform:
create or replace function remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_result CLOB ;
begin
execute immediate q'~select json_transform(
:p_in_json,
REMOVE '$[*]?(#.key == "~' || p_in_key || q'~")'
)
from dual~' into l_result using p_in_json
;
return l_result ;
end ;
/
(with all usual comments regarding possible SQL injection...)
The issue was resolved when I added REVERSE in for loop
before [ERROR]
FOR idx IN 0.. l_ja.get_size - 1
after [PASS]
FOR idx IN REVERSE 0.. l_ja.get_size - 1
Complete working function
CREATE OR REPLACE FUNCTION remove_config_node_by_key (
p_in_json IN CLOB,
p_in_key IN VARCHAR2
) RETURN CLOB IS
l_ja json_array_t := json_array_t ();
l_po json_object_t;
l_key VARCHAR2(500);
BEGIN
l_ja := json_array_t.parse(p_in_json);
FOR idx IN REVERSE 0.. l_ja.get_size - 1
LOOP
l_po := json_object_t(l_ja.get(idx));
l_key := l_po.get_string('key');
-- check if the key matches with input and then delete that node.
IF l_key = p_in_key THEN
dbms_output.put_line('Key to remove in the JSON: ' || l_key || ' at index : ' || idx);
l_ja.remove (idx);
dbms_output.put_line('Key is removed in the JSON: ' || l_key);
END IF;
END LOOP;
RETURN l_ja.to_clob;
END;
/
I want to transform a JSON table on a JSON list.
This code :
DO
$$
DECLARE
varMyJson jsonb;
BEGIN
varMyJson := '[{"Field1":"Value1"},{"Field2":"Value2"}]'::jsonb;
RAISE NOTICE 'varMyJson : %', varMyJson;
SELECT jsonb_object_agg(a.key, a.value)
INTO varMyJson
FROM
(
SELECT 'MyKey' as key, JsonString.value
FROM jsonb_array_elements(varMyJson) JsonString
) a;
RAISE NOTICE 'varMyJson : %', varMyJson;
END
$$
returns :
NOTICE: varMyJson : [{"Field1": "Value1"}, {"Field2": "Value2"}]
NOTICE: varMyJson : {"MyKey": {"Field2": "Value2"}}
But, I want this :
{"MyKey":{"Field1":"Value1"},"MyKey":{"Field2": "Value2"}}
I don't understand why it dosn't work.
You cannot have a jsonb object with duplicate keys. Your json_object_agg function will work expectedly when your keys are unique.
You can get your desired results in a jsonb array:
with data as (
select '[{"Field1":"Value1"},{"Field2":"Value2"}]'::jsonb as items
)
select json_agg(v) from (
select jsonb_build_object('myKey', jsonb_array_elements(items)) as v from data
) x
SELECT json_object_agg('MyKey', JsonString.value)
FROM json_array_elements('[{"Field1": "Value1"}, {"Field2": "Value2"}]' :: json) JsonString
result = { "MyKey" : {"Field1": "Value1"}, "MyKey" : {"Field2": "Value2"} }
Let say that i am trying to express that a Binding must have exactly 1 partner that is a FunctionalClass and exactly 1 partner that is Protein.
I wonder if this is enough
resnet:Binding
rdf:type owl:Class ;
rdf:type sh:NodeShape ;
rdfs:label "Binding" ;
rdfs:subClassOf owl:Thing ;
sh:property [
sh:path resnet:partner ;
sh:NodeKind sh:IRI ;
sh:qualifiedMaxCount 1 ;
sh:qualifiedMinCount 1 ;
sh:qualifiedValueShape [
sh:class resnet:FunctionalClass ;
] ;
] ;
sh:property [
sh:path resnet:partner ;
sh:NodeKind sh:IRI ;
sh:qualifiedMaxCount 1 ;
sh:qualifiedMinCount 1 ;
sh:qualifiedValueShape [
sh:class resnet:Protein ;
] ;
] ;
.
Or do i need the full ceremony
resnet:Binding
rdf:type owl:Class ;
rdf:type sh:NodeShape ;
rdfs:label "Binding" ;
rdfs:subClassOf owl:Thing ;
sh:property [
sh:path resnet:partner ;
sh:minCount 2 ;
sh:maxCount 2 ;
] ;
sh:property [
sh:path resnet:partner ;
sh:qualifiedMaxCount 1 ;
sh:qualifiedMinCount 1 ;
sh:qualifiedValueShape [
sh:class resnet:FunctionalClass ;
] ;
] ;
sh:property [
sh:path resnet:partner ;
sh:qualifiedMaxCount 1 ;
sh:qualifiedMinCount 1 ;
sh:qualifiedValueShape [
sh:class resnet:Protein ;
] ;
] ;
.
I believe you need the second for the case where some partner is both FunctionalClass and a Protein, i.e. you may only have one value and still fulfill the first shape.
Alternatively, this looks like a case for sh:qualifiedValueShapesDisjoint
I have some sql that I want to pass into a mysql stored procedure. I'm using the json functions in mysql-json-udfs-0.4.0-labs-json-udfs-linux-glibc2.5-x86_64. We are running a mysql 5.5.4 server. Updating to 5.7.x is an option.
When I run
set #mapJSON = '[{"from":12,"to":0},{"from":11,"to":-1},{"from":1,"to":1}]' ;
select json_extract(#mapJSON,'from') `from`,json_extract(#mapJSON,'to') `to` ;
I am expecting
from to
12 0
11 -1
1 1
I am getting
from to
{"from":12,"to":0} {"from":12,"to":0}
The question is how to extract rows from a json array using the udf json_extract 0.4.0?
I solved this for the moment by using comma_schema with json as
{
"map": [
{
"from": 12,
"to": 0
},
{
"from": 1,
"to": 10
},
{
"from": 2,
"to": 20
},
{
"from": 3,
"to": 30
},
{
"from": 4,
"to": 40
},
{
"from": 5,
"to": 50
},
{
"from": 6,
"to": 60
},
{
"from": 7,
"to": 70
},
{
"from": 8,
"to": 80
},
{
"from": 9,
"to": 90
},
{
"from": 10,
"to": 100
}
]
}
which gives the result after running
select `common_schema`.`extract_json_value`(#mapJSON,'/map/from') `from`,`common_schema`.`extract_json_value`(#mapJSON,'/map/to') `to` ;
as space delimited strings
from to
12 1 2 3 4 5 6 7 8 9 10 0 10 20 30 40 50 60 70 80 90 100
which I then extract using where #recommendationMapJSON is the new json being passed into the stored procedure.
create temporary table temporary_recommendation_maps AS (
select `common_schema`.`extract_json_value`(#recommendationMapJSON,'/map/from') `from`,`common_schema`.`extract_json_value`(#recommendationMapJSON,'/map/to') `to`
) ;
create temporary table temporary_recommendation_map (
`from` int ,
`to` int
) ;
select length(`from`) - length(replace(`from`,' ','')) +1 into #mapCount from temporary_recommendation_maps ;
set #mapIndex = 0 ;
while #mapIndex < #mapCount do
select substring_index(`from`,' ',1) into #from from temporary_recommendation_maps ;
select substring_index(`to`,' ',1) into #to from temporary_recommendation_maps ;
insert into temporary_recommendation_map(`from`,`to`) values (#from,#to) ;
update temporary_recommendation_maps
set `from` = substring(`from`,instr(`from`,' ')+1)
, `to` = substring(`to`,instr(`to`,' ')+1) ;
set #mapIndex = #mapIndex + 1 ;
end while ;
update temporary_recommendation_maps
set `from` = ''
, `to` = '' ;
which gives the map that I wanted.
select * from temporary_recommendation_map ;
from to
12 0
1 10
2 20
3 30
4 40
5 50
6 60
7 70
8 80
9 90
10 100
Use index to get array value.
$[ index ]
sample:
SELECT JSON_EXTRACT(#mapJSON, "$[0].from") AS 'from',
JSON_EXTRACT(#mapJSON, "$[0].to") AS 'to' ;
Bit late to the party but here's how I did it.
Example:
select json_extract(#mapJSON,'$[*].from') `from`,json_extract(#mapJSON,'$[*].to') `to`
Your syntax inside json_extract() was a bit off. Try using this instead:
SET #mapJSON = '[{"from":12,"to":0},{"from":11,"to":-1},{"from":1,"to":1}]' ;
SELECT JSON_EXTRACT(#mapJSON, "$.from") AS `from`,
JSON_EXTRACT(#mapJSON, "$.to") AS `to`
This should give you a result set looking something like this:
from to
[12, 11, 1] [0, -1, 1]
try this code.
DROP TABLE IF EXISTS tmp;
DROP PROCEDURE IF EXISTS teste;
DELIMITER $$
CREATE PROCEDURE teste()
BEGIN
DECLARE i INT DEFAULT 0;
DECLARE jCount INT DEFAULT -1;
CREATE TEMPORARY TABLE tmp( ou_from INT, out_to INT );
SET #mapJSON = '[{"from":12,"to":0},{"from":11,"to":-1},{"from":1,"to":1},{"a":"teste"}]' ;
SET jCount = jCount + JSON_LENGTH( #mapJSON, '$' );
WHILE ( i <= jCount ) DO
INSERT INTO tmp( ou_from, out_to )
VALUES( JSON_EXTRACT(#mapJSON, CONCAT( '$[', i, '].from') )
, JSON_EXTRACT(#mapJSON, CONCAT( '$[', i, '].to' ) )
);
SET i = i + 1;
END WHILE;
SELECT ou_from AS 'from', out_to AS 'to' FROM tmp;
/*
SELECT JSON_EXTRACT(#mapJSON, "$[1].from") AS 'from',
JSON_EXTRACT(#mapJSON, "$[1].to") AS 'to' ;
*/
END $$
DELIMITER ;
CALL teste;
We have a cloud-based GUI at my work where I can choose some fields from different tables so it can print out the values afterwards. It is not possible for me to write a WITH clause before a SELECT statement as the program automatically write SELECT first. So I want to know If it is possible somehow.
Yes you can, according to MSDN documentation (https://msdn.microsoft.com/en-us/library/ms189499.aspx) :
<SELECT statement> ::=
[WITH <common_table_expression> [,...n]]
<query_expression>
[ ORDER BY { order_by_expression | column_position [ ASC | DESC ] }
[ ,...n ] ]
[ <FOR Clause>]
[ OPTION ( <query_hint> [ ,...n ] ) ]
<query_expression> ::=
{ <query_specification> | ( <query_expression> ) }
[ { UNION [ ALL ] | EXCEPT | INTERSECT }
<query_specification> | ( <query_expression> ) [...n ] ]
<query_specification> ::=
SELECT [ ALL | DISTINCT ]
[TOP ( expression ) [PERCENT] [ WITH TIES ] ]
< select_list >
[ INTO new_table ]
[ FROM { <table_source> } [ ,...n ] ]
[ WHERE <search_condition> ]
[ <GROUP BY> ]
[ HAVING < search_condition > ]
If you can't find any other way around it, this GUI might be persuaded to accept
SELECT * FROM (
-- write your entire select statement here
);