How to extract JSON value from VARCHAR column in Snowflake? - json

I have a VARCHAR column storing JSON data.
Here is one row:
{
"id": null,
"ci": null,
"mr": null,
"meta_data":
{
"product":
{
"product_id": "123xyz",
"sales":
{
"d_code": "UK",
"c_code": "5814"
},
"amount":
{
"currency": "USD",
"value": -1230
},
"entry_mode": "virtual",
"transaction_date": "2020-01-01",
"transaction_type": "purchase",
"others":
[]
}
}
}
Example data:
WITH t1 AS (
SELECT '{"id":null,"ci":null,"mr":null,"meta_data":{"product":{"product_id":"123xyz","sales":{"d_code":"UK","c_code":"5814"},"amount":{"currency":"USD","value":-1230},"entry_mode":"virtual","transaction_date":"2020-01-01","transaction_type":"purchase","others":[]}}}'::varchar AS value
)
In Postgres, I do like this. How can I extract the following values below in Snowflake?
SELECT
value,
value -> 'meta_data' -> 'product' ->> 'product_id' AS product_id,
value -> 'meta_data' -> 'product' -> 'sales' ->> 'd_code' AS d_code,
value -> 'meta_data' -> 'product' -> 'sales' ->> 'c_code' AS c_code,
value -> 'meta_data' -> 'product' -> 'amount' ->> 'currency' AS currency,
value -> 'meta_data' -> 'product' ->> 'entry_mode' AS entry_mode,
value -> 'meta_data' -> 'product' ->> 'transaction_type' AS transaction_type
FROM t1

It is possible with Snowflake too. The key point is usage of TRY_PARSE_JSON/PARSE_JSON:
PARSE_JSON
Interprets an input string as a JSON document, producing a VARIANT value.
WITH t1 AS (
SELECT '{"id":null,"ci":null,"mr":null,"meta_data":{"product":{"product_id":"123xyz","sales":{"d_code":"UK","c_code":"5814"},"amount":{"currency":"USD","value":-1230},"entry_mode":"virtual","transaction_date":"2020-01-01","transaction_type":"purchase","others":[]}}}'::varchar AS value
)
SELECT TRY_PARSE_JSON(t1.value) AS v
,v:meta_data:product:product_id::TEXT AS product_id
,v:meta_data:product:sales:d_code::TEXT AS d_code
-- ...
FROM t1;
Or with another cte:
WITH t1 AS (
SELECT '{"id":null,"ci":null,"mr":null,"meta_data":{"product":{"product_id":"123xyz","sales":{"d_code":"UK","c_code":"5814"},"amount":{"currency":"USD","value":-1230},"entry_mode":"virtual","transaction_date":"2020-01-01","transaction_type":"purchase","others":[]}}}'::varchar AS value
), t1_cast AS (
SELECT *,TRY_PARSE_JSON(t1.value) AS v
FROM t1
)
SELECT
v:meta_data:product:product_id::TEXT AS product_id
,v:meta_data:product:sales:d_code::TEXT AS d_code
-- ...
FROM t1_cast;
Output:

In Snowflake you use the VARIANT data type to store semi-structured data such as JSON. First, you should convert the VARCHAR string to VARIANT with the function PARSE_JSON, then you can query like this:
WITH t1 AS (
SELECT parse_json('{"id":null,"ci":null,"mr":null,"meta_data":{"product":{"product_id":"123xyz","sales":{"d_code":"UK","c_code":"5814"},"amount":{"currency":"USD","value":-1230},"entry_mode":"virtual","transaction_date":"2020-01-01","transaction_type":"purchase","others":[]}}}'::varchar) AS value
)
select value:meta_data:product:product_id as product_id,
value:meta_data:product:sales:d_code as d_code,
value:meta_data:product:sales:c_code AS c_code,
value:meta_data:product:amount:currency AS currency,
value:meta_data:product:entry_mode AS entry_mode,
value:meta_data:product:transaction_type AS transaction_type
from t1;

Related

JSON object: Query a value from unkown node based on a condition

I'm trying to query two values (DISCOUNT_TOTAL and ITEM_TOTAL) from a JSON object in a PostgreSQL database. Take the following query as reference:
SELECT
mt.customer_order
totals -> 0 -> 'amount' -> centAmount DISCOUNT_TOTAL
totals -> 1 -> 'amount' -> centAmount ITEM_TOTAL
FROM
my_table mt
to_jsonb(my_table.my_json -> 'data' -> 'order' -> 'totals') totals
WHERE
mt.customer_order in ('1000001', '1000002')
The query code works just fine, the big problem is that, for some reason out of my control, the values DISCOUNT_TOTAL and ITEM_TOTAL some times change their positions in the JSON object from one customer_order to other:
JSON Object
So i cannot aim to totals -> 0 -> 'amount' -> centAmount assuming that it contains the value related to type : DISCOUNT_TOTAL (same for type: ITEM_TOTAL). Is there any work around to get the correct centAmount for each type?
Use a path query instead of hardcoding the array positions:
with sample (jdata) as (
values (
'{
"data": {
"order": {
"email": "something",
"totals": [
{
"type": "ITEM_TOTAL",
"amount": {
"centAmount": 14990
}
},
{
"type": "DISCOUNT_TOTAL",
"amount": {
"centAmount": 6660
}
}
]
}
}
}'::jsonb)
)
select jsonb_path_query_first(
jdata,
'$.data.order.totals[*] ? (#.type == "DISCOUNT_TOTAL").amount.centAmount'
) as discount_total,
jsonb_path_query_first(
jdata,
'$.data.order.totals[*] ? (#.type == "ITEM_TOTAL").amount.centAmount'
) as item_total
from sample;
db<>fiddle here
EDIT: In case your PostgreSQL version does not support json path queries, you can do it by expanding the array into rows and then doing a pivot by case and sum:
with sample (order_id, jdata) as (
values ( 1,
'{
"data": {
"order": {
"email": "something",
"totals": [
{
"type": "ITEM_TOTAL",
"amount": {
"centAmount": 14990
}
},
{
"type": "DISCOUNT_TOTAL",
"amount": {
"centAmount": 6660
}
}
]
}
}
}'::jsonb)
)
select order_id,
sum(
case
when el->>'type' = 'DISCOUNT_TOTAL' then (el->'amount'->'centAmount')::int
else 0
end
) as discount_total,
sum(
case
when el->>'type' = 'ITEM_TOTAL' then (el->'amount'->'centAmount')::int
else 0
end
) as item_total
from sample
cross join lateral jsonb_array_elements(jdata->'data'->'order'->'totals') as a(el)
group by order_id;
db<>fiddle here

SQL JSON Array, extract field from each element items into concatenated string

I've got a JSON column containing an array of items:
[
{
"type": "banana"
},
{
"type": "apple"
},
{
"type": "orange"
}
]
I want to select one column with a concatenated type, resulting in 'banana, apple, orange'.
Thanks,
David
You need to parse and aggregate the stored JSON:
SELECT
JsonColumn,
NewColumn = (
SELECT STRING_AGG(JSON_VALUE([value], '$.type'), ',')
WITHIN GROUP (ORDER BY CONVERT(int, [key]))
FROM OPENJSON(t.JsonColumn)
)
FROM (VALUES
('[{"type":"banana"},{"type":"apple"},{"type":"orange"}]')
) t (JsonColumn)
Result:
JsonColumn
NewColumn
[{"type":"banana"},{"type":"apple"},{"type":"orange"}]
banana,apple,orange

Get nested objects values from JSON in Postgres

So here is my JSON column in my Postgres DB:
{
"objekt_art": {
"86": {
"code": "86",
"bezeichnung_de": "Kino",
"bezeichnung_fr": "Cinéma",
"bezeichnung_it": "Cinema",
"bezeichnung_en": null,
"kurz_bezeichnung_de": "Kino",
"relevant_fuer_berechnung_steuerquote": true
},
"27": {
"code": "27",
"bezeichnung_de": "Kiosk",
"bezeichnung_fr": "Kiosque",
"bezeichnung_it": "Chiosco",
"bezeichnung_en": null,
"kurz_bezeichnung_de": "Kiosk",
"relevant_fuer_berechnung_steuerquote": true
}
}
}
I need to be able to query the bezechnung_de for example where code = 86.
The number of code i can pass from another table.
How can i for example make a query with two columns. One with the number and the second with bezeichnung_de.
Like this:
Code Bez
86 Kino
Sample data structure and sample table for join data: dbfiddle
select
je.value -> 'code' as "Code",
je.value -> 'bezeichnung_de' as "Bez"
from
test t
cross join jsonb_each((data::jsonb ->> 'objekt_art')::jsonb) je
-- In table test_join I insert value 86 for join record
inner join test_join tj on je.key::int = tj.json_id
As you know the code, this is fairly easy:
select t.the_column -> 'objekt_art' -> '86' ->> 'code' as code,
t.the_column -> 'objekt_art' -> '86' ->> 'bezeichnung_de' as bez
from the_table t
where ...
The value '86' can be a parameter. The first expression to select the code isn't really needed though, as you could replace it with the constant value (=parameter) directly.
If the "outer" JSON key isn't the same value as the code value, you could use something like this:
select o.value ->> 'code' as code,
o.value ->> 'bezeichnung_de' as bez
from the_table t
cross join jsonb_each(t.the_column -> 'objekt_art') o(key, value)
where o.key = '86'
and ... other conditions ...
If you are using Postgres 13 or later, this can also be written as a JSON path expression:
select a.item ->> 'code' as code,
a.item ->> 'bezeichnung_de' as bez
from (
select jsonb_path_query_first(t.the_column, '$.objekt_art.* ? (#.code == "86")') as item
from the_table t
where ....
) a
All examples assume that the column is defined with the data jsonb which it should be. If it's not you need to cast it: the_column::jsonb

Select - Oracle JSON Object - Join

I have a requirement to select column values in Oracle in a JSON structure. Let me explain the requirement in detail
We have a table called "dept" that has the following rows
There is another table called "emp" that has the following rows
The output we need is as follows
{"Data": [{
"dept": "Sports",
"City": "LA",
"employees": {
"profile":[
{"name": "Ben John", "salary": "15000"},
{"name": "Carlos Moya", "salary": "19000"}]
}},
{"dept": "Sales",
"City": "Miami",
"employees": {
"profile":[
{"name": "George Taylor", "salary": "9000"},
{"name": "Emma Thompson", "salary": "8500"}]
}}
]
}
The SQL that I issued is as follows
select json_object('dept' value b.deptname,
'city' value b.deptcity,
'employees' value json_object('employee name' value a.empname,
'employee salary' value a.salary)
format json) as JSONRETURN
from emp a, dept b where
a.deptno=b.deptno
However, the result looks like the following and not what we expected.
Please note the parent data is repeated. What is the mistake I am making?
Thanks for the help
Bala
You can do something like this. Note the multiple (nested) calls to json_object and json_arrayagg. Tested in Oracle 12.2; other versions may have other tools that can make the job easier.
select json_object(
'Data' value
json_arrayagg(
json_object (
'dept' value deptname,
'City' value deptcity,
'employees' value
json_object(
'profile' value
json_arrayagg(
json_object(
'name' value empname,
'salary' value salary
) order by empid -- or as needed
)
)
) order by deptno -- or as needed
)
) as jsonreturn
from dept join emp using (deptno)
group by deptno, deptname, deptcity
;

How to get the number of elements in a JSON array stored as CLOB with Oracle 12c?

I'm storing a java class A as A_DOC in a clob column in my database.
The structure of A is like:
{
id : 123
var1: abc
subvalues : [{
id: 1
value : a
},
{
id: 1
value :b
}
...
}
]}
I know I can do things like
select json_query(a.A_DOC, '$.subvalues.value') from table_name a;
and so on, but how I'm looking for a way to count the number of elements in the subvalues array through an sql query. Is this possible?
the function exists in Oracle 18 only
SELECT json_query('[19, 15, [16,2,3]]','$[*].size()' WITH ARRAY WRAPPER) FROM dual;
SELECT json_value('[19, 15, [16,2,3]]','$.size()') FROM dual;
You can use JSON_TABLE:
SELECT
id, var1, count(sub_id) subvalues
FROM
JSON_TABLE (
to_clob('{ id: 123, var1: "abc", subvalues : [{ id: 1, value: "a", }, { id: 2, value: "b" } ]}'),
'$'
COLUMNS (
id NUMBER PATH '$.id',
var1 VARCHAR PATH '$.var1',
NESTED PATH '$.subvalues[*]'
COLUMNS (
sub_id NUMBER PATH '$.id'
)
)
)
GROUP BY id, var1