SQL query to return Json including array - json

I have a relational database (SQL Server) with normal data.
I am trying to build something with this format:
{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"id": "1",
"properties": { "address": "2" },
"geometry": {
"type": "Point",
"coordinates": [36.33456, 59.523456]
}
},
{
"type": "Feature",
"id": "2",
"properties": { "address": "151" },
"geometry": {
"type": "Point",
"coordinates": [36.33456, 59.523456]
}
}]
}
So far I have written this query:
select top 10
'Feature' as [type],
m.Id as id, m.Address as 'properties.address',
'Point' as 'geometry.type',
'[' + m.location + ']' as 'geometry.coordinates'
from
Buildings m
where
m.Location is not null
and m.Location <> ''
for json path, root('features')
But what I receive in action is like:
{
"features": [{
"type": "Feature",
"id": 250343,
"properties": {
"address": "there"
},
"geometry": {
"type": "Point",
"coordinates": "[5714843008,3363769468.235179]"
}
}, {
"type": "Feature",
"id": 266306,
"properties": {
"address": "here"
},
"geometry": {
"type": "Point",
"coordinates": "[36.38449104993326,59.48238372802735]"
}
}}
How can I add "type": "FeatureCollection", before the root?
I want the coordinate part to hold an array of 2 numbers, but in my current code it is a string holding the array. How can I achieve an array?

It's difficult without test data, but I think that you can build the expected JSОN output using the following statement. You need one more FOR JSON PATH (to generate the outer JSON object) and a JSON_QUERY call (to return a JSON array of scalar values instead of text holding the array):
Table:
CREATE TABLE Buildings (
Id int,
Address varchar(100),
Location varchar(100)
)
INSERT INTO Buildings (Id, Address, Location)
VALUES
(250343, 'there', '5714843008,3363769468.235179'),
(266306, 'here', '36.38449104993326,59.48238372802735')
Statement:
SELECT
[type] = 'FeatureCollection',
[features] = JSON_QUERY((
select top 10
'Feature' as [type],
m.Id as id, m.Address as 'properties.address',
'Point' as 'geometry.type',
JSON_QUERY('[' + m.location + ']') as 'geometry.coordinates'
from
Buildings m
where
m.Location is not null
and m.Location <> ''
for json path
))
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER
Result:
{
"type":"FeatureCollection",
"features":[
{
"type":"Feature",
"id":250343,
"properties":{
"address":"there"
},
"geometry":{
"type":"Point",
"coordinates":[
5714843008,
3363769468.235179
]
}
},
{
"type":"Feature",
"id":266306,
"properties":{
"address":"here"
},
"geometry":{
"type":"Point",
"coordinates":[
36.38449104993326,
59.48238372802735
]
}
}
]
}

I figured a way to build custom JSON using STUFF (GROUP_CONCAT in MySQL) and XML.
See if this works for you:
1 Answer to question 1 - Note, this is a TWO-STEP PROCESS:
a) VARIABLE TO HOLD QUERY DATA
DECLARE #QUERY_DATA varchar(8000)
b) CUSTOM QUERY TO FETCH QUERY DATA FORMATTED FOR INNER JSON METADATA (all in one line)
SET #QUERY_DATA = (
SELECT STUFF(
(
SELECT ',' + CONCAT('{"type": "Feature","id": ', m.Id, ',"properties": {"address": "', m.Address, '"},"geometry": {"type": "Point","coordinates": [', m.location, ']}}'
)
FROM
Buildings m
WHERE
m.Location is not null
and m.Location <> ''
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '')
+)
c) FINAL OUTPUT - Concatenate / Join your JSON
SELECT CONCAT('{ ''type'':''FeatureCollection''','','',',''FEATURES'': [ ', #QUERY_DATA, ' ] }')
2 - ANSWER TO QUESTION 2 with CONSOLIDATED solution using separated X and Y coordinates as two INTEGER Type elements
{''X'':', CAST(LEFT( m.location, CHARINDEX(',', m.location)-1) as int), ',''Y'':', cast(RIGHT( m.location, LEN( m.location)-CHARINDEX(',', m.location)) as int),'}
Therefore, when we add this together, your new query would look like the following:
a) VARIABLE TO HOLD QUERY DATA
+DECLARE #QUERY_DATA varchar(8000)
b) CUSTOM QUERY TO FETCH QUERY DATA FORMATTED FOR INNER JSON METADATA (all in one line), including splitting coordinates into two INTEGER Type elements
SET #QUERY_DATA = (
SELECT STUFF(
(
SELECT ',' + CONCAT('{"type": "Feature","id": ', m.Id, ',"properties": {"address": "', m.Address, '"},"geometry": {"type": "Point","coordinates": [ {''X'':', CAST(LEFT( m.location, CHARINDEX(',', m.location)-1) as int), ',''Y'':', cast(RIGHT( m.location, LEN( m.location)-CHARINDEX(',', m.location)) as int),'}]}}'
)
FROM
Buildings m
WHERE
m.Location is not null
and m.Location <> ''
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '')
+)
c) FINAL OUTPUT - Concatenate / Join your OUTER and INNER JSON
SELECT CONCAT('{ ''type'':''FeatureCollection''','','',',''FEATURES'': [ ', #QUERY_DATA, ' ] }')

Related

Retrieve JSON from sql

My json format in one of the sql columns "jsoncol" in the table "jsontable" is like below.
Kindly help me to get this data using JSON_QUERY or JSON_VALUE
Please pay attention to the brackets and double quotes in the key value pairs...
{
"Company": [
{
"Info": {
"Address": "123"
},
"Name": "ABC",
"Id": 999
},
{
"Info": {
"Address": "456"
},
"Name": "XYZ",
"Id": 888
}
]
}
I am trying to retrieve all the company names using sql query. Thanks in advance
You can use:
SELECT j.name
FROM table_name t
CROSS APPLY JSON_TABLE(
t.value,
'$.Company[*]'
COLUMNS(
name VARCHAR2(200) PATH '$.Name'
)
) j
Which, for the sample data:
CREATE TABLE table_name (
value CLOB CHECK (value IS JSON)
);
INSERT INTO table_name (value)
VALUES ('{
"Company": [
{
"Info": {
"Address": "123"
},
"Name": "ABC",
"Id": 999
},
{
"Info": {
"Address": "456"
},
"Name": "XYZ",
"Id": 888
}
]
}');
Outputs:
NAME
ABC
XYZ
db<>fiddle here
You can easily use JSON_TABLE() function for this case rather provided the DB version is at least 12.1.0.2 such as
SELECT name
FROM jsontable,
JSON_TABLE(jsoncol,
'$' COLUMNS(NESTED PATH '$."Company"[*]'
COLUMNS(name VARCHAR2 PATH '$."Name"')))
Demo

Convert the contents of a SQL Server Column into a JSON Format

I'm having a SQL Server Table with a column named 'Filter'.
Below are the SQL Server Scripts to create a sample table:
CREATE TABLE dbo.TestJSON
(
TestJSONID INT IDENTITY(1,1),
[Filter] NVARCHAR(4000)
)
INSERT INTO dbo.TestJSON ([Filter])
VALUES ('$WYTS IN (''Control'', ''Machine'', ''Power'', ''DSP'', ''NM'', ''Digital'', ''AI'')')
Now my target is to convert the contents of the column Filter into the following JSON Format:
"conditions":{
"condition":"AND",
"rules":[
{
"condition":"AND",
"operator":"IN",
"value":[
"Control",
"Machine",
"Power",
"DSP",
"NM",
"Digital",
"AI"
],
"type":"string"
}
]
}
How can I achieve this?
Any help is going to be highly appreciated.
Thanks in advance. :)
Here's one option
Example
Select [conditions.condition]='AND'
,[conditions.rules] = json_query(
(Select condition='AND'
,operator ='IN'
,value = json_query('['+replace(stuff(stuff(Filter,charindex(')',Filter),len(Filter),''),1,charindex('(',Filter),''),'''','"')+']')
,type = 'string'
For JSON Path )
)
From TestJSON
For JSON Path,Without_Array_Wrapper
Results
{
"conditions": {
"condition": "AND",
"rules": [
{
"condition": "AND",
"operator": "IN",
"value": [
"Control",
"Machine",
"Power",
"DSP",
"NM",
"Digital",
"AI"
],
"type": "string"
}
]
}
}
If By Chance You Need to Escape the String
Select [conditions.condition]='AND'
,[conditions.rules] = json_query(
(Select condition='AND'
,operator ='IN'
,value = json_query('['+replace(stuff(stuff(B.S,charindex(')',B.S),len(B.S),''),1,charindex('(',B.S),''),'''','"')+']')
,type = 'string'
For JSON Path )
)
From TestJSON A
Cross Apply ( values ( string_escape(Filter,'JSON') ) ) B(S)
For JSON Path,Without_Array_Wrapper

How to join nested JSON indices to multiple rows in SQL by primary key

I am trying to update several rows in SQL with JSON.
I'd like to match a primary key on a table row to an index nested in an array of JS objects.
Sample data:
let json = [{
"header": object_data,
"items": [{
"id": {
"i": 0,
"name": "item_id"
},
"meta": {
"data": object_data,
"text": "some_text"
}
}, {
"id": {
"i": 4,
"name": "item_id4"
},
"meta": {
"data": object_data,
"text": "some_text"
}
}, {
"id": {
"i": 17,
"name": "item_id17"
},
"meta": {
"data": object_data,
"text": "some_text"
}}]
}]
Sample table:
i | json | item_id
---+---------------------------+---------
0 | entire_object_at_index_0 | item_id
4 | entire_object_at_index_4 | item_id4
17 | entire_object_at_index_17 | item_id17
entire_object_at_index, meaning appending the item data to the header to create a new object for each row.
"header" "some_data",
"items": [{
"id": {
"i": 0,
"name": "item_id1"
},
"meta": {
"data": "some_data",
"text": "some_text"
}
}]
SQL:
update someTable set
json = json_value(#jsons, '$') -- not sure how to join on index here
item_id = json_value(#jsons, '$.items[?].id.name' -- not sure how to select by index here
where [i] = json_query(#jsons, '$.items.id.i')
The requirement to repeat the other properties complicates this a bit, because we need to build a new object explicitly. Even so it's not too hard:
update someTable
set
[json] = (
select (
select
"header" = json_query(#json, '$.header'),
"items" = json_query(N'[' + items.item + N']')
for json path, without_array_wrapper
)
),
item_id = items.item_id
from openjson(#json, '$.items') with (
item nvarchar(max) '$' as json,
item_id varchar(50) '$.id.name',
i int '$.id.i'
) items
join someTable on [someTable].i = items.i
Here I'm assuming the #json has already been unwrapped from its array, as your query seems to assume. If it's not, substitute $.[0] for $ in the outer query.
Update:
It's an attempt to improve my answer (I missed the header part of the JSON content in the original answer). Of course, the #JeroenMostert's answer is an excellent solution, so this is just another possible approach. Note, that if header part of JSON content is scalar value, you should use JSON_VALUE().
Table and JSON:
-- Table
CREATE TABLE #Data (
i int,
[json] nvarchar(max),
item_id nvarchar(100)
)
INSERT INTO #Data
(i, [json], [item_id])
VALUES
(0 , N'entire_object_at_index_0', N'item_id'),
(4 , N'entire_object_at_index_4', N'item_id4'),
(17, N'entire_object_at_index_17', N'item_id17')
-- JSON
DECLARE #json nvarchar(max) = N'[{
"header": {"key": "some_data"},
"items": [{
"id": {
"i": 0,
"name": "item_id"
},
"meta": {
"data": "some_data",
"text": "some_text"
}
}, {
"id": {
"i": 4,
"name": "item_id4"
},
"meta": {
"data": "some_data",
"text": "some_text"
}
}, {
"id": {
"i": 17,
"name": "item_id17"
},
"meta": {
"data": "some_data",
"text": "some_text"
}}]
}]'
Statement:
UPDATE #Data
SET #Data.Json = j.Json
FROM #Data
CROSS APPLY (
SELECT
JSON_QUERY(#json, '$[0].header') AS header,
JSON_QUERY(j.[value], '$') AS items
FROM OPENJSON(#json, '$[0].items') j
WHERE JSON_VALUE(j.[value], '$.id.i') = #Data.[i]
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER
) j ([Json])
Original answer:
One possible approach is to use OPENJSON and appropriate join:
Table and JSON:
-- Table
CREATE TABLE #Data (
i int,
[json] nvarchar(max),
item_id nvarchar(100)
)
INSERT INTO #Data
(i, [json], [item_id])
VALUES
(0 , N'entire_object_at_index_0', N'item_id'),
(4 , N'entire_object_at_index_4', N'item_id4'),
(17, N'entire_object_at_index_17', N'item_id17')
-- JSON
DECLARE #json nvarchar(max) = N'[{
"header": "some_data",
"items": [{
"id": {
"i": 0,
"name": "item_id"
},
"meta": {
"data": "some_data",
"text": "some_text"
}
}, {
"id": {
"i": 4,
"name": "item_id4"
},
"meta": {
"data": "some_data",
"text": "some_text"
}
}, {
"id": {
"i": 17,
"name": "item_id17"
},
"meta": {
"data": "some_data",
"text": "some_text"
}}]
}]'
Statement:
UPDATE #Data
SET [json] = j.[value]
FROM #Data
LEFT JOIN (
SELECT
[value],
JSON_VALUE([value], '$.id.i') AS [i]
FROM OPENJSON(#json, '$[0].items')
) j ON (#Data.[i] = j.[i])

Couchbase N1QL array query

Document sample from my giata_properties bucket: link
Relevant json paste
{
"propertyCodes": {
"provider": [
{
"code": [
{
"value": [
{
"value": "304387"
}
]
}
],
"providerCode": "hotelbeds",
"providerType": "gds"
},
{
"code": [
{
"value": [
{
"name": "Country Code",
"value": "EG"
},
{
"name": "City Code",
"value": "HRG"
},
{
"name": "Hotel Code",
"value": "91U"
}
]
}
],
"providerCode": "gta",
"providerType": "gds"
}
]
},
"name": "Arabia Azur Resort"
}
I want a query (and an index) to retrieve a document based on propertyCodes.provider.code.value.value and propertyCodes.provider.providerCode. I've managed to do each separately but I'm not sure how to merge both of them in a single query.
SELECT meta().id FROM giata_properties AS gp USE INDEX(`#primary`) WHERE ANY v WITHIN gp.propertyCodes.provider[*].code SATISFIES v.`value` = '150613' END;
SELECT meta().id FROM giata_properties AS gp USE INDEX(`#primary`) WHERE ANY v within gp.propertyCodes.provider[*].providerCode SATISFIES v = 'hotelbeds' END;
So for example I want to fetch the document that includes propertyCodes.provider.code.value.value of 304387 and that provider is also hotelbeds, because code value can be duplicated over documents, but code and providerCode combination is unique.
Here are the query and the indexes.
The query.
SELECT META().id
FROM giata_properties AS gp
WHERE ANY p IN propertyCodes.provider SATISFIES ( ANY v WITHIN p.code SATISFIES v.`value` = '304387' END ) AND p.providerCode = 'hotelbeds' END;
The indexes.
CREATE INDEX idx_value ON giata_properties
( DISTINCT ARRAY ( DISTINCT ARRAY v.`value` FOR v WITHIN p.code END ) FOR p IN propertyCodes.provider END );
CREATE INDEX idx_providerCode ON giata_properties
( DISTINCT ARRAY p.providerCode FOR p IN propertyCodes.provider END );

JSON Oracle SQL parsing / unnest embedded JSON data in escaped form

Here is my JSON stored in a CLOB column:
select upJSON from myLocations;
{"values":[
{"nameValuePairs":{"upJSON":"{\"mResults\":[0.0,0.0],\"mProvider\":\"fused\",\"mDistance\":0.0,\"mAltitude\":0.0}","id":"1","updated":"2015-03-30 20:28:51"}},
{"nameValuePairs":{"upJSON":"{\"mResults\":[0.0,0.0],\"mProvider\":\"FINDME\",\"mDistance\":0.0,\"mAltitude\":22.2}","id":"2","updated":"2015-03-30 20:28:53"}},
{"nameValuePairs":{"upJSON":"{\"mResults\":[0.0,0.0],\"mProvider\":\"fused\",\"mDistance\":0.0,\"mAltitude\":0.0}","id":"3","updated":"2015-03-30 20:28:55"}},
{"nameValuePairs":{"upJSON":"{\"mResults\":[0.0,0.0],\"mProvider\":\"fused\",\"mDistance\":0.0,\"mAltitude\":0.0}","id":"4","updated":"2015-03-30 20:28:57"}}
]}
(I have inserted newlines for clarity)
Please: What is the SQL (or PL/SQL) needed to select just the value of mProvider, mAltitude, and the id from the 2nd "nameValuePairs"
(= "FINDME" and 22.2 and "2") in the example above)
??
Since you're using 12c you have access to the native JSON parsing (as long as your CLOB column has an is json check constraint).
Some good background is available at:
https://docs.oracle.com/database/121/ADXDB/json.htm#ADXDB6371
If you're JSON looks something like this:
{
"values": [
{
"nameValuePairs": {
"upJSON": {
"mResults": [
"0.0",
"0.0"
],
"mProvider": "fused",
"mDistance": "0.0",
"mAltitude": "22.2"
},
"id": "1",
"updated": "2015-03-30 20:28:51"
}
},
...
...
Although, when I put your snippet from the question into JSONLint it returns:
{
"values": [
{
"nameValuePairs": {
"upJSON": "{\"mResults\":[0.0,0.0],\"mProvider\":\"fused\",\"mDistance\":0.0,\"mAltitude\":0.0}",
"id": "1",
"updated": "2015-03-30 20:28:51"
}
},
{
"nameValuePairs": {
"upJSON": "{\"mResults\":[0.0,0.0],\"mProvider\":\"FINDME\",\"mDistance\":0.0,\"mAltitude\":22.2}",
"id": "2",
"updated": "2015-03-30 20:28:53"
}
},
Something like the following might get you started:
select
upJSON.values
from
myLocations
where
json_value(upJSON, '$nameValuePairs.id' returning varchar2 error on error) = '2';
If you want to limit the query to a single ID, you'll need to add a full-text or function-based index to the JSON column.
https://odieweblog.wordpress.com/2015/04/12/json_table-chaining/#comment-1025
with tmp as (
SELECT /*+ no_merge */ d.*
FROM ulocations ul,
json_table(ul.upjson, '$'
columns(
NESTED PATH '$.values[*].nameValuePairs'
COLUMNS (
updated VARCHAR2(19 CHAR) PATH '$.updated'
, id varchar2(9 char) path '$._id'
, upJSON VARCHAR2(2000 CHAR) PATH '$.upJSON'
)) ) d
--where d.id = '0'
)
select t.updated
, t.id
, jt2.*
from tmp t
, json_table(t.upJSON, '$'
columns mProvider varchar2(5) path '$.mProvider'
, mLongitude number path '$.mLongitude'
) jt2
;