mysql jsonarrayagg for comma seperated id - mysql

i have two table casetemp and medicication_master
patient has fields
id
name
age
sex
medicineid
1
xyz
23
M
1,2
2
abc
20
f
3
medicine has fields
id
medname
desc
1
crosin
fever tab
2
etzaa
shampoo
3
zaanta
painkiller
i want the mysql left join output as following :
[{
"id":"1",
"name":"xyz",
"age":"23",
"sex":"M",
"medicine_id":"1,2",
"medicine_Data":[
{
"id":"1"
"medname":"crosin",
"desc":"fever tab"
},
{
"id":"2"
"medname":"etzaa",
"desc":"shampoo"
}]
},
{
"id":"2",
"name":"abc",
"age":"20",
"sex":"F",
"medicine_id":"3",
"medicine_Data":[{
"id":"3"
"medname":"zaanta",
"desc":"pain killer"
}]
}]
the query i used is
SELECT json_object(
'id', b.id,
'name',b.name,
'age',b.age,
'sex',b.sex,
'medicine_id',b.medicine_id,
'medicine_data', json_arrayagg(json_object(
'id', pt.id,
'medname', pt.medname,
'desc',pt.desc,
))
)
FROM patient b LEFT JOIN medication_master pt
ON b.medicine_id = pt.id
where b.id=1
GROUP BY b.id;
thanks in advance

Related

Parsing JSON data with LOOP

I am parsing data from JSON with UNION ALL, but I need repeated it more times. Try to use LOOP, but it doesn't works. :(
I need to parse every element from array in JSON to rows from statements.
I change the number of element in: statements::json->0 and than UNION the data.
The CODE I want to replace with some LOOP:
(
SELECT
execution_entry_id,
account,
trim('"' FROM (account::json->'startBalance')::text) AS startBalance,
trim('"' FROM (account::json->'endBalance')::text) AS endBalance
FROM (
SELECT
execution_entry_id,
statements::json->0 AS account
FROM(
SELECT
e.id AS execution_entry_id,
response_body,
response_body::json->'statements' AS statements
FROM stage_cz.cb_data_execution_entry e
LEFT JOIN stage_cz.cb_data_execution_entry_details d
ON d.execution_entry_id = e.id
WHERE provider_name = 'sokordiatech'
) a
) b WHERE account IS NOT NULL
)
UNION ALL
(
SELECT
execution_entry_id,
account,
trim('"' FROM (account::json->'startBalance')::text) AS startBalance,
trim('"' FROM (account::json->'endBalance')::text) AS endBalance
FROM (
SELECT
execution_entry_id,
statements::json->1 AS account
FROM(
SELECT
e.id AS execution_entry_id,
response_body,
response_body::json->'statements' AS statements
FROM stage_cz.cb_data_execution_entry e
LEFT JOIN stage_cz.cb_data_execution_entry_details d
ON d.execution_entry_id = e.id
WHERE provider_name = 'sokordiatech'
) a
) b WHERE account IS NOT NULL
)
I try to use:
do $$
declare
counter integer := 0;
begin
while counter < 10 loop
SELECT
execution_entry_id,
statements::json->counter AS account
FROM(
SELECT
e.id AS execution_entry_id,
response_body,
response_body::json->'statements' AS statements
FROM stage_cz.cb_data_execution_entry e
LEFT JOIN stage_cz.cb_data_execution_entry_details d
ON d.execution_entry_id = e.id
WHERE provider_name = 'sokordiatech'
) a;
counter := counter + 1;
end loop;
end$$;
and it ends with error:
ERROR: query has no destination for result data
Hint: If you want to discard the results of a SELECT, use PERFORM instead.
Where: PL/pgSQL function inline_code_block line 6 at SQL statement
1 statement failed.
It's possible to get data with LOOP or how instead of 10x UNION ALL, please?
Thanks.
If you are use statements::json->0, so you have Json array in your Json string. For splitting array elements from Json array, you can use jsonb_array_elements_text function on PostgreSQL, this function will be extracting your array elements like as rows. For do this you don't need using loop. For example:
with tbl as (
select
'[
{
"employee_id": 2,
"full_name": "Megan Berry",
"manager_id": 1
},
{
"employee_id": 3,
"full_name": "Sarah Berry",
"manager_id": 1
},
{
"employee_id": 4,
"full_name": "Zoe Black",
"manager_id": 1
},
{
"employee_id": 5,
"full_name": "Tim James",
"manager_id": 1
},
{
"employee_id": 6,
"full_name": "Bella Tucker",
"manager_id": 2
}
]'::jsonb as jsondata
)
select jsondata->1 from tbl;
Result:
{"full_name": "Sarah Berry", "manager_id": 1, "employee_id": 3}
This is getting second element of Json array.
But you need all elements, for getting all elements and extracting key values use this syntax:
with table1 as (
select
'[
{
"employee_id": 2,
"full_name": "Megan Berry",
"manager_id": 1
},
{
"employee_id": 3,
"full_name": "Sarah Berry",
"manager_id": 1
},
{
"employee_id": 4,
"full_name": "Zoe Black",
"manager_id": 1
},
{
"employee_id": 5,
"full_name": "Tim James",
"manager_id": 1
},
{
"employee_id": 6,
"full_name": "Bella Tucker",
"manager_id": 2
}
]'::jsonb as jsondata
)
select
tb1.a1->>'full_name' as fullname,
tb1.a1->>'manager_id' as managerid,
tb1.a1->>'employee_id' as employeeid
from (
select jsonb_array_elements_text(jsondata)::jsonb as a1 from table1
) tb1
Result:
fullname managerid employeeid
Megan Berry 1 2
Sarah Berry 1 3
Zoe Black 1 4
Tim James 1 5
Bella Tucker 2 6

postgresql How to generate nested json objects by one table

I want to aggregate the data from one table to a complex json. The elements should be grouped by id column. And nested json should be grouped by section column
I have a table with data:
-----------------------------
| id | section | subsection |
----------------------------
| 1 | s_1 | ss_1 |
----------------------------
| 1 | s_1 | ss_2 |
----------------------------
| 1 | s_2 | ss_3 |
----------------------------
| 2 | s_3 | ss_4 |
----------------------------
I want to create json like that:
[
{
"id": 1,
"sections": [
{
"section": "s_1",
"subsections": [
{
"subsection": "ss_1"
},
{
"subsection": "ss_2"
}
]
},
{
"section": "s_2",
"subsections": [
{
"subsection": "ss_3"
}
]
}
]
},
{
"id": 2,
"sections": [
{
"section": "s_3",
"subsections": [
{
"subsection": "ss_3"
}
]
}
]
}
]
I tried do it like that:
select
json_build_array(
json_build_object(
'id', a.id,
'sections', json_agg(
json_build_object(
'section', b.section,
'subsections', json_agg(
json_build_object(
'subsection', c.subsection
)
)
)
)
)
)
from table as a
inner join table as b on a.section = b.section
inner join table as c on b.subsection = c.subsection
group by a.id;
BUT there is a problem: Nested aggregate calls are not allowed
Is there any possible way to use nested aggregate calls? Or is there more elegant solution?
You'll need to use CTEs or nested queries for this. No JOINs necessary though:
SELECT json_agg(
json_build_object(
'id', a.id,
'sections', a.sections
)
)
FROM (
SELECT b.id, json_agg(
json_build_object(
'section', b.section,
'subsections', b.subsections
)
) AS sections
FROM (
SELECT c.id, c.section, json_agg(
json_build_object(
'subsection', c.subsection
)
) AS subsections
FROM table AS c
GROUP BY id, section
) AS b
GROUP BY id
) AS a;

Unable to get all the values from JSON_ARRAY_ELEMENTS()

Table with sample data:
create table tbl_jsdata
(
id int,
p_id int,
field_name text,
field_value text
);
insert into tbl_jsdata values
(1,101,'Name','Sam'),
(2,101,'City','Dubai'),
(3,101,'Pin','1235'),
(4,101,'Country','UAE'),
(5,102,'Name','Sam'),
(6,102,'City','Dubai'),
(7,102,'Name','Sam Jack'),
(8,102,'Name','Test'),
(9,102,'Name',null);
json_agg query:
drop table if exists tempJSData;
select p_id,
json_build_array(json_object_agg(field_name, field_value)) into tempJSData
from tbl_jsdata
group by p_id;
Getting Result:
select p_id,(json_array_elements(json_build_array)->>'Name')::text Namess
from tempJSData
p_id Namess
---------------------------------
101 Sam
102
Expected Result:
p_id Namess
---------------------------------
101 Sam
102 Sam
102 Sam Jack
102 Test
102
I think it's because you're not creating an array of Name.
If you check your query
select p_id,
json_build_array(json_object_agg(field_name, field_value))
from tbl_jsdata
group by p_id;
The result is
p_id | json_build_array
------+---------------------------------------------------------------------------------------------
101 | [{ "Name" : "Sam", "City" : "Dubai", "Pin" : "1235", "Country" : "UAE" }]
102 | [{ "Name" : "Sam", "City" : "Dubai", "Name" : "Sam Jack", "Name" : "Test", "Name" : null }]
(2 rows)
Having multiple adjacent entries of the Name field. The following json_array_elements(json_build_array)->>'Name' will fetch the first entry only. I suggest to create an array based on p_id and field_name first
with array_built as (
select p_id,field_name,
array_agg(field_value) field_value
from tbl_jsdata
group by p_id, field_name
)
select p_id,
jsonb_object_agg(field_name, field_value)
from array_built
group by p_id
;
The result can be optimised since it creates an array even if there is only one value
p_id | jsonb_object_agg
------+---------------------------------------------------------------------------
101 | {"Pin": ["1235"], "City": ["Dubai"], "Name": ["Sam"], "Country": ["UAE"]}
102 | {"City": ["Dubai"], "Name": ["Sam", "Sam Jack", "Test", null]}
(2 rows)
But now you can parse it correctly the whole query is
select p_id,
json_build_array(json_object_agg(field_name, field_value))
from tbl_jsdata
group by p_id;
select p_id,
json_build_array(json_object_agg(field_name, field_value))
from tbl_jsdata
group by p_id;
with array_built as (
select p_id,field_name,
array_agg(field_value) field_value
from tbl_jsdata
group by p_id, field_name
), agg as (
select p_id,
jsonb_object_agg(field_name, field_value) json_doc
from array_built
group by p_id
)
select p_id, jsonb_array_elements(json_doc->'Name') from agg;
;
With the expected result as
p_id | jsonb_array_elements
------+----------------------
101 | "Sam"
102 | "Sam"
102 | "Sam Jack"
102 | "Test"
102 | null
(5 rows)
You can use json_each_text to extract the values of your array and in the WHERE clause filter only the key you want:
SELECT p_id,j.value
FROM tempJSData, json_each_text(json_build_array->0) j
WHERE j.key = 'Name';
p_id | value
------+----------
101 | Sam
102 | Sam
102 | Sam Jack
102 | Test
102 |
(5 rows)
Note: this query assumes the format of your json is final. If not, consider creating an array of Name instead of an array of objects that contain names in it: name[foo,bar] instead of [name:foo,name:bar]. The answer from Ftisiot makes a pretty good point.
Demo: db<>fiddle
Your JSON aggregation is essentially invalid, as you are creating a JSON value where the same key appears more than once. If you had used the recommended jsonb data type, the duplicate keys would have been removed.
I think this aggregation makes more sense:
create table tempjsdata
as
select p_id,
jsonb_agg(jsonb_build_object(field_name, field_value)) as names
from tbl_jsdata
group by p_id
The above generates the following result:
p_id | names
-----+---------------------------------------------------------------------------------------------
101 | [{"Name": "Sam"}, {"City": "Dubai"}, {"Pin": "1235"}, {"Country": "UAE"}]
102 | [{"Name": "Sam"}, {"City": "Dubai"}, {"Name": "Sam Jack"}, {"Name": "Test"}, {"Name": null}]
Then you can use:
select p_id,
x.*
from tempjsdata
cross join lateral (
select x.item ->> 'Name'
from jsonb_array_elements(t.names) as x(item)
where x.item ? 'Name'
) x
Online example

Fetching N records excluding joined ones?

I have three tables (details omitted for brevity):
create table products (
id,
name
)
create table tags (
id,
name
)
create table product_tags (
product_id,
tag_id
)
These tables are populated as follows:
--------
products
--------
+----+------+
| id | name |
+----+------+
| 1 | Rice |
| 2 | Bean |
| 3 | Milk |
+----+------+
----
tags
----
+----+-------+
| id | name |
+----+-------+
| 1 | Eat |
| 2 | Drink |
| 3 | Seeds |
| 4 | Cow |
+----+-------+
When fetching products I want the output to be formatted as:
[{
id: 1,
name: 'Rice',
tags: [
{
id: 1,
name: 'Eat'
},
{
id: 3,
name: 'Seeds'
},
]
},
{
id: 2,
name: 'Bean',
tags: [
{
id: 1,
name: 'Eat'
},
{
id: 3,
name: 'Seeds'
},
]
},
{
id: 3,
name: 'Milk',
tags: [
{
id: 2,
name: 'Drink'
},
{
id: 4,
name: 'Cow'
},
]
}]
To accomplish this, what I'm doing is:
select
products.*,
tags.id as tag_id, tags.name as tag_name
from products
left join product_tags map on map.product_id = products.id
left join tags on map.tag_id = tags.id
The output of which is:
[{
id: 1,
name: 'Rice',
tag_id: 1,
tag_name: 'Eat',
},{
id: 1,
name: 'Rice',
tag_id: 3,
tag_name: 'Seeds',
},{
id: 2,
name: 'Bean',
tag_id: 1,
tag_name: 'Eat',
},{
id: 2,
name: 'Bean',
tag_id: 3,
tag_name: 'Seeds',
},{
id: 3,
name: 'Milk',
tag_id: 2,
tag_name: 'Drink',
},{
id: 3,
name: 'Milk',
tag_id: 4,
tag_name: 'Cow',
}]
Which I parse by hand and aggregate each product instance with an array of zero or more tag objects that it is associated with.
Question
When doing the select above, the output is 6 rows. However, there are only 3 products. Is it possible to use a limit clause that would apply to just the products?
For example, if Product.id => 1 had 10 tags associated with it, doing a LIMIT 5 would only select 5 of the tags. What I want to do is select 5 products along with all tags associated with it.
The only way I can think of achieving this is to select just the products, then do an unbounded SELECT with just the product IDs from the previous query.
Bonus question
Is there a more efficient way to do this JOIN such that the output is aggregated as above?
Use a subquery to select 5 products, then join with tags:
SELECT p.name as product, t.name AS tag
FROM (
SELECT id, name
FROM products
ORDER by name
LIMIT 5) AS p
JOIN product_tags AS pt ON pt.product_id = p.id
JOIN tags AS t ON pt.tag_id = t.id

Getting duplicated sql result

I have 3 tables: Users, Articles and Votes
| Users | | Articles | | Votes |
| id | | id | | userId |
| name | | title | | articleId |
| email | | userId | | type |
I want to get users list with Count voteup and Count votedown for each one.
I'm testing this query:
SELECT u.id,u.name,u.email,
(SELECT COUNT(*) FROM votes as v WHERE v.type=1 AND v.articleId IN
(SELECT a.id From articles as a WHERE a.userId = u.id) ) AS totalvoteup,
(SELECT COUNT(*) FROM votes as v WHERE v.type=0 AND v.articleId IN
(SELECT a.id From articles as a WHERE a.userId = u.id) ) AS totalvotedown
FROM users as u
I have the list I want when I test it via phpmyadmin(the results number matches with the number of users in the table), but when I try to get through Node server(from AngularJs or Postman) I'm getting duplicate results:
{
"users": [
[
{
"id": 1,
"name": "John Lennon",
"email": "johnlennon#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
},
{
"id": 2,
"name": "John Lennon 2",
"email": "johnlennon2#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
},
{
"id": 3,
"name": "John Lennon 3",
"email": "johnlennon3#gmail.com",
"totalvoteup": 0,
"totalvotedown": 1
},
{
"id": 4,
"name": "John Lennon 4",
"email": "johnlennon4#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
}
],
[
{
"id": 1,
"name": "John Lennon 1",
"email": "johnlennon1#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
},
{
"id": 2,
"name": "John Lennon 2",
"email": "johnlennon2#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
},
{
"id": 3,
"name": "John Lennon 3",
"email": "johnlennon3#gmail.com",
"totalvoteup": 0,
"totalvotedown": 1
},
{
"id": 4,
"name": "John Lennon 4",
"email": "johnlennon4#gmail.com",
"totalvoteup": 0,
"totalvotedown": 0
}
]
]
}
Any ideas how to resolve this?
Am not a Java guy and am not sure about the problem in question, but the query can be written in better way
SELECT DISTINCT u.id,
u.name,
u.email,
coalesce(totalvoteup,0) as totalvoteup,
coalesce(totalvotedown,0) as totalvotedown
FROM users AS u
LEFT JOIN (SELECT DISTINCT id,userId FROM articles) a
ON a.userId = u.id
LEFT JOIN (SELECT Count(CASE WHEN v.type = 1 THEN 1 END) AS totalvoteup,
Count(CASE WHEN v.type = 0 THEN 1 END) AS totalvotedown,
v.articleId
FROM votes v
GROUP BY v.articleId) v
ON a.id = v.articleId
May not help you to solve the problem just thought of sharing since I like query optimization