I've PostgreSQL function with json data object and I need to return some values
this is my function
CREATE OR REPLACE FUNCTION "public"."insert_from_json"("in_json_txt" json)
RETURNS "pg_catalog"."void" AS $BODY$
INSERT INTO json_test2 (name, age, location_id)
WITH t1 AS (
SELECT (rec->>'name')::text , (rec->>'age')::integer FROM
json_array_elements(in_json_txt->'data') rec
),t2 AS (
WITH my_v_table ( jsonblob ) AS ( VALUES ( in_json_txt:: jsonb ) )
SELECT
((my_v_table.jsonblob ->> 'Store_IntegrationCode')::numeric) as store_id
FROM my_v_table
)
SELECT * from t1,t2
$BODY$
LANGUAGE sql VOLATILE
COST 100
when I use returns query I got error :(
this is call statement
select insert_from_json('{
"Customer_IntegrationCode": "558889999",
"XretialOrderCode": "000020430",
"ShippingAddress": "Cairo, Nasr City, 01128777733",
"ShippingAddress_IntegrationCode": null,
"PaymentOption": 1,
"CreationDate": "2021-01-04T07:38:57.033Z",
"Total": 73.0,
"Currency": "EGP",
"Note": null,
"ShippingCost": 15.0,
"CODFee": 25.0,
"ShipmentProvider": null,
"Plateform": 1,
"SubTotal": 33.0,
"TotalDiscountAmount_PerOrderLevel": 0,
"OriginalSubTotal": 33.0,
"TaxPercentage": null,
"TaxValue": null,
"Store_IntegrationCode": "1234567",
"data": [
{
"name": "12345678",
"age": "23456789",
"Qty": 3,
"UnitPrice": 11.0,
"NetPrice": 11.0,
"SKUDiscount": 0,
"Total": 33.0,
"ShipmentCost": 0.0,
"SubTotal": 33.0
},
{
"name": "999999",
"age": "988888",
"Qty": 3,
"UnitPrice": 11.0,
"NetPrice": 11.0,
"SKUDiscount": 0,
"Total": 33.0,
"ShipmentCost": 0.0,
"SubTotal": 33.0
}
]
}
')
when I add return query to function I got this error
ERROR: syntax error at or near "RETURN"
LINE 18: RETURN query SELECT * from t1,t2
You can not return the values from a function returning VOID.
If you want to return the rows after insertion you can try below mentioned function definition:
CREATE OR REPLACE FUNCTION "public"."insert_from_json"("in_json_txt" json)
RETURNS
table (name_ text, age_ int, location_ numeric)
AS $BODY$
BEGIN
RETURN QUERY
INSERT INTO json_test2 (name, age, location_id)
(WITH t1 AS (
SELECT (rec->>'name')::text , (rec->>'age')::integer FROM
json_array_elements(in_json_txt->'data') rec
),t2 AS (
WITH my_v_table ( jsonblob ) AS ( VALUES ( in_json_txt:: jsonb ) )
SELECT
((my_v_table.jsonblob ->> 'Store_IntegrationCode')::numeric) as store_id
FROM my_v_table
)
SELECT * from t1,t2
)
RETURNING name, age, location_id;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100
DEMO
Even same thing you can write in more shorter and simpler way like below:
CREATE OR REPLACE FUNCTION "public"."insert_from_json"("in_json_txt" json)
RETURNS
table (name_ text, age_ int, location_ numeric)
AS $BODY$
BEGIN
RETURN QUERY
INSERT INTO json_test2 (name, age, location_id)
SELECT
(rec->>'name')::text, (rec->>'age')::integer, (in_json_txt->>'Store_IntegrationCode')::numeric
FROM json_array_elements(in_json_txt->'data') rec
RETURNING name, age, location_id;
END;
$BODY$
LANGUAGE plpgsql VOLATILE
COST 100
DEMO
You can reduce the function to just an "Insert ... Returning" by 'converting' to SQL function.
create or replace function insert_from_json(in_json_txt json)
returns setof json_test2
language sql
as $$
insert into json_test2 (name, age, location)
select (rec->>'name')::text
, (rec->>'age')::integer
, (in_json_txt->>'Store_IntegrationCode')::numeric
from json_array_elements(in_json_txt->'data') rec
returning *;
$$;
See Example
Related
Here is my scenario, I want to update the hourly_rate for the BOB to 600. How to extract the hourly_rate from the json_array mentioned below for the specific user BOB.
#data = [{
"Subject": "Maths",
"type": "paid",
"tutor": "MARY",
"hourly_rate": "500"
},
{
"Subject": "Maths",
"type": "paid",
"tutor": "BOB",
"hourly_rate": "700"
}]
Can I use JSON_SEARCH() to get the index by using Where Clause.
example:
"Select JSON_SET(#data,'$[*].hourly_rate', 600) Where 'Subject' = Maths and 'tutor' = 'BOB'";
I got this working. But I had to use a view in order to get cleaner code.
My answer is based on this one: https://stackoverflow.com/a/51563616/1688441
Update Query
Fiddle # https://www.db-fiddle.com/f/7MnPYEJW2uiGYaPhSSjtKa/1
UPDATE test
INNER JOIN getJsonArray ON getJsonArray.tutor = 'BOB'
SET test =
JSON_REPLACE(
test,
CONCAT('$[', getJsonArray.rowid - 1, '].hourly_rate'), 600);
select * from test;
Ddl
CREATE TABLE `test` (
`test` json DEFAULT NULL
);
INSERT INTO `test` (`test`)
VALUES ('[{
"Subject": "Maths",
"type": "paid",
"tutor": "MARY",
"hourly_rate": "500"
},
{
"Subject": "Maths",
"type": "paid",
"tutor": "BOB",
"hourly_rate": "700"
}]');
create view getJsonArray as
select data.*
from test, json_table(
test,
"$[*]"
COLUMNS(
rowid FOR ORDINALITY,
Subject VARCHAR(100) PATH "$.Subject" DEFAULT '111' ON EMPTY DEFAULT '999' ON ERROR,
type VARCHAR(100) PATH "$.type" DEFAULT '111' ON EMPTY DEFAULT '999' ON ERROR,
tutor VARCHAR(100) PATH "$.tutor" DEFAULT '111' ON EMPTY DEFAULT '999' ON ERROR,
hourly_rate JSON PATH "$.hourly_rate" DEFAULT '{"x": 333}' ON EMPTY
)
) data
;
I have a json file which contains data I need to import into a table, the problem is I only want the Latitude, Longitude and preferredGazzeteerName properties.
Here is how my json data looks like :
[
{
"MRGID": 2375,
"gazetteerSource": "Nomenclature des espaces maritimes/List of maritime areas",
"placeType": "Strait",
"latitude": 51.036666666667,
"longitude": 1.5486111111111,
"minLatitude": 49.71788333,
"minLongitude": 0.238905863,
"maxLatitude": 51.78156033,
"maxLongitude": 2.744909289,
"precision": 21000,
"preferredGazetteerName": "Dover Strait",
"preferredGazetteerNameLang": "English",
"status": "standard",
"accepted": 2375
},
{
"MRGID": 2376,
"gazetteerSource": "The Times comprehensive atlas of the world. 10th ed. Times Books: London, UK. ISBN 0-7230-0792-6. 67, 220, 124 plates pp.,",
"placeType": "Strait",
"latitude": 54.604722222222,
"longitude": 11.220833333333,
"minLatitude": null,
"minLongitude": null,
"maxLatitude": null,
"maxLongitude": null,
"precision": 40000,
"preferredGazetteerName": "Femer Baelt",
"preferredGazetteerNameLang": "English",
"status": "standard",
"accepted": 2376
}]
and table
The preferredGazetteerName values will be inserted at strait name column.
Here is one approach where we split the JSON string with a delimiter of '{' then it just becomes a matter of string manipulation
Example
Declare #S varchar(max) ='... your JSON String ...'
Select Name = left(Name,charindex(',',Name+',')-1)
,Lat = try_convert(float,left(Lat,charindex(',',Lat+',')-1))
,Lng = try_convert(float,left(Lng,charindex(',',Lng+',')-1))
From (
Select Name = ltrim(replace(substring(RetVal,nullif(charindex('"preferredGazetteerName"',RetVal),0)+25,75),'"',''))
,Lat = ltrim(substring(RetVal,nullif(charindex('"latitude"',RetVal),0) +11,25))
,Lng = ltrim(substring(RetVal,nullif(charindex('"longitude"',RetVal),0)+12,25))
From [dbo].[tvf-Str-Parse](#S,'{')
) A
Where Name is not null
Returns
Name Lat Lng
Dover Strait 51.036666666667 1.5486111111111
Femer Baelt 54.604722222222 11.220833333333
The Split/Parse TVF if Interested
CREATE FUNCTION [dbo].[tvf-Str-Parse] (#String varchar(max),#Delimiter varchar(10))
Returns Table
As
Return (
Select RetSeq = Row_Number() over (Order By (Select null))
,RetVal = LTrim(RTrim(B.i.value('(./text())[1]', 'varchar(max)')))
From (Select x = Cast('<x>' + replace((Select replace(#String,#Delimiter,'§§Split§§') as [*] For XML Path('')),'§§Split§§','</x><x>')+'</x>' as xml).query('.')) as A
Cross Apply x.nodes('x') AS B(i)
);
I'm working on a Web project where the client application communicates with the DB via JSONs.
The initial implementation took place with SQL Server 2012 (NO JSON support and hence we implemented a Stored Function that handled the parsing) and now we are moving to 2016 (YES JSON support).
So far, we are reducing processing time by a significant factor (in some cases, over 200 times faster!).
There are some interactions that contain arrays that need to be converted into tables. To achieve that, the OPENJSON function does ALMOST what we need.
In some of these (array-based) cases, records within the arrays have one or more fields that are also OBJECTS (in this particular case, also arrays), for instance:
[{
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": [{
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 1",
"$$hashKey": "object:1848"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "User_Comment",
"Body": "[]: Comment 2",
"$$hashKey": "object:1857"
}, {
"Timestamp": "2018-07-16 09:19",
"From": "1",
"Type": "Routine_Report",
"Body": "[To + Media + What]: Comment 3",
"$$hashKey": "object:1862"
}]
}, {
"Formal_Round_Method": "Floor",
"Public_Round_Method": "Closest",
"Formal_Precision": "3",
"Public_Precision": "3",
"Formal_Significant_Digits": "3",
"Public_Significant_Digits": "3",
"General_Comment": []
}]
Here, General_Comment is also an array.
When running the command:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(4000) '$.General_Comment'
) ;
[#_l_Table_Data is a variable holding the JSON string]
we are getting the column General_Comment = NULL even though the is data in there (at least in the first element of the array).
I guess that I should be using a different syntax for those columns that may contain OBJECTS and not SIMPLE VALUES, but I have no idea what that syntax should be.
I found a Microsoft page that actually solves the problem.
Here is how the query should look like:
SELECT *
FROM OPENJSON(#_l_Table_Data)
WITH ( Formal_Round_Method NVARCHAR(16) '$.Formal_Round_Method' ,
Public_Round_Method NVARCHAR(16) '$.Public_Round_Method' ,
Formal_Precision INT '$.Formal_Precision' ,
Public_Precision INT '$.Public_Precision' ,
Formal_Significant_Digits INT '$.Formal_Significant_Digits' ,
Public_Significant_Digits INT '$.Public_Significant_Digits' ,
General_Comment NVARCHAR(MAX) '$.General_Comment' AS JSON
) ;
So, you need to add AS JSON at the end of the column definition and (God knows why) the type MUST be NVARCHAR(MAX).
Very simple indeed!!!
Create Function ParseJson:
Create or Alter FUNCTION [dbo].[ParseJson] (#JSON NVARCHAR(MAX))
RETURNS #Unwrapped TABLE
(
[id] INT IDENTITY, --just used to get a unique reference to each json item
[level] INT, --the hierarchy level
[key] NVARCHAR(100), --the key or name of the item
[Value] NVARCHAR(MAX),--the value, if it is a null, int,binary,numeric or string
type INT, --0 TO 5, the JSON type, null, numeric, string, binary, array or object
SQLDatatype sysname, --whatever the datatype can be parsed to
parent INT, --the ID of the parent
[path] NVARCHAR(4000) --the path as used by OpenJSON
)
AS begin
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
VALUES
(0, --the level
NULL, --the key,
#json, --the value,
CASE WHEN Left(ltrim(#json),1)='[' THEN 4 ELSE 5 END, --the type
'json', --SQLDataType,
0 , --no parent
'$' --base path
);
DECLARE #ii INT = 0,--the level
#Rowcount INT = -1; --the number of rows from the previous iteration
WHILE #Rowcount <> 0 --while we are still finding levels
BEGIN
INSERT INTO #Unwrapped ([level], [key], Value, type, SQLDatatype, parent,
[path])
SELECT [level] + 1 AS [level], new.[Key] AS [key],
new.[Value] AS [value], new.[Type] AS [type],
-- SQL Prompt formatting off
/* in order to determine the datatype of a json value, the best approach is to a determine
the datatype that can be parsed. It JSON, an array of objects can contain attributes that arent
consistent either in their name or value. */
CASE
WHEN new.Type = 0 THEN 'bit null'
WHEN new.[type] IN (1,2) then COALESCE(
CASE WHEN TRY_CONVERT(INT,new.[value]) IS NOT NULL THEN 'int' END,
CASE WHEN TRY_CONVERT(NUMERIC(14,4),new.[value]) IS NOT NULL THEN 'numeric' END,
CASE WHEN TRY_CONVERT(FLOAT,new.[value]) IS NOT NULL THEN 'float' END,
CASE WHEN TRY_CONVERT(MONEY,new.[value]) IS NOT NULL THEN 'money' END,
CASE WHEN TRY_CONVERT(DateTime,new.[value],126) IS NOT NULL THEN 'Datetime2' END,
CASE WHEN TRY_CONVERT(Datetime,new.[value],127) IS NOT NULL THEN 'Datetime2' END,
'nvarchar')
WHEN new.Type = 3 THEN 'bit'
WHEN new.Type = 5 THEN 'object' ELSE 'array' END AS SQLDatatype,
old.[id],
old.[path] + CASE WHEN old.type = 5 THEN '.' + new.[Key]
ELSE '[' + new.[Key] COLLATE DATABASE_DEFAULT + ']' END AS path
-- SQL Prompt formatting on
FROM #Unwrapped old
CROSS APPLY OpenJson(old.[Value]) new
WHERE old.[level] = #ii AND old.type IN (4, 5);
SELECT #Rowcount = ##RowCount;
SELECT #ii = #ii + 1;
END;
return
END
For Usage:
select * from ParseJson(jsonString)
Running: PostgreSQL 9.6.2
I have data stored in a table that is in the form of a key/value pair. The "key" is actually the path of a json object, each one being a property. So for example if the key was "cogs","props1","value", then the json object would be like so:
{
"cogs":{
"props1": {
"value": 100
}
}
}
I'd like to somehow reconstruct a json object via a SQL query if possible. Here is the test data set:
drop table if exists test_table;
CREATE TABLE test_table
(
id serial,
file_id integer NOT NULL,
key character varying[],
value character varying,
status character varying
)
WITH (
OIDS = FALSE
)
TABLESPACE pg_default;
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","description"}', 'some awesome cog', 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","display"}', 'Giant Cog', null);
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props1","value"}', '100', 'not verified');
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props1","id"}', 26, 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props1","dimensions"}', '{"200", "300"}', null);
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props2","value"}', '200', 'not verified');
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props2","id"}', 27, 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"cogs","props2","dimensions"}', '{"700", "800"}', null);
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","description"}', 'some awesome widget', 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","display"}', 'Giant Widget', null);
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props1","value"}', '100', 'not verified');
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props1","id"}', 28, 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props1","dimensions"}', '{"200", "300"}', null);
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props2","value"}', '200', 'not verified');
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props2","id"}', 29, 'approved');
insert into test_table (file_id, key, value, status)
values (1, '{"widgets","props2","dimensions"}', '{"900", "1000"}', null);
The output I'm looking for is in this format:
{
"cogs": {
"description": "some awesome cog",
"display": "Giant Cog",
"props1": {
"value": 100,
"id": 26,
"dimensions": [200, 300]
},
"props2": {
"value": 200,
"id": 27,
"dimensions": [700, 800]
}
},
"widgets": {
"description": "some awesome widget",
"display": "Giant Widget",
"props1": {
"value": 100,
"id": 28,
"dimensions": [200, 300]
},
"props2": {
"value": 200,
"id": 29,
"dimensions": [900, 1000]
}
}
}
Some issues I'm facing:
The "value" column can hold text, numbers, and an array. For whatever reason, the server-side code using knex.js is storing an array of integers (ie, [100,300]) into postgres as the following format: {"100","300"}. I need to ensure I extract this out as an array of integers as well.
Trying to make this dynamic as possible. Maybe a recursive procedure to figure out what depth of the "key" path exists.... rather than hard-coding array lookup values.
json_object_agg works well to group together properties into a single object. However it breaks when hitting a null value. So if the "key" column has only two values (ie, "cogs","description"), and I attempt to aggregate up an array of length three (ie, "cogs","props1","value"), it will break unless I filter on only arrays of length 3.
Preserve the ordering of the input. #klin solution below is amazing and gets me 95% of the way there. However I failed to mention to also preserve the ordering...
A dynamic solution needs some work.
First, we need a function to convert a text array and a value to a jsonb object.
create or replace function keys_to_object(keys text[], val text)
returns jsonb language plpgsql as $$
declare
i int;
rslt jsonb = to_jsonb(val);
begin
for i in select generate_subscripts(keys, 1, true) loop
rslt := jsonb_build_object(keys[i], rslt);
end loop;
return rslt;
end $$;
select keys_to_object(array['key', 'subkey', 'subsub'], 'value');
keys_to_object
------------------------------------------
{"key": {"subkey": {"subsub": "value"}}}
(1 row)
Next, another function to merge jsonb objects (see Merging JSONB values in PostgreSQL).
create or replace function jsonb_merge(a jsonb, b jsonb)
returns jsonb language sql as $$
select
jsonb_object_agg(
coalesce(ka, kb),
case
when va isnull then vb
when vb isnull then va
when jsonb_typeof(va) <> 'object' or jsonb_typeof(vb) <> 'object' then vb
else jsonb_merge(va, vb) end
)
from jsonb_each(a) e1(ka, va)
full join jsonb_each(b) e2(kb, vb) on ka = kb
$$;
select jsonb_merge('{"key": {"subkey1": "value1"}}', '{"key": {"subkey2": "value2"}}');
jsonb_merge
-----------------------------------------------------
{"key": {"subkey1": "value1", "subkey2": "value2"}}
(1 row)
Finally, let's create an aggregate based on the above function,
create aggregate jsonb_merge_agg(jsonb)
(
sfunc = jsonb_merge,
stype = jsonb
);
and we are done:
select jsonb_pretty(jsonb_merge_agg(keys_to_object(key, translate(value, '{}"', '[]'))))
from test_table;
jsonb_pretty
----------------------------------------------
{ +
"cogs": { +
"props1": { +
"id": "26", +
"value": "100", +
"dimensions": "[200, 300]" +
}, +
"props2": { +
"id": "27", +
"value": "200", +
"dimensions": "[700, 800]" +
}, +
"display": "Giant Cog", +
"description": "some awesome cog" +
}, +
"widgets": { +
"props1": { +
"id": "28", +
"value": "100", +
"dimensions": "[200, 300]" +
}, +
"props2": { +
"id": "29", +
"value": "200", +
"dimensions": "[900, 1000]" +
}, +
"display": "Giant Widget", +
"description": "some awesome widget"+
} +
}
(1 row)
I'm using PostgreSQL 9.4.5. I'd like to update a jsonb column.
My table is structured this way:
CREATE TABLE my_table (
gid serial PRIMARY KEY,
"data" jsonb
);
JSON strings are like this:
{"files": [], "ident": {"id": 1, "country": null, "type ": "20"}}
The following SQL doesn't do the job (syntax error - SQL state = 42601):
UPDATE my_table SET "data" -> 'ident' -> 'country' = 'Belgium';
Is there a way to achieve that?
Ok there are two functions:
create or replace function set_jsonb_value(p_j jsonb, p_key text, p_value jsonb) returns jsonb as $$
select jsonb_object_agg(t.key, t.value) from (
select
key,
case
when jsonb_typeof(value) = 'object' then set_jsonb_value(value, p_key, p_value)
when key = p_key then p_value
else value
end as value from jsonb_each(p_j)) as t;
$$ language sql immutable;
First one just changes the value of the existing key regardless of the key path:
postgres=# select set_jsonb_value(
'{"files": [], "country": null, "ident": {"id": 1, "country": null, "type ": "20"}}',
'country',
'"foo"');
set_jsonb_value
--------------------------------------------------------------------------------------
{"files": [], "ident": {"id": 1, "type ": "20", "country": "foo"}, "country": "foo"}
(1 row)
create or replace function set_jsonb_value(p_j jsonb, p_path text[], p_value jsonb) returns jsonb as $$
select jsonb_object_agg(t.key, t.value) from (
select
key,
case
when jsonb_typeof(value) = 'object' then set_jsonb_value(value, p_path[2:1000], p_value)
when key = p_path[1] then p_value
else value
end as value from jsonb_each(p_j)
union all
select
p_path[1],
case
when array_length(p_path,1) = 1 then p_value
else set_jsonb_value('{}', p_path[2:1000], p_value) end
where not p_j ? p_path[1]) as t;
$$ language sql immutable;
Second one changes the value of the existing key using the path specified or creates it if the path does not exists:
postgres=# select set_jsonb_value(
'{"files": [], "country": null, "ident": {"id": 1, "type ": "20"}}',
'{ident,country}'::text[],
'"foo"');
set_jsonb_value
-------------------------------------------------------------------------------------
{"files": [], "ident": {"id": 1, "type ": "20", "country": "foo"}, "country": null}
(1 row)
postgres=# select set_jsonb_value(
'{"files": [], "country": null, "ident": {"id": 1, "type ": "20"}}',
'{ident,foo,bar,country}'::text[],
'"foo"');
set_jsonb_value
-------------------------------------------------------------------------------------------------------
{"files": [], "ident": {"id": 1, "foo": {"bar": {"country": "foo"}}, "type ": "20"}, "country": null}
(1 row)
Hope it will help to someone who uses the PostgreSQL < 9.5
Disclaimer: Tested on PostgreSQL 9.5
In PG 9.4 you are out of luck with "easy" solutions like jsonb_set() (9.5). Your only option is to unpack the JSON object, make the changes and re-build the object. That sounds very cumbersome and it is indeed: JSON is horrible to manipulate, no matter how advanced or elaborate the built-in functions.
CREATE TYPE data_ident AS (id integer, country text, "type" integer);
UPDATE my_table
SET "data" = json_build_object('files', "data"->'files', 'ident', ident.j)::jsonb
FROM (
SELECT gid, json_build_object('id', j.id, 'country', 'Belgium', 'type', j."type") AS j
FROM my_table
JOIN LATERAL jsonb_populate_record(null::data_ident, "data"->'ident') j ON true) ident
WHERE my_table.gid = ident.gid;
In the SELECT clause "data"->'ident' is unpacked into a record (for which you need to CREATE TYPE a structure). Then it is built right back into a JSON object with the new country name. In the UPDATE that "ident" object is re-joined with the "files" object and the whole thing cast to a jsonb.
A pure thing of beauty -- just so long as speed is not your thing...
My previous solution relied on 9.5 functionality.
I would recommend instead either going with abelisto's solutions below or using pl/perlu, plpythonu, or plv8js to write json mutators in a language that has better support for them.