How to convert Firestore REST API response to normal json? - json

I am trying to use Firestore from Unity by using REST API of it. Everything is working as expected till now.
When reading document from Firestore, it returns json in different format.
Like this.
{
"name": "projects/firestore-unity-demo-87998/databases/(default)/documents/test/panara",
"fields": {
"health": {
"integerValue": "1008"
},
"name": {
"stringValue": "Bhavin Panara"
},
"birthday": {
"timestampValue": "1992-10-08T04:40:10Z"
},
"alive": {
"booleanValue": true
},
"floatingPointNumber": {
"testFloat": 100.1
}
},
"createTime": "2019-07-30T13:27:09.599079Z",
"updateTime": "2019-07-31T11:41:10.637712Z"
}
How can I convert this kind of json to a normal json like this.
{
"health":1008,
"name":"Bhavin Panara",
"birthday" : "1992-10-08T04:40:10Z",
"alive":true,
"floatingPointNumber":100.1
}

I used the code from firestore-parser
const getFireStoreProp = (value) => {
const props = {
arrayValue: 1,
bytesValue: 1,
booleanValue: 1,
doubleValue: 1,
geoPointValue: 1,
integerValue: 1,
mapValue: 1,
nullValue: 1,
referenceValue: 1,
stringValue: 1,
timestampValue: 1,
};
return Object.keys(value).find(k => props[k] === 1);
};
export const fireStoreParser = (value) => {
let newVal = value;
// You can use this part to avoid mutating original values
// let newVal;
// if (typeof value === 'object') {
// newVal = { ...value };
// } else if (value instanceof Array) {
// newVal = value.slice(0);
// } else {
// newVal = value;
// }
const prop = getFireStoreProp(newVal);
if (prop === 'doubleValue' || prop === 'integerValue') {
newVal = Number(newVal[prop]);
} else if (prop === 'arrayValue') {
newVal = ((newVal[prop] && newVal[prop].values) || []).map(v => fireStoreParser(v));
} else if (prop === 'mapValue') {
newVal = fireStoreParser((newVal[prop] && newVal[prop].fields) || {});
} else if (prop === 'geoPointValue') {
newVal = { latitude: 0, longitude: 0, ...newVal[prop] };
} else if (prop) {
newVal = newVal[prop];
} else if (typeof newVal === 'object') {
Object.keys(newVal).forEach((k) => { newVal[k] = fireStoreParser(newVal[k]); });
}
return newVal;
};

function toValue(field) {
return "integerValue" in field
? Number(field.integerValue)
: "doubleValue" in field
? Number(field.doubleValue)
: "arrayValue" in field
? field.arrayValue.values.map(toValue)
: "mapValue" in field
? toJSON(field.mapValue)
: Object.entries(field)[0][1];
}
function toJSON(doc) {
return Object.fromEntries(
Object.entries(doc.fields ?? {}).map(([key, field]) => [key, toValue(field)])
);
}

Related

How to parse Telnyx JSON in Google Sheets

I am looking for a way to parse the JSON received from Telnyx (through a webhook) in a Google spreadsheet, using in cell formulas.
The ideal outcome is to having data from each line in a separate column (including lines with no data), with the correct heading.
JS would also be an option.
I've tried using FILTER and REGEXMATCH with some results, but I couldn't manage to get a decent output.
Any help would be greatly appreciated.
The data received from Telnyx looks like this:
"{
""data"": {
""event_type"": ""message.received"",
""id"": ""0d9c22"",
""occurred_at"": ""2022-07-23T04:52:08.642+00:00"",
""payload"": {
""cc"": [],
""completed_at"": null,
""cost"": null,
""direction"": ""inbound"",
""encoding"": ""GSM-7"",
""errors"": [],
""from"": {
""carrier"": """",
""line_type"": """",
""phone_number"": ""+447""
},
""id"": ""eb17"",
""media"": [],
""messaging_profile_id"": ""4001"",
""organization_id"": ""8ab"",
""parts"": 2,
""received_at"": ""2022-07-20T04:52:08.464+00:00"",
""record_type"": ""message"",
""sent_at"": null,
""subject"": """",
""tags"": [],
""text"": ""SMS goes here.\nThanks"",
""to"": [
{
""carrier"": ""Telnyx"",
""line_type"": ""Wireless"",
""phone_number"": ""+447"",
""status"": ""webhook_delivered""
}
],
""type"": ""SMS"",
""valid_until"": null,
""webhook_failover_url"": null,
""webhook_url"": ""https://script.google.com/XXXXX""
},
""record_type"": ""event""
},
""meta"": {
""attempt"": 1,
""delivered_to"": ""https://script.google.com/""
}
}"
Try this recursive function (A1 contains the data)
=functionTelnyx(A1)
put in your script editor
function functionTelnyx(json) {
const telnyx = JSON.parse(json.slice(1,-1).replace(/""/g,'"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push([ p, obj[p]]);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return result
}
edit:
for multiple json, and horizontal layout
for headers, put in B1 =telnyx_headers(A2)
for contents, put in B2 and drag below =telnyx_contents(A2), then =telnyx_contents(A3) and so forth
with
function telnyx_headers(json) {
const telnyx = JSON.parse(json.slice(1, -1).replace(/""/g, '"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push(p);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return [result]
}
function telnyx_contents(json) {
const telnyx = JSON.parse(json.slice(1, -1).replace(/""/g, '"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push(obj[p]);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return [result]
}
optional: array-enabled version
To process a larger number of JSON strings in the column A2:A, use this formula:
=telnyx(A2:A)
The telnyx() function uses telnyx_headers() and telnyx_contents() while observing custom function best practices.
/**
* Converts one or more JSON strings to a horizontal table.
* Row 1 will show field names and subsequent rows the
* data in each field.
*
* #param {A2:A} json The JSON strings to convert to a table.
* #customfunction
*/
function telnyx(json) {
'use strict';
if (!Array.isArray(json)) {
json = [json];
}
json = json.flat();
let result = telnyx_headers(json[0]);
json.forEach(j => result = result.concat(j ? telnyx_contents(j) : [null]));
return result;
}

JSON data calculation and re-formate using Angular

I have a JSON file and I am trying to calculate the JSON file key based on the value and reformating it. My JSON file looks like below:
data=[
{
pet:'Cat',
fruit:'Apple',
fish:'Hilsha'
},
{
pet:'Dog',
fish:'Carp'
},
{
pet:'Cat',
fruit:'Orange',
fish:'Lobster'
}
];
I do like to calculate and formate it like below:
data=[
{
label:'Pet',
total:3,
list:[
{
name:'Cat',
value: 2,
},
{
name:'Dog',
value: 1,
}
]
},
{
label:'Fruit',
total:2,
list:[
{
name:'Apple',
value: 1,
},
{
name:'Orange',
value: 1,
}
]
},
{
label:'Fish',
total:3,
list:[
{
name:'Hilsha',
value: 1,
},
{
name:'Carp',
value: 1,
},
{
name:'Lobster',
value: 1,
}
]
},
];
If anybody can help me, it will be very help for me and will save a day.
I have fixed this task myself. If I have any wrong, you can put your comment fill-free :)
``
ngOnInit(): void {
this.dataService.$data.subscribe(data => {
// Create new object and calculation according to category
let petObj: any = {}
let fruitObj: any = {}
let fishObj: any = {}
data.forEach((el: any) => {
if (el.pet != undefined) {
petObj[el.pet] = (petObj[el.pet] || 0) + 1;
}
if (el.fruit != undefined) {
fruitObj[el.fruit] = (fruitObj[el.fruit] || 0) + 1;
}
if (el.fish != undefined) {
fishObj[el.fish] = (fishObj[el.fish] || 0) + 1;
}
});
// Create list according to category
let pet_list: any = [];
let fruit_list: any = [];
let fish_list: any = [];
for (var key in petObj) {
let pet = {
label: key,
value: petObj[key]
}
pet_list.push(pet)
}
for (var key in fruitObj) {
let fruit = {
label: key,
value: fruitObj[key]
}
fruit_list.push(fruit)
}
for (var key in fishObj) {
let fish = {
label: key,
value: fishObj[key]
}
fish_list.push(fish)
}
// Calculate total sum according to category
var totalPet = pet_list.map((res: any) => res.value).reduce((a: any, b: any) => a + b);
var totalFruit = fruit_list.map((res: any) => res.value).reduce((a: any, b: any) => a + b);
var totalFish = fish_list.map((res: any) => res.value).reduce((a: any, b: any) => a + b);
// Rearrange the JSON
this.rearrangeData = [
{
label: 'Pet',
total: totalPet,
list: pet_list
},
{
label: 'Fruit',
total: totalFruit,
list: fruit_list
},
{
label: 'Fish',
total: totalFish,
list: fish_list
}
]
console.log(this.rearrangeData)
// End rearrange the JSON
});
}
``
You can simplify your function. Take a look this one
group(oldData) {
const data = []; //declare an empty array
oldData.forEach((x) => {
//x will be {pet: 'Cat',fruit: 'Apple',fish: 'Hilsha'},
// {pet: 'Dog',fish: 'Carp'}
// ...
Object.keys(x).forEach((key) => {
//key will be 'pet','fruit',...
const item = data.find((d) => d.label == key); //search in the "data array"
if (item) { //if find it
item.total++; //add 1 to the property total of the element find it
// and search in the item.list the 'Cat'
const list = item.list.find((l) => l.name == x[key]);
//if find it add 1 to the property value of the list
if (list)
list.value++;
else
//if not, add to the list
//an object with property "name" and "value" equal 1
item.list.push({ name: x[key], value: 1 });
} else
//if the element is not in the "array data"
//add an object with properties label, total and list
//see that list is an array with an unique element
data.push({
label: key,
total: 1,
list: [{ name: x[key], value: 1 }],
});
});
});
return data;
}
You can use like
this.dataService.$data.subscribe(data => {
this.rearrangeData=this.group(data)
}
NOTE: this function the labels are 'pet','fruit' and 'fish' not 'Pet', 'Fruit' and 'Fish'
Did you try reading the text leading up to this exercise? That'd be my first approach. After that, I'd use reduce. You can do pretty much anything with reduce.

How to get filtered the array in json reponse based on condition check with keys in angular 7

I would like to get filterd the particular array alone from the json response when dataID is not matched with the ParentDataID from another array in same json response using typescript feature in Angular 7
{ "data":[
{
"dataId":"Atlanta",
"parentDataId":"America"
},
{
"dataId":"Newyork",
"parentDataId":"America"
},
{
"dataId":"Georgia",
"parentDataId":"Atlanta"
},
{
"dataId":"South",
"parentDataId":"Atlanta"
},
{
"dataId":"North",
"parentDataId":"South"
}
]
}
In above response the value of dataId Newyork is not matched with any of the parentDataId entire array json response. So Now i want to filtered out only the second array of DataID alone to make new array.
I would like to have this validation in Typescript angular 7
My output is supposed to like below... The DataId does not have the parentDataId
[
{
"dataId":"Newyork",
"parentDataId":"America"
},
{
"dataId":"Georgia",
"parentDataId":"Atlanta"
},
{
"dataId":"North",
"parentDataId":"South"
}
]
Appreciate the help and response
You can use filter method:
let filterKey = 'Atlanta';
const result = data.data.filter(f=> f.parentDataId != filterKey
&& f.dataId != filterKey);
An example:
let data = { "data":[
{
"dataId":"Atlanta",
"parentDataId":"America"
},
{
"dataId":"Newyork",
"parentDataId":"America"
},
{
"dataId":"Georgia",
"parentDataId":"Atlanta"
}
]
};
let filterKey = 'Atlanta';
const result = data.data.filter(f=> f.parentDataId != filterKey
&& f.dataId != filterKey);
console.log(result);
demo in this StackBlitz Link
my solution is like below code snippet. ts
reducedData = [...this.data];
this.data.reduce((c,n,i) => {
this.data.reduce((d,o, inex) => {
if ( n.dataId === o.parentDataId){
this.reducedData.splice(i,1, {'dataId': 'removed', parentDataId: 'true'});
} else {
return o;
}
},{});
return n;
}, {});
this.reducedData = this.reducedData.filter (value => value.dataId !== 'removed');
html file
<h4> dataId does not have parentId </h4>
<hr>
<pre>
{{reducedData | json}}
</pre>
EDIT
If you do not want to use second object reducedData, then below solution is fine to work.. StackBlitz Link
component.ts
this.data.reduce((c,n,i) => {
this.data.reduce((d,o, inex) => {
if ( n.dataId === o.parentDataId) {
this.data[i]['removed'] = "removed";
} else{
return o;
}
},{});
return n;
}, {});
this.data = this.data.filter (value => value['removed'] !== 'removed');
component.html
<h4> dataId does not have parentId </h4>
<hr>
<pre>
{{data |json}}
</pre>
Please try like this.
const data = { "data":[
{
"dataId":"Atlanta",
"parentDataId":"America"
},
{
"dataId":"Newyork",
"parentDataId":"America"
},
{
"dataId":"Georgia",
"parentDataId":"Atlanta"
}
]
};
const filterKey = "Newyork"
const matchExist = data.data.some( item => item.parentDataId === filterKey && item.dataId === filterKey)
let filteredArray ;
if(!matchExist){
filteredArray = data.data.find(item => item.dataId === filterKey )
}

Logstash: Flatten nested JSON, combine fields inside array

I have a JSON looking like this:
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
]
},
"text": "My text"
}
Actually it's not split to multiple lines (just did it to give a better overview), so it's looking like this:
{"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":404,"attr2":416}},{"#tagId":"456","tagAttributes":{"attr1":1096,"attr2":1103}}]},"text":"My text"}
I want to insert this JSON with Logstash to an Elasticsearch index. However, I want to insert a flattened JSON with the fields in the array combined like this:
"foo.bar.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
In total, the data inserted to Elasticsearch should look like this:
"foo.bar.type": "someType"
"foo.bar.id": "ga241ghs"
"foo.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
"foo.text": "My text"
This is my current Logstash .conf; I am able to split the "tags" array, but now I am getting 2 entries as a result.
How can I now join all tagIds to one field, attr1 values of the array to one field, and all attr2 values to another?
input {
file {
codec => json
path => ["/path/to/my/data/*.json"]
mode => "read"
file_completed_action => "log"
file_completed_log_path => ["/path/to/my/logfile"]
sincedb_path => "/dev/null"
}
}
filter {
split {
field => "[foo][tags]"
}
}
output {
stdout { codec => rubydebug }
}
Thanks a lot!
Nice example for my JSON iterator IIFE - no need for complex algos, just pick DepthFirst, sligthly modified path (new "raw" version) and that is it.
In case you like this JS answer, mind ticking accept flag under voting buttons.
In case you want different language, have also C# parser with similar iterators on same GitHub.
var src = {"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":"AAA","attr2":"111"}},{"#tagId":"456","tagAttributes":{"attr1":"BBB","attr2":"222"}}],"text":"My text"}};
//console.log(JSON.stringify(src, null, 2));
function traverse(it) {
var dest = {};
var i=0;
do {
if (it.Current().HasStringValue()) {
var pathKey = it.Path(true).join('.');
var check = dest[pathKey];
if (check) {
if (!(check instanceof Array)) dest[pathKey] = [check];
dest[pathKey].push(it.Value());
} else {
dest[pathKey] = it.Value();
}
}
//console.log(it.Level + '\t' + it.Path(1).join('.') + '\t' + it.KeyDots(), (it.Value() instanceof Object) ? "-" : it.Value());
} while (it.DepthFirst());
console.log(JSON.stringify(dest, null, 2));
return dest;
}
/*
* https://github.com/eltomjan/ETEhomeTools/blob/master/HTM_HTA/JSON_Iterator_IIFE.js
* +new raw Path feature
*/
'use strict';
var JNode = (function (jsNode) {
function JNode(_parent, _pred, _key, _value) {
this.parent = _parent;
this.pred = _pred;
this.node = null;
this.next = null;
this.key = _key;
this.value = _value;
}
JNode.prototype.HasOwnKey = function () { return this.key && (typeof this.key != "number"); }
JNode.prototype.HasStringValue = function () { return !(this.value instanceof Object); }
return JNode;
})();
var JIterator = (function (json) {
var root, current, maxLevel = -1;
function JIterator(json, parent) {
if (parent === undefined) parent = null;
var pred = null, localCurrent;
for (var child in json) {
var obj = json[child] instanceof Object;
if (json instanceof Array) child = parseInt(child); // non-associative array
if (!root) root = localCurrent = new JNode(parent, null, child, json[child]);
else {
localCurrent = new JNode(parent, pred, child, obj ? ((json[child] instanceof Array) ? [] : {}) : json[child]);
}
if (pred) pred.next = localCurrent;
if (parent && parent.node == null) parent.node = localCurrent;
pred = localCurrent;
if (obj) {
var memPred = pred;
JIterator(json[child], pred);
pred = memPred;
}
}
if (this) {
current = root;
this.Level = 0;
}
}
JIterator.prototype.Current = function () { return current; }
JIterator.prototype.SetCurrent = function (newCurrent) {
current = newCurrent;
this.Level = 0;
while(newCurrent = newCurrent.parent) this.Level++;
}
JIterator.prototype.Parent = function () {
var retVal = current.parent;
if (retVal == null) return false;
this.Level--;
return current = retVal;
}
JIterator.prototype.Pred = function () {
var retVal = current.pred;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Node = function () {
var retVal = current.node;
if (retVal == null) return false;
this.Level++;
return current = retVal;
}
JIterator.prototype.Next = function () {
var retVal = current.next;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Key = function () { return current.key; }
JIterator.prototype.KeyDots = function () { return (typeof (current.key) == "number") ? "" : (current.key + ':'); }
JIterator.prototype.Value = function () { return current.value; }
JIterator.prototype.Reset = function () {
current = root;
this.Level = 0;
}
JIterator.prototype.RawPath = function () {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
steps.push(level.key + (level.value instanceof Array ? "[]" : "{}"));
} else {
if (level != null) steps.push(level.key);
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.Path = function (raw) {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
var size = 0;
var items = level.node;
if (typeof (level.key) == "number" && !raw) steps.push('[' + level.key + ']');
else {
if(raw) {
if (typeof (level.key) != "number") steps.push(level.key);
} else {
while (items) {
size++;
items = items.next;
}
var type = (level.value instanceof Array ? "[]" : "{}");
var prev = steps[steps.length - 1];
if (prev && prev[0] == '[') {
var last = prev.length - 1;
if (prev[last] == ']') {
last--;
if (!isNaN(prev.substr(1, last))) {
steps.pop();
size += '.' + prev.substr(1, last);
}
}
}
steps.push(level.key + type[0] + size + type[1]);
}
}
} else {
if (level != null) {
if (typeof (level.key) == "number") steps.push('[' + level.key + ']');
else steps.push(level.key);
}
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.DepthFirst = function () {
if (current == null) return 0; // exit sign
if (current.node != null) {
current = current.node;
this.Level++;
if (maxLevel < this.Level) maxLevel = this.Level;
return 1; // moved down
} else if (current.next != null) {
current = current.next;
return 2; // moved right
} else {
while (current != null) {
if (current.next != null) {
current = current.next;
return 3; // returned up & moved next
}
this.Level--;
current = current.parent;
}
}
return 0; // exit sign
}
JIterator.prototype.BreadthFirst = function () {
if (current == null) return 0; // exit sign
if (current.next) {
current = current.next;
return 1; // moved right
} else if (current.parent) {
var level = this.Level, point = current;
while (this.DepthFirst() && level != this.Level);
if (current) return 2; // returned up & moved next
do {
this.Reset();
level++;
while (this.DepthFirst() && level != this.Level);
if (current) return 3; // returned up & moved next
} while (maxLevel >= level);
return current != null ? 3 : 0;
} else if (current.node) {
current = current.node;
return 3;
} else if (current.pred) {
while (current.pred) current = current.pred;
while (current && !current.node) current = current.next;
if (!current) return null;
else return this.DepthFirst();
}
}
JIterator.prototype.ReadArray = function () {
var retVal = {};
var item = current;
do {
if (item.value instanceof Object) {
if (item.value.length == 0) retVal[item.key] = item.node;
else retVal[item.key] = item;
} else retVal[item.key] = item.value;
item = item.next;
} while (item != null);
return retVal;
}
JIterator.prototype.FindKey = function (key) {
var pos = current;
while (current && current.key != key) this.DepthFirst();
if (current.key == key) {
var retVal = current;
current = pos;
return retVal;
} else {
current = pos;
return null;
}
}
return JIterator;
})();
traverse(new JIterator(src));
Your short JSON version was different, now using this one, which looks like your required results (attrs changed and text moved from root under foo):
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
],
"text": "My text"
}
}
Figured it out how to do it with a Ruby filter directly in Logstash - for all searching for this in future, here is one example on how to do it for #tagId:
filter {
ruby { code => '
i = 0
tagId_array = Array.new
while i < event.get( "[foo][tags]" ).length do
tagId_array = tagId_array.push(event.get( "[foo][tags][" + i.to_s + "][#tagId]" ))
i += 1
end
event.set( "foo.tags.tagId", tagId_array )
'
}
}

Formatting DynamoDB data to normal JSON in AWS Lambda

I'm using AWS Lambda to scan data from a DynamoDB table. This is what I get in return:
{
"videos": [
{
"file": {
"S": "file1.mp4"
},
"id": {
"S": "1"
},
"canvas": {
"S": "This is Canvas1"
}
},
{
"file": {
"S": "main.mp4"
},
"id": {
"S": "0"
},
"canvas": {
"S": "this is a canvas"
}
}
]
}
My front-end application is using Ember Data Rest Adapter which does not accepts such response. Is there any way I can get normal JSON format? There is this NPM module called dynamodb-marshaler to convert DynamoDB data to normal JSON. I'm looking for a native solution if possible.
Node.js
Use the unmarshall function from AWSJavaScriptSDK:
const AWS = require("aws-sdk");
exports.handler = function( event, context, callback ) {
const newImages = event.Records.map(
(record) => AWS.DynamoDB.Converter.unmarshall(record.dynamodb.NewImage)
);
console.log('Converted records', newImages);
callback(null, `Success`);
}
Python
Use TypeDeserializer.deserialize from boto3.dynamodb.types:
import json
from boto3.dynamodb.types import TypeDeserializer
def ddb_deserialize(r, type_deserializer = TypeDeserializer()):
return type_deserializer.deserialize({"M": r})
def lambda_handler(event, context):
new_images = [ ddb_deserialize(r["dynamodb"]["NewImage"]) for r in event['Records'] ]
print('Converted records', json.dumps(new_images, indent=2))
I know is a bit old but I had the same problem processing stream data from dynamoDB in node js lambda function. I used the proposed by #churro
import sdk and output converter
var AWS = require("aws-sdk");
var parse = AWS.DynamoDB.Converter.output;
use the parse function with a small hack
exports.handler = function( event, context, callback ) {
var docClient = new AWS.DynamoDB.DocumentClient();
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record:', parse({ "M": record.dynamodb.NewImage }));
});
callback(null, `Successfully processed ${event.Records.length} records.`);
}
Hope it helps
AWS JavaScript SDK was recently updated with Document Client which does exactly what you need. Check the announce and usage examples here: http://blogs.aws.amazon.com/javascript/post/Tx1OVH5LUZAFC6T/Announcing-the-Amazon-DynamoDB-Document-Client-in-the-AWS-SDK-for-JavaScript
Javascript: AWS SDK provides the unmarshall function
Python: use TypeDeserializer from boto3.dynamodb.types:
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
def from_dynamodb_to_json(item):
d = TypeDeserializer()
return {k: d.deserialize(value=v) for k, v in item.items()}
## Usage:
from_dynamodb_to_json({
"Day": {"S": "Monday"},
"mylist": {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
})
# {'Day': 'Monday', 'mylist': ['Cookies', 'Coffee', Decimal('3.14159')]}
Here you can find gist which does that:
function mapper(data) {
let S = "S";
let SS = "SS";
let NN = "NN";
let NS = "NS";
let BS = "BS";
let BB = "BB";
let N = "N";
let BOOL = "BOOL";
let NULL = "NULL";
let M = "M";
let L = "L";
if (isObject(data)) {
let keys = Object.keys(data);
while (keys.length) {
let key = keys.shift();
let types = data[key];
if (isObject(types) && types.hasOwnProperty(S)) {
data[key] = types[S];
} else if (isObject(types) && types.hasOwnProperty(N)) {
data[key] = parseFloat(types[N]);
} else if (isObject(types) && types.hasOwnProperty(BOOL)) {
data[key] = types[BOOL];
} else if (isObject(types) && types.hasOwnProperty(NULL)) {
data[key] = null;
} else if (isObject(types) && types.hasOwnProperty(M)) {
data[key] = mapper(types[M]);
} else if (isObject(types) && types.hasOwnProperty(L)) {
data[key] = mapper(types[L]);
} else if (isObject(types) && types.hasOwnProperty(SS)) {
data[key] = types[SS];
} else if (isObject(types) && types.hasOwnProperty(NN)) {
data[key] = types[NN];
} else if (isObject(types) && types.hasOwnProperty(BB)) {
data[key] = types[BB];
} else if (isObject(types) && types.hasOwnProperty(NS)) {
data[key] = types[NS];
} else if (isObject(types) && types.hasOwnProperty(BS)) {
data[key] = types[BS];
}
}
}
return data;
function isObject(value) {
return typeof value === "object" && value !== null;
}
}
https://gist.github.com/igorzg/c80c0de4ad5c4028cb26cfec415cc600
If you are using python in the lambda you can utilise the dynamodb-json library.
Install library
pip install dynamodb-json
and use the below snippet
from dynamodb_json import json_util as util
def marshall(regular_json):
dynamodb_json = util.dumps(reular_json)
def unmarshall(dynamodb_json):
regular_json = util.loads(dynamodb_json)
Reference
https://pypi.org/project/dynamodb-json/
I think it's just a custom transformation exercise for each app. A simple conversion from DynamoDB's item format to you application format might look like this:
var response = {...} // your response from DynamoDB
var formattedObjects = response.videos.map(function(video) {
return {
"file": video.file.S,
"id": video.id.S,
"canvas": video.canvas.S
};
});
If you want to build a generic system for this, you would have to handle DynamoDB's various AttributeValue types. A function like the one below would do the job, but I've left out the hard work of handling most of DynamoDB's more complex attribute value types:
function dynamoItemToPlainObj(dynamoItem) {
var plainObj = {};
for (var attributeName in dynamoItem) {
var attribute = dynamoItem[attributeName];
var attributeValue;
for (var itemType in attribute) {
switch (itemType) {
case "S":
attributeValue = attribute.S.toString();
break;
case "N":
attributeValue = Number(attribute.N);
break;
// more attribute types...
default:
attributeValue = attribute[itemType].toString();
break;
}
}
plainObj[attributeName] = attributeValue;
}
return plainObj;
}
var formattedObjects = response.videos.map(dynamoItemToPlainObj);
I tried several solutions here but none worked with multi-level data, such as if it includes a list of maps e.g.
{
"item1": {
"M": {
"sub-item1": {
"L": [
{
"M": {
"sub-item1-list-map": {
"S": "value"
Below, adapted from #igorzg's answer (which also has that drawback), fixes that.
Example usage:
dynamodb.getItem({...}, function(err, data) {
if (!err && data && data.Item) {
var converted = ddb_to_json(data.Item);
Here's the conversion function:
function ddb_to_json(data) {
function isObject(value) {
return typeof value === "object" && value !== null;
}
if(isObject(data))
return convert_ddb({M:data});
function convert_ddb(ddbData) {
if (isObject(ddbData) && ddbData.hasOwnProperty('S'))
return ddbData.S;
if (isObject(ddbData) && ddbData.hasOwnProperty('N'))
return parseFloat(ddbData.N);
if (isObject(ddbData) && ddbData.hasOwnProperty('BOOL'))
return ddbData.BOOL;
if (isObject(ddbData) && ddbData.hasOwnProperty('NULL'))
return null;
if (isObject(ddbData) && ddbData.hasOwnProperty('M')) {
var x = {};
for(var k in ddbData.M)
x[k] = convert_ddb(ddbData.M[k])
return x;
}
if (isObject(ddbData) && ddbData.hasOwnProperty('L'))
return ddbData.L.map(x => convert_ddb(x));
if (isObject(ddbData) && ddbData.hasOwnProperty('SS'))
return ddbData.SS;
if (isObject(ddbData) && ddbData.hasOwnProperty('NN'))
return ddbData.NN;
if (isObject(ddbData) && ddbData.hasOwnProperty('BB'))
return ddbData.BB;
if (isObject(ddbData) && ddbData.hasOwnProperty('NS'))
return ddbData.NS;
if (isObject(ddbData) && ddbData.hasOwnProperty('BS'))
return ddbData.BS;
return data;
}
return data;
}
If you need online editor try this
https://2json.net/dynamo