How to parse Telnyx JSON in Google Sheets - json

I am looking for a way to parse the JSON received from Telnyx (through a webhook) in a Google spreadsheet, using in cell formulas.
The ideal outcome is to having data from each line in a separate column (including lines with no data), with the correct heading.
JS would also be an option.
I've tried using FILTER and REGEXMATCH with some results, but I couldn't manage to get a decent output.
Any help would be greatly appreciated.
The data received from Telnyx looks like this:
"{
""data"": {
""event_type"": ""message.received"",
""id"": ""0d9c22"",
""occurred_at"": ""2022-07-23T04:52:08.642+00:00"",
""payload"": {
""cc"": [],
""completed_at"": null,
""cost"": null,
""direction"": ""inbound"",
""encoding"": ""GSM-7"",
""errors"": [],
""from"": {
""carrier"": """",
""line_type"": """",
""phone_number"": ""+447""
},
""id"": ""eb17"",
""media"": [],
""messaging_profile_id"": ""4001"",
""organization_id"": ""8ab"",
""parts"": 2,
""received_at"": ""2022-07-20T04:52:08.464+00:00"",
""record_type"": ""message"",
""sent_at"": null,
""subject"": """",
""tags"": [],
""text"": ""SMS goes here.\nThanks"",
""to"": [
{
""carrier"": ""Telnyx"",
""line_type"": ""Wireless"",
""phone_number"": ""+447"",
""status"": ""webhook_delivered""
}
],
""type"": ""SMS"",
""valid_until"": null,
""webhook_failover_url"": null,
""webhook_url"": ""https://script.google.com/XXXXX""
},
""record_type"": ""event""
},
""meta"": {
""attempt"": 1,
""delivered_to"": ""https://script.google.com/""
}
}"

Try this recursive function (A1 contains the data)
=functionTelnyx(A1)
put in your script editor
function functionTelnyx(json) {
const telnyx = JSON.parse(json.slice(1,-1).replace(/""/g,'"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push([ p, obj[p]]);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return result
}
edit:
for multiple json, and horizontal layout
for headers, put in B1 =telnyx_headers(A2)
for contents, put in B2 and drag below =telnyx_contents(A2), then =telnyx_contents(A3) and so forth
with
function telnyx_headers(json) {
const telnyx = JSON.parse(json.slice(1, -1).replace(/""/g, '"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push(p);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return [result]
}
function telnyx_contents(json) {
const telnyx = JSON.parse(json.slice(1, -1).replace(/""/g, '"').replace(/(\r\n|\n|\r|\t| )/gm, ""))
let result = [];
recursion(telnyx)
function recursion(obj, path) {
if (path == undefined) { path = 'telnyx' }
const regex = new RegExp('[^0-9]+');
for (let p in obj) {
let newPath = (regex.test(p)) ? path + '.' + p : path + '[' + p + ']';
if (obj[p] != null) {
if (typeof obj[p] != 'object' && typeof obj[p] != 'function') {
result.push(obj[p]);
}
if (typeof obj[p] == 'object') {
recursion(obj[p], newPath);
}
}
}
}
return [result]
}
optional: array-enabled version
To process a larger number of JSON strings in the column A2:A, use this formula:
=telnyx(A2:A)
The telnyx() function uses telnyx_headers() and telnyx_contents() while observing custom function best practices.
/**
* Converts one or more JSON strings to a horizontal table.
* Row 1 will show field names and subsequent rows the
* data in each field.
*
* #param {A2:A} json The JSON strings to convert to a table.
* #customfunction
*/
function telnyx(json) {
'use strict';
if (!Array.isArray(json)) {
json = [json];
}
json = json.flat();
let result = telnyx_headers(json[0]);
json.forEach(j => result = result.concat(j ? telnyx_contents(j) : [null]));
return result;
}

Related

Logstash: Flatten nested JSON, combine fields inside array

I have a JSON looking like this:
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
]
},
"text": "My text"
}
Actually it's not split to multiple lines (just did it to give a better overview), so it's looking like this:
{"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":404,"attr2":416}},{"#tagId":"456","tagAttributes":{"attr1":1096,"attr2":1103}}]},"text":"My text"}
I want to insert this JSON with Logstash to an Elasticsearch index. However, I want to insert a flattened JSON with the fields in the array combined like this:
"foo.bar.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
In total, the data inserted to Elasticsearch should look like this:
"foo.bar.type": "someType"
"foo.bar.id": "ga241ghs"
"foo.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
"foo.text": "My text"
This is my current Logstash .conf; I am able to split the "tags" array, but now I am getting 2 entries as a result.
How can I now join all tagIds to one field, attr1 values of the array to one field, and all attr2 values to another?
input {
file {
codec => json
path => ["/path/to/my/data/*.json"]
mode => "read"
file_completed_action => "log"
file_completed_log_path => ["/path/to/my/logfile"]
sincedb_path => "/dev/null"
}
}
filter {
split {
field => "[foo][tags]"
}
}
output {
stdout { codec => rubydebug }
}
Thanks a lot!
Nice example for my JSON iterator IIFE - no need for complex algos, just pick DepthFirst, sligthly modified path (new "raw" version) and that is it.
In case you like this JS answer, mind ticking accept flag under voting buttons.
In case you want different language, have also C# parser with similar iterators on same GitHub.
var src = {"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":"AAA","attr2":"111"}},{"#tagId":"456","tagAttributes":{"attr1":"BBB","attr2":"222"}}],"text":"My text"}};
//console.log(JSON.stringify(src, null, 2));
function traverse(it) {
var dest = {};
var i=0;
do {
if (it.Current().HasStringValue()) {
var pathKey = it.Path(true).join('.');
var check = dest[pathKey];
if (check) {
if (!(check instanceof Array)) dest[pathKey] = [check];
dest[pathKey].push(it.Value());
} else {
dest[pathKey] = it.Value();
}
}
//console.log(it.Level + '\t' + it.Path(1).join('.') + '\t' + it.KeyDots(), (it.Value() instanceof Object) ? "-" : it.Value());
} while (it.DepthFirst());
console.log(JSON.stringify(dest, null, 2));
return dest;
}
/*
* https://github.com/eltomjan/ETEhomeTools/blob/master/HTM_HTA/JSON_Iterator_IIFE.js
* +new raw Path feature
*/
'use strict';
var JNode = (function (jsNode) {
function JNode(_parent, _pred, _key, _value) {
this.parent = _parent;
this.pred = _pred;
this.node = null;
this.next = null;
this.key = _key;
this.value = _value;
}
JNode.prototype.HasOwnKey = function () { return this.key && (typeof this.key != "number"); }
JNode.prototype.HasStringValue = function () { return !(this.value instanceof Object); }
return JNode;
})();
var JIterator = (function (json) {
var root, current, maxLevel = -1;
function JIterator(json, parent) {
if (parent === undefined) parent = null;
var pred = null, localCurrent;
for (var child in json) {
var obj = json[child] instanceof Object;
if (json instanceof Array) child = parseInt(child); // non-associative array
if (!root) root = localCurrent = new JNode(parent, null, child, json[child]);
else {
localCurrent = new JNode(parent, pred, child, obj ? ((json[child] instanceof Array) ? [] : {}) : json[child]);
}
if (pred) pred.next = localCurrent;
if (parent && parent.node == null) parent.node = localCurrent;
pred = localCurrent;
if (obj) {
var memPred = pred;
JIterator(json[child], pred);
pred = memPred;
}
}
if (this) {
current = root;
this.Level = 0;
}
}
JIterator.prototype.Current = function () { return current; }
JIterator.prototype.SetCurrent = function (newCurrent) {
current = newCurrent;
this.Level = 0;
while(newCurrent = newCurrent.parent) this.Level++;
}
JIterator.prototype.Parent = function () {
var retVal = current.parent;
if (retVal == null) return false;
this.Level--;
return current = retVal;
}
JIterator.prototype.Pred = function () {
var retVal = current.pred;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Node = function () {
var retVal = current.node;
if (retVal == null) return false;
this.Level++;
return current = retVal;
}
JIterator.prototype.Next = function () {
var retVal = current.next;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Key = function () { return current.key; }
JIterator.prototype.KeyDots = function () { return (typeof (current.key) == "number") ? "" : (current.key + ':'); }
JIterator.prototype.Value = function () { return current.value; }
JIterator.prototype.Reset = function () {
current = root;
this.Level = 0;
}
JIterator.prototype.RawPath = function () {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
steps.push(level.key + (level.value instanceof Array ? "[]" : "{}"));
} else {
if (level != null) steps.push(level.key);
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.Path = function (raw) {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
var size = 0;
var items = level.node;
if (typeof (level.key) == "number" && !raw) steps.push('[' + level.key + ']');
else {
if(raw) {
if (typeof (level.key) != "number") steps.push(level.key);
} else {
while (items) {
size++;
items = items.next;
}
var type = (level.value instanceof Array ? "[]" : "{}");
var prev = steps[steps.length - 1];
if (prev && prev[0] == '[') {
var last = prev.length - 1;
if (prev[last] == ']') {
last--;
if (!isNaN(prev.substr(1, last))) {
steps.pop();
size += '.' + prev.substr(1, last);
}
}
}
steps.push(level.key + type[0] + size + type[1]);
}
}
} else {
if (level != null) {
if (typeof (level.key) == "number") steps.push('[' + level.key + ']');
else steps.push(level.key);
}
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.DepthFirst = function () {
if (current == null) return 0; // exit sign
if (current.node != null) {
current = current.node;
this.Level++;
if (maxLevel < this.Level) maxLevel = this.Level;
return 1; // moved down
} else if (current.next != null) {
current = current.next;
return 2; // moved right
} else {
while (current != null) {
if (current.next != null) {
current = current.next;
return 3; // returned up & moved next
}
this.Level--;
current = current.parent;
}
}
return 0; // exit sign
}
JIterator.prototype.BreadthFirst = function () {
if (current == null) return 0; // exit sign
if (current.next) {
current = current.next;
return 1; // moved right
} else if (current.parent) {
var level = this.Level, point = current;
while (this.DepthFirst() && level != this.Level);
if (current) return 2; // returned up & moved next
do {
this.Reset();
level++;
while (this.DepthFirst() && level != this.Level);
if (current) return 3; // returned up & moved next
} while (maxLevel >= level);
return current != null ? 3 : 0;
} else if (current.node) {
current = current.node;
return 3;
} else if (current.pred) {
while (current.pred) current = current.pred;
while (current && !current.node) current = current.next;
if (!current) return null;
else return this.DepthFirst();
}
}
JIterator.prototype.ReadArray = function () {
var retVal = {};
var item = current;
do {
if (item.value instanceof Object) {
if (item.value.length == 0) retVal[item.key] = item.node;
else retVal[item.key] = item;
} else retVal[item.key] = item.value;
item = item.next;
} while (item != null);
return retVal;
}
JIterator.prototype.FindKey = function (key) {
var pos = current;
while (current && current.key != key) this.DepthFirst();
if (current.key == key) {
var retVal = current;
current = pos;
return retVal;
} else {
current = pos;
return null;
}
}
return JIterator;
})();
traverse(new JIterator(src));
Your short JSON version was different, now using this one, which looks like your required results (attrs changed and text moved from root under foo):
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
],
"text": "My text"
}
}
Figured it out how to do it with a Ruby filter directly in Logstash - for all searching for this in future, here is one example on how to do it for #tagId:
filter {
ruby { code => '
i = 0
tagId_array = Array.new
while i < event.get( "[foo][tags]" ).length do
tagId_array = tagId_array.push(event.get( "[foo][tags][" + i.to_s + "][#tagId]" ))
i += 1
end
event.set( "foo.tags.tagId", tagId_array )
'
}
}

How to convert Firestore REST API response to normal json?

I am trying to use Firestore from Unity by using REST API of it. Everything is working as expected till now.
When reading document from Firestore, it returns json in different format.
Like this.
{
"name": "projects/firestore-unity-demo-87998/databases/(default)/documents/test/panara",
"fields": {
"health": {
"integerValue": "1008"
},
"name": {
"stringValue": "Bhavin Panara"
},
"birthday": {
"timestampValue": "1992-10-08T04:40:10Z"
},
"alive": {
"booleanValue": true
},
"floatingPointNumber": {
"testFloat": 100.1
}
},
"createTime": "2019-07-30T13:27:09.599079Z",
"updateTime": "2019-07-31T11:41:10.637712Z"
}
How can I convert this kind of json to a normal json like this.
{
"health":1008,
"name":"Bhavin Panara",
"birthday" : "1992-10-08T04:40:10Z",
"alive":true,
"floatingPointNumber":100.1
}
I used the code from firestore-parser
const getFireStoreProp = (value) => {
const props = {
arrayValue: 1,
bytesValue: 1,
booleanValue: 1,
doubleValue: 1,
geoPointValue: 1,
integerValue: 1,
mapValue: 1,
nullValue: 1,
referenceValue: 1,
stringValue: 1,
timestampValue: 1,
};
return Object.keys(value).find(k => props[k] === 1);
};
export const fireStoreParser = (value) => {
let newVal = value;
// You can use this part to avoid mutating original values
// let newVal;
// if (typeof value === 'object') {
// newVal = { ...value };
// } else if (value instanceof Array) {
// newVal = value.slice(0);
// } else {
// newVal = value;
// }
const prop = getFireStoreProp(newVal);
if (prop === 'doubleValue' || prop === 'integerValue') {
newVal = Number(newVal[prop]);
} else if (prop === 'arrayValue') {
newVal = ((newVal[prop] && newVal[prop].values) || []).map(v => fireStoreParser(v));
} else if (prop === 'mapValue') {
newVal = fireStoreParser((newVal[prop] && newVal[prop].fields) || {});
} else if (prop === 'geoPointValue') {
newVal = { latitude: 0, longitude: 0, ...newVal[prop] };
} else if (prop) {
newVal = newVal[prop];
} else if (typeof newVal === 'object') {
Object.keys(newVal).forEach((k) => { newVal[k] = fireStoreParser(newVal[k]); });
}
return newVal;
};
function toValue(field) {
return "integerValue" in field
? Number(field.integerValue)
: "doubleValue" in field
? Number(field.doubleValue)
: "arrayValue" in field
? field.arrayValue.values.map(toValue)
: "mapValue" in field
? toJSON(field.mapValue)
: Object.entries(field)[0][1];
}
function toJSON(doc) {
return Object.fromEntries(
Object.entries(doc.fields ?? {}).map(([key, field]) => [key, toValue(field)])
);
}

How should i get matching sub-objects from nested json object

I have an obj like this:
var obj = { thing1 : { name: 'test', value: 'testvalue1'},
thing2 : { name: 'something', thing4: {name:'test', value: 'testvalue2'}},
}
I want to write a function like findByName(obj, 'test').It returns all the matching sub-objects with the same name. So it should return:
{ name: 'test', value: 'testvalue1'}
{name:'test', value: 'testvalue2'}
Right now this is what i have:
function findByName(obj, name) {
if( obj.name === name ){
return obj;
}
var result, p;
for (p in obj) {
if( obj.hasOwnProperty(p) && typeof obj[p] === 'object' ) {
result = findByName(obj[p], name);
if(result){
return result;
}
}
}
return result;
}
obviously it only return the first matching.. how to improve this method?
You need to push the results into an array and make the function return an array.
Also, do a sanity check whether the object is null or undefined to avoid errors.
Here is your code modified.
Note: I have also modified the parent object ,which is "obj", by adding a "name" property with value "test" so the result should have the parent object in the result as well.
function findByName(obj, name) {
var result=[], p;
if(obj == null || obj == undefined)
return result;
if( obj.name === name ){
result.push(obj);
}
for (p in obj) {
if( obj.hasOwnProperty(p) && typeof obj[p] === 'object') {
newresult = findByName(obj[p], name);
if(newresult.length>0){
//concatenate the result with previous results found;
result=result.concat(newresult);
}
}
}
return result;
}
var obj = { thing1 : { name: 'test', value: 'testvalue1'},
thing2 : { name: 'something', thing4: {name:'test', value: 'testvalue2'}},
name:'test' //new property added
}
//execute
findByName(obj,"test");
Run this in your console and upvote if this helps you.

Formatting DynamoDB data to normal JSON in AWS Lambda

I'm using AWS Lambda to scan data from a DynamoDB table. This is what I get in return:
{
"videos": [
{
"file": {
"S": "file1.mp4"
},
"id": {
"S": "1"
},
"canvas": {
"S": "This is Canvas1"
}
},
{
"file": {
"S": "main.mp4"
},
"id": {
"S": "0"
},
"canvas": {
"S": "this is a canvas"
}
}
]
}
My front-end application is using Ember Data Rest Adapter which does not accepts such response. Is there any way I can get normal JSON format? There is this NPM module called dynamodb-marshaler to convert DynamoDB data to normal JSON. I'm looking for a native solution if possible.
Node.js
Use the unmarshall function from AWSJavaScriptSDK:
const AWS = require("aws-sdk");
exports.handler = function( event, context, callback ) {
const newImages = event.Records.map(
(record) => AWS.DynamoDB.Converter.unmarshall(record.dynamodb.NewImage)
);
console.log('Converted records', newImages);
callback(null, `Success`);
}
Python
Use TypeDeserializer.deserialize from boto3.dynamodb.types:
import json
from boto3.dynamodb.types import TypeDeserializer
def ddb_deserialize(r, type_deserializer = TypeDeserializer()):
return type_deserializer.deserialize({"M": r})
def lambda_handler(event, context):
new_images = [ ddb_deserialize(r["dynamodb"]["NewImage"]) for r in event['Records'] ]
print('Converted records', json.dumps(new_images, indent=2))
I know is a bit old but I had the same problem processing stream data from dynamoDB in node js lambda function. I used the proposed by #churro
import sdk and output converter
var AWS = require("aws-sdk");
var parse = AWS.DynamoDB.Converter.output;
use the parse function with a small hack
exports.handler = function( event, context, callback ) {
var docClient = new AWS.DynamoDB.DocumentClient();
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record:', parse({ "M": record.dynamodb.NewImage }));
});
callback(null, `Successfully processed ${event.Records.length} records.`);
}
Hope it helps
AWS JavaScript SDK was recently updated with Document Client which does exactly what you need. Check the announce and usage examples here: http://blogs.aws.amazon.com/javascript/post/Tx1OVH5LUZAFC6T/Announcing-the-Amazon-DynamoDB-Document-Client-in-the-AWS-SDK-for-JavaScript
Javascript: AWS SDK provides the unmarshall function
Python: use TypeDeserializer from boto3.dynamodb.types:
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
def from_dynamodb_to_json(item):
d = TypeDeserializer()
return {k: d.deserialize(value=v) for k, v in item.items()}
## Usage:
from_dynamodb_to_json({
"Day": {"S": "Monday"},
"mylist": {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
})
# {'Day': 'Monday', 'mylist': ['Cookies', 'Coffee', Decimal('3.14159')]}
Here you can find gist which does that:
function mapper(data) {
let S = "S";
let SS = "SS";
let NN = "NN";
let NS = "NS";
let BS = "BS";
let BB = "BB";
let N = "N";
let BOOL = "BOOL";
let NULL = "NULL";
let M = "M";
let L = "L";
if (isObject(data)) {
let keys = Object.keys(data);
while (keys.length) {
let key = keys.shift();
let types = data[key];
if (isObject(types) && types.hasOwnProperty(S)) {
data[key] = types[S];
} else if (isObject(types) && types.hasOwnProperty(N)) {
data[key] = parseFloat(types[N]);
} else if (isObject(types) && types.hasOwnProperty(BOOL)) {
data[key] = types[BOOL];
} else if (isObject(types) && types.hasOwnProperty(NULL)) {
data[key] = null;
} else if (isObject(types) && types.hasOwnProperty(M)) {
data[key] = mapper(types[M]);
} else if (isObject(types) && types.hasOwnProperty(L)) {
data[key] = mapper(types[L]);
} else if (isObject(types) && types.hasOwnProperty(SS)) {
data[key] = types[SS];
} else if (isObject(types) && types.hasOwnProperty(NN)) {
data[key] = types[NN];
} else if (isObject(types) && types.hasOwnProperty(BB)) {
data[key] = types[BB];
} else if (isObject(types) && types.hasOwnProperty(NS)) {
data[key] = types[NS];
} else if (isObject(types) && types.hasOwnProperty(BS)) {
data[key] = types[BS];
}
}
}
return data;
function isObject(value) {
return typeof value === "object" && value !== null;
}
}
https://gist.github.com/igorzg/c80c0de4ad5c4028cb26cfec415cc600
If you are using python in the lambda you can utilise the dynamodb-json library.
Install library
pip install dynamodb-json
and use the below snippet
from dynamodb_json import json_util as util
def marshall(regular_json):
dynamodb_json = util.dumps(reular_json)
def unmarshall(dynamodb_json):
regular_json = util.loads(dynamodb_json)
Reference
https://pypi.org/project/dynamodb-json/
I think it's just a custom transformation exercise for each app. A simple conversion from DynamoDB's item format to you application format might look like this:
var response = {...} // your response from DynamoDB
var formattedObjects = response.videos.map(function(video) {
return {
"file": video.file.S,
"id": video.id.S,
"canvas": video.canvas.S
};
});
If you want to build a generic system for this, you would have to handle DynamoDB's various AttributeValue types. A function like the one below would do the job, but I've left out the hard work of handling most of DynamoDB's more complex attribute value types:
function dynamoItemToPlainObj(dynamoItem) {
var plainObj = {};
for (var attributeName in dynamoItem) {
var attribute = dynamoItem[attributeName];
var attributeValue;
for (var itemType in attribute) {
switch (itemType) {
case "S":
attributeValue = attribute.S.toString();
break;
case "N":
attributeValue = Number(attribute.N);
break;
// more attribute types...
default:
attributeValue = attribute[itemType].toString();
break;
}
}
plainObj[attributeName] = attributeValue;
}
return plainObj;
}
var formattedObjects = response.videos.map(dynamoItemToPlainObj);
I tried several solutions here but none worked with multi-level data, such as if it includes a list of maps e.g.
{
"item1": {
"M": {
"sub-item1": {
"L": [
{
"M": {
"sub-item1-list-map": {
"S": "value"
Below, adapted from #igorzg's answer (which also has that drawback), fixes that.
Example usage:
dynamodb.getItem({...}, function(err, data) {
if (!err && data && data.Item) {
var converted = ddb_to_json(data.Item);
Here's the conversion function:
function ddb_to_json(data) {
function isObject(value) {
return typeof value === "object" && value !== null;
}
if(isObject(data))
return convert_ddb({M:data});
function convert_ddb(ddbData) {
if (isObject(ddbData) && ddbData.hasOwnProperty('S'))
return ddbData.S;
if (isObject(ddbData) && ddbData.hasOwnProperty('N'))
return parseFloat(ddbData.N);
if (isObject(ddbData) && ddbData.hasOwnProperty('BOOL'))
return ddbData.BOOL;
if (isObject(ddbData) && ddbData.hasOwnProperty('NULL'))
return null;
if (isObject(ddbData) && ddbData.hasOwnProperty('M')) {
var x = {};
for(var k in ddbData.M)
x[k] = convert_ddb(ddbData.M[k])
return x;
}
if (isObject(ddbData) && ddbData.hasOwnProperty('L'))
return ddbData.L.map(x => convert_ddb(x));
if (isObject(ddbData) && ddbData.hasOwnProperty('SS'))
return ddbData.SS;
if (isObject(ddbData) && ddbData.hasOwnProperty('NN'))
return ddbData.NN;
if (isObject(ddbData) && ddbData.hasOwnProperty('BB'))
return ddbData.BB;
if (isObject(ddbData) && ddbData.hasOwnProperty('NS'))
return ddbData.NS;
if (isObject(ddbData) && ddbData.hasOwnProperty('BS'))
return ddbData.BS;
return data;
}
return data;
}
If you need online editor try this
https://2json.net/dynamo

Error in node js while fetching records from mysql database

Routes:
exports.authenticate = function(req, res) {
//connection.connect();
var sql="SELECT * from users where username='"+req.body.user+"' and password='"+req.body.pass+"' LIMIT 1";
connection.query(sql, function(err, rows, fields) {
if (err) throw err;
//res.send('Your data is: ', rows);
var str="Hi, <b>"+rows[0].name+"</b> ("+rows[0].email+")";
sql="SELECT username,name from users ORDER BY name";
connection.query(sql, function(err, datarows, fields) {
if (err) throw err;
//res.send('Your data is: ', rows+' <br/> All Users are : ', datarows.length+"<a href='/'>Login</a>");
console.log(datarows);
res.render('home.jade', {title: 'Home',result:datarows});
/*---------------The above calling jade-----------------*/
});
//str+="<a href='/'>Login</a>";
//res.send(str);
//connection.end();
});
Jade:
extends layout
block content
//p #{result}
-var list=[ { username: 'rohankumar1524', name: 'Rohan Kumar' }]
each item, i in result
p test #{item.length}
each role,j in item
p #{role}
Output
test
rohankumar1524
Rohan Kumar
unusual output
function (parser, fieldPackets, typeCast, nestTables, connection) { var self = this; var next = function () { return self._typeCast(fieldPacket, parser, connection.config.timezone, connection.config.supportBigNumbers); }; for (var i = 0; i < fieldPackets.length; i++) { var fieldPacket = fieldPackets[i]; var value; if (typeof typeCast == "function") { value = typeCast.apply(connection, [ new Field({ packet: fieldPacket, parser: parser }), next ]); } else { value = (typeCast) ? this._typeCast(fieldPacket, parser, connection.config.timezone, connection.config.supportBigNumbers) : ( (fieldPacket.charsetNr === Charsets.BINARY) ? parser.parseLengthCodedBuffer() : parser.parseLengthCodedString() ); } if (typeof nestTables == "string" && nestTables.length) { this[fieldPacket.table + nestTables + fieldPacket.name] = value; } else if (nestTables) { this[fieldPacket.table] = this[fieldPacket.table] || {}; this[fieldPacket.table][fieldPacket.name] = value; } else { this[fieldPacket.name] = value; } } }
Also this
function (field, parser, timeZone, supportBigNumbers) { switch (field.type) { case Types.TIMESTAMP: case Types.DATE: case Types.DATETIME: case Types.NEWDATE: var dateString = parser.parseLengthCodedString(); if (dateString === null) { return null; } if (timeZone != 'local') { if (field.type === Types.DATE) { dateString += ' 00:00:00 ' + timeZone; } else { dateString += timeZone; } } return new Date(dateString); case Types.TINY: case Types.SHORT: case Types.LONG: case Types.INT24: case Types.YEAR: case Types.FLOAT: case Types.DOUBLE: case Types.LONGLONG: case Types.NEWDECIMAL: var numberString = parser.parseLengthCodedString(); return (numberString === null || (field.zeroFill && numberString[0] == "0")) ? numberString : ((supportBigNumbers && Number(numberString) > IEEE_754_BINARY_64_PRECISION) ? numberString : Number(numberString)); case Types.BIT: return parser.parseLengthCodedBuffer(); case Types.STRING: case Types.VAR_STRING: case Types.TINY_BLOB: case Types.MEDIUM_BLOB: case Types.LONG_BLOB: case Types.BLOB: return (field.charsetNr === Charsets.BINARY) ? parser.parseLengthCodedBuffer() : parser.parseLengthCodedString(); case Types.GEOMETRY: return parser.parseGeometryValue(); default: return parser.parseLengthCodedString(); } }
But when I use this code in jade
extends layout
block content
//p #{result}
-var list=[ { username: 'rohankumar1524', name: 'Rohan Kumar' }]
each item, i in list // list in place of result
p test #{item.length}
each role,j in item
p #{role}
The output is
test
rohankumar1524
Rohan Kumar
which is expected
Why this problem occurs, is any other module I have install by npm
Try this:
Jade
extends layout
block content
each item in result
p item.username
p item.name