I'm using AWS Lambda to scan data from a DynamoDB table. This is what I get in return:
{
"videos": [
{
"file": {
"S": "file1.mp4"
},
"id": {
"S": "1"
},
"canvas": {
"S": "This is Canvas1"
}
},
{
"file": {
"S": "main.mp4"
},
"id": {
"S": "0"
},
"canvas": {
"S": "this is a canvas"
}
}
]
}
My front-end application is using Ember Data Rest Adapter which does not accepts such response. Is there any way I can get normal JSON format? There is this NPM module called dynamodb-marshaler to convert DynamoDB data to normal JSON. I'm looking for a native solution if possible.
Node.js
Use the unmarshall function from AWSJavaScriptSDK:
const AWS = require("aws-sdk");
exports.handler = function( event, context, callback ) {
const newImages = event.Records.map(
(record) => AWS.DynamoDB.Converter.unmarshall(record.dynamodb.NewImage)
);
console.log('Converted records', newImages);
callback(null, `Success`);
}
Python
Use TypeDeserializer.deserialize from boto3.dynamodb.types:
import json
from boto3.dynamodb.types import TypeDeserializer
def ddb_deserialize(r, type_deserializer = TypeDeserializer()):
return type_deserializer.deserialize({"M": r})
def lambda_handler(event, context):
new_images = [ ddb_deserialize(r["dynamodb"]["NewImage"]) for r in event['Records'] ]
print('Converted records', json.dumps(new_images, indent=2))
I know is a bit old but I had the same problem processing stream data from dynamoDB in node js lambda function. I used the proposed by #churro
import sdk and output converter
var AWS = require("aws-sdk");
var parse = AWS.DynamoDB.Converter.output;
use the parse function with a small hack
exports.handler = function( event, context, callback ) {
var docClient = new AWS.DynamoDB.DocumentClient();
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record:', parse({ "M": record.dynamodb.NewImage }));
});
callback(null, `Successfully processed ${event.Records.length} records.`);
}
Hope it helps
AWS JavaScript SDK was recently updated with Document Client which does exactly what you need. Check the announce and usage examples here: http://blogs.aws.amazon.com/javascript/post/Tx1OVH5LUZAFC6T/Announcing-the-Amazon-DynamoDB-Document-Client-in-the-AWS-SDK-for-JavaScript
Javascript: AWS SDK provides the unmarshall function
Python: use TypeDeserializer from boto3.dynamodb.types:
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
def from_dynamodb_to_json(item):
d = TypeDeserializer()
return {k: d.deserialize(value=v) for k, v in item.items()}
## Usage:
from_dynamodb_to_json({
"Day": {"S": "Monday"},
"mylist": {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
})
# {'Day': 'Monday', 'mylist': ['Cookies', 'Coffee', Decimal('3.14159')]}
Here you can find gist which does that:
function mapper(data) {
let S = "S";
let SS = "SS";
let NN = "NN";
let NS = "NS";
let BS = "BS";
let BB = "BB";
let N = "N";
let BOOL = "BOOL";
let NULL = "NULL";
let M = "M";
let L = "L";
if (isObject(data)) {
let keys = Object.keys(data);
while (keys.length) {
let key = keys.shift();
let types = data[key];
if (isObject(types) && types.hasOwnProperty(S)) {
data[key] = types[S];
} else if (isObject(types) && types.hasOwnProperty(N)) {
data[key] = parseFloat(types[N]);
} else if (isObject(types) && types.hasOwnProperty(BOOL)) {
data[key] = types[BOOL];
} else if (isObject(types) && types.hasOwnProperty(NULL)) {
data[key] = null;
} else if (isObject(types) && types.hasOwnProperty(M)) {
data[key] = mapper(types[M]);
} else if (isObject(types) && types.hasOwnProperty(L)) {
data[key] = mapper(types[L]);
} else if (isObject(types) && types.hasOwnProperty(SS)) {
data[key] = types[SS];
} else if (isObject(types) && types.hasOwnProperty(NN)) {
data[key] = types[NN];
} else if (isObject(types) && types.hasOwnProperty(BB)) {
data[key] = types[BB];
} else if (isObject(types) && types.hasOwnProperty(NS)) {
data[key] = types[NS];
} else if (isObject(types) && types.hasOwnProperty(BS)) {
data[key] = types[BS];
}
}
}
return data;
function isObject(value) {
return typeof value === "object" && value !== null;
}
}
https://gist.github.com/igorzg/c80c0de4ad5c4028cb26cfec415cc600
If you are using python in the lambda you can utilise the dynamodb-json library.
Install library
pip install dynamodb-json
and use the below snippet
from dynamodb_json import json_util as util
def marshall(regular_json):
dynamodb_json = util.dumps(reular_json)
def unmarshall(dynamodb_json):
regular_json = util.loads(dynamodb_json)
Reference
https://pypi.org/project/dynamodb-json/
I think it's just a custom transformation exercise for each app. A simple conversion from DynamoDB's item format to you application format might look like this:
var response = {...} // your response from DynamoDB
var formattedObjects = response.videos.map(function(video) {
return {
"file": video.file.S,
"id": video.id.S,
"canvas": video.canvas.S
};
});
If you want to build a generic system for this, you would have to handle DynamoDB's various AttributeValue types. A function like the one below would do the job, but I've left out the hard work of handling most of DynamoDB's more complex attribute value types:
function dynamoItemToPlainObj(dynamoItem) {
var plainObj = {};
for (var attributeName in dynamoItem) {
var attribute = dynamoItem[attributeName];
var attributeValue;
for (var itemType in attribute) {
switch (itemType) {
case "S":
attributeValue = attribute.S.toString();
break;
case "N":
attributeValue = Number(attribute.N);
break;
// more attribute types...
default:
attributeValue = attribute[itemType].toString();
break;
}
}
plainObj[attributeName] = attributeValue;
}
return plainObj;
}
var formattedObjects = response.videos.map(dynamoItemToPlainObj);
I tried several solutions here but none worked with multi-level data, such as if it includes a list of maps e.g.
{
"item1": {
"M": {
"sub-item1": {
"L": [
{
"M": {
"sub-item1-list-map": {
"S": "value"
Below, adapted from #igorzg's answer (which also has that drawback), fixes that.
Example usage:
dynamodb.getItem({...}, function(err, data) {
if (!err && data && data.Item) {
var converted = ddb_to_json(data.Item);
Here's the conversion function:
function ddb_to_json(data) {
function isObject(value) {
return typeof value === "object" && value !== null;
}
if(isObject(data))
return convert_ddb({M:data});
function convert_ddb(ddbData) {
if (isObject(ddbData) && ddbData.hasOwnProperty('S'))
return ddbData.S;
if (isObject(ddbData) && ddbData.hasOwnProperty('N'))
return parseFloat(ddbData.N);
if (isObject(ddbData) && ddbData.hasOwnProperty('BOOL'))
return ddbData.BOOL;
if (isObject(ddbData) && ddbData.hasOwnProperty('NULL'))
return null;
if (isObject(ddbData) && ddbData.hasOwnProperty('M')) {
var x = {};
for(var k in ddbData.M)
x[k] = convert_ddb(ddbData.M[k])
return x;
}
if (isObject(ddbData) && ddbData.hasOwnProperty('L'))
return ddbData.L.map(x => convert_ddb(x));
if (isObject(ddbData) && ddbData.hasOwnProperty('SS'))
return ddbData.SS;
if (isObject(ddbData) && ddbData.hasOwnProperty('NN'))
return ddbData.NN;
if (isObject(ddbData) && ddbData.hasOwnProperty('BB'))
return ddbData.BB;
if (isObject(ddbData) && ddbData.hasOwnProperty('NS'))
return ddbData.NS;
if (isObject(ddbData) && ddbData.hasOwnProperty('BS'))
return ddbData.BS;
return data;
}
return data;
}
If you need online editor try this
https://2json.net/dynamo
Related
I am trying to use the group by function on a JSON array using the inner JSON value as a key as shown below. But unable to read the inner JSON value. Here is my JSON array.
NotificationData = [
{
"eventId":"90989",
"eventTime":"2019-12-11T11:20:53+04:00",
"eventType":"yyyy",
"event":{
"ServiceOrder":{
"externalId":"2434",
"priority":"1"
}
}
},
{
"eventId":"6576",
"eventTime":"2019-12-11T11:20:53+04:00",
"eventType":"yyyy",
"event":{
"ServiceOrder":{
"externalId":"78657",
"priority":"1"
}
}
}
]
GroupBy Logic:
const groupBy = (array, key) => {
return array.reduce((result, currentValue) => {
(result[currentValue[key]] = result[currentValue[key]] || []).push(
currentValue
);
return result;
}, {});
};
const serviceOrdersGroupedByExternalId = groupBy(this.NotificationData, 'event.ServiceOrder.externalId');
//this line of code is not working as
// it is unable to locate the external id value.
Desired output
{ "2434":[{
"eventId":"90989",
"eventTime":"2019-12-11T11:20:53+04:00",
"eventType":"yyyy",
"event":{
"ServiceOrder":{ "priority":"1" }
}
}],
"78657":[{
"eventId":"6576",
"eventTime":"2019-12-11T11:20:53+04:00",
"eventType":"yyyy",
"event":{
"ServiceOrder":{ "priority":"1" }
}
}]
}
Does this solves your purpose?
let group = NotificationData.reduce((r, a) => {
let d = r[a.event.ServiceOrder.externalId] = [...r[a.event.ServiceOrder.externalId] || [], a];
return r;
}, {});
console.log(group);
Try like this:
result = {};
constructor() {
let externalIds = this.NotificationData.flatMap(item => item.event.ServiceOrder.externalId);
externalIds.forEach(id => {
var eventData = this.NotificationData.filter(
x => x.event.ServiceOrder.externalId == id
).map(function(item) {
delete item.event.ServiceOrder.externalId;
return item;
});
this.result[id] = eventData;
});
}
Working Demo
I have a JSON looking like this:
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
]
},
"text": "My text"
}
Actually it's not split to multiple lines (just did it to give a better overview), so it's looking like this:
{"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":404,"attr2":416}},{"#tagId":"456","tagAttributes":{"attr1":1096,"attr2":1103}}]},"text":"My text"}
I want to insert this JSON with Logstash to an Elasticsearch index. However, I want to insert a flattened JSON with the fields in the array combined like this:
"foo.bar.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
In total, the data inserted to Elasticsearch should look like this:
"foo.bar.type": "someType"
"foo.bar.id": "ga241ghs"
"foo.tags.tagId": ["123", "456"]
"foo.tags.tagAttributs.attr1": ["AAA", "BBB"]
"foo.tags.tagAttributs.attr2": ["111", "222"]
"foo.text": "My text"
This is my current Logstash .conf; I am able to split the "tags" array, but now I am getting 2 entries as a result.
How can I now join all tagIds to one field, attr1 values of the array to one field, and all attr2 values to another?
input {
file {
codec => json
path => ["/path/to/my/data/*.json"]
mode => "read"
file_completed_action => "log"
file_completed_log_path => ["/path/to/my/logfile"]
sincedb_path => "/dev/null"
}
}
filter {
split {
field => "[foo][tags]"
}
}
output {
stdout { codec => rubydebug }
}
Thanks a lot!
Nice example for my JSON iterator IIFE - no need for complex algos, just pick DepthFirst, sligthly modified path (new "raw" version) and that is it.
In case you like this JS answer, mind ticking accept flag under voting buttons.
In case you want different language, have also C# parser with similar iterators on same GitHub.
var src = {"foo":{"bar":{"type":"someType","id":"ga241ghs"},"tags":[{"#tagId":"123","tagAttributes":{"attr1":"AAA","attr2":"111"}},{"#tagId":"456","tagAttributes":{"attr1":"BBB","attr2":"222"}}],"text":"My text"}};
//console.log(JSON.stringify(src, null, 2));
function traverse(it) {
var dest = {};
var i=0;
do {
if (it.Current().HasStringValue()) {
var pathKey = it.Path(true).join('.');
var check = dest[pathKey];
if (check) {
if (!(check instanceof Array)) dest[pathKey] = [check];
dest[pathKey].push(it.Value());
} else {
dest[pathKey] = it.Value();
}
}
//console.log(it.Level + '\t' + it.Path(1).join('.') + '\t' + it.KeyDots(), (it.Value() instanceof Object) ? "-" : it.Value());
} while (it.DepthFirst());
console.log(JSON.stringify(dest, null, 2));
return dest;
}
/*
* https://github.com/eltomjan/ETEhomeTools/blob/master/HTM_HTA/JSON_Iterator_IIFE.js
* +new raw Path feature
*/
'use strict';
var JNode = (function (jsNode) {
function JNode(_parent, _pred, _key, _value) {
this.parent = _parent;
this.pred = _pred;
this.node = null;
this.next = null;
this.key = _key;
this.value = _value;
}
JNode.prototype.HasOwnKey = function () { return this.key && (typeof this.key != "number"); }
JNode.prototype.HasStringValue = function () { return !(this.value instanceof Object); }
return JNode;
})();
var JIterator = (function (json) {
var root, current, maxLevel = -1;
function JIterator(json, parent) {
if (parent === undefined) parent = null;
var pred = null, localCurrent;
for (var child in json) {
var obj = json[child] instanceof Object;
if (json instanceof Array) child = parseInt(child); // non-associative array
if (!root) root = localCurrent = new JNode(parent, null, child, json[child]);
else {
localCurrent = new JNode(parent, pred, child, obj ? ((json[child] instanceof Array) ? [] : {}) : json[child]);
}
if (pred) pred.next = localCurrent;
if (parent && parent.node == null) parent.node = localCurrent;
pred = localCurrent;
if (obj) {
var memPred = pred;
JIterator(json[child], pred);
pred = memPred;
}
}
if (this) {
current = root;
this.Level = 0;
}
}
JIterator.prototype.Current = function () { return current; }
JIterator.prototype.SetCurrent = function (newCurrent) {
current = newCurrent;
this.Level = 0;
while(newCurrent = newCurrent.parent) this.Level++;
}
JIterator.prototype.Parent = function () {
var retVal = current.parent;
if (retVal == null) return false;
this.Level--;
return current = retVal;
}
JIterator.prototype.Pred = function () {
var retVal = current.pred;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Node = function () {
var retVal = current.node;
if (retVal == null) return false;
this.Level++;
return current = retVal;
}
JIterator.prototype.Next = function () {
var retVal = current.next;
if (retVal == null) return false;
return current = retVal;
}
JIterator.prototype.Key = function () { return current.key; }
JIterator.prototype.KeyDots = function () { return (typeof (current.key) == "number") ? "" : (current.key + ':'); }
JIterator.prototype.Value = function () { return current.value; }
JIterator.prototype.Reset = function () {
current = root;
this.Level = 0;
}
JIterator.prototype.RawPath = function () {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
steps.push(level.key + (level.value instanceof Array ? "[]" : "{}"));
} else {
if (level != null) steps.push(level.key);
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.Path = function (raw) {
var steps = [], level = current;
do {
if (level != null && level.value instanceof Object) {
var size = 0;
var items = level.node;
if (typeof (level.key) == "number" && !raw) steps.push('[' + level.key + ']');
else {
if(raw) {
if (typeof (level.key) != "number") steps.push(level.key);
} else {
while (items) {
size++;
items = items.next;
}
var type = (level.value instanceof Array ? "[]" : "{}");
var prev = steps[steps.length - 1];
if (prev && prev[0] == '[') {
var last = prev.length - 1;
if (prev[last] == ']') {
last--;
if (!isNaN(prev.substr(1, last))) {
steps.pop();
size += '.' + prev.substr(1, last);
}
}
}
steps.push(level.key + type[0] + size + type[1]);
}
}
} else {
if (level != null) {
if (typeof (level.key) == "number") steps.push('[' + level.key + ']');
else steps.push(level.key);
}
else break;
}
level = level.parent;
} while (level != null);
var retVal = "";
retVal = steps.reverse();
return retVal;
}
JIterator.prototype.DepthFirst = function () {
if (current == null) return 0; // exit sign
if (current.node != null) {
current = current.node;
this.Level++;
if (maxLevel < this.Level) maxLevel = this.Level;
return 1; // moved down
} else if (current.next != null) {
current = current.next;
return 2; // moved right
} else {
while (current != null) {
if (current.next != null) {
current = current.next;
return 3; // returned up & moved next
}
this.Level--;
current = current.parent;
}
}
return 0; // exit sign
}
JIterator.prototype.BreadthFirst = function () {
if (current == null) return 0; // exit sign
if (current.next) {
current = current.next;
return 1; // moved right
} else if (current.parent) {
var level = this.Level, point = current;
while (this.DepthFirst() && level != this.Level);
if (current) return 2; // returned up & moved next
do {
this.Reset();
level++;
while (this.DepthFirst() && level != this.Level);
if (current) return 3; // returned up & moved next
} while (maxLevel >= level);
return current != null ? 3 : 0;
} else if (current.node) {
current = current.node;
return 3;
} else if (current.pred) {
while (current.pred) current = current.pred;
while (current && !current.node) current = current.next;
if (!current) return null;
else return this.DepthFirst();
}
}
JIterator.prototype.ReadArray = function () {
var retVal = {};
var item = current;
do {
if (item.value instanceof Object) {
if (item.value.length == 0) retVal[item.key] = item.node;
else retVal[item.key] = item;
} else retVal[item.key] = item.value;
item = item.next;
} while (item != null);
return retVal;
}
JIterator.prototype.FindKey = function (key) {
var pos = current;
while (current && current.key != key) this.DepthFirst();
if (current.key == key) {
var retVal = current;
current = pos;
return retVal;
} else {
current = pos;
return null;
}
}
return JIterator;
})();
traverse(new JIterator(src));
Your short JSON version was different, now using this one, which looks like your required results (attrs changed and text moved from root under foo):
{
"foo": {
"bar": {
"type": "someType",
"id": "ga241ghs"
},
"tags": [
{
"#tagId": "123",
"tagAttributes": {
"attr1": "AAA",
"attr2": "111"
}
},
{
"#tagId": "456",
"tagAttributes": {
"attr1": "BBB",
"attr2": "222"
}
}
],
"text": "My text"
}
}
Figured it out how to do it with a Ruby filter directly in Logstash - for all searching for this in future, here is one example on how to do it for #tagId:
filter {
ruby { code => '
i = 0
tagId_array = Array.new
while i < event.get( "[foo][tags]" ).length do
tagId_array = tagId_array.push(event.get( "[foo][tags][" + i.to_s + "][#tagId]" ))
i += 1
end
event.set( "foo.tags.tagId", tagId_array )
'
}
}
I am trying to use Firestore from Unity by using REST API of it. Everything is working as expected till now.
When reading document from Firestore, it returns json in different format.
Like this.
{
"name": "projects/firestore-unity-demo-87998/databases/(default)/documents/test/panara",
"fields": {
"health": {
"integerValue": "1008"
},
"name": {
"stringValue": "Bhavin Panara"
},
"birthday": {
"timestampValue": "1992-10-08T04:40:10Z"
},
"alive": {
"booleanValue": true
},
"floatingPointNumber": {
"testFloat": 100.1
}
},
"createTime": "2019-07-30T13:27:09.599079Z",
"updateTime": "2019-07-31T11:41:10.637712Z"
}
How can I convert this kind of json to a normal json like this.
{
"health":1008,
"name":"Bhavin Panara",
"birthday" : "1992-10-08T04:40:10Z",
"alive":true,
"floatingPointNumber":100.1
}
I used the code from firestore-parser
const getFireStoreProp = (value) => {
const props = {
arrayValue: 1,
bytesValue: 1,
booleanValue: 1,
doubleValue: 1,
geoPointValue: 1,
integerValue: 1,
mapValue: 1,
nullValue: 1,
referenceValue: 1,
stringValue: 1,
timestampValue: 1,
};
return Object.keys(value).find(k => props[k] === 1);
};
export const fireStoreParser = (value) => {
let newVal = value;
// You can use this part to avoid mutating original values
// let newVal;
// if (typeof value === 'object') {
// newVal = { ...value };
// } else if (value instanceof Array) {
// newVal = value.slice(0);
// } else {
// newVal = value;
// }
const prop = getFireStoreProp(newVal);
if (prop === 'doubleValue' || prop === 'integerValue') {
newVal = Number(newVal[prop]);
} else if (prop === 'arrayValue') {
newVal = ((newVal[prop] && newVal[prop].values) || []).map(v => fireStoreParser(v));
} else if (prop === 'mapValue') {
newVal = fireStoreParser((newVal[prop] && newVal[prop].fields) || {});
} else if (prop === 'geoPointValue') {
newVal = { latitude: 0, longitude: 0, ...newVal[prop] };
} else if (prop) {
newVal = newVal[prop];
} else if (typeof newVal === 'object') {
Object.keys(newVal).forEach((k) => { newVal[k] = fireStoreParser(newVal[k]); });
}
return newVal;
};
function toValue(field) {
return "integerValue" in field
? Number(field.integerValue)
: "doubleValue" in field
? Number(field.doubleValue)
: "arrayValue" in field
? field.arrayValue.values.map(toValue)
: "mapValue" in field
? toJSON(field.mapValue)
: Object.entries(field)[0][1];
}
function toJSON(doc) {
return Object.fromEntries(
Object.entries(doc.fields ?? {}).map(([key, field]) => [key, toValue(field)])
);
}
How can I write a test for an empty value of a specific JSON name pair. For example I have this JSON:
{
"data": {
"sectionGroupName": "PPConfig:APIMethod",
"sections": {}
},
"success": true,
"errorMessage": ""
}
I want to check if sections is empty, like it is in this case. I have other successful tests written like this:
tests["Status code is 200"] = responseCode.code === 200;
var body = JSON.parse(responseBody);
tests["Success Response"] = body.success === true;
tests["No Error message"] = body.errorMessage === "";
tests.Data = body.data.sectionGroupName === "PPConfig:APIMethod";
But I haven't been able to find successful test code for checking if the value of a specific name is an empty dictionary. Can someone help me with this as an example please?
You can get the list of properties of sections and test its length.
let sectionKeys = Object.keys(body.data.sectionGroupName)
if(sectionKeys.length){
//Proceed with section
} else {
//Proceed when it's empty
}
See Object.keys()
from this link
to check if it's a dictionary (use your 'sections' as v)
function isDict(v) {
return !!v && typeof v==='object' && v!==null && !(v instanceof Array) && !(v instanceof Date) && isJsonable(v);
}
Then check that it is empty (from this other link) use:
function isEmpty(obj) {
for (var x in obj) { return false; }
return true;
}
That should work
I found many solutions to find depth of nodes in a nested json file. but it throws me an error "maximum recursion depth exceeded "
when it set maximum recursion limit, it says "process exceeded with some error code"
As a part of my problem, I also need to find out key names of each node in the json file.
example json :
"attachments": {
"data": [
{
"media": {
"image": {
"height": 400,
"src": "https://scontent.xx.fbcdn.net/v/t1.0-1/10250217_10152130974757825_8645405213175562082_n.jpg?oh=904c1785fc974a3208f1d18ac07d59f3&oe=57CED94D",
"width": 400
}
},
"target": {
"id": "74286767824",
"url": "https://www.facebook.com/LAInternationalAirport/"
},
"title": "Los Angeles International Airport (LAX)",
"type": "map",
"url": "https://www.facebook.com/LAInternationalAirport/"
}
]
}
the output should be:
nodes:
[data [media [image[height,width,src]], target[id,url], title, type, url]]
depth: 4
If anyone else came here and found that the accepted answer does not work because Object.keys() for a string returns an array of each character of string and thus for either large objects or objects with large strings it just fails.
Here is something that works ->
function getDepth(obj){
if(!obj || obj.length===0 || typeof(obj)!=="object") return 0;
const keys = Object.keys(obj);
let depth = 0;
keys.forEach(key=>{
let tmpDepth = getDepth(obj[key]);
if(tmpDepth>depth){
depth = tmpDepth;
}
})
return depth+1;
}
Exmaple - https://jsfiddle.net/95g3ebp7/
Try the following function:
const getDepth = (
// eslint-disable-next-line #typescript-eslint/no-explicit-any
obj: Record<string, any>,
tempDepth?: number
): number => {
let depth = tempDepth ? tempDepth : 0;
if (obj !== null) {
depth++;
if (typeof obj === 'object' && !Array.isArray(obj)) {
const keys = Object.keys(obj);
if (keys.length > 0)
depth = Math.max(
...keys.map((key) => {
return getDepth(obj[key], depth);
})
);
} else if (Array.isArray(obj)) {
if (obj.length > 0)
depth = Math.max(
...obj.map((item) => {
return getDepth(item, depth);
})
);
}
}
return depth;
};
If you try this, I believe you have to get 7.
console.log(getDepth({
a: {
b: { a: [{ a: { a: [] } }] }
}
}));
function geth(obj) {
var depth = 0;
var k = Object.keys(obj);
console.log(k);
for (var i in k) {
var tmpDepth = geth(obj[k[i]]);
if (tmpDepth > depth) {
depth = tmpDepth
}
}
return 1 + depth;
}