Postman test for empty dictionary value of json name - json

How can I write a test for an empty value of a specific JSON name pair. For example I have this JSON:
{
"data": {
"sectionGroupName": "PPConfig:APIMethod",
"sections": {}
},
"success": true,
"errorMessage": ""
}
I want to check if sections is empty, like it is in this case. I have other successful tests written like this:
tests["Status code is 200"] = responseCode.code === 200;
var body = JSON.parse(responseBody);
tests["Success Response"] = body.success === true;
tests["No Error message"] = body.errorMessage === "";
tests.Data = body.data.sectionGroupName === "PPConfig:APIMethod";
But I haven't been able to find successful test code for checking if the value of a specific name is an empty dictionary. Can someone help me with this as an example please?

You can get the list of properties of sections and test its length.
let sectionKeys = Object.keys(body.data.sectionGroupName)
if(sectionKeys.length){
//Proceed with section
} else {
//Proceed when it's empty
}
See Object.keys()

from this link
to check if it's a dictionary (use your 'sections' as v)
function isDict(v) {
return !!v && typeof v==='object' && v!==null && !(v instanceof Array) && !(v instanceof Date) && isJsonable(v);
}
Then check that it is empty (from this other link) use:
function isEmpty(obj) {
for (var x in obj) { return false; }
return true;
}
That should work

Related

Regex for fetching JSON data

I have a JSON file abc.json
{"value1":5.0,"value2":2.5,"value3":"2019-10-24T15:26:00.000Z","modifier":[],"value4":{"value41":{"value411":5,"value412":"hey"}}}
I can get value2 using this regex
sed -E 's/."value2":"?([^,"])"?.*/\1/' abc.json
I want to know how I can get values of value411 and value412
I don't want to use jq or any other tool as my requirement is to use regex for this.
You should always try to use an existing parser, depending on what platform you work on there should be one that can interpret the data model for you.
I refer to this famous answer about parsing HTML with regex
var jData = {
"value1": 5.0,
"value2": 2.5,
"value3": "2019-10-24T15:26:00.000Z",
"modifier": [],
"value4": {
"value41": {
"value411": 5,
"value412": "hey"
}
}
};
If You try with regex then use this:
JSON.stringify(jData).match(/(?<=(\"(value411|value412)\"\:))[\"\w+]+/g)
// Output: ['5', '"hey"']
Demo: https://regex101.com/r/c3K4cH/1
Limitation: You have to put only the key name, don't try to fetch the full object
You create a javascript function get any non-empty value from a JSON, as follows:
function getData(obj, fKey, fData = null) {
for (const prop of Object.getOwnPropertyNames(obj)) {
if (!fData) {
if (typeof obj[prop] === 'object' && Object.keys(obj[prop]).length > 0 && prop !== fKey) {
return getData(obj[prop], fKey, fData)
} else {
if(prop === fKey) {
fData = obj[prop];
return fData
}
}
} else {
return fData;
}
}
}
console.log(getData(jData, 'value411'));
console.log(getData(jData, 'value412'));

Filtering json values

I have a process that returns a json object:
data={key1:[], key2:[], key3:[{key1:"a"}, {key2:"b"}], key4:[{key1:"c"}, {key2:"d"}]}
I want know if there is a simple way to filter this json object to remove the properties where the value is an empty array.
Once filtered I can then loop through the remaining properties and action the array elements.
First, we have to iterate over properties in an object.
for (var prop in data) {
if (data.hasOwnProperty(prop)) {
// Logic here
}
}
Then it's a simple check to filter out empty array properties
if (data[prop].length == 0) {
delete data[prop]
}
The full solution,
for (var prop in data) {
if (data.hasOwnProperty(prop)) {
if (data[prop].length == 0) {
delete data[prop]
}
}
}
I would prefer to create a new object that omits the empty arrays instead of deleting from the existing object.
var data={key1:[], key2:[], key3:[{key1:"a"}, {key2:"b"}], key4:[{key1:"c"}, {key2:"d"}]}
var cleanData = Object.keys(data).reduce((obj, key) => {
if (data[key] && data[key].length) {
obj[key] = data[key]
}
return obj
}, {})
Using lodash should make this pretty simple:
var filtered = _.omitBy(data, function(value) {
return Array.isArray(value) && value.length == 0;
});
You can now loop through the remaining elements in the filtered object to take further actions on them.
Try this working demo :
var data = {
key1:[],
key2:[],
key3:[
{key1:"a"},
{key2:"b"}
],
key4:[
{key1:"c"},
{key2:"d"}
]
};
for (var i in data) {
if (data[i].length == 0) {
delete data[i]
}
}
console.log(data);

Couchbase - get doc if value of key is empty (i.e. has length 0 )

I have a View in Couchbase that I use to retrieve my data. Simplified, my data bucket contains:
{
"id" : 123,
"key" : "some value"
},
{
"id" : 456,
"key" : ""
}
...
and I want to get all the docs where 'key' is present, but its value is empty.
If I use a view like this:
function (doc, meta) {
if(doc.key)
emit([doc.id], doc);
}
I get both the JSONs above, if I use a view like this:
function (doc, meta) {
if(doc.key && doc.key == "")
emit([doc.id], doc);
}
I get none of them.
I want to get the json with id = 456. What is the correct syntax?
That is because empty string is false in javascript
~ $ node
> var x = ""
undefined
> x
''
> if (x && x == "") { console.log("it is empty") }
undefined
> if (x) { console.log("it is empty") }
undefined
> if (!x) { console.log("it is empty") }
it is empty
undefined
You should use something like
if(doc.key === "")
Triple equals will compare it to string more strictly

Formatting DynamoDB data to normal JSON in AWS Lambda

I'm using AWS Lambda to scan data from a DynamoDB table. This is what I get in return:
{
"videos": [
{
"file": {
"S": "file1.mp4"
},
"id": {
"S": "1"
},
"canvas": {
"S": "This is Canvas1"
}
},
{
"file": {
"S": "main.mp4"
},
"id": {
"S": "0"
},
"canvas": {
"S": "this is a canvas"
}
}
]
}
My front-end application is using Ember Data Rest Adapter which does not accepts such response. Is there any way I can get normal JSON format? There is this NPM module called dynamodb-marshaler to convert DynamoDB data to normal JSON. I'm looking for a native solution if possible.
Node.js
Use the unmarshall function from AWSJavaScriptSDK:
const AWS = require("aws-sdk");
exports.handler = function( event, context, callback ) {
const newImages = event.Records.map(
(record) => AWS.DynamoDB.Converter.unmarshall(record.dynamodb.NewImage)
);
console.log('Converted records', newImages);
callback(null, `Success`);
}
Python
Use TypeDeserializer.deserialize from boto3.dynamodb.types:
import json
from boto3.dynamodb.types import TypeDeserializer
def ddb_deserialize(r, type_deserializer = TypeDeserializer()):
return type_deserializer.deserialize({"M": r})
def lambda_handler(event, context):
new_images = [ ddb_deserialize(r["dynamodb"]["NewImage"]) for r in event['Records'] ]
print('Converted records', json.dumps(new_images, indent=2))
I know is a bit old but I had the same problem processing stream data from dynamoDB in node js lambda function. I used the proposed by #churro
import sdk and output converter
var AWS = require("aws-sdk");
var parse = AWS.DynamoDB.Converter.output;
use the parse function with a small hack
exports.handler = function( event, context, callback ) {
var docClient = new AWS.DynamoDB.DocumentClient();
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record:', parse({ "M": record.dynamodb.NewImage }));
});
callback(null, `Successfully processed ${event.Records.length} records.`);
}
Hope it helps
AWS JavaScript SDK was recently updated with Document Client which does exactly what you need. Check the announce and usage examples here: http://blogs.aws.amazon.com/javascript/post/Tx1OVH5LUZAFC6T/Announcing-the-Amazon-DynamoDB-Document-Client-in-the-AWS-SDK-for-JavaScript
Javascript: AWS SDK provides the unmarshall function
Python: use TypeDeserializer from boto3.dynamodb.types:
from boto3.dynamodb.types import TypeDeserializer, TypeSerializer
def from_dynamodb_to_json(item):
d = TypeDeserializer()
return {k: d.deserialize(value=v) for k, v in item.items()}
## Usage:
from_dynamodb_to_json({
"Day": {"S": "Monday"},
"mylist": {"L": [{"S": "Cookies"}, {"S": "Coffee"}, {"N": "3.14159"}]}
})
# {'Day': 'Monday', 'mylist': ['Cookies', 'Coffee', Decimal('3.14159')]}
Here you can find gist which does that:
function mapper(data) {
let S = "S";
let SS = "SS";
let NN = "NN";
let NS = "NS";
let BS = "BS";
let BB = "BB";
let N = "N";
let BOOL = "BOOL";
let NULL = "NULL";
let M = "M";
let L = "L";
if (isObject(data)) {
let keys = Object.keys(data);
while (keys.length) {
let key = keys.shift();
let types = data[key];
if (isObject(types) && types.hasOwnProperty(S)) {
data[key] = types[S];
} else if (isObject(types) && types.hasOwnProperty(N)) {
data[key] = parseFloat(types[N]);
} else if (isObject(types) && types.hasOwnProperty(BOOL)) {
data[key] = types[BOOL];
} else if (isObject(types) && types.hasOwnProperty(NULL)) {
data[key] = null;
} else if (isObject(types) && types.hasOwnProperty(M)) {
data[key] = mapper(types[M]);
} else if (isObject(types) && types.hasOwnProperty(L)) {
data[key] = mapper(types[L]);
} else if (isObject(types) && types.hasOwnProperty(SS)) {
data[key] = types[SS];
} else if (isObject(types) && types.hasOwnProperty(NN)) {
data[key] = types[NN];
} else if (isObject(types) && types.hasOwnProperty(BB)) {
data[key] = types[BB];
} else if (isObject(types) && types.hasOwnProperty(NS)) {
data[key] = types[NS];
} else if (isObject(types) && types.hasOwnProperty(BS)) {
data[key] = types[BS];
}
}
}
return data;
function isObject(value) {
return typeof value === "object" && value !== null;
}
}
https://gist.github.com/igorzg/c80c0de4ad5c4028cb26cfec415cc600
If you are using python in the lambda you can utilise the dynamodb-json library.
Install library
pip install dynamodb-json
and use the below snippet
from dynamodb_json import json_util as util
def marshall(regular_json):
dynamodb_json = util.dumps(reular_json)
def unmarshall(dynamodb_json):
regular_json = util.loads(dynamodb_json)
Reference
https://pypi.org/project/dynamodb-json/
I think it's just a custom transformation exercise for each app. A simple conversion from DynamoDB's item format to you application format might look like this:
var response = {...} // your response from DynamoDB
var formattedObjects = response.videos.map(function(video) {
return {
"file": video.file.S,
"id": video.id.S,
"canvas": video.canvas.S
};
});
If you want to build a generic system for this, you would have to handle DynamoDB's various AttributeValue types. A function like the one below would do the job, but I've left out the hard work of handling most of DynamoDB's more complex attribute value types:
function dynamoItemToPlainObj(dynamoItem) {
var plainObj = {};
for (var attributeName in dynamoItem) {
var attribute = dynamoItem[attributeName];
var attributeValue;
for (var itemType in attribute) {
switch (itemType) {
case "S":
attributeValue = attribute.S.toString();
break;
case "N":
attributeValue = Number(attribute.N);
break;
// more attribute types...
default:
attributeValue = attribute[itemType].toString();
break;
}
}
plainObj[attributeName] = attributeValue;
}
return plainObj;
}
var formattedObjects = response.videos.map(dynamoItemToPlainObj);
I tried several solutions here but none worked with multi-level data, such as if it includes a list of maps e.g.
{
"item1": {
"M": {
"sub-item1": {
"L": [
{
"M": {
"sub-item1-list-map": {
"S": "value"
Below, adapted from #igorzg's answer (which also has that drawback), fixes that.
Example usage:
dynamodb.getItem({...}, function(err, data) {
if (!err && data && data.Item) {
var converted = ddb_to_json(data.Item);
Here's the conversion function:
function ddb_to_json(data) {
function isObject(value) {
return typeof value === "object" && value !== null;
}
if(isObject(data))
return convert_ddb({M:data});
function convert_ddb(ddbData) {
if (isObject(ddbData) && ddbData.hasOwnProperty('S'))
return ddbData.S;
if (isObject(ddbData) && ddbData.hasOwnProperty('N'))
return parseFloat(ddbData.N);
if (isObject(ddbData) && ddbData.hasOwnProperty('BOOL'))
return ddbData.BOOL;
if (isObject(ddbData) && ddbData.hasOwnProperty('NULL'))
return null;
if (isObject(ddbData) && ddbData.hasOwnProperty('M')) {
var x = {};
for(var k in ddbData.M)
x[k] = convert_ddb(ddbData.M[k])
return x;
}
if (isObject(ddbData) && ddbData.hasOwnProperty('L'))
return ddbData.L.map(x => convert_ddb(x));
if (isObject(ddbData) && ddbData.hasOwnProperty('SS'))
return ddbData.SS;
if (isObject(ddbData) && ddbData.hasOwnProperty('NN'))
return ddbData.NN;
if (isObject(ddbData) && ddbData.hasOwnProperty('BB'))
return ddbData.BB;
if (isObject(ddbData) && ddbData.hasOwnProperty('NS'))
return ddbData.NS;
if (isObject(ddbData) && ddbData.hasOwnProperty('BS'))
return ddbData.BS;
return data;
}
return data;
}
If you need online editor try this
https://2json.net/dynamo

Can I stop Angular.js’s json filter from excluding properties that start with $?

Angular.js has a handy built-in filter, json, which displays JavaScript objects as nicely formatted JSON.
However, it seems to filter out object properties that begin with $ by default:
Template:
<pre>{{ {'name':'value', 'special':'yes', '$reallyspecial':'Er...'} | json }}</pre>
Displayed:
{
"name": "value",
"special": "yes"
}
http://plnkr.co/edit/oem4HJ9utZMYGVbPkT6N?p=preview
Can I make properties beginning with $ be displayed like other properties?
Basically you can't. It is "hard-coded" into the filter's behaviour.
Nonetheless, it is quite easy to build a custom JSON filter that behaves identically with the Angular's one but not filtering out properties starting with '$'.
(Scroll further down for sample code and a short demo.)
If you take a look at the 1.2.15 version source code, you will find out that the json filter is defined like this:
function jsonFilter() {
return function(object) {
return toJson(object, true);
};
}
So, it uses the toJson() function (the second parameter (true) means: format my JSON nicely).
So, our next stop is the toJson() function, that looks like this:
function toJson(obj, pretty) {
if (typeof obj === 'undefined') return undefined;
return JSON.stringify(obj, toJsonReplacer, pretty ? ' ' : null);
}
This function makes use of the "native" JSON.stringify() function, passing a custom replacer function (toJsonReplacer).
The toJsonReplacer() function handles some special cases: It checks if the key starts with $ and ignores it if it does (this is what we want to change) and it checks if the value is either a Window, a Document or a Scope object (in which case it converts it to a descriptive string in order to avoid "Converting circular structure to JSON" errors).
function toJsonReplacer(key, value) {
var val = value;
if (typeof key === 'string' && key.charAt(0) === '$') {
val = undefined;
} else if (isWindow(value)) {
val = '$WINDOW';
} else if (value && document === value) {
val = '$DOCUMENT';
} else if (isScope(value)) {
val = '$SCOPE';
}
return val;
}
For the sake of completeness, the two functions that check for Window and Scope look like this:
function isWindow(obj) {
return obj && obj.document && obj.location && obj.alert && obj.setInterval;
}
function isScope(obj) {
return obj && obj.$evalAsync && obj.$watch;
}
Finally, all we need to do is to create a custom filter that uses the exact same code, with the sole difference that our toJsonReplacer() won't filter out properties starting with $.
app.filter('customJson', function () {
function isWindow(obj) {
return obj &&
obj.document &&
obj.location &&
obj.alert &&
obj.setInterval;
}
function isScope(obj) {
return obj &&
obj.$evalAsync &&
obj.$watch;
}
function toJsonReplacer(key, value) {
var val = value;
if (isWindow(value)) {
val = '$WINDOW';
} else if (value && (document === value)) {
val = '$DOCUMENT';
} else if (isScope(value)) {
val = '$SCOPE';
}
return val;
}
function toJson(obj, pretty) {
if (typeof obj === 'undefined') { return undefined; }
return JSON.stringify(obj, toJsonReplacer, pretty ? ' ' : null);
}
return function(object) {
return toJson(object, true);
};
});
See, also, this short demo.
* The downside is that your custom JSON filter will not benefit from further improvement/enhancement of Angular's json filter, so you'll have to re-define your's to incorporate changes. Of course, for such a basic and simple filter like this, one should'nt expect frequent or extensive changes, but that doesn't mean there aren't going to be any.