Node.js how do you read a JSON with an element that is a HTTP address - json

OK I just ran into an issue. I am using Auth0 to create users with different rights (not scopes just rights) in the App Metadata. When I decode the Token I get this json:
{
"iss": "https://testing.auth0.com/",
"sub": "auth0|58e7bae154941844b507eaf5",
"aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
"exp": 1497016797,
"iat": 1496980797,
"https://thetestgroup.com/app_metadata": {
"is_admin": true
}
}
As you can see the app metadata is in the element "https://thetestgroup.com/app_metadata". Normally I would just do something like this in my code (auth.payload.iat) to get the iat but for the app_metadata it rejects it because of the :. Is there a good way to get at that data?

ok lets talk javascript (node) and your json
Firefox Scratchpad (Shift-F4)
var x = {
"iss": "https://testing.auth0.com/",
"sub": "auth0|58e7bae154941844b507eaf5",
"aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
"exp": 1497016797,
"iat": 1496980797,
"https://thetestgroup.com/app_metadata": {
"is_admin": true
}
}
x['https://thetestgroup.com/app_metadata'].is_admin // hit run
/*
true
*/
node.js
~ $ node -v
v8.1.0
~ $ node
> var x = {
... "iss": "https://testing.auth0.com/",
... "sub": "auth0|58e7bae154941844b507eaf5",
... "aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
... "exp": 1497016797,
... "iat": 1496980797,
... "https://thetestgroup.com/app_metadata": {
..... "is_admin": true
..... }
... }
undefined
> x['https://thetestgroup.com/app_metadata'].is_admin
true
>
Please provide a mcve since pure JS and JSON are quite happily using the (strange) key - as demonstrated.

Related

TTN V3 (MQTT JSON) -> Telegraf -> Grafana / Sensor data from Dragino LSE01 does not apear

I have a problem with Telegraf. I have a Dragino LSE01-8 sensor which is registered on TTN v3. I can check the decoded payload by subscribing to the topic "v3/lse01-8#ttn/devices/+/up".
But when I want to grab the data from Influx, I can not get "temp_SOIL" and "water_SOIL", although the data appears in JSON. "conduct_SOIL" is no problem. But I don't know why. Can somebody give me a hint?
Another sensor (Dragino LHT 65) works fine with all data I want to access.
It's possible to get this data from the Influx-Database:
uplink_message_decoded_payload_BatV
uplink_message_decoded_payload_Mod
uplink_message_decoded_payload_conduct_SOIL
uplink_message_decoded_payload_i_flag
uplink_message_decoded_payload_s_flag
uplink_message_f_cnt
uplink_message_f_port
uplink_message_locations_user_latitude
uplink_message_locations_user_longitude
uplink_message_rx_metadata_0_channel_index
uplink_message_rx_metadata_0_channel_rssi
uplink_message_rx_metadata_0_location_altitude
uplink_message_rx_metadata_0_location_latitude
uplink_message_rx_metadata_0_location_longitude
uplink_message_rx_metadata_0_rssi
uplink_message_rx_metadata_0_snr
uplink_message_rx_metadata_0_timestamp
uplink_message_settings_data_rate_lora_bandwidth
uplink_message_settings_data_rate_lora_spreading_factor
uplink_message_settings_timestamp
## Feuchtigkeitssensor Dragino LSE01-8
[[inputs.mqtt_consumer]]
name_override = "TTN-LSE01"
servers = ["tcp://eu1.cloud.thethings.network:1883"]
qos = 0
connection_timeout = "30s"
topics = [ "v3/lse01-8#ttn/devices/+/up" ]
client_id = "telegraf"
username = "lse01-8#ttn"
password = "NNSXS.LLSNSE67AP..................P67Q.Q...........HPG............KJA..........." //
data_format = "json"
This is the JSON data I can get (I changed some data in order not to send any passwords or tokens).
{
"end_device_ids":{
"device_id":"eui-a8.40.141.bbe4",
"application_ids":{
"application_id":"lse01-8"
},
"dev_eui":"A8...40.BE...4",
"join_eui":"A8.40.010.1",
"dev_addr":"2.9F.....8"
},
"correlation_ids":[
"as:up:01G4WDNS..P3C3R...RK56VQ...KT7N076",
"gs:conn:01G4H2F.ETRG.V2QER...RQ.0K1MGZ44",
"gs:up:host:01G4H2F.ETWRZX.4PFN.A2M.6RDKD4",
"gs:uplink:01G4WDN.N7B6P.J8E.JS.503F1",
"ns:uplink:01G4WDNSFM.MCYYEZZ1.KY.4M78",
"rpc:/ttn.lorawan.v3.GsNs/HandleUplink:01G4W.NSFM29Z3.PABYW...43",
"rpc:/ttn.lorawan.v3.NsAs/HandleUplink:01G4W....VTQ4DMKBF"
],
"received_at":"2022-06-06T11:51:18.979353604Z",
"uplink_message":{
"session_key_id":"AYE...j+DM....A==",
"f_port":2,
"f_cnt":292,
"frm_payload":"DSQAAAcVB4AADBA=",
"decoded_payload":{
"BatV":3.364,
"Mod":0,
"conduct_SOIL":12,
"i_flag":0,
"s_flag":1,
"temp_DS18B20":"0.00",
"temp_SOIL":"19.20",
"water_SOIL":"18.13"
},
"rx_metadata":[
{
"gateway_ids":{
"gateway_id":"lr8",
"eui":"3.6201F0.058.....00"
},
"time":"2022-06-06T11:51:00.289713Z",
"timestamp":4283143007,
"rssi":-47,
"channel_rssi":-47,
"snr":7,
"location":{
"latitude":51.______________,
"longitude":6.__________________,
"altitude":25,
"source":"SOURCE_REGISTRY"
},
"uplink_token":"ChsKG________________________________",
"channel_index":2
}
],
"settings":{
"data_rate":{
"lora":{
"bandwidth":125000,
"spreading_factor":7
}
},
"coding_rate":"4/5",
"frequency":"868500000",
"timestamp":4283143007,
"time":"2022-06-06T11:51:00.289713Z"
},
"received_at":"2022-06-06T11:51:18.772518399Z",
"consumed_airtime":"0.061696s",
"locations":{
"user":{
"latitude":51._________________,
"longitude":6.__________________4,
"source":"SOURCE_REGISTRY"
}
},
"version_ids":{
"brand_id":"dragino",
"model_id":"lse01",
"hardware_version":"_unknown_hw_version_",
"firmware_version":"1.1.4",
"band_id":"EU_863_870"
},
"network_ids":{
"net_id":"000013",
"tenant_id":"ttn",
"cluster_id":"eu1",
"cluster_address":"eu1.cloud.thethings.network"
}
}
}

Converting Packer 1.6 vsphere-iso configuration code from JSON to HCL2

With the release of Packer 1.6 came several depreciated fields in the vsphere-iso builder. From the looks of it, seems to be a format/type change because the fields actually still exists but just as properties it seems. An example of the changes are the following:
Working in Packer 1.5.6:
JSON
"disk_size": 123456,
"disk_thin_provisioned": true
"network": "VM Network",
"network_card": "vmxnet3"
Working in Packer 1.6.0:
JSON
"storage": [
{
"disk_size": 123456,
"disk_thin_provisioned": true
}
],
"network_adapters": [
{
"network": "VM Network",
"network_card": "vmxnet3"
}
]
The issue I have at the moment is I'm using Packer 1.6.0 and am trying to convert the above working JSON code to HCL2. I can't figure out the HCL2 syntax that supports the changes that were made in Packer 1.6.0.
I've tried the following:
network_adapters = {
network_card = "vmxnet3"
network = "VM Network"
}
Output:
An argument named "network_adapter" is not expected here.
network_adapters = (
network_card = "vmxnet3"
network = "VM Network"
)
Output:
Error: Unbalanced parentheses
on .\Packer\ConfigFileName.pkr.hcl line 19, in source "vsphere-iso"
"Test": 18: storage = ( 19: disk_thin_provisioned = true
Expected a closing parenthesis to terminate the expression.
network_adapters = [
network_card = "vmxnet3",
network = "VM Network"
]
Output:
Error: Missing item separator
on .\Packer\ConfigFileName.pkr.hcl line 19, in source "vsphere-iso"
"Test": 18: storage = [ 19: disk_thin_provisioned =
true,
Expected a comma to mark the beginning of the next item.
I've also tried several other permutations of different collection syntax together with no luck so far. Any suggestions or tips would greatly be appreciated
The correct syntax is the following:
network_adapters {
network_card = "vmxnet3",
network = "VM Network"
}
Note that it's not using an assignment operator = between network_adapters and {
Credit goes to SwampDragons over on the Packer forums for pointing this out.
If you're interested in knowing why: There was a change to how maps are treated in HCL2 back in May 2020 with the release of Packer 1.5.6
core/hcl2: Maps are now treated as settable arguments as opposed to blocks. For example tags = {} instead of tags {} [GH-9035]
Reference: https://github.com/hashicorp/packer/blob/master/CHANGELOG.md#156-may-1-2020

should I replace wct-istanbul by WCT-istanbub in order to estimate how much polymer web compnents is test coveraged

There is some similiarity between my question and How to measure common coverage for Polymer components + .js files?. Nevertheless, it is accepted as answer "split to .js files and include it to components" in order to use wct-istanbul and all my web components and tests are in .html files (the javascript is inside of each .html file).
My straight question is: can I still use wct-istambul to check how much from my code is covered by tests? If so, what is wrong in configuration described bellow? If not, is wct-istanbub planned to replace wct-istanbul for polymer projects?
package.json
"polyserve": "^0.18.0",
"web-component-tester": "^6.0.0",
"web-component-tester-istanbul": "^0.10.0",
...
wct.conf.js
var path = require('path');
var ret = {
'suites': ['test'],
'webserver': {
'pathMappings': []
},
'plugins': {
'local': {
'browsers': ['chrome']
},
'sauce': {
'disabled': true
},
"istanbul": {
"dir": "./coverage",
"reporters": ["text-summary", "lcov"],
"include": [
"/*.html"
],
"exclude": [
],
thresholds: {
global: {
statements: 100
}
}
}
}
};
var mapping = {};
var rootPath = (__dirname).split(path.sep).slice(-1)[0];
mapping['/components/' + rootPath + '/bower_components'] = 'bower_components';
ret.webserver.pathMappings.push(mapping);
module.exports = ret;
Well, I tried WCT-istanbub (https://github.com/Bubbit/wct-istanbub) which seams to be a temporary workaround (Code coverage of Polymer Application with WCT), it works.
wct.conf.js
"istanbub": {
"dir": "./coverage",
"reporters": ["text-summary", "lcov"],
"include": [
"**/*.html"
],
"exclude": [
"**/test/**",
"*/*.js"
],
thresholds: {
global: {
statements: 100
}
}
}
...
and the result is
...
chrome 66 RESPONSE quit()
chrome 66 BrowserRunner complete
Test run ended with great success
chrome 66 (2/0/0)
=============================== Coverage summary ===============================
Statements : 21.18% ( 2011/9495 )
Branches : 15.15% ( 933/6160 )
Functions : 18.08% ( 367/2030 )
Lines : 21.14% ( 2001/9464 )
================================================================================
Coverage for statements (21.18%) does not meet configured threshold (100%)
Error: Coverage failed

Sensu checks results event data

I am working on Sensu. I have installed sensu in CentOS. I need to get the event messages which is generated by Sensu checks.I have added some of the sensu community plugins like check-procs.rb,check-load.rb,check-banner.rb, metrics-ebs-volume.rb etc. I have written some handler files to event handle these .rb files. I am getting events in sensu-server.log.
Example:
{"timestamp":"2016-08-10T07:32:08.000003+0000","level":"info","message":"publishing check request","payload":{"name":"swap-free","issued":1470814327,"command":"check-swap.sh 20 10"},"subscribers":["base_centos_monitoring"]}
I have written a ruby file "nephele_events_handler.rb" which sends events messages through rest call to another server. The ruby file is in the location "/etc/sensu/handlers/". I am reading events from STDIN.read, i have read from official sensu documentation that events will be stored inside STDIN.
#!/opt/sensu/embedded/bin/ruby
require "#{File.dirname(__FILE__)}/base"
require 'rubygems'
require 'json'
require 'uri'
require 'net/http'
require 'net/https'
require 'json'
class RunProcs < BaseHandler
def payload_check
#Read event data
sensuhash = "{ \"SensuMessage\"" + ":"
braces = "}"
s = sensuhash.to_s
event = JSON.parse(STDIN.read, :symbolize_names => true)
eventPayload = event.to_json
sensujson = s + eventPayload + braces
uri = URI.parse("https://localhost:8080/Processor/services/sourceEvents/requestMsg")
https = Net::HTTP.new(uri.host,uri.port)
https.use_ssl = false
req = Net::HTTP::Post.new(uri.path, initheader = {'Content-Type' =>'application/json'})
req.body = "[ #{sensujson} ]"
res = https.request(req)
end
info = RunProcs.new
info.payload_check
end
Am writing handler json file "processor.json" inside the location "/etc/sensu/conf.d/handlers".
{
"handlers": {
"nephele_processor": {
"type": "pipe",
"command": "nephele_events_handler.rb"
}
}
}
But the issue am facing is am only getting events from 'check-procs'
{"client":{"address":"10.81.1.105","subscriptions":["base_centos","base_chef-client","python","base_centos_monitoring","base_centos_monitoring_metrics","sensu_client","base_aws","base_aws_monitoring","sensu_master","all"],"name":"ip-localhost.internal","hostname":"ip-localhost","version":"0.25.3","timestamp":1470896756},"check":{"command":"check-procs.rb --pattern='chef-client' -W=1","subscribers":["base_centos_monitoring"],"handlers":["base_with_jira"],"interval":60,"team":"ops","aggregate":true,"occurrences":3,"refresh":300,"ticket":true,"name":"process-chef-client","issued":1470896771,"executed":1470896771,"duration":0.864,"output":"CheckProcs CRITICAL: Found 0 matching processes; cmd /chef-client/\n","status":2,"type":"standard","history":["2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2"],"total_state_change":0},"occurrences":19879,"action":"create","timestamp":1470896772,"id":"dc2b0698-dbac-416d-a9ae-42aa09d53cc3","last_state_change":1469690268,"last_ok":null}
The Check which is getting executed
{
"checks": {
"process-chef-client": {
"command": "check-procs.rb --pattern='chef-client' -W=1",
"subscribers": [
"base_centos_monitoring"
],
"handlers": [
"base_with_jira"
],
"interval": 60,
"team": "ops",
"aggregate": true,
"occurrences": 3,
"interval": 60,
"refresh": 300,
"ticket": true
}
}
}
base_with_jira.json
{
"handlers": {
"base_with_jira": {
"type": "set",
"handlers": [
"jira",
"nephele_processor"
],
"config": "http://apache.enron.nephele.solutions/uchiwa"
}
}
}
I am not getting events from other plugins. Can you explain what i have to do for this.

XML Syntax error when processing JSON output for atom feed in Alfresco webscript

I've run into an incredibly very frustrating error. This was working without issue on Friday and I deployed it last night, found out today that the modified timestamp isn't properly formatted for iso8690 (the application which digests the feed is very strict) so I reformatted it and built a jar for testing. This error comes up:
05110009 Failed to execute script 'classpath*:alfresco/site-webscripts/org/foo/components/dashlets/recent-docs.get.js': 05110008 SyntaxError: illegally formed XML syntax (jar:file:/usr/share/tomcat6/shared/lib/recent-docs.jar!/alfresco/site-webscripts/org/foo/components/dashlets/recent-docs.get.js#20(eval)#1)
So I revert the changes and the exact same error is displayed. Can anyone tell me why I'm receiving this? I even copied the original jar back in, same error.
Here's an example of the JSON output generated by the repo webscript:
{
"documents":
[
{
"site": "swsdp",
"nodeRef": "workspace://SpacesStore/1a0b110f-1e09-4ca2-b367-fe25e4964a4e",
"id": "1a0b110f-1e09-4ca2-b367-fe25e4964a4e",
"name": "Project Contract.pdf",
"title": "Project Contract for Green Enery",
"creator": "abeecher",
"description": "Conract for the Green Energy project",
"categories": [
],
"created": "15 Feb 2011 21:26:54 PM (UTC)",
"modified": "14 Jun 2011 10:28:54 AM (UTC)"
},
{
"site": "swsdp",
"nodeRef": "workspace://SpacesStore/05dedd34-9d9d-48d9-9af6-c81b555541c9",
"id": "05dedd34-9d9d-48d9-9af6-c81b555541c9",
"name": "WebSiteReview.mp4",
"title": "WebSiteReview.mp4",
"creator": "abeecher",
"description": "This is a video of the mock up to show the planned structure for the new web site.",
"categories": [
],
"created": "08 Mar 2011 10:35:10 AM (UTC)",
"modified": "08 Mar 2011 10:37:43 AM (UTC)"
}
]
}
And the share webscript stuff
recent-docs.get.desc.xml
<shortname>Recently Modified Documents</shortname>
<description>Retrieve Recently Modified content for a site</description>
<url>/components/recent-docs?site={site}</url>
<arg>
<shortname>site</shortname>
<description><![CDATA[site name]]></description>
</arg>
<format default="html">argument</format>
<authentication>user</authentication>
<transaction>required</transaction>
<cache>
<neverCache>false</neverCache>
<mustRevalidate/>
</cache>
</webscript>
recent-docs.get.js
function main()
{
for (var arg in args)
{
if (arg == "site")
{
model.site = args[arg];
}
}
// call the repository to get recent documents
var connector = remote.connect("alfresco");
var json = connector.call("/recent-docs?site=" + escape(model.site));
if (json.status == 200)
{
obj = eval("(" + json + ")");
model.docs = obj["documents"];
}
else
{
obj = eval("(" + json + ")");
obj.name = "Error";
model.docs = obj;
}
}
main();
recent-docs.get.atom.ftl
<feed xmlns="http://www.w3.org/2005/Atom">
<generator version="${server.version}">Alfresco (${server.edition})</generator>
<link rel="self" href="${absurl(url.full)?xml}" />
<id>${absurl(url.full)?xml}</id>
<title>Site: ${site}</title>
<subtitle>Alfresco Recently Modified Documents</subtitle>
<updated>${xmldate(date)}</updated>
<icon>${absurl(url.context)}/res/themes/default/images/app-logo-48.png</icon>
<#list docs as child>
<entry xmlns='http://www.w3.org/2005/Atom'>
<title>${child.name?html}</title>
<link href="${absurl(url.context)}/page/document-details?nodeRef=${child.nodeRef}"/>
<id>urn:uuid:${child.id}</id>
<updated>${child.modified}</updated>
<summary>
${msg("feed.uploaded", child.name, child.creator)}<br />
<#if child.modifier?exists>${msg("feed.modified", child.modified, child.modifier)}<br /></#if>
${child.description!""}<br />
<#if child.categories[0]?exists>
${msg("feed.categories")} <#list child.categories as category> ${category.name}<#if category_has_next>, </#if></#list></#if><br />
</summary>
<author>
<name>${child.creator}</name>
</author>
</entry>
</#list>
</feed>
I've been hacking at this trying various things without success, I've got a set of changes to correct the atom template so it's a valid atom document but i need to figure out why this error is showing up. I think it's because I'm not converting everything to a string in the repo webscript (only modified/creation time) but it's not clear how I should prepare it.
Update I modified the atom template to run on the repo side and the result is XML valid so at least that's working. On the share side I'm still seeing the syntax error which is problematic as I plan to build a dashlet with this.