should I replace wct-istanbul by WCT-istanbub in order to estimate how much polymer web compnents is test coveraged - polymer

There is some similiarity between my question and How to measure common coverage for Polymer components + .js files?. Nevertheless, it is accepted as answer "split to .js files and include it to components" in order to use wct-istanbul and all my web components and tests are in .html files (the javascript is inside of each .html file).
My straight question is: can I still use wct-istambul to check how much from my code is covered by tests? If so, what is wrong in configuration described bellow? If not, is wct-istanbub planned to replace wct-istanbul for polymer projects?
package.json
"polyserve": "^0.18.0",
"web-component-tester": "^6.0.0",
"web-component-tester-istanbul": "^0.10.0",
...
wct.conf.js
var path = require('path');
var ret = {
'suites': ['test'],
'webserver': {
'pathMappings': []
},
'plugins': {
'local': {
'browsers': ['chrome']
},
'sauce': {
'disabled': true
},
"istanbul": {
"dir": "./coverage",
"reporters": ["text-summary", "lcov"],
"include": [
"/*.html"
],
"exclude": [
],
thresholds: {
global: {
statements: 100
}
}
}
}
};
var mapping = {};
var rootPath = (__dirname).split(path.sep).slice(-1)[0];
mapping['/components/' + rootPath + '/bower_components'] = 'bower_components';
ret.webserver.pathMappings.push(mapping);
module.exports = ret;
Well, I tried WCT-istanbub (https://github.com/Bubbit/wct-istanbub) which seams to be a temporary workaround (Code coverage of Polymer Application with WCT), it works.
wct.conf.js
"istanbub": {
"dir": "./coverage",
"reporters": ["text-summary", "lcov"],
"include": [
"**/*.html"
],
"exclude": [
"**/test/**",
"*/*.js"
],
thresholds: {
global: {
statements: 100
}
}
}
...
and the result is
...
chrome 66 RESPONSE quit()
chrome 66 BrowserRunner complete
Test run ended with great success
chrome 66 (2/0/0)
=============================== Coverage summary ===============================
Statements : 21.18% ( 2011/9495 )
Branches : 15.15% ( 933/6160 )
Functions : 18.08% ( 367/2030 )
Lines : 21.14% ( 2001/9464 )
================================================================================
Coverage for statements (21.18%) does not meet configured threshold (100%)
Error: Coverage failed

Related

Packer manifest missing "OSDiskUriReadOnlySas" artifact

I have some packer code to generate a VHD from an Azure Marketplace image. The result produces a OSDiskUriReadOnlySas that I can't see in the manifest.
Packer Output:
OSType: Windows
StorageAccountLocation: australiaeast
OSDiskUri: https://<storage_account>.blob.core.windows.net/<container>/Microsoft.Compute/Images/<folder>/windows_server2019-osDisk.25d65d12-f827-45af-a37e-6b361ea2d380.vhd
OSDiskUriReadOnlySas: https://<storage_account>.blob.core.windows.net/<container>/Microsoft.Compute/Images/<folder>/windows_server2019-osDisk.25d65d12-f827-45af-a37e-6b361ea2d380.vhd?se=2022-10-26T21%3A12%3fghfghrtyh45%$45y7DBrTtDNmVIB5YZ7SUESheShmTGGYfAWm9c%3D&sp=r&sr=b&sv=2018-03-28
TemplateUri: https://<storage_account>.blob.core.windows.net/<container>/Microsoft.Compute/Images/<folder>/windows_server2019-vmTemplate.25d65d12-f827-45af-a37e-6b361ea2d380.json
TemplateUriReadOnlySas: https://<storage_account>.blob.core.windows.net/<container>/Microsoft.Compute/Images/<folder>/windows_server2019-vmTemplate.25d65d12-f827-45af-a37e-6b361ea2d380.json?se=2022-10-26T21%3A12%3A00Z&sig=z7WEFGEGE#FW#QWWHiFYJS0lqsGIM%3D&sp=r&sr=b&sv=2018-03-28
And I have some code to output the manifest:
post-processor "manifest" {
output = "server2019_manifest_${uuidv4()}.json"
strip_path = true
}
The manifest is missing OSDiskUriReadOnlySas:
"builds": [
{
"name": "server_2019_vhd",
"builder_type": "azure-arm",
"build_time": 1664226720,
"files": null,
"artifact_id": "https://<storage_account>.blob.core.windows.net/<container>/Microsoft.Compute/Images/<folder>/windows_server2019-osDisk.25d65d12-f827-45af-a37e-6b361ea2d380.vhd",
"packer_run_uuid": "20d6c483-e9eb-493b-b729-e9a4987916f4",
"custom_data": null
}
],
"last_run_uuid": "20d6c483-e9eb-493b-b729-e9a4987913f4"
}
]
How can I get OSDiskUriReadOnlySas in the manifest?

Converting Packer 1.6 vsphere-iso configuration code from JSON to HCL2

With the release of Packer 1.6 came several depreciated fields in the vsphere-iso builder. From the looks of it, seems to be a format/type change because the fields actually still exists but just as properties it seems. An example of the changes are the following:
Working in Packer 1.5.6:
JSON
"disk_size": 123456,
"disk_thin_provisioned": true
"network": "VM Network",
"network_card": "vmxnet3"
Working in Packer 1.6.0:
JSON
"storage": [
{
"disk_size": 123456,
"disk_thin_provisioned": true
}
],
"network_adapters": [
{
"network": "VM Network",
"network_card": "vmxnet3"
}
]
The issue I have at the moment is I'm using Packer 1.6.0 and am trying to convert the above working JSON code to HCL2. I can't figure out the HCL2 syntax that supports the changes that were made in Packer 1.6.0.
I've tried the following:
network_adapters = {
network_card = "vmxnet3"
network = "VM Network"
}
Output:
An argument named "network_adapter" is not expected here.
network_adapters = (
network_card = "vmxnet3"
network = "VM Network"
)
Output:
Error: Unbalanced parentheses
on .\Packer\ConfigFileName.pkr.hcl line 19, in source "vsphere-iso"
"Test": 18: storage = ( 19: disk_thin_provisioned = true
Expected a closing parenthesis to terminate the expression.
network_adapters = [
network_card = "vmxnet3",
network = "VM Network"
]
Output:
Error: Missing item separator
on .\Packer\ConfigFileName.pkr.hcl line 19, in source "vsphere-iso"
"Test": 18: storage = [ 19: disk_thin_provisioned =
true,
Expected a comma to mark the beginning of the next item.
I've also tried several other permutations of different collection syntax together with no luck so far. Any suggestions or tips would greatly be appreciated
The correct syntax is the following:
network_adapters {
network_card = "vmxnet3",
network = "VM Network"
}
Note that it's not using an assignment operator = between network_adapters and {
Credit goes to SwampDragons over on the Packer forums for pointing this out.
If you're interested in knowing why: There was a change to how maps are treated in HCL2 back in May 2020 with the release of Packer 1.5.6
core/hcl2: Maps are now treated as settable arguments as opposed to blocks. For example tags = {} instead of tags {} [GH-9035]
Reference: https://github.com/hashicorp/packer/blob/master/CHANGELOG.md#156-may-1-2020

Scrapy multiple regular expressions in LinkExtractor seem to be not working

I've got my regular expressions inside a JSON file. This file gets loaded as a configuration for my spider. The spider creates one LinkExtractor with allow and deny regular expression rules.
I'd like to:
crawl and scrape product pages (scraping / parsing is NOT working)
crawl category pages
avoid general pages (about us, privacy, etc.)
It all works well on some shops, but not on others and I believe it's a problem of my Regular Expressions.
"rules": [
{
"deny": ["\\/(customer\\+service|ways\\+to\\+save|sponsorship|order|cart|company|specials|checkout|integration|blog|brand|account|sitemap|prefn1=)\\/"],
"follow": false
},
{
"allow": ["com\\/store\\/details\\/"],
"follow": true,
"use_content": true
},
{
"allow": ["com\\/store\\/browse\\/"],
"follow": true
}
],
URL patterns:
Products:
https://www.example.com/store/details/Nike+SB-Portmore-II-Solar-Canvas-Mens
https://www.example.com/store/details/Coleman+Renegade-Mens-Hiking
https://www.example.com/store/details/Mueller+ATF3-Ankle-Brace
https://www.example.com/store/details/Planet%20Fitness+18
https://www.example.com/store/details/Lifeline+Pro-Grip-Ring
https://www.example.com/store/details/Nike+Phantom-Vision
Categories:
https://www.example.com/store/browse/footwear/
https://www.example.com/store/browse/apparel/
https://www.example.com/store/browse/fitness/
Deny:
https://www.example.com/store/customer+service/Online+Customer+Service
https://www.example.com/store/checkout/
https://www.example.com/store/ways+to+save/
https://www.example.com/store/specials
https://www.example.com/store/company/Privacy+Policy
https://www.example.com/store/company/Terms+of+Service
Loading the rules from JSON inside my spider __init__
for rule in self.MY_SETTINGS["rules"]:
allow_r = ()
if "allow" in rule.keys():
allow_r = [a for a in rule["allow"]]
deny_r = ()
if "deny" in rule.keys():
deny_r = [d for d in rule["deny"]]
restrict_xpaths_r = ()
if "restrict_xpaths" in rule.keys():
restrict_xpaths_r = [rx for rx in rule["restrict_xpaths"]]
Sportygenspider.rules.append(Rule(
LinkExtractor(
allow=allow_r,
deny=deny_r,
restrict_xpaths=restrict_xpaths_r,
),
follow=rule["follow"],
callback='parse_item' if ("use_content" in rule.keys()) else None
))
If I do a pprint(vars(onerule.link_extractor)) I can see the Python regex correctly:
'deny_res': [re.compile('\\/(customer\\+service|sponsorship|order|cart|company|specials|checkout|integration|blog|account|sitemap|prefn1=)\\/')]
{'allow_domains': set(),
'allow_res': [re.compile('com\\/store\\/details\\/')],
{'allow_domains': set(),
'allow_res': [re.compile('com\\/store\\/browse\\/')],
Testing the regex in https://regex101.com/ seems to be fine as well (despite: I'm using \\/ in my JSON file and \/ in regex101.com)
In my spider logfile, I can see that the produce pages are being crawled, but not parsed:
2019-02-01 08:25:33 [scrapy.core.engine] DEBUG: Crawled (200) <GET https://www.example.com/store/details/FILA+Hometown-Mens-Lifestyle-Shoes/5345120230028/_/A-6323521;> (referer: https://www.example.com/store/browse/footwear)
2019-02-01 08:25:47 [scrapy.core.engine] DEBUG: Crawled (200) <GET https://www.example.com/store/details/FILA+D-Formation-Mens-Lifestyle-Shoes/5345120230027/_/A-6323323> (ref
Why does the spider not parse the product pages?
(same code, different JSON works on different shops)
After hours of debugging and testing, I figured that I had to change the order of the rules.
Products to scrape rule
Deny about us etc.
Categories to follow
Now it is working.
"rules": [
{
"allow": ["com\\/store\\/details\\/"],
"follow": true,
"use_content": true
},
{
"deny": ["\\/(customer\\+service|ways\\+to\\+save|sponsorship|order|cart|company|specials|checkout|integration|blog|brand|account|sitemap|prefn1=)\\/"],
"follow": false
},
{
"allow": ["com\\/store\\/browse\\/"],
"follow": true
}
],

Node.js how do you read a JSON with an element that is a HTTP address

OK I just ran into an issue. I am using Auth0 to create users with different rights (not scopes just rights) in the App Metadata. When I decode the Token I get this json:
{
"iss": "https://testing.auth0.com/",
"sub": "auth0|58e7bae154941844b507eaf5",
"aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
"exp": 1497016797,
"iat": 1496980797,
"https://thetestgroup.com/app_metadata": {
"is_admin": true
}
}
As you can see the app metadata is in the element "https://thetestgroup.com/app_metadata". Normally I would just do something like this in my code (auth.payload.iat) to get the iat but for the app_metadata it rejects it because of the :. Is there a good way to get at that data?
ok lets talk javascript (node) and your json
Firefox Scratchpad (Shift-F4)
var x = {
"iss": "https://testing.auth0.com/",
"sub": "auth0|58e7bae154941844b507eaf5",
"aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
"exp": 1497016797,
"iat": 1496980797,
"https://thetestgroup.com/app_metadata": {
"is_admin": true
}
}
x['https://thetestgroup.com/app_metadata'].is_admin // hit run
/*
true
*/
node.js
~ $ node -v
v8.1.0
~ $ node
> var x = {
... "iss": "https://testing.auth0.com/",
... "sub": "auth0|58e7bae154941844b507eaf5",
... "aud": "OSBkLd832tIhpDe0QFJbQ9vutgB2s6cJ",
... "exp": 1497016797,
... "iat": 1496980797,
... "https://thetestgroup.com/app_metadata": {
..... "is_admin": true
..... }
... }
undefined
> x['https://thetestgroup.com/app_metadata'].is_admin
true
>
Please provide a mcve since pure JS and JSON are quite happily using the (strange) key - as demonstrated.

Sensu checks results event data

I am working on Sensu. I have installed sensu in CentOS. I need to get the event messages which is generated by Sensu checks.I have added some of the sensu community plugins like check-procs.rb,check-load.rb,check-banner.rb, metrics-ebs-volume.rb etc. I have written some handler files to event handle these .rb files. I am getting events in sensu-server.log.
Example:
{"timestamp":"2016-08-10T07:32:08.000003+0000","level":"info","message":"publishing check request","payload":{"name":"swap-free","issued":1470814327,"command":"check-swap.sh 20 10"},"subscribers":["base_centos_monitoring"]}
I have written a ruby file "nephele_events_handler.rb" which sends events messages through rest call to another server. The ruby file is in the location "/etc/sensu/handlers/". I am reading events from STDIN.read, i have read from official sensu documentation that events will be stored inside STDIN.
#!/opt/sensu/embedded/bin/ruby
require "#{File.dirname(__FILE__)}/base"
require 'rubygems'
require 'json'
require 'uri'
require 'net/http'
require 'net/https'
require 'json'
class RunProcs < BaseHandler
def payload_check
#Read event data
sensuhash = "{ \"SensuMessage\"" + ":"
braces = "}"
s = sensuhash.to_s
event = JSON.parse(STDIN.read, :symbolize_names => true)
eventPayload = event.to_json
sensujson = s + eventPayload + braces
uri = URI.parse("https://localhost:8080/Processor/services/sourceEvents/requestMsg")
https = Net::HTTP.new(uri.host,uri.port)
https.use_ssl = false
req = Net::HTTP::Post.new(uri.path, initheader = {'Content-Type' =>'application/json'})
req.body = "[ #{sensujson} ]"
res = https.request(req)
end
info = RunProcs.new
info.payload_check
end
Am writing handler json file "processor.json" inside the location "/etc/sensu/conf.d/handlers".
{
"handlers": {
"nephele_processor": {
"type": "pipe",
"command": "nephele_events_handler.rb"
}
}
}
But the issue am facing is am only getting events from 'check-procs'
{"client":{"address":"10.81.1.105","subscriptions":["base_centos","base_chef-client","python","base_centos_monitoring","base_centos_monitoring_metrics","sensu_client","base_aws","base_aws_monitoring","sensu_master","all"],"name":"ip-localhost.internal","hostname":"ip-localhost","version":"0.25.3","timestamp":1470896756},"check":{"command":"check-procs.rb --pattern='chef-client' -W=1","subscribers":["base_centos_monitoring"],"handlers":["base_with_jira"],"interval":60,"team":"ops","aggregate":true,"occurrences":3,"refresh":300,"ticket":true,"name":"process-chef-client","issued":1470896771,"executed":1470896771,"duration":0.864,"output":"CheckProcs CRITICAL: Found 0 matching processes; cmd /chef-client/\n","status":2,"type":"standard","history":["2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2","2"],"total_state_change":0},"occurrences":19879,"action":"create","timestamp":1470896772,"id":"dc2b0698-dbac-416d-a9ae-42aa09d53cc3","last_state_change":1469690268,"last_ok":null}
The Check which is getting executed
{
"checks": {
"process-chef-client": {
"command": "check-procs.rb --pattern='chef-client' -W=1",
"subscribers": [
"base_centos_monitoring"
],
"handlers": [
"base_with_jira"
],
"interval": 60,
"team": "ops",
"aggregate": true,
"occurrences": 3,
"interval": 60,
"refresh": 300,
"ticket": true
}
}
}
base_with_jira.json
{
"handlers": {
"base_with_jira": {
"type": "set",
"handlers": [
"jira",
"nephele_processor"
],
"config": "http://apache.enron.nephele.solutions/uchiwa"
}
}
}
I am not getting events from other plugins. Can you explain what i have to do for this.