I want to import two csv files to a Orientdb database. The first is the apex, with 1 million records. The second are the edges with 59 million records
I have two json file to import:
vértex
{
"source": { "file": { "path": "../csvs/metodo01/pesquisador.csv" } },
"extractor": { "row": {} },
"transformers": [
{ "csv": {} },
{ "vertex": { "class": "Pesquisador" } }
],
"loader": {
"orientdb": {
"dbURL": "remote:localhost/dbCemMilM01",
"dbType": "graph",
"batchCommit": 1000,
"classes": [
{"name": "Pesquisador", "extends": "V"}
], "indexes": [
{"class":"Pesquisador", "fields":["psq_id:integer"], "type":"UNIQUE" }
]
}
}
}
edge
{
"config": {
"log": "info",
"parallel": false
},
"source": {
"file": {
"path": "../csvs/metodo01/a10.csv"
}
},
"extractor": {
"row": {
}
},
"transformers": [{
"csv": {
"separator": ",",
"columnsOnFirstLine": true,
"columns": ["psq_id_from:integer",
"pub_id_to:integer",
"ordem:integer"]
}
},
{
"command": {
"command": "create edge PUBLICOU from (select from Pesquisador where psq_id = ${input.psq_id_from}) to (select from Publicacao where pub_id = ${input.pub_id_to}) set ordem = ${input.ordem} ",
"output": "edge"
}
}],
"loader": {
"orientdb": {
"dbURL": "remote:localhost/dbUmMilhaoM01",
"dbType": "graph",
"standardElementConstraints": false,
"batchCommit": 1000,
"classes": [{
"name": "PUBLICOU",
"extends": "E"
}]
}
}
}
In the process the Orientdb suggests using index to accelerate the process.
How do I do that?
Just the command is create edge PUBLICOU from (select from Pesquisador where psq_id = ${input.psq_id_from}) to (select from Publicacao where pub_id = ${input.pub_id_to}) set ordem = ${input.ordem}
To speed up the create edge process you may need indexes on both properties Pesquisador.psq_id , that you already have, and on Publicacao.pub_id.
Ivan
You can declare indexes directly in the ETL configuration. Example taken from DBPedia importer:
"orientdb": {
"dbURL": "plocal:/temp/databases/dbpedia",
"dbUser": "importer",
"dbPassword": "IMP",
"dbAutoCreate": true,
"tx": false,
"batchCommit": 1000,
"wal" : false,
"dbType": "graph",
"classes": [
{"name":"Person", "extends": "V" },
{"name":"Customer", "extends": "Person", "clusters":8 }
],
"indexes": [
{"class":"V", "fields":["URI:string"], "type":"UNIQUE" },
{"class":"Person", "fields":["town:string"], "type":"NOTUNIQUE" ,
metadata : { "ignoreNullValues" : false }
}
]
}
For more information look at: http://orientdb.com/docs/2.2/Loader.html
To speedup the load process my suggestion is to work in plocal mode and then mode the created db to a standalone OrientDB server.
Related
I am using nlog json layout and took this from the example
{
"Logging": {
"NLog": {
"IncludeScopes": false,
"ParseMessageTemplates": true,
"CaptureMessageProperties": true
}
},
"NLog": {
"autoreload": true,
"internalLogLevel": "Info",
"internalLogFile": "c:/temp/console-example-internal2.log",
"throwConfigExceptions": true,
"targets": {
"console": {
"type": "Console",
"layout": "${date}|${level:uppercase=true}|${message} ${exception:format=tostring}|${logger}|${all-event-properties}"
},
"file": {
"type": "AsyncWrapper",
"target": {
"wrappedFile": {
"type": "File",
"fileName": "c:/temp/console-example2.log",
"layout": {
"type": "JsonLayout",
"Attributes": [
{ "name": "timestamp", "layout": "${date:format=o}" },
{ "name": "level", "layout": "${level}" },
{ "name": "logger", "layout": "${logger}" },
{ "name": "message", "layout": "${message:raw=true}" },
{ "name": "properties", "encode": false, "layout": { "type": "JsonLayout", "includeallproperties": "true" } }
]
}
}
}
}
},
"rules": [
{
"logger": "*",
"minLevel": "Trace",
"writeTo": "File,Console"
}
]
}
}
https://github.com/NLog/NLog.Extensions.Logging/blob/master/examples/NetCore2/ConsoleExampleJsonConfig/appsettings.json
on this line I saw this { "name": "message", "layout": "${message:raw=true}" } I changed it to false.
when I do this
var test = "Something";
logger.Info("This is what is stored in the variable: {var}", test);
I get
{
"message": This is what is stored in the variable: \"Something\""
}
Why is it in quotes?
When I change raw to "true" I get
{
"message": This is what is stored in the variable: {test}"
}
how do I just get "This is what is stored in the variable: Something"
I have the following output coming from a step function task: ListObjectsV2
{
"Contents": [
{
"ETag": "\"86c12c034bc6c30cb89b500b954c188f\"",
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_1.csv",
"LastModified": "2023-02-09T13:46:20Z",
"Size": 796014,
"StorageClass": "STANDARD"
},
{
"ETag": "\"58e4a770e0f66073b00d185df500f07f\"",
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_2.csv",
"LastModified": "2023-02-09T13:47:20Z",
"Size": 934038,
"StorageClass": "STANDARD"
},
{
"ETag": "\"460abd0de64d5cb67e8f0d46878cb1ef\"",
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_3.csv",
"LastModified": "2023-02-09T13:46:57Z",
"Size": 794264,
"StorageClass": "STANDARD"
},
{
"ETag": "\"1bfedc3dc92e4ba8d04e24b9b5a0ed58\"",
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_4.csv",
"LastModified": "2023-02-09T13:46:24Z",
"Size": 788756,
"StorageClass": "STANDARD"
},
{
"ETag": "\"9d6c434ce5ebdf203a790fbcf19338dc\"",
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_5.csv",
"LastModified": "2023-02-09T13:47:07Z",
"Size": 831156,
"StorageClass": "STANDARD"
}
],
"IsTruncated": false,
"KeyCount": 5,
"MaxKeys": 1000,
"Name": "vita-internal-text-classification-dev-183576513728",
"Prefix": "55271f52fffe4461a2ee3228ebb97157"
}
I want to have an array containing only the Key key, to pass to the next state, like so:
[
{
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_1.csv",
},
{
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_2.csv",
},
{
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_3.csv",
},
{
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_4.csv",
},
{
"Key": "55271f52fffe4461a2ee3228ebb97157/input/batch_5.csv",
}
]
So far I've tried setting the ResultPath to:
$.Contents[*].Key
$.Contents[*].['Key']
What I get is:
[
"55271f52fffe4461a2ee3228ebb97157/input/batch_1.csv",
"55271f52fffe4461a2ee3228ebb97157/input/batch_2.csv",
"55271f52fffe4461a2ee3228ebb97157/input/batch_3.csv",
"55271f52fffe4461a2ee3228ebb97157/input/batch_4.csv",
"55271f52fffe4461a2ee3228ebb97157/input/batch_5.csv",
]
But I've gotten bad output from that, any help?
The way I've solved this is to use an Inline Map state with a Pass state to build the necessary format. You can see this pattern in an example here for how to use Step Functions Distributed Map to bulk delete objects from S3. You can see this in the inner Create Object Identifier Array Map state. If you were doing this in Standard Workflows, this could be a cost concern given the number of state transitions involved. But since in the Item Processor I'm using Express Workflows, which are billed by duration (and these are super fast), it works pretty well.
{
"Comment": "A state machine to bulk delete objects from S3 using Distributed Map",
"StartAt": "Confirm Bucket Provided",
"States": {
"Confirm Bucket Provided": {
"Type": "Choice",
"Choices": [
{
"Not": {
"Variable": "$.bucket",
"IsPresent": true
},
"Next": "Fail - No Bucket"
}
],
"Default": "Check for Prefix"
},
"Check for Prefix": {
"Type": "Choice",
"Choices": [
{
"Not": {
"Variable": "$.prefix",
"IsPresent": true
},
"Next": "Generate Parameters - Without Prefix"
}
],
"Default": "Generate Parameters - With Prefix"
},
"Generate Parameters - Without Prefix": {
"Type": "Pass",
"Parameters": {
"Bucket.$": "$.bucket",
"Prefix": ""
},
"ResultPath": "$.list_parameters",
"Next": "Delete Objects from S3 Bucket"
},
"Fail - No Bucket": {
"Type": "Fail",
"Error": "InsuffcientArguments",
"Cause": "No Bucket was provided"
},
"Generate Parameters - With Prefix": {
"Type": "Pass",
"Next": "Delete Objects from S3 Bucket",
"Parameters": {
"Bucket.$": "$.bucket",
"Prefix.$": "$.prefix"
},
"ResultPath": "$.list_parameters"
},
"Delete Objects from S3 Bucket": {
"Type": "Map",
"ItemProcessor": {
"ProcessorConfig": {
"Mode": "DISTRIBUTED",
"ExecutionType": "EXPRESS"
},
"StartAt": "Create Object Identifier Array",
"States": {
"Create Object Identifier Array": {
"Type": "Map",
"ItemProcessor": {
"ProcessorConfig": {
"Mode": "INLINE"
},
"StartAt": "Create Object Identifier",
"States": {
"Create Object Identifier": {
"Type": "Pass",
"End": true,
"Parameters": {
"Key.$": "$.Key"
}
}
}
},
"ItemsPath": "$.Items",
"ResultPath": "$.object_identifiers",
"Next": "Delete Objects"
},
"Delete Objects": {
"Type": "Task",
"Next": "Clear Output",
"Parameters": {
"Bucket.$": "$.BatchInput.bucket",
"Delete": {
"Objects.$": "$.object_identifiers"
}
},
"Resource": "arn:aws:states:::aws-sdk:s3:deleteObjects",
"Retry": [
{
"ErrorEquals": [
"States.ALL"
],
"BackoffRate": 2,
"IntervalSeconds": 1,
"MaxAttempts": 6
}
],
"ResultSelector": {
"Deleted.$": "$.Deleted",
"RetryCount.$": "$$.State.RetryCount"
}
},
"Clear Output": {
"Type": "Pass",
"End": true,
"Result": {}
}
}
},
"ItemReader": {
"Resource": "arn:aws:states:::s3:listObjectsV2",
"Parameters": {
"Bucket.$": "$.list_parameters.Bucket",
"Prefix.$": "$.list_parameters.Prefix"
}
},
"MaxConcurrency": 5,
"Label": "S3objectkeys",
"ItemBatcher": {
"MaxInputBytesPerBatch": 204800,
"MaxItemsPerBatch": 1000,
"BatchInput": {
"bucket.$": "$.list_parameters.Bucket"
}
},
"ResultSelector": {},
"End": true
}
}
}
I am trying to flatten a nested JSON returned from a Rest source. The pipeline code is as follows.
The problem here is this pipeline returns only first object from JSON dataset and skips all the rest of the rows.
Can you please guide me on how to iterate over nested objects.
Thanks
Sameet
{
"name": "STG_NCR2",
"properties": {
"activities": [
{
"name": "Copy data1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "RestSource",
"httpRequestTimeout": "00:01:40",
"requestInterval": "00.00:00:00.010",
"requestMethod": "GET",
"additionalHeaders": {
"OData-MaxVersion": "4.0",
"OData-Version": "4.0",
"Prefer": "odata.include-annotations=*"
}
},
"sink": {
"type": "AzureSqlSink"
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"mappings": [
{
"source": {
"path": "$['value'][0]['tco_ncrid']"
},
"sink": {
"name": "NCRID"
}
},
{
"source": {
"path": "['tco_name']"
},
"sink": {
"name": "EquipmentSerialNumber"
}
}
],
"collectionReference": "$['value'][0]['tco_ncr_tco_equipment']"
}
},
"inputs": [
{
"referenceName": "Rest_PowerApps_NCR",
"type": "DatasetReference"
}
],
"outputs": [
{
"referenceName": "Prestaging_PowerApps_NCREquipments",
"type": "DatasetReference"
}
]
}
],
"annotations": []
}
}
The JSON is in the following format
[
{
"value":[
{
"tco_ncrid":"abc-123",
"tco_ncr_tco_equipment":[
{
"tco_name":"abc"
}
]
},
{
"tco_ncrid":"abc-456",
"tco_ncr_tco_equipment":[
{
"tco_name":"xyz"
},
{
"tco_name":"yzx"
}
}
]
]
}
]
This can be resolved by amending the translator property as follows.
"translator": {
"type": "TabularTranslator",
"mappings": [
{
"source": {
"path": "$.['value'][0].['tco_ncrid']"
},
"sink": {
"name": "NCRID",
"type": "String"
}
},
{
"source": {
"path": "$.['value'][0].['tco_text_id']"
},
"sink": {
"name": "EquipmentDescription",
"type": "String"
}
},
{
"source": {
"path": "['tco_name']"
},
"sink": {
"name": "EquipmentSerialNumber",
"type": "String"
}
}
],
"collectionReference": "$.['value'][*].['tco_ncr_tco_equipment']"
}
This code forces the pipeline to iterate over nested array but as you can see that the NCRID is hardcoded to first element of the value array. This is not exactly what I want as I am looking for all Equipment Serial Numbers against every NCRID. Still researching...
I'm a newbie in Orientdb .
I have a csv file which has both the nodes and the edge and I need to create a graph out of that csv file .
csv file
"p1","p2","score"
"LGG_00001","LGG_01682",282
"LGG_00001",".LGG_01831",183
"LGG_00001","LGG_01491",238
The edge is IsActingWith which had the score attribute
{
"source": {
"file": {
"path": "C:/Users/sams/Desktop/OrientDB2/lac2.csv"
}
},
"extractor": {
"csv": {}
},
"transformers": [
{
"vertex": {
"class": "lac2"
}
},
{
"vertex": {
"class": "lac2"
}
},
{
"edge":
{
"class": "IsActingWith",
"joinFieldName": "score_p",
"lookup": "acore",
"direction": "out"
}
}
],
"loader": {
"orientdb": {
"dbURL": "plocal:C:/Users/sams/Desktop/OrientDB2/database/proj",
"dbType": "graph",
"dbAutoCreate": true,
"classes": [
{
"name": "lac2",
"extends": "V"
},
{
"name": "lac2",
"extends": "V"
},
{
"name": "IsActingWith",
"extends": "E"
},
]
}
}
}
That is what I tried but it does not seem logic to me.
The final result I'm looking for is to have a graaph made of p1->ACTINGWITH-> p2 and ACTINGWITH has score of the score attribute
maybe there's a better solution but this works.
My plan is to use 3 different etl scripts: first and second for inserting the vertices and the third for the edges. Of course you'll need to execute them in order.
vertex_import_p1.json
{
"source": { "file": { "path": "/home/ivan/Cose/OrientDB/issues/stack/44641116/file.csv" } },
"extractor": { "csv": {
"separator": ",",
"columns": ["p1:String","p2:String","s:Integer"] } },
"transformers": [
{ "command": { "command": "UPDATE lac2 set p='${input.p1}' UPSERT WHERE p='${input.p1}'"} }
],
"loader": {
"orientdb": {
"dbURL": "plocal:/home/ivan/Cose/OrientDB/issues/stack/44641116/db",
"dbUser": "admin",
"dbPassword": "admin",
"dbType": "graph",
"classes": [
{"name": "lac2", "extends": "V"},
{"name": "isActingWith", "extends": "E"}
]
}
}
}
vertex_import_p2.json
{
"source": { "file": { "path": "/home/ivan/Cose/OrientDB/issues/stack/44641116/file.csv" } },
"extractor": { "csv": {
"separator": ",",
"columns": ["p1:String","p2:String","s:Integer"] } },
"transformers": [
{ "command": { "command": "UPDATE lac2 set p='${input.p2}' UPSERT WHERE p='${input.p2}'"} }
],
"loader": {
"orientdb": {
"dbURL": "plocal:/home/ivan/Cose/OrientDB/issues/stack/44641116/db",
"dbUser": "admin",
"dbPassword": "admin",
"dbType": "graph",
"classes": [
{"name": "lac2", "extends": "V"},
{"name": "isActingWith", "extends": "E"}
]
}
}
}
edge_import_s.json
{
"source": { "file": { "path": "/home/ivan/Cose/OrientDB/issues/stack/44641116/file.csv" } },
"extractor": { "csv": {
"separator": ",",
"columns": ["p1:String","p2:String","s:Integer"] } },
"transformers": [
{ "command": { "command": "CREATE EDGE isActingWith FROM (SELECT FROM lac2 WHERE p='${input.p1}') TO (SELECT FROM lac2 WHERE p='${input.p2}') set score=${input.s}"} }
],
"loader": {
"orientdb": {
"dbURL": "plocal:/home/ivan/Cose/OrientDB/issues/stack/44641116/db",
"dbUser": "admin",
"dbPassword": "admin",
"dbType": "graph",
"classes": [
{"name": "lac2", "extends": "V"},
{"name": "isActingWith", "extends": "E"}
]
}
}
}
And here are the situation after the executions:
orientdb {db=db}> select from lac2
+----+-----+------+---------+-------------------+---------------+
|# |#RID |#CLASS|p |out_isActingWith |in_isActingWith|
+----+-----+------+---------+-------------------+---------------+
|0 |#21:6|lac2 |LGG_00001|[#25:5,#26:1,#27:1]| |
|1 |#21:7|lac2 |LGG_01682| |[#25:5] |
|2 |#22:3|lac2 |LGG_01831| |[#26:1] |
|3 |#23:1|lac2 |LGG_01491| |[#27:1] |
+----+-----+------+---------+-------------------+---------------+
4 item(s) found. Query executed in 0.003 sec(s).
orientdb {db=db}> select from isActingWith
+----+-----+------------+-----+-----+-----+
|# |#RID |#CLASS |score|out |in |
+----+-----+------------+-----+-----+-----+
|0 |#25:5|isActingWith|282 |#21:6|#21:7|
|1 |#26:1|isActingWith|183 |#21:6|#22:3|
|2 |#27:1|isActingWith|238 |#21:6|#23:1|
+----+-----+------------+-----+-----+-----+
3 item(s) found. Query executed in 0.004 sec(s).
I'm working on a Minecraft launcher, I have a problem, because my launcher uses the original way, and for example, here is the JSON:
{
"id": "1.8.1",
"time": "2014-11-24T14:13:31+00:00",
"releaseTime": "2014-11-24T14:13:31+00:00",
"type": "release",
"minecraftArguments": "--username ${auth_player_name} --version ${version_name} --gameDir ${game_directory} --assetsDir ${assets_root} --assetIndex ${assets_index_name} --uuid ${auth_uuid} --accessToken ${auth_access_token} --userProperties ${user_properties} --userType ${user_type}",
"minimumLauncherVersion": 14,
"assets": "1.8",
"libraries": [
{
"name": "com.ibm.icu:icu4j-core-mojang:51.2"
},
{
"name": "net.sf.jopt-simple:jopt-simple:4.6"
},
{
"name": "com.paulscode:codecjorbis:20101023"
},
{
"name": "com.paulscode:codecwav:20101023"
},
{
"name": "com.paulscode:libraryjavasound:20101123"
},
{
"name": "com.paulscode:librarylwjglopenal:20100824"
},
{
"name": "com.paulscode:soundsystem:20120107"
},
{
"name": "io.netty:netty-all:4.0.23.Final"
},
{
"name": "com.google.guava:guava:17.0"
},
{
"name": "org.apache.commons:commons-lang3:3.3.2"
},
{
"name": "commons-io:commons-io:2.4"
},
{
"name": "commons-codec:commons-codec:1.9"
},
{
"name": "net.java.jinput:jinput:2.0.5"
},
{
"name": "net.java.jutils:jutils:1.0.0"
},
{
"name": "com.google.code.gson:gson:2.2.4"
},
{
"name": "com.mojang:authlib:1.5.17"
},
{
"name": "com.mojang:realms:1.7.5"
},
{
"name": "org.apache.commons:commons-compress:1.8.1"
},
{
"name": "org.apache.httpcomponents:httpclient:4.3.3"
},
{
"name": "commons-logging:commons-logging:1.1.3"
},
{
"name": "org.apache.httpcomponents:httpcore:4.3.2"
},
{
"name": "org.apache.logging.log4j:log4j-api:2.0-beta9"
},
{
"name": "org.apache.logging.log4j:log4j-core:2.0-beta9"
},
{
"name": "org.lwjgl.lwjgl:lwjgl:2.9.1",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "org.lwjgl.lwjgl:lwjgl_util:2.9.1",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "org.lwjgl.lwjgl:lwjgl-platform:2.9.1",
"natives": {
"linux": "natives-linux",
"windows": "natives-windows",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
},
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "net.java.jinput:jinput-platform:2.0.5",
"natives": {
"linux": "natives-linux",
"windows": "natives-windows",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
}
},
{
"name": "tv.twitch:twitch:6.5"
},
{
"name": "tv.twitch:twitch-platform:6.5",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "linux"
}
}
],
"natives": {
"linux": "natives-linux",
"windows": "natives-windows-${arch}",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
}
},
{
"name": "tv.twitch:twitch-external-platform:4.5",
"rules": [
{
"action": "allow",
"os": {
"name": "windows"
}
}
],
"natives": {
"windows": "natives-windows-${arch}"
},
"extract": {
"exclude": [
"META-INF/"
]
}
}
],
"mainClass": "net.minecraft.client.main.Main"
}
So, in the "libraries" [, the game libraries listed with "name":, and below, the libraries contains the rules that apply to them, I want to know this rules with the library file name, and apply the actions with it, but I don't have any idea, I using this code, but this only redirect the libraries.
Here is my code:
Dim item As String = Version
Dim client As New WebClient()
Await client.DownloadFileTaskAsync(New Uri("https://s3.amazonaws.com/Minecraft.Download/versions/" + item + "/" + item + ".json"), Root + "\versions\" + item + "\" + item + ".json")
Dim JSONREADER As New StreamReader(Root + "\versions\" + item + "\" + item + ".json")
json = JSONREADER.ReadToEnd()
JSONREADER.Close()
Dim JSONResult As Object = JsonConvert.DeserializeObject(Of Object)(json)
MinecraftArgs = JSONResult("minecraftArguments")
AssetIndex = JSONResult("assets")
MainClass = JSONResult("mainClass")
For Each i In JSONResult("libraries").Children()
LibrariesList.Add(i.ToObject(Of MClibraries).name)
Next
For example:
{
"name": "org.lwjgl.lwjgl:lwjgl:2.9.1",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
The "name" as the name of the file, the "rules" there is two action: allow, disallow, under the disallow, there is the "os":, this sets the action, because
"os": {
"name": "osx"
}
this only allows the file in Windows and Linux, but disallows the file in mac OS X, so I want to get and apply this actions.
I want to add these libraries:
"libraries": [
{
"name": "com.ibm.icu:icu4j-core-mojang:51.2"
},
{
"name": "net.sf.jopt-simple:jopt-simple:4.6"
},
{
"name": "com.paulscode:codecjorbis:20101023"
},
{
"name": "com.paulscode:codecwav:20101023"
},
{
"name": "com.paulscode:libraryjavasound:20101123"
},
{
"name": "com.paulscode:librarylwjglopenal:20100824"
},
{
"name": "com.paulscode:soundsystem:20120107"
},
{
"name": "io.netty:netty-all:4.0.23.Final"
},
{
"name": "com.google.guava:guava:17.0"
},
{
"name": "org.apache.commons:commons-lang3:3.3.2"
},
{
"name": "commons-io:commons-io:2.4"
},
{
"name": "commons-codec:commons-codec:1.9"
},
{
"name": "net.java.jinput:jinput:2.0.5"
},
{
"name": "net.java.jutils:jutils:1.0.0"
},
{
"name": "com.google.code.gson:gson:2.2.4"
},
{
"name": "com.mojang:authlib:1.5.17"
},
{
"name": "com.mojang:realms:1.7.5"
},
{
"name": "org.apache.commons:commons-compress:1.8.1"
},
{
"name": "org.apache.httpcomponents:httpclient:4.3.3"
},
{
"name": "commons-logging:commons-logging:1.1.3"
},
{
"name": "org.apache.httpcomponents:httpcore:4.3.2"
},
{
"name": "org.apache.logging.log4j:log4j-api:2.0-beta9"
},
{
"name": "org.apache.logging.log4j:log4j-core:2.0-beta9"
},
And add these libraries if the rules allow Windows operating system:
{
"name": "org.lwjgl.lwjgl:lwjgl:2.9.1",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "org.lwjgl.lwjgl:lwjgl_util:2.9.1",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "org.lwjgl.lwjgl:lwjgl-platform:2.9.1",
"natives": {
"linux": "natives-linux",
"windows": "natives-windows",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
},
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "osx"
}
}
]
},
{
"name": "net.java.jinput:jinput-platform:2.0.5",
"natives": {
"linux": "natives-linux",
"windows": "natives-windows",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
}
},
{
"name": "tv.twitch:twitch:6.5"
},
{
"name": "tv.twitch:twitch-platform:6.5",
"rules": [
{
"action": "allow"
},
{
"action": "disallow",
"os": {
"name": "linux"
}
}
],
"natives": {
"linux": "natives-linux",
"windows": "natives-windows-${arch}",
"osx": "natives-osx"
},
"extract": {
"exclude": [
"META-INF/"
]
}
},
{
"name": "tv.twitch:twitch-external-platform:4.5",
"rules": [
{
"action": "allow",
"os": {
"name": "windows"
}
}
],
"natives": {
"windows": "natives-windows-${arch}"
},
"extract": {
"exclude": [
"META-INF/"
]
}
}
],
If in the rules only disallow, and in the disallow only for example OS X, this means the Windows, and the Linux allowed.
First edit your MClibraries class so that it looks like this:
Public Class MClibraries
Public name As String
Public rules As JArray
End Class
So to do what you're asking I start off with a list and a dictionary:
Private ReadOnly _librariesList As New List(Of String)
Private ReadOnly _disallowed As New Dictionary(Of String, String)
NOTE: You already have _librariesList, but yours is named LibrariesList.
Next, to get the libraries, I did this:
Dim item As String = Version
Dim client = New WebClient() With {.Proxy = Nothing}
Dim json = Await client.DownloadStringTaskAsync(New Uri(String.Format("http://s3.amazonaws.com/Minecraft.Download/versions/{0}/{0}.json", item)))
Dim jsonResult As Object = JsonConvert.DeserializeObject(Of Object)(json)
For Each i In jsonResult("libraries")
Dim entry As MClibraries = i.ToObject(Of MClibraries)() ' Converts object to MClibrary
_librariesList.Add(entry.name) ' Add the name of each entry to the listbox
If Not IsNothing(entry.rules) AndAlso entry.rules.Count = 2 Then ' Check to make sure it isn't empty
_disallowed.Add(entry.rules(1)("os")("name")) ' Gets the os that is disallowed
End If
Next
NOTE: Just add what you already have to this
Now you can access all the disallowed os's with the new dictionary "_disallowed". The key is the os that is disallowed and the value is the package.
If you want to see if the os matches the user os you can write the following. Also you will need to add the reference to System.Management and import it as well
Private _currentOs as String
'The following should go under the loading of a form
Dim osname As String = (From x In New ManagementObjectSearcher("SELECT * FROM Win32_OperatingSystem").Get().OfType(Of ManagementObject)()
Select x.GetPropertyValue("Caption")).FirstOrDefault()
'The following should go where you retrieve the libraries
For Each i In jsonResult("libraries")
Dim entry As MClibraries = i.ToObject(Of MClibraries)() ' Converts object to MClibrary
If Not IsNothing(entry.rules) AndAlso entry.rules.Count = 2 Then ' Check to make sure it isn't empty
If Not _currentOs.ToLower.Contains(entry.rules(1)("os")("name")) Then
_librariesList.Add(entry.name)
End If
End If
Next