#/usr/lib/perl
use lib qw(..);
use JSON qw( );
open json_fh, "<$ARGV[0]" or die "Couldn't open file $ARGV[0]!\n";
open csv_fh, ">$ARGV[1]" or die "Couldn't open file $ARGV[1]!\n";
#json_text =<json_fh>;
close json_fh;
foreach $json_text( #json_text )
{
chomp $json_text;
$json = JSON->new;
$data = $json->decode($json_text);
$id=$data->{_id};
#lines=#{$data->{accounts}};
foreach $line ( #lines )
{
$accountNumber = $line->{accountNumber};
$accountType = $line->{accountType};
$cardType = $line->{cardType};
$cardSubType = $line->{cardSubType};
$protectionMethod = $line->{protectionMethod};
$protectionSource = $line->{protectionSource};
$expirationDate = $line->{expirationDate};
$nameOnAccount = $line->{nameOnAccount};
$cardStatus = $line->{cardStatus};
$cardHolderType = $line->{cardHolderType};
$createdBy = $line->{createdBy};
$addressId = $line->{addressId};
$productType = $line->{productType};
$isDefaultAccount = $line->{isDefaultAccount};
#Write to the file in delimited file format
print csv_fh "$id|$accountNumber|$accountType|$cardType|$cardSubType|$protectionMethod|$protectionSource|$expirationDate|$nameOnAccount|$cardStatus|$cardHolderType|$createdBy|$addressId|$productType|$isDefaultAccount\n";
}
}
close csv_fh;
This is a perl script that, I created to convert the JSON file to a delimited file, If the element names are known.
Could anyone please help me to modify the code so that this conversion can be done, when the element names are unknown.
Assuming that every account has the same fields —it makes no sense otherwise— you can use the following:
my $json_parser = JSON->new;
my #headers;
for my $json_doc (#json_docs) {
my $data = $json_parser->decode($json_doc);
my $id = $data->{_id};
for my $account (#{ $data->{accounts} }) {
if (!#headers) {
#headers = sort keys %$account;
say join "|", 'id', #headers;
}
say join "|", $id, #$account{#headers};
}
}
You didn't provide an example input file, so I'm guessing it is something like this:
{ "accounts": [ { "_id": "1", "accountNumber": "99999", "accountType": "acctTypeA", "cardType": "cardTypeA", "cardSubType": "cardSubTypeA", "protectionMethod": "protectionMethodA", "protectionSource": "protectionSourceA", "expirationDate": "2020-09", "nameOnAccount": "First Last", "cardStatus": "OK", "cardHolderType": "CHTypeA", "createdBy": "userX", "addressId": "444", "productType": "prodTypeA", "isDefaultAccount": "1", "optional": "OptA" } ] }
You're pretty close, but usually the entire file is a JSON record, so you don't loop line-by-line, you create a data structure (hashref) that represents the entire file (i.e. you only need to do $json->decode once per file).
Additionally, I'd suggest some checks to validate input, such as missing fields; you can see I have it die with an error message if any field is missing.
#!/usr/bin/env perl
use strict;
use lib qw(..);
use JSON qw( );
#ARGV == 2 or die("Infile, Outfile required\n");
open json_fh, "<$ARGV[0]" or die "Couldn't open file $ARGV[0]!\n";
open csv_fh, ">$ARGV[1]" or die "Couldn't open file $ARGV[1]!\n";
my $json_text =<json_fh>;
close json_fh;
my $json = JSON->new->allow_nonref;
my $data = $json->decode($json_text);
my $accounts = $data->{accounts};
my #required = qw(_id accountNumber accountType cardType cardSubType protectionMethod protectionSource expirationDate nameOnAccount cardStatus cardHolderType createdBy addressId productType isDefaultAccount);
my #opt = (); # learn these
my %col; # key => column index
my $lastIndex;
for (my $i=0; $i<=$#required; ++$i) { $lastIndex = $col{$required[$i]} = $i }
print "There are ", $lastIndex+1, " required cols\n";
foreach my $rec ( #$accounts )
{
my #row;
foreach my $key ( keys %$rec )
{
if ( ! exists($col{$key}) ) {
# new (optional) key
push #opt, $key;
$col{$key} = ++$lastIndex;
print "New col: $key (col ", $lastIndex+1, ")\n";
}
$row[$col{$key}] = $rec->{$key};
}
# check for all required
for (my $i=0; $i<=$#required; ++$i) {
defined($row[$i]) or die("Missing: $required[$i]\n");
}
#Write to the file in delimited file format
print csv_fh join("|", #row), "\n";
}
close csv_fh;
Related
I have JSON code that I'm pulling with key names that are the same and I'm trying to pull the values from the keys one at a time and pass them to variables (in a loop) in a perl script but it pulls all of the values at one time instead of iterating through them. I'd like to pull a value from a key and pass it to a variable then iterate through the loop again for the next value. The amount of data changes in JSON so the amount of identical keys will grow.
Perl Script Snippet
#!/usr/bin/perl
use warnings;
use strict;
use JSON::XS;
my $res = "test.json";
my $txt = do {
local $/;
open my $fh, "<", $res or die $!;
<$fh>;
};
my $json = decode_json($txt);
for my $mdata (#{ $json->{results} }) {
my $sitedomain = "$mdata->{custom_fields}->{Domain}";
my $routerip = "$mdata->{custom_fields}->{RouterIP}";
#vars
my $domain = $sitedomain;
my $host = $routerip;
print $domain;
print $host;
}
Print $host variable
print $host;
192.168.201.1192.168.202.1192.168.203.1
Print $domain variable
print $domain;
site1.global.localsite2.global.localsite3.global.local
JSON (test.json)
{
"results": [
{
"id": 37,
"url": "http://global.local/api/dcim/sites/37/",
"display": "Site 1",
"name": "Site 1",
"slug": "site1",
"custom_fields": {
"Domain": "site1.global.local",
"RouterIP": "192.168.201.1"
}
},
{
"id": 38,
"url": "http://global.local/api/dcim/sites/38/",
"display": "Site 2",
"name": "Site 2",
"slug": "site2",
"custom_fields": {
"Domain": "site2.global.local",
"RouterIP": "192.168.202.1"
}
},
{
"id": 39,
"url": "http://global.local/api/dcim/sites/39/",
"display": "Site 3",
"name": "Site 3",
"slug": "site3",
"custom_fields": {
"Domain": "site3.global.local",
"RouterIP": "192.168.203.1"
}
}
]
}
Your code produces expected result if you add \n to print statement. You can utilize say instead of print if there is no format required.
use warnings;
use strict;
use feature 'say';
use JSON::XS;
my $res = "test.json";
my $txt = do {
local $/;
open my $fh, "<", $res or die $!;
<$fh>;
};
my $json = decode_json($txt);
for my $mdata (#{ $json->{results} }) {
my $sitedomain = "$mdata->{custom_fields}->{Domain}";
my $routerip = "$mdata->{custom_fields}->{RouterIP}";
#vars
my $domain = $sitedomain;
my $host = $routerip;
say "$domain $host";
}
The code can be re-written in shorter form as following
use strict;
use warnings;
use feature 'say';
use JSON;
my $fname = 'router_test.json';
my $txt = do {
local $/;
open my $fh, "<", $fname or die $!;
<$fh>;
};
my $json = from_json($txt);
say "$_->{custom_fields}{Domain} $_->{custom_fields}{RouterIP}" for #{$json->{results}};
It sounds like you want to "slice" the data. You could buffer in code, or collect unique values later. Let's modify what you started with, and make some tweaks:
n.b. No need to quote my $sitedomain = "$mdata->{custom_fields}->{Domain}";. The content of the JSON is already a string, and forcing Perl to make another string by interpolating it is unnecessary.
n.b.2 JSON::XS works automatically if it's installed.
my %domains;
my %ips;
for my $mdata (#{ $json->{results} }) {
my $sitedomain = $mdata->{custom_fields}->{Domain};
my $routerip = $mdata->{custom_fields}->{RouterIP};
# Collect and count all the unique domains and IPs by storing them as hash keys
$domains{$sitedomain} += 1;
$ips{$routerip} += 1;
}
for my $key (keys %domains) {
printf "%s %s\n", $key, $domains{$key};
# and so on
}
If we don't know the custom fields, we can play with nested hashes to collect it all:
my %fields;
for my $mdata (#{ $json->{results} }) {
for my $custom_field (keys %{ $mdata->{custom_fields} }) {
$fields{$custom_field}{$mdata->{custom_fields}{$custom_field}} += 1;
}
}
for my $custom_field (keys %fields) {
print "$custom_field:\n";
for my $unique_value (keys %{ $fields{$custom_field} }){
printf "%s - %s\n", $unique_value, $fields{$custom_field}{$unique_value};
}
}
Example output:
RouterIP:
192.168.201.1 - 1
192.168.203.1 - 1
192.168.202.1 - 1
Domain:
site2.global.local - 1
site1.global.local - 1
site3.global.local - 1
... or something like that. Nested structures lead very quickly to messy code. You can mitigate it by dereferencing the substructures. It could also be more predictable if we work with a known list of keys e.g.
my #known_keys = qw/RouterIP Domain/;
for my $mdata (#{ $json->{results} }) {
for my $custom_field (#known_keys) {
if (exists $fields{$custom_field}) {
$fields{$custom_field}{$mdata->{custom_fields}{$custom_field}} += 1;
}
}
}
If the JSON file is massive you may run out of memory. For this you would need to look into a package like JSON::SL or JSON::Streaming::Reader. They're more involved to use but prevent you from needing to load the whole file into memory. There are also unix tools like jq that provide the same powers.
Following is my json file (demo.json)
[
{
"Directory" : "/opt/gehc/abc/root/mr_csd/xxx/diag/",
"Files" : [ "abc.html","xyz.html",
"mnp.html"],
"Permission" : 555
}
]
i want to read each files in "Files" one by one which lies in "Directory", and change its "Permissions"
Following is the code i have started, Pls Help :
#!/usr/bin/perl
use JSON;
my $filename = 'demo.json';
my $data;
{
local $/ = undef;
open my $fh, '<', $filename;
$data = <$fh>;
close $fh;
}
my $result = decode_json( $data );
for my $report ( #{$result} ) {
Using your own code, you can easily de-reference the json-structure to simpler structures:
my #files = #{ $result->[0]{Files} };
my $perm = $result->[0]{Permission};
print Dumper \#files, $perm;
Which will print:
$VAR1 = [
'abc.html',
'xyz.html',
'mnp.html'
];
$VAR2 = 555;
Then you can loop over the files with a simple for loop:
for my $file (#files) { ...
And chmod files as necessary. Though you may have to prepare the number 555 to be an octal, as described in the documentation.
And if you have several levels of this array that you call "reports", you can loop over them like so:
for my $report (#{ $result }) {
my #files = #{ $report->{Files} };
my $perm = $report->{Permission};
print Dumper \#files, $perm;
}
My custom code (on Perl) give next wrong JSON, missing comma between blocks:
{
"data": [{
"{#LOGFILEPATH}": "/tmp/QRZ2007.tcserverlogs",
"{#LOGFILE}": "QRZ2007"
} **missing comma** {
"{#LOGFILE}": "ARZ2007",
"{#LOGFILEPATH}": "/tmp/ARZ2007.tcserverlogs"
}]
}
My terrible code:
#!/usr/bin/perl
use strict;
use warnings;
use File::Basename;
use utf8;
use JSON;
binmode STDOUT, ":utf8";
my $dir = $ARGV[0];
my $json = JSON->new->utf8->space_after;
opendir(DIR, $dir) or die $!;
print '{"data": [';
while (my $file = readdir(DIR)) {
next unless (-f "$dir/$file");
next unless ($file =~ m/\.tcserverlogs$/);
my $fullPath = "$dir/$file";
my $filenameshort = basename($file, ".tcserverlogs");
my $data_to_json = {"{#LOGFILEPATH}"=>$fullPath,"{#LOGFILE}"=>$filenameshort};
my $data_to_json = {"{#LOGFILEPATH}"=>$fullPath,"{#LOGFILE}"=>$filenameshort};
print $json->encode($data_to_json);
}
print ']}'."\n";
closedir(DIR);
exit 0;
Dear Team i am not a programmer, please any idea how fix it, thank you!
If you do not print a comma, you will not get a comma.
You are trying to build your own JSON string from pre-encoded pieces of smaller data structures. That will not work unless you tell Perl when to put commas. You could do that, but it's easier to just collect all the data into a Perl data structure that is equivalent to the JSON string you want to produce, and encode the whole thing in one go when you're done.
my $dir = $ARGV[0];
my $json = JSON->new->utf8->space_after;
my #data;
opendir( DIR, $dir ) or die $!;
while ( my $file = readdir(DIR) ) {
next unless ( -f "$dir/$file" );
next unless ( $file =~ m/\.tcserverlogs$/ );
my $fullPath = "$dir/$file";
my $filenameshort = basename( $file, ".tcserverlogs" );
my $data_to_json = { "{#LOGFILEPATH}" => $fullPath, "{#LOGFILE}" => $filenameshort };
push #data, $data_to_json;
}
closedir(DIR);
print $json->encode( { data => \#data } );
I have a configuration file which is in XML format. I need to parse the XML and convert to JSON. I'm able to convert it with XML2JSON module of perl. But the problem is, it is not maintaining the order of XML elements. I strictly need the elements in order otherwise I cannot configure
My XML file is something like this. I have to configure an IP address and set that IP as a gateway to certain route.
<Config>
<ip>
<address>1.1.1.1</address>
<netmask>255.255.255.0</netmask>
</ip>
<route>
<network>20.20.20.0</network>
<netmask>55.255.255.0</netmask>
<gateway>1.1.1.1</gateway>
</route>
</Config>
This is my perl code to convert to JSON
my $file = 'config.xml';
use Data::Dumper;
open my $fh, '<',$file or die;
$/ = undef;
my $data = <$fh>;
my $XML = $data;
my $XML2JSON = XML::XML2JSON->new();
my $Obj = $XML2JSON->xml2obj($XML);
print Dumper($Obj);
The output I'm getting is,
$VAR1 = {'Config' => {'route' => {'netmask' => {'$t' => '55.255.255.0'},'gateway' => {'$t' => '1.1.1.1'},'network' => {'$t' => '20.20.20.0'}},'ip' => {'netmask' => {'$t' => '255.255.255.0'},'address' => {'$t' => '1.1.1.1'}}},'#encoding' => 'UTF-8','#version' => '1.0'};
I have a script which reads the json object and configure..
But it fails as it first tries to set gateway ip address to a route where the ip address is not yet configured and add then add ip address.
I strictly want key ip to come first and then route for proper configuration without error. Like this I have many dependencies where order of keys is a must.
Is there any way I can tackle this problem? I tried almost all modules of XML parsing like XML::Simple,Twig::XML,XML::Parser. But nothing helped..
Here's a program that I hacked together that uses XML::Parser to parse some XML data and generate the equivalent JSON in the same order. It ignores any attributes, processing instructions etc. and requires that every XML element must contain either a list of child elements or a text node. Mixing text and elements won't work, and this isn't checked except that the program will die trying to dereference a string
It's intended to be a framework for you to enhance as you require, but works fine as it stands with the XML data you show in your question
use strict;
use warnings 'all';
use XML::Parser;
my $parser = XML::Parser->new(Handlers => {
Start => \&start_tag,
End => \&end_tag,
Char => \&text,
});
my $struct;
my #stack;
$parser->parsefile('config.xml');
print_json($struct->[1]);
sub start_tag {
my $expat = shift;
my ($tag, %attr) = #_;
my $elem = [ $tag => [] ];
if ( $struct ) {
my $content = $stack[-1][1];
push #{ $content }, $elem;
}
else {
$struct = $elem;
}
push #stack, $elem;
}
sub end_tag {
my $expat = shift;
my ($elem) = #_;
die "$elem <=> $stack[-1][0]" unless $stack[-1][0] eq $elem;
for my $content ( $stack[-1][1] ) {
$content = "#$content" unless grep ref, #$content;
}
pop #stack;
}
sub text {
my $expat = shift;
my ($string) = #_;
return unless $string =~ /\S/;
$string =~ s/\A\s+//;
$string =~ s/\s+\z//;
push #{ $stack[-1][1] }, $string;
}
sub print_json {
my ($data, $indent, $comma) = (#_, 0, '');
print "{\n";
for my $i ( 0 .. $#$data ) {
# Note that $data, $indent and $comma are overridden here
# to reflect the inner context
#
my $elem = $data->[$i];
my $comma = $i < $#$data ? ',' : '';
my ($tag, $data) = #$elem;
my $indent = $indent + 1;
printf qq{%s"%s" : }, ' ' x $indent, $tag;
if ( ref $data ) {
print_json($data, $indent, $comma);
}
else {
printf qq{"%s"%s\n}, $data, $comma;
}
}
# $indent and $comma (and $data) are restored here
#
printf "%s}%s\n", ' ' x $indent, $comma;
}
output
{
"ip" : {
"address" : "1.1.1.1",
"netmask" : "255.255.255.0"
},
"route" : {
"network" : "20.20.20.0",
"netmask" : "55.255.255.0",
"gateway" : "1.1.1.1"
}
}
The problem isn't so much to do with XML parsing, but because perl hashes are not ordered. So when you 'write' some JSON... it can be any order.
The way to avoid this is to apply a sort function to your JSON.
You can do this by using sort_by to explicitly sort:
#!/usr/bin/env perl
use strict;
use warnings;
use XML::Twig;
use JSON::PP;
use Data::Dumper;
sub order_nodes {
my %rank_of = ( ip => 0, route => 1, address => 2, network => 3, netmask => 4, gateway => 5 );
print "$JSON::PP::a <=> $JSON::PP::b\n";
return $rank_of{$JSON::PP::a} <=> $rank_of{$JSON::PP::b};
}
my $twig = XML::Twig -> parse (\*DATA);
my $json = JSON::PP -> new;
$json ->sort_by ( \&order_nodes );
print $json -> encode( $twig -> simplify );
__DATA__
<Config>
<ip>
<address>1.1.1.1</address>
<netmask>255.255.255.0</netmask>
</ip>
<route>
<network>20.20.20.0</network>
<netmask>55.255.255.0</netmask>
<gateway>1.1.1.1</gateway>
</route>
</Config>
In some scenarios, setting canonical can help, as that sets ordering to lexical order. (And means your JSON output would be consistently ordered). This doesn't apply to your case.
You could build the node ordering via XML::Twig, either by an xpath expression, or by using twig_handlers. I gave it a quick go, but got slightly unstuck in figuring out how you'd 'tell' how to figure out ordering based on getting address/netmask and then network/netmask/gateway.
As a simple example you could:
my $count = 0;
foreach my $node ( $twig -> get_xpath ( './*' ) ) {
$rank_of{$node->tag} = $count++ unless $rank_of{$node->tag};
}
print Dumper \%rank_of;
This will ensure ip and route are always the right way around. However it doesn't order the subkeys.
That actually gets a bit more complicated, as you'd need to recurse... and then decide how to handle 'collisions' (like netmask - address comes before, but how does it sort compared to network).
Or alternatively:
my $count = 0;
foreach my $node ( $twig->get_xpath('.//*') ) {
$rank_of{ $node->tag } = $count++ unless $rank_of{ $node->tag };
}
This walks all the nodes, and puts them in order. It doesn't quite work, because netmask appears in both stanzas though.
You get:
{"ip":{"address":"1.1.1.1","netmask":"255.255.255.0"},"route":{"netmask":"55.255.255.0","network":"20.20.20.0","gateway":"1.1.1.1"}}
I couldn't figure out a neat way of collapsing both lists.
I got this Perl code that is supposed to read my categories and put them into a csv file. After many tries i finally got it but is only ready 50 of my over 500 categories. Any way to modify this routine to read all my categories.
Here is the Perl file I got from the Bigcommerce forum.
use strict;
use JSON::PP;
open (my $fh, "<", 'categories.json');
my $json_text = <$fh>;
my $perl_scalar = decode_json($json_text);
# Make a list of ids to names, so that I can build a content path for Neto category CSV
my $id;
foreach my $element (#$perl_scalar)
{
$id->{$element->{id}}=$element->{name};
}
# Actually print out the CSV content, in Neto's required format.
print "content type,content path,name,description 1,description 2,sort order,seo meta description,seo page title,seo meta keywords\n";
foreach my $element (#$perl_scalar)
{
print "Product Category,";
my $parent_category = $element->{parent_category_list}[0];
if ($parent_category == $element->{id})
{
print ",";
}
else
{
print $id->{$parent_category}, ",";
}
print $element->{name}, ",", $element->{description}, ",,", $element->{sort_order}, ",", $element->{meta_description}, ",,\n";
}
Thanks in advance
There is a pretty fundamental problem with mapping JSON to CSV. JSON is a nested data structure, where CSV isn't. Therefore you'll always have to mess around with converting - how would you colliminate:
{
"data2" : {
"fish" : "paste"
},
"data" : [
{
"somesub" : "somethingelse"
},
{
"somesub" : "anotherthing"
}
]
}
This won't turn into a flat data structure like CSV easily.
If you've some trivial JSON to convert, it's not too hard, but depends entirely on the structure of your JSON file, and how you want to map things.
For a trivial example:
use strict;
use warnings;
use JSON;
use Data::Dumper;
local $/;
my $data = from_json(<DATA>);
print Dumper $data;
my #columns = qw ( col1 col2 col3 );
print join( ",", "key", #columns ), "\n";
foreach my $key ( sort keys %$data ) {
print join( ",", $key, #{ $data->{$key} }{#columns} ), "\n";
}
__DATA__
{
"1" :
{
"col1" : "value1",
"col2" : "value2",
"col3" : "value3"
},
"2" : {
"col1" : "value4",
"col2" : "value5",
"col3" : "value6"
}
}
For a more complex example - it may be appropriate to use Text::CSV - but it depends rather what's in your JSON content - the simplistic join approach above doesn't cope with line feeds, embedded quotes or commas within the text. So it might be better to use Text::CSV:
#!/usr/bin/env perl
use strict;
use warnings;
use JSON;
use Text::CSV;
use Data::Dumper;
local $/;
my $data = from_json ( <DATA> );
print Dumper $data;
my $csv = Text::CSV -> new ( { 'binary' => 1 } );
my #columns = qw ( col1 col2 col3 );
$csv -> column_names ( #columns );
foreach my $key ( sort keys %$data ) {
$csv -> print_hr ( \*STDOUT, $data->{$key} );
print "\n";
}
foreach my $key ( sort keys %$data ) {
my $row = [ $key, #{$data->{$key}}{#columns} ];
$csv -> print ( \*STDOUT, $row );
print "\n";
}
This uses the same __DATA__ block as above, and also runs twice - once with using 'column headings' to print - which works provided you don't want to preserve the "key" field, and the second which assembles an array reference to print.