Convert JSON String to Lua Table? - json

I need to convert a Json String to a table data structure in Lua. I am using the following code.
local json = require "json"
local t = {
["name1"] = "value1",
["name2"] = { 1, false, true, 23.54, "a \021 string" },
name3 = json.null
}
local encode = json.encode (t)
print (encode) --> {"name1":"value1","name3":null,"name2":[1,false,true,23.54,"a \u0015 string"]}
local decode = json.decode( encode )
But when I run the script, I get the following errors,
no field package.preload['json']
no file '/usr/local/share/lua/5.2/json.lua'
no file '/usr/local/share/lua/5.2/json/init.lua'
no file '/usr/local/lib/lua/5.2/json.lua'
no file '/usr/local/lib/lua/5.2/json/init.lua'
no file './json.lua'
no file '/usr/local/lib/lua/5.2/json.so'
no file '/usr/local/lib/lua/5.2/loadall.so'
no file './json.so'
So how to convert my json string to lua table?

maybe lua-cjsonis your friend:
install e.g. through luarocks:
$sudo luarocks install lua-cjson
then in lua:
local json = require('cjson')
local tab = json.decode(json_string)
json_string = json.encode(tab)

https://gist.github.com/tylerneylon/59f4bcf316be525b30ab
I found pure lua script file to parse json data (just one file).
local json = {}
-- Internal functions.
local function kind_of(obj)
if type(obj) ~= 'table' then return type(obj) end
local i = 1
for _ in pairs(obj) do
if obj[i] ~= nil then i = i + 1 else return 'table' end
end
if i == 1 then return 'table' else return 'array' end
end
local function escape_str(s)
local in_char = {'\\', '"', '/', '\b', '\f', '\n', '\r', '\t'}
local out_char = {'\\', '"', '/', 'b', 'f', 'n', 'r', 't'}
for i, c in ipairs(in_char) do
s = s:gsub(c, '\\' .. out_char[i])
end
return s
end
-- Returns pos, did_find; there are two cases:
-- 1. Delimiter found: pos = pos after leading space + delim; did_find = true.
-- 2. Delimiter not found: pos = pos after leading space; did_find = false.
-- This throws an error if err_if_missing is true and the delim is not found.
local function skip_delim(str, pos, delim, err_if_missing)
pos = pos + #str:match('^%s*', pos)
if str:sub(pos, pos) ~= delim then
if err_if_missing then
error('Expected ' .. delim .. ' near position ' .. pos)
end
return pos, false
end
return pos + 1, true
end
-- Expects the given pos to be the first character after the opening quote.
-- Returns val, pos; the returned pos is after the closing quote character.
local function parse_str_val(str, pos, val)
val = val or ''
local early_end_error = 'End of input found while parsing string.'
if pos > #str then error(early_end_error) end
local c = str:sub(pos, pos)
if c == '"' then return val, pos + 1 end
if c ~= '\\' then return parse_str_val(str, pos + 1, val .. c) end
-- We must have a \ character.
local esc_map = {b = '\b', f = '\f', n = '\n', r = '\r', t = '\t'}
local nextc = str:sub(pos + 1, pos + 1)
if not nextc then error(early_end_error) end
return parse_str_val(str, pos + 2, val .. (esc_map[nextc] or nextc))
end
-- Returns val, pos; the returned pos is after the number's final character.
local function parse_num_val(str, pos)
local num_str = str:match('^-?%d+%.?%d*[eE]?[+-]?%d*', pos)
local val = tonumber(num_str)
if not val then error('Error parsing number at position ' .. pos .. '.') end
return val, pos + #num_str
end
-- Public values and functions.
function json.stringify(obj, as_key)
local s = {} -- We'll build the string as an array of strings to be concatenated.
local kind = kind_of(obj) -- This is 'array' if it's an array or type(obj) otherwise.
if kind == 'array' then
if as_key then error('Can\'t encode array as key.') end
s[#s + 1] = '['
for i, val in ipairs(obj) do
if i > 1 then s[#s + 1] = ', ' end
s[#s + 1] = json.stringify(val)
end
s[#s + 1] = ']'
elseif kind == 'table' then
if as_key then error('Can\'t encode table as key.') end
s[#s + 1] = '{'
for k, v in pairs(obj) do
if #s > 1 then s[#s + 1] = ', ' end
s[#s + 1] = json.stringify(k, true)
s[#s + 1] = ':'
s[#s + 1] = json.stringify(v)
end
s[#s + 1] = '}'
elseif kind == 'string' then
return '"' .. escape_str(obj) .. '"'
elseif kind == 'number' then
if as_key then return '"' .. tostring(obj) .. '"' end
return tostring(obj)
elseif kind == 'boolean' then
return tostring(obj)
elseif kind == 'nil' then
return 'null'
else
error('Unjsonifiable type: ' .. kind .. '.')
end
return table.concat(s)
end
json.null = {} -- This is a one-off table to represent the null value.
function json.parse(str, pos, end_delim)
pos = pos or 1
if pos > #str then error('Reached unexpected end of input.') end
local pos = pos + #str:match('^%s*', pos) -- Skip whitespace.
local first = str:sub(pos, pos)
if first == '{' then -- Parse an object.
local obj, key, delim_found = {}, true, true
pos = pos + 1
while true do
key, pos = json.parse(str, pos, '}')
if key == nil then return obj, pos end
if not delim_found then error('Comma missing between object items.') end
pos = skip_delim(str, pos, ':', true) -- true -> error if missing.
obj[key], pos = json.parse(str, pos)
pos, delim_found = skip_delim(str, pos, ',')
end
elseif first == '[' then -- Parse an array.
local arr, val, delim_found = {}, true, true
pos = pos + 1
while true do
val, pos = json.parse(str, pos, ']')
if val == nil then return arr, pos end
if not delim_found then error('Comma missing between array items.') end
arr[#arr + 1] = val
pos, delim_found = skip_delim(str, pos, ',')
end
elseif first == '"' then -- Parse a string.
return parse_str_val(str, pos + 1)
elseif first == '-' or first:match('%d') then -- Parse a number.
return parse_num_val(str, pos)
elseif first == end_delim then -- End of an object or array.
return nil, pos + 1
else -- Parse true, false, or null.
local literals = {['true'] = true, ['false'] = false, ['null'] = json.null}
for lit_str, lit_val in pairs(literals) do
local lit_end = pos + #lit_str - 1
if str:sub(pos, lit_end) == lit_str then return lit_val, lit_end + 1 end
end
local pos_info_str = 'position ' .. pos .. ': ' .. str:sub(pos, pos + 10)
error('Invalid json syntax starting at ' .. pos_info_str)
end
end
return json

You can use json-lua. A pure lua implementation of json. First install json-lua using Luarocks. luarocks install json-lua . Then Use this code :
local json = require "json"
local t = {
["name1"] = "value1",
["name2"] = { 1, false, true, 23.54, "a \021 string" },
name3 = json.null
}
local encode = json:encode (t)
print (encode) --> {"name1":"value1","name3":null,"name2":[1,false,true,23.54,"a \u0015 string"]}
local decode = json:decode( encode )
Tested & Verified on win 7 64 bit with lua 5.1. lua-cjson is fine, but it is not a pure lua rock. So, it's installation will not be easier to you.

Related

Proper Syntax For Lua Code Inside Terminal

I am using Lua version 54 inside my windows 64 bit OS. I am struggling writing the proper syntax for opening a directory, then opening the local file with path. Then once the file is open (JSON File) converting it to Lua Table.
Code I am using:
Lua54 syntax
local archhudbasic = io.open("C:\\Users\\Lichr\\Documents",)
Internal functions
local function kind_of(obj)
if type(obj) ~= 'table' then return type(obj) end
local i = 1
for _ in pairs(obj) do
if obj[i] ~= nil then i = i + 1 else return 'table' end
end
if i == 1 then return 'table' else return 'array' end
end
local function escape_str(s)
local in_char = {'\\', '"', '/', '\b', '\f', '\n', '\r', '\t'}
local out_char = {'\\', '"', '/', 'b', 'f', 'n', 'r', 't'}
for i, c in ipairs(in_char) do s = s:gsub(c, '\\' .. out_char[i])
end
return s
end
Returns pos, did_find; there are two cases:
Delimiter found: pos = pos after leading space + delim; did_find = true.
Delimiter not found: pos = pos after leading space; did_find = false.
This throws an error if err_if_missing is true and the delim is not found.
local function skip_delim(str, pos, delim, err_if_missing)
pos = pos + #str:match('^%s*', pos)
if str:sub(pos, pos) ~= delim then
if err_if_missing then
error('Expected ' .. delim .. ' near position ' .. pos)
end
return pos, false
end
return pos + 1, true
end
Expects the given pos to be the first character after the opening quote.
Returns val, pos; the returned pos is after the closing quote character.
local function parse_str_val(str, pos, val)
val = val or ''
local early_end_error = 'End of input found while parsing string.'
if pos > #str then error(early_end_error) end
local c = str:sub(pos, pos)
if c == '"' then return val, pos + 1 end
if c ~= '\\' then return parse_str_val(str, pos + 1, val .. c) end
-- We must have a \ character.
local esc_map = {b = '\b', f = '\f', n = '\n', r = '\r', t = '\t'}
local nextc = str:sub(pos + 1, pos + 1)
if not nextc then error(early_end_error) end
return parse_str_val(str, pos + 2, val .. (esc_map[nextc] or nextc))
end
Returns val, pos; the returned pos is after the number's final character.
local function parse_num_val(str, pos)
local num_str = str:match('^-?%d+%.?%d*[eE]?[+-]?%d*', pos)
local val = tonumber(num_str)
if not val then error('Error parsing number at position ' .. pos .. '.') end
return val, pos + #num_str
end
Public values and functions.
function json.stringify(obj, as_key)
local s = {} -- We'll build the string as an array of strings to be concatenated.
local kind = kind_of(obj) -- This is 'array' if it's an array or type(obj)
otherwise.
if kind == 'array' then
if as_key then error('Can\'t encode array as key.') end
s[#s + 1] = '['
for i, val in ipairs(obj) do
if i > 1 then s[#s + 1] = ', ' end
s[#s + 1] = json.stringify(val)
end
s[#s + 1] = ']'
elseif kind == 'table' then
if as_key then error('Can\'t encode table as key.') end
s[#s + 1] = '{'
for k, v in pairs(obj) do
if #s > 1 then s[#s + 1] = ', ' end
s[#s + 1] = json.stringify(k, true)
s[#s + 1] = ':'
s[#s + 1] = json.stringify(v)
end
s[#s + 1] = '}'
elseif kind == 'string' then
return '"' .. escape_str(obj) .. '"'
elseif kind == 'number' then
if as_key then return '"' .. tostring(obj) .. '"' end
return tostring(obj)
elseif kind == 'boolean' then
return tostring(obj)
elseif kind == 'nil' then
return 'null'
else
error('Unjsonifiable type: ' .. kind .. '.')
end
return table.concat(s)
end
json.null = {} //This is a one-off table to represent the null value.
function json.parse(str, pos, end_delim)
pos = pos or 1
if pos > #str then error('Reached unexpected end of input.') end
local pos = pos + #str:match('^%s*', pos) -- Skip whitespace.
local first = str:sub(pos, pos)
if first == '{' then -- Parse an object.
local obj, key, delim_found = {}, true, true
pos = pos + 1
while true do
key, pos = json.parse(str, pos, '}')
if key == nil then return obj, pos end
if not delim_found then error('Comma missing between object items.') end
pos = skip_delim(str, pos, ':', true) -- true -> error if missing.
obj[key], pos = json.parse(str, pos)
pos, delim_found = skip_delim(str, pos, ',')
end
elseif first == '[' then -- Parse an array.
local arr, val, delim_found = {}, true, true
pos = pos + 1
while true do
val, pos = json.parse(str, pos, ']')
if val == nil then return arr, pos end
if not delim_found then error('Comma missing between array items.') end
arr[#arr + 1] = val
pos, delim_found = skip_delim(str, pos, ',')
end
elseif first == '"' then -- Parse a string.
return parse_str_val(str, pos + 1)
elseif first == '-' or first:match('%d') then -- Parse a number.
return parse_num_val(str, pos)
elseif first == end_delim then -- End of an object or array.
return nil, pos + 1
else -- Parse true, false, or null.
local literals = {['true'] = true, ['false'] = false, ['null'] = json.null}
for lit_str, lit_val in pairs(literals) do
local lit_end = pos + #lit_str - 1
if str:sub(pos, lit_end) == lit_str then return lit_val, lit_end + 1 end
end
local pos_info_str = 'position ' .. pos .. ': ' .. str:sub(pos, pos + 10)
error('Invalid json syntax starting at ' .. pos_info_str)
end
end
return json
I am new to lua coding. What am I missing? How can I make this code functional inside the terminal?

How do I create a generated scan report for PDF in Arachni Web Application Security Scanner Framework?

I have been working on a project where I have to modify the Arachni Web Application Security Scanner Framework and get it to generate and download a scan report in PDF format. The web application itself is powered by Ruby on Rails. Here is the link to the application that I am talking about.
https://www.arachni-scanner.com/
Now, the default application generates five file formats: HTML (or HTML.zip), JSON, Marshal, YAML, and XML. I am trying to include PDF, and the PDF report has to look exactly like the contents of the HTML report, charts and all. The application folder has its own libraries directory. There is one library called Arachni-1.5.1, and I have come to the conclusion that it is the heart of where the generated file formats are being made. I can give one example, specifically the library file that supposedly creates the HTML.zip report. It is code in the Arachni-1.5.1 library under a folder called reporters, and the name of the file is html.rb.
`module TemplateUtilities
def base64_encode( string )
Base64.encode64( string ).gsub( /\n/, '' )
end
def normalize( str )
str.to_s.recode
end
def md( markdown )
html = Kramdown::Document.new( markdown ).to_html.recode
Loofah.fragment( html ).scrub!(:prune).to_s
end
def key_to_words( k )
k.to_s.capitalize.gsub( '_', ' ' )
end
def code_highlight( code, language = :html, options = {} )
return if !code
lines = CodeRay.scan( code.recode, language ).
html( css: :style ).lines.to_a
if options[:from]
from = [0, options[:from]].max
else
from = 0
end
if options[:to]
to = [lines.size, options[:to]].min
else
to = lines.size - 1
end
code = '<div class="code-container"><table class="CodeRay"><tbody><tr><td class="line-numbers"><pre>'
from.upto(to) do |i|
if options[:anchor_id]
line = "<a href='#{id_to_location "#{options[:anchor_id]}-#{i}"}'>#{i}</a>"
else
line = "#{i}"
end
if options[:breakpoint] && options[:breakpoint] == i
code << "<span class='breakpoint'>#{line}</span>"
else
code << line
end
code << "\n"
end
code << '</pre></td><td class="code"><pre>'
from.upto(to) do |i|
line = "<span id='#{options[:anchor_id]}-#{i}'>#{lines[i]}</span>"
if options[:breakpoint] && options[:breakpoint] == i
code << "<span class='breakpoint'>#{line}</span>"
else
code << line.to_s
end
end
code + '</pre></td></tr></tbody></table></div>'
end
def highlight_proof( string, proof )
proof = proof.to_s.recode
string = string.to_s.recode
return escapeHTML( string ) if proof.to_s.empty?
return escapeHTML( string ) if !string.include?( proof )
escaped_proof = escapeHTML( proof )
escaped_response_body = escapeHTML( string )
escaped_response_body.gsub(
escaped_proof,
"<span class=\"issue-proof-highlight\">#{escaped_proof}</span>"
)
end
def data_dump( data )
ap = AwesomePrint::Inspector.new( plain: true, html: true )
"<pre class='data-dump'>#{ap.awesome( data )}</pre>"
end
# Carefully escapes HTML and converts to UTF-8 while removing
# invalid character sequences.
def escapeHTML( str )
CGI.escapeHTML( normalize( str ) )
end
def highlight_issue_page_body( issue, span_class )
return escapeHTML( issue.page.body ) if !issue.page.body.include?( issue.proof )
escaped_proof = escapeHTML( issue.proof )
escaped_response_body = escapeHTML( issue.page.body )
escaped_response_body.gsub(
escaped_proof,
"<span class=\"#{span_class}\">#{escaped_proof}</span>"
)
end
def issue_location( issue )
id_to_location( issue_id( issue ) )
end
def issue_id( issue )
issue = report.issue_by_digest( issue.digest )
"issues-#{'un' if issue.untrusted?}trusted-severity-" <<
"#{issue.severity}-#{issue.check[:shortname]}-#{issue.digest}"
end
def id_to_location( id )
"#!/#{id.gsub( '-', '/' )}"
end
def erb( tpl, params = {} )
scope = TemplateScope.new( params )
tpl = tpl.to_s + '.erb' if tpl.is_a?( Symbol )
path = File.exist?( tpl ) ? tpl : TEMPLATE_DIR + tpl
ERB.new( IO.read( path ).recode ).result( scope.get_binding )
rescue
ap tpl
raise
end
end
include TemplateUtilities
class TemplateScope
include TemplateUtilities
ISSUES_URL = 'https://github.com/Arachni/arachni/issues'
def initialize( params = {} )
update params
update self.class.global_data
end
def update( params )
params.each { |name, value| self[name] = value }
self
end
def []=( name, value )
self.class.send( :attr_accessor, name )
instance_variable_set( "##{name.to_s}", value )
self
end
def prep_description( str )
escapeHTML Arachni::Reporters::HTML.prep_description( str )
end
def get_plugin_info( name )
report.plugins[name.to_sym]
end
def js_multiline( str )
"\"" + normalize( str ).gsub( "\n", '\n' ) + "\""
end
def get_binding
binding
end
def self.global_data=( data )
#global_data = data
end
def self.global_data
#global_data
end
end
def global_data
grouped_issues = {
trusted: {},
untrusted: {}
}
Arachni::Issue::Severity::ORDER.each do |severity|
by_severity = report.issues.select { |i| i.severity.to_sym == severity }
next if by_severity.empty?
by_name = {}
by_severity.each do |issue|
by_name[issue.name] ||= []
by_name[issue.name] << issue
end
next if by_name.empty?
grouped_issues[:trusted][by_severity.first.severity] =
by_name.inject({}) do |h, (name, issues)|
i = issues.select(&:trusted?)
next h if i.empty?
h[name] = i
h
end
grouped_issues[:untrusted][by_severity.first.severity] =
by_name.inject({}) do |h, (name, issues)|
i = issues.select(&:untrusted?)
next h if i.empty?
h[name] = i
h
end
[:trusted, :untrusted].each do |t|
if grouped_issues[t][by_severity.first.severity].empty?
grouped_issues[t].delete by_severity.first.severity
end
end
end
[:trusted, :untrusted].each do |t|
grouped_issues.delete( t ) if grouped_issues[t].empty?
end
prepare_data.merge(
report: report,
grouped_issues: grouped_issues,
plugins: format_plugin_results
)
end
# Runs the HTML report.
def run
FileUtils.rm_rf outfile
print_line
print_status 'Creating HTML report...'
TemplateScope.global_data = global_data
tmpdir = "#{Arachni::Options.paths.tmpdir}/#{generate_token}/"
FileUtils.rm_rf tmpdir
FileUtils.mkdir_p tmpdir
FileUtils.mkdir_p "#{tmpdir}/js/lib"
FileUtils.mkdir_p "#{tmpdir}/css/lib"
FileUtils.cp_r "#{TEMPLATE_DIR}/fonts", "#{tmpdir}/"
FileUtils.cp_r "#{TEMPLATE_DIR}/js/lib", "#{tmpdir}/js/"
FileUtils.cp_r "#{TEMPLATE_DIR}/css/lib", "#{tmpdir}/css/"
%w(js/helpers.js js/init.js.erb js/charts.js.erb js/configuration.js.erb
css/main.css).each do |f|
if f.end_with? '.erb'
IO.write( "#{tmpdir}/#{f.split('.erb').first}", erb( "#{TEMPLATE_DIR}/#{f}" ) )
else
FileUtils.cp( "#{TEMPLATE_DIR}/#{f}" , "#{tmpdir}/#{f}" )
end
end
IO.write( "#{tmpdir}/index.html", erb( TEMPLATE_FILE ) )
compress( tmpdir, outfile )
FileUtils.rm_rf tmpdir
print_status "Saved in '#{outfile}'."
end
def self.info
{
name: 'HTML',
description: %q{Exports the audit results as a compressed HTML report.},
content_type: 'application/zip',
author: 'Tasos "Zapotek" Laskos <tasos.laskos#arachni-scanner.com>',
version: '0.4.3',
options: [
Options.outfile( '.html.zip' ),
Options.skip_responses
]
}
end
private
def compress( directory, archive )
Zip::File.open( archive, Zip::File::CREATE ) do |zipfile|
Dir[File.join(directory, '**', '**')].each do |file|
zipfile.add( file.sub( directory, '' ), file )
end
end
archive
end
def self.prep_description( str )
placeholder = '--' + rand( 1000 ).to_s + '--'
cstr = str.gsub( /^\s*$/xm, placeholder )
cstr.gsub!( /^\s*/xm, '' )
cstr.gsub!( placeholder, "\n" )
cstr.chomp
end
def prepare_data
graph_data = {
severities: {
Severity::HIGH.to_sym => 0,
Severity::MEDIUM.to_sym => 0,
Severity::LOW.to_sym => 0,
Severity::INFORMATIONAL.to_sym => 0
},
severity_for_issue: {},
severity_index_for_issue: {},
severity_regions: {},
issues: {},
issues_shortnames: Set.new,
trusted_issues: {},
untrusted_issues: {},
elements: {
Element::Form.type => 0,
Element::Form::DOM.type => 0,
Element::Link.type => 0,
Element::Link::DOM.type => 0,
Element::Cookie.type => 0,
Element::Cookie::DOM.type => 0,
Element::LinkTemplate.type => 0,
Element::LinkTemplate::DOM.type => 0,
Element::Header.type => 0,
Element::Body.type => 0,
Element::Path.type => 0,
Element::Server.type => 0,
Element::GenericDOM.type => 0,
Element::JSON.type => 0,
Element::XML.type => 0,
Element::UIInput::DOM.type => 0,
Element::UIForm::DOM.type => 0
},
verification: {
'Yes' => 0,
'No' => 0
},
trust: {
'Trusted' => 0,
'Untrusted' => 0
}
}
total_severities = 0
total_elements = 0
has_trusted_issues = false
has_untrusted_issues = false
last_severity = nil
report.issues.each.with_index do |issue, i|
graph_data[:severities][issue.severity.to_sym] += 1
total_severities += 1
graph_data[:issues][issue.name] ||= 0
graph_data[:issues][issue.name] += 1
graph_data[:elements][issue.vector.class.type] += 1
total_elements += 1
verification = issue.untrusted? ? 'Yes' : 'No'
graph_data[:verification][verification] += 1
graph_data[:untrusted_severities] ||= {}
graph_data[:untrusted_severities][issue.severity.to_sym] ||= 0
graph_data[:trusted_severities] ||= {}
graph_data[:trusted_severities][issue.severity.to_sym] ||= 0
graph_data[:trusted_issues][issue.name] ||= 0
graph_data[:untrusted_issues][issue.name] ||= 0
graph_data[:issues_shortnames] << issue.check[:shortname]
graph_data[:severity_for_issue][issue.check[:shortname]] = issue.severity.to_s
new_region = !graph_data[:severity_regions].include?( issue.severity.to_sym )
graph_data[:severity_regions][issue.severity.to_sym] ||= {}
graph_data[:severity_regions][issue.severity.to_sym][:class] =
"severity-#{issue.severity.to_sym}"
graph_data[:severity_regions][issue.severity.to_sym][:start] ||=
graph_data[:issues].size - 1
if new_region && last_severity
graph_data[:severity_regions][last_severity][:end] =
graph_data[:issues].size - 2
end
last_severity = issue.severity.to_sym
graph_data[:severity_index_for_issue][issue.name] =
Issue::Severity::ORDER.reverse.index( issue.severity.to_sym ) + 1
if issue.trusted?
has_trusted_issues = true
graph_data[:trust]['Trusted'] += 1
graph_data[:trusted_severities][issue.severity.to_sym] += 1
graph_data[:trusted_issues][issue.name] += 1
else
has_untrusted_issues = true
graph_data[:trust]['Untrusted'] += 1
graph_data[:untrusted_severities][issue.severity.to_sym] += 1
graph_data[:untrusted_issues][issue.name] += 1
end
end
graph_data[:issues_shortnames] = graph_data[:issues_shortnames].to_a
graph_data[:severity_regions] = graph_data[:severity_regions].values
{
graph_data: graph_data,
total_severities: total_severities,
total_elements: total_elements,
has_trusted_issues: has_trusted_issues,
has_untrusted_issues: has_untrusted_issues
}
end`
Now, how would I turn something like that into PDF? I should also note that any editing I apply in this library can cause the application to crash whenever I visit a scan page. The web application also comes with library Kramdown-1.4.1, which has some sort of PDF converter; but I am honestly not sure if it's the most ideal tool to use. I have looked up countless sources, and this does not seem to be a known issue to many.

Lua nested Json, remove single occurs or list of occurs if multiple

So what I am trying to do here is for a given json_body which is decoded json into a table using cjson I want to remove a given element by a configurable value conf.remove.json, I feel I am pretty close but its still not working, and is there a better way? Is there a safe way to find the tables "depth" and then reach out like conf.remove.json= I.want.to.remove.this creates the behavior json_table[I][want][to][remove][this] = nil without throwing some kind of NPE?
local configRemovePath= {}
local configRemoveDepth= 0
local recursiveCounter = 1
local function splitString(inputstr)
sep = "%." --Split on .
configRemovePath={}
configRemoveDepth=0
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
configRemovePath[configRemoveDepth + 1] = str
configRemoveDepth = configRemoveDepth + 1
end
end
local function recursiveSearchAndNullify(jsonTable)
for key, value in pairs(jsonTable) do --unordered search
-- First iteration
--Sample Json below, where conf.remove.json = data.id and nothing happened. conf.remove.json=data.id
--{
--"data": {
-- "d": 2,
-- "id": 1
--}
--}
-- value = {"d": 2, "id": 1}, key = "data", configRemovePath[recursiveCounter] = "data" , configRemovePath ['data','id'] , configRemoveDepth = 2
if(type(value) == "table" and value == configRemovePath[recursiveCounter] and recursiveCounter < configRemoveDepth) then --If the type is table, the current table is one we need to dive into, and we have not exceeded the configurations remove depth level
recursiveCounter = recursiveCounter + 1
jsonTable = recursiveSearchAndNullify(value)
else
if(key == configRemovePath[recursiveCounter] and recursiveCounter == configRemoveDepth) then --We are at the depth to remove and the key matches then we delete.
for key in pairs (jsonTable) do --Remove all occurances of said element
jsonTable[key] = nil
end
end
end
end
return jsonTable
end
for _, name in iter(conf.remove.json) do
splitString(name)
if(configRemoveDepth == 0) then
for name in pairs (json_body) do
json_body[name] = nil
end
else
recursiveCounter = 1 --Reset to 1 for each for call
json_body = recursiveSearchAndNullify(json_body)
end
end
Thanks to any who assist, this is my first day with Lua so I am pretty newb.
This is the official answer, found a better way with the help of Christian Sciberras!
local json_body_test_one = {data = { id = {"a", "b"},d = "2" }} --decoded json w cjson
local json_body_test_two = {data = { { id = "a", d = "1" }, { id = "b", d = "2" } } }
local config_json_remove = "data.id"
local function dump(o) --Method to print test tables for debugging
if type(o) == 'table' then
local s = '{ '
for k,v in pairs(o) do
if type(k) ~= 'number' then k = '"'..k..'"' end
s = s .. '['..k..'] = ' .. dump(v) .. ','
end
return s .. '} '
else
return tostring(o)
end
end
local function splitstring(inputstr, sep)
if sep == nil then
sep = "%." --Dot notation default
end
local t={} ; i=1
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
t[i] = str
i = i + 1
end
return t
end
local function setjsonprop(json_object, path, newvalue)
local configarray = splitstring(path)
while (#configarray > 1) do
json_object = json_object[table.remove(configarray, 1)]
if(type(json_object) == "table" and #json_object > 0) then
local recursepath = table.concat(configarray, ".")
for _, item in pairs(json_object) do
setjsonprop(item, recursepath, newvalue)
end
return
end
end
json_object[table.remove(configarray, 1)] = newvalue
end
setjsonprop(json_body_test_one, config_json_remove, nil)
print(dump(json_body_test_one))

Sort lua table based on nested json value

We have a key-value pair in redis consisting of a key with a JSON object as a value with various information;
"node:service:i-01fe0d69c343734" :
"{\"port\":\"32781\",
\"version\":\"3.0.2\",
\"host-instance-id\":\"i-01fe0d69c2243b366\",
\"last-checkin\":\"1492702508\",
\"addr\":\"10.0.0.0\",
\"host-instance-type\":\"m3.large\"}"
Is it possible to sort the table based on the last-checkin time of the value?
Here is my solution to your problem, using the quick sort algorithm, before doing a little correction of your input (as I understood it):
-----------------------------------------------------
local json = require("json")
function quicksort(t, sortname, start, endi)
start, endi = start or 1, endi or #t
sortname = sortname or 1
if(endi - start < 1) then return t end
local pivot = start
for i = start + 1, endi do
if t[i][sortname] <= t[pivot][sortname] then
local temp = t[pivot + 1]
t[pivot + 1] = t[pivot]
if(i == pivot + 1) then
t[pivot] = temp
else
t[pivot] = t[i]
t[i] = temp
end
pivot = pivot + 1
end
end
t = quicksort(t, sortname, start, pivot - 1)
return quicksort(t, sortname, pivot + 1, endi)
end
---------------------------------------------------------
-- I manually added delimeter ","
-- and name "node:service..." must be different
str = [[
{
"node:service:i-01fe0d69c343731" :
"{\"port\":\"32781\",
\"version\":\"3.0.2\",
\"host-instance-id\":\"i-01fe0d69c2243b366\",
\"last-checkin\":\"1492702506\",
\"addr\":\"10.0.0.0\",
\"host-instance-type\":\"m3.large\"}"
,
"node:service:i-01fe0d69c343732" :
"{\"port\":\"32781\",
\"version\":\"3.0.2\",
\"host-instance-id\":\"i-01fe0d69c2243b366\",
\"last-checkin\":\"1492702508\",
\"addr\":\"10.0.0.0\",
\"host-instance-type\":\"m3.large\"}"
,
"node:service:i-01fe0d69c343733" :
"{\"port\":\"32781\",
\"version\":\"3.0.2\",
\"host-instance-id\":\"i-01fe0d69c2243b366\",
\"last-checkin\":\"1492702507\",
\"addr\":\"10.0.0.0\",
\"host-instance-type\":\"m3.large\"}"
,
"node:service:i-01fe0d69c343734" :
"{\"port\":\"32781\",
\"version\":\"3.0.2\",
\"host-instance-id\":\"i-01fe0d69c2243b366\",
\"last-checkin\":\"1492702501\",
\"addr\":\"10.0.0.0\",
\"host-instance-type\":\"m3.large\"}"
}
]]
-- remove unnecessary \
str = str:gsub('"{','{'):gsub('}"','}'):gsub('\\"','"')
local t_res= json.decode(str)
-- prepare table before sorting
local t_indexed = {}
for k,v in pairs(t_res) do
v["node-service"] = k
t_indexed[#t_indexed+1] = v
end
-- algoritm quicksort realised only for indexed table
local t_sort= quicksort(t_indexed, "last-checkin")
for k,v in pairs(t_sort) do
print( k , v["node-service"] , v["port"], v["version"], v["host-instance-id"], v["last-checkin"] , v["addr"], v["host-instance-type"] )
end
console:
1 node:service:i-01fe0d69c343734 32781 3.0.2 i-01fe0d69c2243b366 1492702501 10.0.0.0 m3.large
2 node:service:i-01fe0d69c343731 32781 3.0.2 i-01fe0d69c2243b366 1492702506 10.0.0.0 m3.large
3 node:service:i-01fe0d69c343733 32781 3.0.2 i-01fe0d69c2243b366 1492702507 10.0.0.0 m3.large
4 node:service:i-01fe0d69c343732 32781 3.0.2 i-01fe0d69c2243b366 1492702508 10.0.0.0 m3.large

Search and compare values of a .CSV file

My goal is to search a CSV's first column twice, then execute an action (dependent of a value in the third column of same record). I began in VBScript using InStr() :
Set objFS = CreateObject("Scripting.FileSystemObject")
roster = "C:\bin\roster.csv"
Set objFile = objFS.OpenTextFile(roster)
Do Until objFile.AtEndOfStream
strLine = objFile.ReadLine
intLength = Len(strLine)
intZeros = 5 - intLength
If InStr(strLine, strIP)> 0 Then
strinfo = split(strLine, ",")
siteNumA = strinfo (0)
siteNumB = string(5 - Len(siteNumA), "0") & siteNumA
siteIP = strinfo (1)
siteDist = strinfo (2)
siteReg = strinfo (3)
End If
It could compare values of siteDist to same data from a second search. However, I prefer to use AutoIt. Is there a way to achieve this using AutoIt (or a command to achieve my plan)?
A simple CSV file I am using for testing :
Site,District,Region
1,1,1
2,1,1
3,1,2
4,2,2
5,2,1
Searching two separate entries for Site and confirming that District matches afterwards, running this script at site 1 should have it evaluate as true for Site 1, 2, or 3, and false for Site 4 and 5.
Use this:
; #FUNCTION# ====================================================================================================================
; Name...........: _ParseCSV
; Description ...: Reads a CSV-file
; Syntax.........: _ParseCSV($sFile, $sDelimiters=',', $sQuote='"', $iFormat=0)
; Parameters ....: $sFile - File to read or string to parse
; $sDelimiters - [optional] Fieldseparators of CSV, mulitple are allowed (default: ,;)
; $sQuote - [optional] Character to quote strings (default: ")
; $iFormat - [optional] Encoding of the file (default: 0):
; |-1 - No file, plain data given
; |0 or 1 - automatic (ASCII)
; |2 - Unicode UTF16 Little Endian reading
; |3 - Unicode UTF16 Big Endian reading
; |4 or 5 - Unicode UTF8 reading
; Return values .: Success - 2D-Array with CSV data (0-based)
; Failure - 0, sets #error to:
; |1 - could not open file
; |2 - error on parsing data
; |3 - wrong format chosen
; Author ........: ProgAndy
; Modified.......:
; Remarks .......:
; Related .......: _WriteCSV
; Link ..........:
; Example .......:
; ===============================================================================================================================
Func _ParseCSV($sFile, $sDelimiters=',;', $sQuote='"', $iFormat=0)
Local Static $aEncoding[6] = [0, 0, 32, 64, 128, 256]
If $iFormat < -1 Or $iFormat > 6 Then
Return SetError(3,0,0)
ElseIf $iFormat > -1 Then
Local $hFile = FileOpen($sFile, $aEncoding[$iFormat]), $sLine, $aTemp, $aCSV[1], $iReserved, $iCount
If #error Then Return SetError(1,#error,0)
$sFile = FileRead($hFile)
FileClose($hFile)
EndIf
If $sDelimiters = "" Or IsKeyword($sDelimiters) Then $sDelimiters = ',;'
If $sQuote = "" Or IsKeyword($sQuote) Then $sQuote = '"'
$sQuote = StringLeft($sQuote, 1)
Local $srDelimiters = StringRegExpReplace($sDelimiters, '[\\\^\-\[\]]', '\\\0')
Local $srQuote = StringRegExpReplace($sQuote, '[\\\^\-\[\]]', '\\\0')
Local $sPattern = StringReplace(StringReplace('(?m)(?:^|[,])\h*(["](?:[^"]|["]{2})*["]|[^,\r\n]*)(\v+)?',',', $srDelimiters, 0, 1),'"', $srQuote, 0, 1)
Local $aREgex = StringRegExp($sFile, $sPattern, 3)
If #error Then Return SetError(2,#error,0)
$sFile = '' ; save memory
Local $iBound = UBound($aREgex), $iIndex=0, $iSubBound = 1, $iSub = 0
Local $aResult[$iBound][$iSubBound]
For $i = 0 To $iBound-1
Select
Case StringLen($aREgex[$i])<3 And StringInStr(#CRLF, $aREgex[$i])
$iIndex += 1
$iSub = 0
ContinueLoop
Case StringLeft(StringStripWS($aREgex[$i], 1),1)=$sQuote
$aREgex[$i] = StringStripWS($aREgex[$i], 3)
$aResult[$iIndex][$iSub] = StringReplace(StringMid($aREgex[$i], 2, StringLen($aREgex[$i])-2), $sQuote&$sQuote, $sQuote, 0, 1)
Case Else
$aResult[$iIndex][$iSub] = $aREgex[$i]
EndSelect
$aREgex[$i]=0 ; save memory
$iSub += 1
If $iSub = $iSubBound Then
$iSubBound += 1
ReDim $aResult[$iBound][$iSubBound]
EndIf
Next
If $iIndex = 0 Then $iIndex=1
ReDim $aResult[$iIndex][$iSubBound]
Return $aResult
EndFunc
; #FUNCTION# ====================================================================================================================
; Name...........: _WriteCSV
; Description ...: Writes a CSV-file
; Syntax.........: _WriteCSV($sFile, Const ByRef $aData, $sDelimiter, $sQuote, $iFormat=0)
; Parameters ....: $sFile - Destination file
; $aData - [Const ByRef] 0-based 2D-Array with data
; $sDelimiter - [optional] Fieldseparator (default: ,)
; $sQuote - [optional] Quote character (default: ")
; $iFormat - [optional] character encoding of file (default: 0)
; |0 or 1 - ASCII writing
; |2 - Unicode UTF16 Little Endian writing (with BOM)
; |3 - Unicode UTF16 Big Endian writing (with BOM)
; |4 - Unicode UTF8 writing (with BOM)
; |5 - Unicode UTF8 writing (without BOM)
; Return values .: Success - True
; Failure - 0, sets #error to:
; |1 - No valid 2D-Array
; |2 - Could not open file
; Author ........: ProgAndy
; Modified.......:
; Remarks .......:
; Related .......: _ParseCSV
; Link ..........:
; Example .......:
; ===============================================================================================================================
Func _WriteCSV($sFile, Const ByRef $aData, $sDelimiter=',', $sQuote='"', $iFormat=0)
Local Static $aEncoding[6] = [2, 2, 34, 66, 130, 258]
If $sDelimiter = "" Or IsKeyword($sDelimiter) Then $sDelimiter = ','
If $sQuote = "" Or IsKeyword($sQuote) Then $sQuote = '"'
Local $iBound = UBound($aData, 1), $iSubBound = UBound($aData, 2)
If Not $iSubBound Then Return SetError(2,0,0)
Local $hFile = FileOpen($sFile, $aEncoding[$iFormat])
If #error Then Return SetError(2,#error,0)
For $i = 0 To $iBound-1
For $j = 0 To $iSubBound-1
FileWrite($hFile, $sQuote & StringReplace($aData[$i][$j], $sQuote, $sQuote&$sQuote, 0, 1) & $sQuote)
If $j < $iSubBound-1 Then FileWrite($hFile, $sDelimiter)
Next
FileWrite($hFile, #CRLF)
Next
FileClose($hFile)
Return True
EndFunc
; === EXAMPLE ===================================================
;~ #include<Array.au3>
;~ $aResult = _ParseCSV(#ScriptDir & '\test.csv', "\", '$', 4)
;~ _ArrayDisplay($aResult)
;~ _WriteCSV(#ScriptDir & '\written.csv', $aResult, ',', '"', 5)
; ===============================================================
or this:
#region ;************ Includes ************
#include <Array.au3>
#endregion ;************ Includes ************
; _csvTo2DArray
Local $re = _csvTo2DArray("c:\Repository.csv", ';')
_ArrayDisplay($re)
Func _csvTo2DArray($file, $delim = ',')
Local $content = FileRead($file)
Local $rows_A = StringSplit(StringStripCR($content), #LF, 2)
StringReplace($rows_A[0], $delim, $delim)
Local $countColumns = #extended
Local $columns_A = 0
Local $2D_A[UBound($rows_A)][$countColumns + 1]
For $z = 0 To UBound($rows_A) - 1
$columns_A = StringSplit($rows_A[$z], $delim, 2)
For $y = 0 To UBound($columns_A) - 1
$2D_A[$z][$y] = $columns_A[$y]
Next
Next
Return $2D_A
EndFunc ;==>_csvTo2DArray
and then compare the cells with normal Stringfunctions or use the _Array functions on the array.
… is there another way to achieve my end goal in AutoIT, or is there a direct equivalent command set to what is outlined above to achieve my original plan?
Example using _ArraySearch() (replace ConsoleWrite() by whatever action required) :
#include <FileConstants.au3>
#include <File.au3>
#include <Array.au3>
Global Enum $CSV_COL_SITE, _
$CSV_COL_DISTRICT, _
$CSV_COL_REGION
Global Const $g_sFileCSV = 'C:\bin\roster.csv'
Global Const $g_sFileDelim = ','
Global Const $g_iColSearch = $CSV_COL_DISTRICT
Global Const $g_sValSearch = '1'
Global Const $g_sMessage = 'Matched row #%i : %s\n'
Global $g_iRow = 0
Global $g_aCSV
_FileReadToArray($g_sFileCSV, $g_aCSV, $FRTA_NOCOUNT, $g_sFileDelim)
While True
$g_iRow = _ArraySearch($g_aCSV, $g_sValSearch, ($g_iRow ? $g_iRow + 1 : $g_iRow), 0, 0, 0, 1, $g_iColSearch, False)
If #error Then ExitLoop
ConsoleWrite(StringFormat($g_sMessage, $g_iRow, _ArrayToString($g_aCSV, $g_sFileDelim, $g_iRow, $g_iRow, '')))
WEnd
Console output (row number +1 for presence of header record):
Matched row #2 : 1,1,1
Matched row #3 : 2,1,1
Matched row #4 : 3,1,2