Rust Deserializing JSON - json

I am having trouble deserializing json data sent from my client.
server.rs
use std::collections::HashMap;
use std::sync::{Arc,Mutex};
use tokio::net::{TcpListener, TcpStream};
use tokio::io::{AsyncWriteExt, AsyncReadExt};
use serde_json::{ Value};
/*
The type Arc<T> provides shared ownership of a value of type T, allocated in the heap. Invoking clone on Arc produces a new Arc instance, which points to the same allocation on the heap as the source Arc, while increasing a reference count. When the last Arc pointer to a given allocation is destroyed, the value stored in that allocation (often referred to as “inner value”) is also dropped.
*/
// creating a type alias for user to socket map
// Arc points top
type UserToSocket = Arc<Mutex<HashMap<String,TcpStream>>>;
#[tokio::main]
async fn main() {
let listener = TcpListener::bind("127.0.0.1:9090").await;
// creating a threadsafe hashmap mutex
let local_db: UserToSocket = Arc::new(Mutex::new(HashMap::new()));
let listener = match listener{
Result::Ok(value) => {value},
Result::Err(_)=> {panic!("ERROR OCCURED")},
};
println!("[+] Listener has been started");
loop {
// now waiting for connection
println!("[+] Listening for connection");
let (socket,addr) = listener.accept().await.unwrap();
println!("[+] A connection accepted from {:?}, spawwning a new task for it",addr);
// cloning does not actually clone, but rather just increases counter to it
let ld = Arc::clone(&local_db);
// spawning a new task
tokio::spawn(
async move {
handler(socket,ld).await;
}
);
}
}
// a handler for new connection
async fn handler(mut socket: TcpStream, _db: UserToSocket) {
socket.write_all(b"[+] Hello Friend, Welcome to my program\r\n").await.unwrap();
let mut buf = vec![0; 1024];
loop {
// n holds the number of bytes read i think
match socket.read(&mut buf).await {
Ok(0) => {
println!("Client Closed connection");
return;
}
// getting some data
Ok(_n) => {
// ownership is transferred so need to clone it
let bufc = buf.clone();
// unmarshalling json
//let parsed:Value = serde_json::from_slice(&bufc).unwrap();
// obtaining string
match String::from_utf8(bufc) {
Ok(val) => {
println!("[+] So the parsed value is {}",val);
//let temp = val.as_str();
let parsed:Value = serde_json::from_str(&val).unwrap();
println!("{:?}",parsed);
socket.write_all(b"So yeah thanks for sending this\r\n").await.unwrap();
continue;
}
Err(err) => {
println!("ERROR Could not convert to string {:?}",err);
continue;
}
};
//socket.write_all(b"Vnekai bujena\r\n").await.unwrap();
}
Err(_) => {
println!("Unhandeled error occured");
return;
}
}
}
}
client.rs
use tokio::net::{TcpStream};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use std::{thread,time};
#[tokio::main]
async fn main() {
let sleep_time = time::Duration::from_secs(2);
let socket = TcpStream::connect("127.0.0.1:9090").await;
let mut socket = match socket {
Ok(v) => {
println!("[+] Successfully connected");
v
}
Err(_) => {
println!("ERROR could not connect to the server");
std::process::exit(-1);
}
};
let mut buf = vec![0;1024];
//let mut user_input = String::new();
loop {
thread::sleep(sleep_time);
match socket.read(&mut buf).await {
Ok(0) => {
println!("[+] Connection with server has been closed");
std::process::exit(1);
}
Ok(_n) => {
let bc = buf.clone();
let res = String::from_utf8(bc).unwrap();
println!("[+] Server responded with {}",res);
}
Err(_) => {
panic!("[-] Some fatal error occured");
}
}
println
!("You want to say: ");
/*let _v = match io::stdin().read_line(&mut user_input){
Ok(val) => {val}
Err(_) => panic!("ERROR"),
};*/
let val = "{\"name\": \"John Doe\",\"age\": 43,\"phones\": [\"+44 1234567\",\"+44 2345678\"]}\r\n";
socket.write(val.as_bytes()).await.unwrap();
}
}
When I send json data to server, I receive an error.
thread 'tokio-runtime-worker' panicked at 'called Result::unwrap() on an Err value: Error("trailing characters", line: 2, column: 1)', src\bin\simple_server.rs:79:71
This error does not occur when I try to desterilize the json string directly. It only occurs when I send the data through network.

Since your JSON is newline-terminated, you should use something like read_line() to read it. (And you should never send a formatted JSON, because it will contain newlines - but serde_json is creating non-formatted JSON by default.)
For example, this compiles and should work as intended
use serde_json::Value;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufStream};
use tokio::net::{TcpListener, TcpStream};
type UserToSocket = Arc<Mutex<HashMap<String, TcpStream>>>;
// ... main unchanged from your implementation ...
async fn handler(socket: TcpStream, _db: UserToSocket) {
let mut socket = BufStream::new(socket);
socket
.write_all(b"[+] Hello Friend, Welcome to my program\r\n")
.await
.unwrap();
socket.flush().await.unwrap();
let mut line = vec![];
loop {
line.clear();
if let Err(e) = socket.read_until(b'\n', &mut line).await {
println!("Unhandled error occured: {}", e);
return;
}
if line.is_empty() {
println!("Client Closed connection");
return;
}
println!(
"[+] So the received value is {}",
String::from_utf8_lossy(&line)
);
let parsed: Value = serde_json::from_slice(&line).unwrap();
println!("{:?}", parsed);
socket
.write_all(b"So yeah thanks for sending this\r\n")
.await
.unwrap();
socket.flush().await.unwrap();
continue;
}
}

Related

How can I append json data in memory to the end of a new-line delimited json file without reading the file into memory in Rust? [duplicate]

I am using this code to append a new line to the end of a file:
let text = "New line".to_string();
let mut option = OpenOptions::new();
option.read(true);
option.write(true);
option.create(true);
match option.open("foo.txt") {
Err(e) => {
println!("Error");
}
Ok(mut f) => {
println!("File opened");
let size = f.seek(SeekFrom::End(0)).unwrap();
let n_text = match size {
0 => text.clone(),
_ => format!("\n{}", text),
};
match f.write_all(n_text.as_bytes()) {
Err(e) => {
println!("Write error");
}
Ok(_) => {
println!("Write success");
}
}
f.sync_all();
}
}
It works, but I think it's too difficult. I found option.append(true);, but if I use it instead of option.write(true); I get "Write error".
Using OpenOptions::append is the clearest way to append to a file:
use std::fs::OpenOptions;
use std::io::prelude::*;
fn main() {
let mut file = OpenOptions::new()
.write(true)
.append(true)
.open("my-file")
.unwrap();
if let Err(e) = writeln!(file, "A new line!") {
eprintln!("Couldn't write to file: {}", e);
}
}
As of Rust 1.8.0 (commit) and RFC 1252, append(true) implies write(true). This should not be a problem anymore.
Before Rust 1.8.0, you must use both write and append — the first one allows you to write bytes into a file, the second specifies where the bytes will be written.

thread 'main' panicked at 'assertion failed: self.remaining() >= dst.len()' when trying to connect database MYSQL

So when i try to connect mysql database main tread panics it was working fine with sqlite but when i try to use it with mysql it's not working.
cargo check
is also working fine. The error is located at line 20 where i created a database connection.
my main.rs
use dotenv::dotenv;
use roadoxe::data::AppDatabase;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
#[structopt(name = "roadoxe")]
struct Opt {
#[structopt(default_value = "mysql://root:root#127.0.0.1:33060/automate")]
connection_string: String,
}
fn main() {
dotenv().ok();
let opt = Opt::from_args();
let rt = tokio::runtime::Runtime::new().expect("failed to spawn tokio runtime");
let handle = rt.handle().clone();
let database = rt.block_on(async move { AppDatabase::new(&opt.connection_string).await });
let config = roadoxe::RocketConfig {
database,
// maintenance,
};
rt.block_on(async move {
roadoxe::rocket(config)
.launch()
.await
.expect("failed to launch rocket server");
})
}
and data mod.rs
use sqlx::MySql;
// use std::str::FromStr;
#[derive(Debug, thiserror::Error)]
pub enum DataError {
#[error("Database Error: {0}")]
Database(#[from] sqlx::Error),
}
pub type AppDatabase = Database<MySql>;
pub type DatabasePool = sqlx::mysql::MySqlPool;
pub type Transaction<'a> = sqlx::Transaction<'a, MySql>;
pub type AppDatabaseRow = sqlx::mysql::MySqlRow;
pub type AppQueryResult = sqlx::mysql::MySqlQueryResult;
pub struct Database<D: sqlx::Database>(sqlx::Pool<D>);
impl Database<MySql> {
pub async fn new(path: &str) -> Self {
let pool = sqlx::mysql::MySqlPoolOptions::new().connect(path).await;
match pool {
Ok(pool) => Self(pool),
Err(e) => {
eprintln!("{}\n", e);
eprintln!(
"If the database has not yet been created, run \n $ sqlx database setup\n"
);
panic!("Failed to connect to database");
}
}
}
pub fn get_pool(&self) -> &DatabasePool {
&self.0
}
}
I have no idea what's happening here fairly new to rust. And thanks for your time.
The first issue was the port it is suppose to be
const PORT = 3306
rather than 33060
and the second issue was the ip address it doesn't work with 127.0.0.1 but localhost seems to work fine just needed to switch
#[structopt(default_value = "mysql://root:root#127.0.0.1:33060/automate")]
with
#[structopt(default_value = "mysql://root:root#localhost:3306/automate")]

Unhandled Exception: type 'List<dynamic>' is not a subtype of type 'List<Model>'

I am trying to parse Json data that I get from an online API but get the error mentioned in the title. I looked at similar questions but could not find a solution.
Here is the relevant code:
class Model {
final double a;
final double b;
final String s;
Model._({this.a, this.b, this.s});
factory Model.fromJson(Map<String, dynamic> json) {
return new Model._(
a: json['a'],
b: json['b'],
s: json['s'],
);
}
}
void fetchPairValue() async {
final response = await http.get('https://api.1forge.com/quotes?pairs=USD/TRY,EUR/USD,EUR/TRY,CAD/TRY,GBP/TRY,AUD/TRY,JPY/TRY,CHF/TRY,AED/TRY,USD/QAR,USD/BGN,DKK/TRY,USD/SAR,USD/CNY,USD/RUB,NOK/TRY,SEK/TRY'
'&api_key=KatWbQa9sDFmYQ25LmtAMlGau5xKSWIe');
if (response.statusCode == 200) {
// If the call to the server was successful, parse the JSON
List<Model> list = json.decode(response.body).map((data) => Model.fromJson(data))
.toList();
setState(() {
currencyPair[0][1] = round_to_4(list[0].b).toString();
currencyPair[0][2] = round_to_4(list[0].a).toString();
for (int i = 1; i < currencyPair.length; i++) {
if(list[i].s.startsWith('USD'))
{
currencyPair[i][1] = round_to_4(list[i].b/list[1].b).toString();
currencyPair[i][2] = round_to_4(list[i].a/list[1].a).toString();
}
else {
currencyPair[i][1] = round_to_4(list[i].b).toString();
currencyPair[i][2] = round_to_4(list[i].a).toString();
}
}
});
} else {
// If that call was not successful, throw an error.
throw Exception('Failed to load post');
}
}
Sample Json Data:
[{"p":1.21856,"a":1.22201,"b":1.2151,"s":"EUR/USD","t":1608934265255},{"p":7.5575,"a":7.5625,"b":7.5525,"s":"USD/TRY","t":1608908143931},{"p":9.26299,"a":9.27256,"b":9.25342,"s":"EUR/TRY","t":1608879625018},{"p":6.037513,"a":6.039437,"b":6.035589,"s":"CAD/TRY","t":1608933871214},{"p":10.297348,"a":10.316695,"b":10.278,"s":"GBP/TRY","t":1608879629130},{"p":5.7738,"a":5.7885,"b":5.7591,"s":"AUD/TRY","t":1608879564069},{"p":0.07303697,"a":0.07308529,"b":0.07298864,"s":"JPY/TRY","t":1608908143937},{"p":8.529457,"a":8.538269,"b":8.520645,"s":"CHF/TRY","t":1608879624835},{"p":2.057672,"a":2.059033,"b":2.056311,"s":"AED/TRY","t":1608908143934},{"p":3.6413,"a":3.642,"b":3.6405,"s":"USD/QAR","t":1608847204796},{"p":1.6069188,"a":1.61497,"b":1.5988675,"s":"USD/BGN","t":1608861813327},{"p":1.2452666,"a":1.2465531,"b":1.24398,"s":"DKK/TRY","t":1608879625024},{"p":3.752353,"a":3.755106,"b":3.7496,"s":"USD/SAR","t":1608879629251},{"p":6.5418,"a":6.5428,"b":6.5408,"s":"USD/CNY","t":1608909993197},{"p":74.06,"a":74.095,"b":74.025,"s":"USD/RUB","t":1608930021562},{"p":0.87736,"a":0.878167,"b":0.876553,"s":"NOK/TRY","t":1608847205092},{"p":0.917155,"a":0.918032,"b":0.916278,"s":"SEK/TRY","t":1608847203927}]
How to fix? Thanks.
You need to add the generic type to the map function:
List<Model> list = json
.decode(response)
.map<Model>((data) => Model.fromJson(data))
.toList();

Node JS: Make a flat json from a tree json

I was writing a node.js script to combine all the json files in a directory and store the result as a new json file. I tried do the job to a great extent but it has few flaws.
A.json
[
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
]
B.json
[
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
]
What I finally need is:
result.json
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!"
}
I wrote a node.js script:
var fs = require('fs');
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
fs.readFile(dirname + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
onFileContent(filename, content);
});
});
});
}
var data = {};
readFiles('C:/node/test/', function(filename, content) {
data[filename] = content;
var lines = content.split('\n');
lines.forEach(function(line) {
var parts = line.split('"');
if (parts[1] == 'id') {
fs.appendFile('result.json', parts[3]+': ', function (err) {});
}
if (parts[1] == 'defaultMessage') {
fs.appendFile('result.json', parts[3]+',\n', function (err) {});
}
});
}, function(err) {
throw err;
});
It extracts the 'id' and 'defaultMessage' but is not able to append correctly.
What I get:
result.json
addEmoticon1: addPhoto1: Hello, {name}!,
close1: How are you??,
Close!,
This output is different every time I run my script.
Aim 1: Surround items in double quotes,
Aim 2: Add curly braces at the top and at the end
Aim 3: No comma at the end of last element
Aim 4: Same output every time I run my script
I'll start with the finished solution...
There's a big explanation at the end of this answer. Let's try to think big-picture for a little bit first tho.
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Console output
wrote results to /path/to/result.json
result.json (I added a c.json with some data to show that this works with more than 2 files)
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!",
"somethingelse": "Something!"
}
Implementation
I made Promise-based interfaces for readdir and readFile and writeFile
import {readdir, readFile, writeFile} from 'fs';
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
In order to map functions to our Promises, we needed an fmap utility. Notice how we take care to bubble errors up.
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
And here's the rest of the utilities
const fmap = f=> x=> x.fmap(f);
const mapResolve = dir=> map(x=>resolve(dir,x));
const map = f=> xs=> xs.map(x=> f(x));
const filter = f=> xs=> xs.filter(x=> f(x));
const match = re=> s=> re.test(s);
const concatp = xs=> Promise.all(xs);
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
Lastly, the one custom function that does your work
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
And here's c.json
[
{
"id": "somethingelse",
"description": "something",
"defaultMessage": "Something!"
}
]
"Why so many little functions ?"
Well despite what you may think, you have a pretty big problem. And big problems are solved by combining several small solutions. The most prominent advantage of this code is that each function has a very distinct purpose and it will always produce the same results for the same inputs. This means each function can be used other places in your program. Another advantage is that smaller functions are easier to read, reason with, and debug.
Compare all of this to the other answers given here; #BlazeSahlen's in particular. That's over 60 lines of code that's basically only usable to solve this one particular problem. And it doesn't even filter out non-JSON files. So the next time you need to create a sequence of actions on reading/writing files, you'll have to rewrite most of those 60 lines each time. It creates lots of duplicated code and hard-to-find bugs because of exhausting boilerplate. And all that manual error-handling... wow, just kill me now. And he/she thought callback hell was bad ? haha, he/she just created yet another circle of hell all on his/her own.
All the code together...
Functions appear (roughly) in the order they are used
import {readdir, readFile, writeFile} from 'fs';
import {resolve} from 'path';
// logp: Promise<Value> -> Void
const logp = p=> p.then(x=> console.log(x), x=> console.err(x));
// fmap : Promise<a> -> (a->b) -> Promise<b>
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
// fmap : (a->b) -> F<a> -> F<b>
const fmap = f=> x=> x.fmap(f);
// readdirp : String -> Promise<Array<String>>
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
// mapResolve : String -> Array<String> -> Array<String>
const mapResolve = dir=> map(x=>resolve(dir,x));
// map : (a->b) -> Array<a> -> Array<b>
const map = f=> xs=> xs.map(x=> f(x));
// filter : (Value -> Boolean) -> Array<Value> -> Array<Value>
const filter = f=> xs=> xs.filter(x=> f(x));
// match : RegExp -> String -> Boolean
const match = re=> s=> re.test(s);
// readfilep : String -> Promise<String>
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
// concatp : Array<Promise<Value>> -> Array<Value>
const concatp = xs=> Promise.all(xs);
// reduce : (b->a->b) -> b -> Array<a> -> b
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
// flatten : Array<Array<Value>> -> Array<Value>
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
// writefilep : String -> Value -> Promise<String>
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
// -----------------------------------------------------------------------------
// createMap : Object -> Object -> Object
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
// do it !
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Still having trouble following along?
It's not easy to see how these things work at first. This is a particularly squirrely problem because the data gets nested very quickly. Thankfully that doesn't mean our code has to be a big nested mess just to solve the problem ! Notice the code stays nice and flat even when we're dealing with things like a Promise of an Array of Promises of JSON...
// Here we are reading directory '.'
// We will get a Promise<Array<String>>
// Let's say the files are 'a.json', 'b.json', 'c.json', and 'run.js'
// Promise will look like this:
// Promise<['a.json', 'b.json', 'c.json', 'run.js']>
readdirp('.')
// Now we're going to strip out any non-JSON files
// Promise<['a.json', 'b.json', 'c.json']>
.fmap(filter(match(/\.json$/)))
// call `readfilep` on each of the files
// We will get <Promise<Array<Promise<JSON>>>>
// Don't freak out, it's not that bad!
// Promise<[Promise<JSON>, Promise<JSON>. Promise<JSON>]>
.fmap(map(readfilep))
// for each file's Promise, we want to parse the data as JSON
// JSON.parse returns an object, so the structure will be the same
// except JSON will be an object!
// Promise<[Promise<Object>, Promise<Object>, Promise<Object>]>
.fmap(map(fmap(JSON.parse)))
// Now we can start collapsing some of the structure
// `concatp` will convert Array<Promise<Value>> to Array<Value>
// We will get
// Promise<[Object, Object, Object]>
// Remember, we have 3 Objects; one for each parsed JSON file
.fmap(concatp)
// Your particular JSON structures are Arrays, which are also Objects
// so that means `concatp` will actually return Promise<[Array, Array, Array]
// but we'd like to flatten that
// that way each parsed JSON file gets mushed into a single data set
// after flatten, we will have
// Promise<Array<Object>>
.fmap(flatten)
// Here's where it all comes together
// now that we have a single Promise of an Array containing all of your objects ...
// We can simply reduce the array and create the mapping of key:values that you wish
// `createMap` is custom tailored for the mapping you need
// we initialize the `reduce` with an empty object, {}
// after it runs, we will have Promise<Object>
// where Object is your result
.fmap(reduce(createMap)({}))
// It's all downhill from here
// We currently have Promise<Object>
// but before we write that to a file, we need to convert it to JSON
// JSON.stringify(data, null, '\t') will pretty print the JSON using tab to indent
// After this, we will have Promise<JSON>
.fmap(data=> JSON.stringify(data, null, '\t'))
// Now that we have a JSON, we can easily write this to a file
// We'll use `writefilep` to write the result to `result.json` in the current working directory
// I wrote `writefilep` to pass the filename on success
// so when this finishes, we will have
// Promise<Path>
// You could have it return Promise<Void> like writeFile sends void to the callback. up to you.
.fmap(writefilep(resolve(__dirname, 'result.json')))
// the grand finale
// alert the user that everything is done (or if an error occurred)
// Remember `.then` is like a fork in the road:
// the code will go to the left function on success, and the right on failure
// Here, we're using a generic function to say we wrote the file out
// If a failure happens, we write that to console.error
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
All done !
Assumed files is list of arrays; [a, b, ...];
var res = {};
files.reduce((a, b) => a.concat(b), []).forEach(o => res[o.id] = o.defaultMessage);
But you need not to get all files at once.
Just add this code to onFileContent callback.
JSON.parse(fileContent).forEach(o => res[o.id] = o.defaultMessage);
Also, you should to add any final callback to your readFiles.
And in this callback:
fs.writeFile('result.json', JSON.stringify(res));
So, final solution for you:
var fs = require('fs');
function task(dir, it, cb) {
fs.readdir(dir, (err, names) => {
if (err) return cb([err]);
var errors = [], c = names.length;
names.forEach(name => {
fs.readFile(dir + name, 'utf-8', (err, data) => {
if (err) return errors.push(err);
try {
it(JSON.parse(data)); // We get a file data!
} catch(e) {
errors.push('Invalid json in ' + name + ': '+e.message);
}
if (!--c) cb(errors); // We are finish
});
});
});
}
var res = {};
task('C:/node/test/', (data) => data.forEach(o => res[o.id] = o.defaultMessage), (errors) => {
// Some files can be wrong
errors.forEach(err => console.error(err));
// But we anyway write received data
fs.writeFile('C:/node/test/result.json', JSON.stringify(res), (err) => {
if (err) console.error(err);
else console.log('Task finished. see results.json');
})
});
this should do it once you have your json in variables a and b:
var a = [
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
];
var b = [
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
];
var c = a.concat(b);
var res = []
for (var i = 0; i < c.length; i++){
res[ c[i].id ] = c[i].defaultMessage;
}
console.log(res);
Here's my solution:
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
/**
* We'll store the parsed JSON data in this array
* #type {Array}
*/
var fileContent = [];
if (err) {
onError(err);
} else {
filenames.forEach(function(filename) {
// Reading the file (synchronously) and storing the parsed JSON output (parsing from string to JSON object)
var jsonObject = JSON.parse(fs.readFileSync(dirname + filename, 'utf-8'));
// Pushing the parsed JSON output into array
fileContent.push(jsonObject);
});
// Calling the callback
onFileContent(fileContent);
}
});
}
readFiles('./files/',function(fileContent) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
// Loop over the JSON objects
fileContent.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}, function(err) {
throw err;
});
Notable difference is that I've not used the asynchronous readFile and writeFile functions as they'd needlessly complicate the example. This example is meant to showcase the use of JSON.parse and JSON.stringify to do what OP wants.
UPDATE:
var fs = require('fs');
function readFiles(dirname, onEachFilename, onComplete) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
throw err;
} else {
// Prepending the dirname to each filename
filenames.forEach(function(each, index, array) {
array[index] = dirname + each;
});
// Calling aync.map which accepts these parameters:
// filenames <-------- array of filenames
// onEachFilename <--- function which will be applied on each filename
// onComplete <------- function to call when the all elements of filenames array have been processed
require('async').map(filenames, onEachFilename, onComplete);
}
});
}
readFiles('./files/', function(item, callback) {
// Read the file asynchronously
fs.readFile(item, function(err, data) {
if (err) {
callback(err);
} else {
callback(null, JSON.parse(data));
}
});
}, function(err, results) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
if (err) {
throw err;
} else {
// Loop over the JSON objects
results.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}
});
This is a simple asynchronous implementation of the same, using readFile. For more information, async.map.

Unity3d Facebook Response Data parsing

I was able to solve this solution is in the coment
I have problem with parsing data that I get rom Facebook on appRequestCallback.
The request is send and that part is ok. But I need to pars the send data for the internal uses.
the code is this
private void appRequestCallback(FBResult result)
{
Util.Log("appRequestCallback");
if (result != null)
{
var responseObject = Json.Deserialize(result.Text) as Dictionary<string, object>;
object obj = 0;
string resp = (string)responseObject["request"];
Util.Log ("resp : " + resp);
if (responseObject.TryGetValue("cancelled", out obj))
{
Util.Log("Request cancelled");
}
else if (responseObject.TryGetValue("request", out obj))
{
responseObject.TryGetValue("to", out obj);
string[] s = (string[]) obj;
Util.Log ("s: " + s);
AddPopupMessage("Request Sent", ChallengeDisplayTime);
Util.Log("Request sent");
}
}
}
In the console I get this
appRequestCallback
UnityEngine.Debug:Log(Object)
resp: 870884436303337
UnityEngine.Debug:Log(Object)
And then the error
InvalidCastException: Cannot cast from source type to destination type.
MainMenu.appRequestCallback (.FBResult result) (at Assets/Resources/Scripts/MainMenu.cs:482)
Facebook.AsyncRequestDialogPost.CallbackWithErrorHandling (.FBResult result)
Facebook.AsyncRequestString+c__Iterator0.MoveNext ()
The problem is in parsing of to: part of json file and I am not sure why. I have tried to cast it into string, string[], List<>, Array, ArrayList. As I see the problem is that I am not using the good cast type for the to: but I can not figure out what the correct cast type is