JSON4S error at extract[T] - json

i am using json4s to extract data according to case class but i am getting a "unknown error". My scala version is 2.10.2 and Json4S is 3.2.10
My code look like:
import org.json4s._
import org.json4s.jackson.JsonMethods._
implicit val formats = org.json4s.DefaultFormats
case class Person(name: String, age: Int)
class user{
def add(){
val json="""{"1":{"name":"user1", "age":16}}"""
print(parse(json).extract[Map[String,Person]])
}
}
Could any one suggest What i am missing here?
I tried based on the suggestion given here:
JSON4S unknown error
https://github.com/json4s/json4s/issues/125
But I still get following error:
java.lang.NoSuchMethodError: scala.collection.immutable.$colon$colon.hd$1()Ljava/lang/Object;
at org.json4s.MonadicJValue.$bslash(MonadicJValue.scala:18)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$14.apply(Extraction.scala:463)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$14.apply(Extraction.scala:463)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.json4s.Extraction$ClassInstanceBuilder.org$json4s$Extraction$ClassInstanceBuilder$$instantiate(Extraction.scala:451)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$result$6.apply(Extraction.scala:491)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$result$6.apply(Extraction.scala:488)
at org.json4s.Extraction$.org$json4s$Extraction$$customOrElse(Extraction.scala:500)
at org.json4s.Extraction$ClassInstanceBuilder.result(Extraction.scala:488)
at org.json4s.Extraction$.extract(Extraction.scala:332)
at org.json4s.Extraction$$anonfun$extract$5.apply(Extraction.scala:316)
at org.json4s.Extraction$$anonfun$extract$5.apply(Extraction.scala:316)
at scala.collection.immutable.List.map(List.scala:273)
at org.json4s.Extraction$.extract(Extraction.scala:316)
at org.json4s.Extraction$.extract(Extraction.scala:42)
at org.json4s.ExtractableJsonAstNode.extract(ExtractableJsonAstNode.scala:21)
at com.czechscala.blank.HttpMethods.parseJsonResponse(HttpMethods.scala:87)
at com.czechscala.blank.HttpMethods.getRequestFunction(HttpMethods.scala:184)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(Hello.scala:68)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply(Hello.scala:64)
at scala.concurrent.impl.ExecutionContextImpl$DefaultThreadFactory$$anon$2$$anon$4.block(ExecutionContextImpl.scala:48)
at scala.concurrent.forkjoin.ForkJoinPool.managedBlock(ForkJoinPool.java:3640)
at scala.concurrent.impl.ExecutionContextImpl$DefaultThreadFactory$$anon$2.blockOn(ExecutionContextImpl.scala:45)
at scala.concurrent.package$.blocking(package.scala:123)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply(Hello.scala:64)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
at scala.concurrent.impl.ExecutionContextImpl$AdaptedForkJoinTask.exec(ExecutionContextImpl.scala:121)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
java.lang.NoSuchMethodError: scala.collection.immutable.$colon$colon.hd$1()Ljava/lang/Object;
at org.json4s.MonadicJValue.$bslash(MonadicJValue.scala:18)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$14.apply(Extraction.scala:463)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$14.apply(Extraction.scala:463)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.json4s.Extraction$ClassInstanceBuilder.org$json4s$Extraction$ClassInstanceBuilder$$instantiate(Extraction.scala:451)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$result$6.apply(Extraction.scala:491)
at org.json4s.Extraction$ClassInstanceBuilder$$anonfun$result$6.apply(Extraction.scala:488)
at org.json4s.Extraction$.org$json4s$Extraction$$customOrElse(Extraction.scala:500)
at org.json4s.Extraction$ClassInstanceBuilder.result(Extraction.scala:488)
at org.json4s.Extraction$.extract(Extraction.scala:332)
at org.json4s.Extraction$$anonfun$extract$5.apply(Extraction.scala:316)
at org.json4s.Extraction$$anonfun$extract$5.apply(Extraction.scala:316)
at scala.collection.immutable.List.map(List.scala:273)
at org.json4s.Extraction$.extract(Extraction.scala:316)
at org.json4s.Extraction$.extract(Extraction.scala:42)
at org.json4s.ExtractableJsonAstNode.extract(ExtractableJsonAstNode.scala:21)
at com.czechscala.blank.HttpMethods.parseJsonResponse(HttpMethods.scala:87)
at com.czechscala.blank.HttpMethods.getRequestFunction(HttpMethods.scala:184)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(Hello.scala:68)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1$$anonfun$apply$mcV$sp$1.apply(Hello.scala:64)
at scala.concurrent.impl.ExecutionContextImpl$DefaultThreadFactory$$anon$2$$anon$4.block(ExecutionContextImpl.scala:48)
at scala.concurrent.forkjoin.ForkJoinPool.managedBlock(ForkJoinPool.java:3640)
at scala.concurrent.impl.ExecutionContextImpl$DefaultThreadFactory$$anon$2.blockOn(ExecutionContextImpl.scala:45)
at scala.concurrent.package$.blocking(package.scala:123)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply$mcV$sp(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply(Hello.scala:64)
at com.czechscala.blank.Hello$$anonfun$sendParallelRequests$1$$anonfun$apply$mcVI$sp$1.apply(Hello.scala:64)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
at scala.concurrent.impl.ExecutionContextImpl$AdaptedForkJoinTask.exec(ExecutionContextImpl.scala:121)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
My build.sbt:
name := "blank"
version := "1.0-SNAPSHOT"
scalaVersion := "2.10.2"
libraryDependencies ++= Seq (
"org.scalatest" % "scalatest_2.10" % "1.9.1" % "test"
)
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-actor" % "2.1.2"
)
libraryDependencies ++= Seq(
"net.databinder" %% "dispatch" % "0.8.10"
)
libraryDependencies ++= Seq(
"org.scala-lang" % "scala-actors" % "2.10.2"
)
libraryDependencies ++= Seq(
"net.databinder" %% "dispatch-core" % "0.8.10"
)
libraryDependencies ++= Seq(
"net.databinder" %% "dispatch-futures" % "0.8.10"
)
libraryDependencies ++= Seq(
"net.databinder" %% "dispatch-nio" % "0.8.10"
)
libraryDependencies ++= Seq(
"org.slf4j" % "slf4j-api" % "1.6.4",
"org.slf4j" % "slf4j-simple" % "1.6.4"
)
libraryDependencies ++= Seq(
"org.json4s" %% "json4s-core" % "3.2.10",
"org.json4s" %% "json4s-native" % "3.2.10",
"org.json4s" %% "json4s-jackson" % "3.2.10",
"net.databinder" %% "unfiltered-netty" % "0.8.0" % "test",
"net.databinder.dispatch" % "dispatch-json4s-native_2.11" % "0.11.1"
)
libraryDependencies ++= Seq(
"io.argonaut" %% "argonaut" % "6.0.4"
)
initialCommands := "import dispatch._"

This modified json is working for me
val json="""{"1":{"name":"user1", "age":16}}"""
age is in quotes and taking quotes for 16. age is defined as int.
When I replicated your build.sbt, it seems there is two conflicting version of json4s related libraiers are getting loaded. One version mentioned here for the
"org.json4s" %% "json4s-core" % "3.2.10",
"org.json4s" %% "json4s-native" % "3.2.10",
"org.json4s" %% "json4s-jackson" % "3.2.10",
And the other version mentioned from this line
"net.databinder.dispatch" ...... "..._2.11",
which is pulling the json4s for the scala 2.11 version.
Changing the netbinder version from "_2.11" to "_2.10" seems to be building fine.

Related

How to configure transactor in doobie?

Recently I started learning doobie but I couldn't create a hikari transactor without error. I'm using mysql, Intellij-Idea.
This is my build.sbt file
name := "doobie"
version := "0.1"
//scalaVersion := "2.13.1"
scalacOptions += "-Ypartial-unification" // 2.11.9+
libraryDependencies ++= {
lazy val doobieVersion = "0.8.4"
Seq(
"org.tpolecat" %% "doobie-core" % doobieVersion,
"org.tpolecat" %% "doobie-h2" % doobieVersion,
"org.tpolecat" %% "doobie-hikari" % doobieVersion,
"org.tpolecat" %% "doobie-quill" % doobieVersion,
"org.tpolecat" %% "doobie-specs2" % doobieVersion,
"org.tpolecat" %% "doobie-scalatest" % doobieVersion % "test",
"mysql" % "mysql-connector-java" % "8.0.17",
"org.slf4j" % "slf4j-api" % "1.7.5",
"ch.qos.logback" % "logback-classic" % "1.0.9"
)
}
resolvers ++= Seq(
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
)
This is my Connection.scala file
import cats.effect.IO
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie.hikari.HikariTransactor
trait Connection {
val config = new HikariConfig()
config.setJdbcUrl("jdbc:mysql://localhost:quill_demo")
config.setUsername("admin")
config.setPassword("password")
config.setMaximumPoolSize(5)
val transactor: IO[HikariTransactor[IO]] =
IO.pure(HikariTransactor.apply[IO](new HikariDataSource(config)))
}
The problem is in above file IO.pure(HikariTransactor.apply[IO](new HikariDataSource(config))) statement gives error. Here 3 of last closed braces gives 3 errors as below.
No implicit arguments of type: ContextShift[IO]
Unspecified value parameters: connectEC: ExecutionContext, transactEC: ExecutionContext
No implicits found for parameter evidence$2: ContextShift[IO]
All I want to know, How to do this correctly.
Try to add the following import and implicit
import scala.concurrent.ExecutionContext
implicit val cs = IO.contextShift(ExecutionContext.global)
After I change the versions of dependencies as follows, I could solve all errors.
name := "doobie"
version := "0.1"
//scalaVersion := "2.13.1"
scalacOptions += "-Ypartial-unification"
libraryDependencies ++= {
lazy val doobieVersion = "0.5.4"
Seq(
"org.tpolecat" %% "doobie-core" % doobieVersion,
"org.tpolecat" %% "doobie-h2" % doobieVersion,
"org.tpolecat" %% "doobie-hikari" % doobieVersion,
"org.tpolecat" %% "doobie-quill" % doobieVersion,
"org.tpolecat" %% "doobie-specs2" % doobieVersion,
"org.tpolecat" %% "doobie-scalatest" % doobieVersion % "test",
"mysql" % "mysql-connector-java" % "5.1.34",
"org.slf4j" % "slf4j-api" % "1.7.5",
"ch.qos.logback" % "logback-classic" % "1.0.9"
)
}
resolvers ++= Seq(
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
)

How to decode missing json array as empty List with circe

For example, we have some case class
case class Foo(a: Int, b: List[String])
And we want to deserialize instance of Foo from json {"a": 1} replacing missing b array with Nil
We can create custom decoder for such behavior
implicit val fooDecoder: Decoder[Foo] = (c: HCursor) =>
for {
a <- c.downField("a").as[Int]
b <- c.downField("b").as[Option[List[String]]
} yield Foo(a, b.getOrElse(Nil))
But, unfortunately, the created this way decoder doesn't accumulate all decoding failures.
Is there any way to create decoder with failures accumulation or any way to replace standard list deserialization behavior in circe?
Try providing default value b: List[String] = Nil using circe-generic-extras like so
import io.circe.parser._
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.auto._
implicit val config: Configuration = Configuration.default.withDefaults
case class Foo(a: Int, b: List[String] = Nil)
val raw = """{"a": 1}"""
decode[Foo](raw) // res0: Either[io.circe.Error,Foo] = Right(Foo(1,List()))
where
libraryDependencies ++= Seq(
"io.circe" %% "circe-core" % "0.12.0-M3",
"io.circe" %% "circe-parser" % "0.12.0-M3",
"io.circe" %% "circe-generic-extras" % "0.12.0-M3",
"io.circe" %% "circe-generic" % "0.12.0-M3"
)
You can also use .map(_.getOrElse(List.empty)) as well
implicit val fooDecoder: Decoder[Foo] = (c: HCursor) =>
for {
a <- c.downField("a").as[Int]
b <- c.downField("b").as[Option[List[String]].map(_.getOrElse(List.empty))
} yield Foo(a, b)

Clojure error - trace error

I have a school assignment that has to be done by next week but here i am sitting and trying to solve a error problem which i really dont get why i am getting this?
According to my teacher i have to get this:
user> (def v (safe (/ 1 0)))
user> v
ArithmeticException java.lang.ArithmeticException: Divide by zero
but what i am getting when doing this is:
java.io.File
user=> (def v (safe (/ 1 0)))
#'user/v
user=> v
#error {
:cause "Divide by zero"
:via
[{:type java.lang.ArithmeticException
:message "Divide by zero"
:at [clojure.lang.Numbers divide "Numbers.java" 158]}]
:trace
[[clojure.lang.Numbers divide "Numbers.java" 158]
[clojure.lang.Numbers divide "Numbers.java" 3808]
[user$fn__17 invoke "NO_SOURCE_FILE" 30]
[clojure.lang.AFn applyToHelper "AFn.java" 152]
[clojure.lang.AFn applyTo "AFn.java" 144]
[clojure.lang.Compiler$InvokeExpr eval "Compiler.java" 3623]
[clojure.lang.Compiler$DefExpr eval "Compiler.java" 439]
[clojure.lang.Compiler eval "Compiler.java" 6787]
[clojure.lang.Compiler eval "Compiler.java" 6745]
[clojure.core$eval invoke "core.clj" 3081]
[clojure.main$repl$read_eval_print__7099$fn__7102 invoke "main.clj" 240]
[clojure.main$repl$read_eval_print__7099 invoke "main.clj" 240]
[clojure.main$repl$fn__7108 invoke "main.clj" 258]
[clojure.main$repl doInvoke "main.clj" 258]
[clojure.lang.RestFn invoke "RestFn.java" 421]
[clojure.main$repl_opt invoke "main.clj" 324]
[clojure.main$main doInvoke "main.clj" 422]
[clojure.lang.RestFn invoke "RestFn.java" 397]
[clojure.lang.Var invoke "Var.java" 375]
[clojure.lang.AFn applyToHelper "AFn.java" 152]
[clojure.lang.Var applyTo "Var.java" 700]
[clojure.main main "main.java" 37]]}
You got it, just the format of the error is a bit different.
If you look at the :via key you'll see the :type key's value is java.lang.ArithmeticException and the :message key's value is Divide by zero
Put them together and you'll get java.lang.ArithmeticException: Divide by zero
This might be an issue with how you're running the REPL. I've never seen errors show up like this but I usually run the REPL with the leiningen command: lein repl

How do I run JUnit 4.11 test cases with SBT?

I have the following in build.sbt:
libraryDependencies += "com.novocode" % "junit-interface" % "0.10" % "test"
libraryDependencies += "junit" % "junit" % "4.11" % "test"
I noticed that junit-interface 0.10 depends on junit-dep 4.10. This makes it impossible to compile tests that use assertNotEquals which was introduced in junit 4.11.
How do I run JUnit 4.11 test cases with SBT?
I would do this:
libraryDependencies ++= Seq(
"junit" % "junit" % "4.11" % Test,
"com.novocode" % "junit-interface" % "0.11" % Test
exclude("junit", "junit-dep")
)
By excluding what we don't desire, it doesn't interfere. This doesn't depend on ordering.
Use junit-interface 0.11 to avoid the dependency on junit-dep:
libraryDependencies += "junit" % "junit" % "4.12" % "test"
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test"
UPDATE: junit-interface 0.11 makes this reliable by depending on junit rather than junit-dep.

erlang-mysql-driver source code, why little endianness to fetch the result

I'm new to erlang and studying erlang-mysql-driver. Can anyone help to make me understand why little endianness here when parsing the binary in function "get_lcb"?
Following is the code in mysql_conn.erl
%%--------------------------------------------------------------------
%% Function: get_query_response(LogFun, RecvPid)
%% LogFun = undefined | function() with arity 3
%% RecvPid = pid(), mysql_recv process
%% Version = integer(), Representing MySQL version used
%% Descrip.: Wait for frames until we have a complete query response.
%% Returns : {data, #mysql_result}
%% {updated, #mysql_result}
%% {error, #mysql_result}
%% FieldInfo = list() of term()
%% Rows = list() of [string()]
%% AffectedRows = int()
%% Reason = term()
%%--------------------------------------------------------------------
get_query_response(LogFun, RecvPid, Version) ->
case do_recv(LogFun, RecvPid, undefined) of
{ok, Packet, _} ->
{Fieldcount, Rest} = get_lcb(Packet),
case Fieldcount of
0 ->
%% No Tabular data
<<AffectedRows:8, Rest2/binary>> = Rest,
io:format("Rest2=~p~n", [Rest2]),
{InsertId, _} = get_lcb(Rest2),
io:format("InsertId=~p~n", [InsertId]),
{updated, #mysql_result{affectedrows=AffectedRows, insertid=InsertId}};
255 ->
<<_Code:16/little, Message/binary>> = Rest,
{error, #mysql_result{error=Message}};
_ ->
%% Tabular data received
case get_fields(LogFun, RecvPid, [], Version) of
{ok, Fields} ->
case get_rows(Fields, LogFun, RecvPid, []) of
{ok, Rows} ->
{data, #mysql_result{fieldinfo=Fields,
rows=Rows}};
{error, Reason} ->
{error, #mysql_result{error=Reason}}
end;
{error, Reason} ->
{error, #mysql_result{error=Reason}}
end
end;
{error, Reason} ->
{error, #mysql_result{error=Reason}}
end.
get_lcb(<<251:8, Rest/binary>>) ->
{null, Rest};
get_lcb(<<252:8, Value:16/little, Rest/binary>>) ->
io:format("Value=~p~n",[Value]),
io:format("Rest=~p~n",[Rest]),
{Value, Rest};
get_lcb(<<253:8, Value:24/little, Rest/binary>>) ->
{Value, Rest};
get_lcb(<<254:8, Value:32/little, Rest/binary>>) ->
{Value, Rest};
get_lcb(<<Value:8, Rest/binary>>) when Value < 251 ->
{Value, Rest};
get_lcb(<<255:8, Rest/binary>>) ->
{255, Rest}.
%%--------------------------------------------------------------------
%% Function: do_recv(LogFun, RecvPid, SeqNum)
%% LogFun = undefined | function() with arity 3
%% RecvPid = pid(), mysql_recv process
%% SeqNum = undefined | integer()
%% Descrip.: Wait for a frame decoded and sent to us by RecvPid.
%% Either wait for a specific frame if SeqNum is an integer,
%% or just any frame if SeqNum is undefined.
%% Returns : {ok, Packet, Num} |
%% {error, Reason}
%% Reason = term()
%%
%% Note : Only to be used externally by the 'mysql_auth' module.
%%--------------------------------------------------------------------
do_recv(LogFun, RecvPid, SeqNum) when is_function(LogFun);
LogFun == undefined,
SeqNum == undefined ->
receive
{mysql_recv, RecvPid, data, Packet, Num} ->
{ok, Packet, Num};
{mysql_recv, RecvPid, closed, _E} ->
{error, "mysql_recv: socket was closed"}
end;
do_recv(LogFun, RecvPid, SeqNum) when is_function(LogFun);
LogFun == undefined,
is_integer(SeqNum) ->
ResponseNum = SeqNum + 1,
receive
{mysql_recv, RecvPid, data, Packet, ResponseNum} ->
{ok, Packet, ResponseNum};
{mysql_recv, RecvPid, closed, _E} ->
{error, "mysql_recv: socket was closed"}
end.
I suppose you mean these lines: Value:16/little, Value:24/little, etc...? Well, it's because MySQL server will always fill these parts of the response packet with little-endian values - but Erlang machine by default will operate with whatever endianness is native to the CPU.