I have an user.csv file .I want to store the data of csv file into sql
database. I provide proper sql connection in yml file but it throws
run time exception
Caused by: java.sql.SQLSyntaxErrorException: Table 'login.batch_job_instance' doesn't exist
at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:120) ~[mysql-connector-java-8.0.26.jar:8.0.26]
at com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(SQLExceptionsMapping.java:122) ~[mysql-connector-java-8.0.26.jar:8.0.26]
at com.mysql.cj.jdbc.ClientPreparedStatement.executeInternal(ClientPreparedStatement.java:953) ~[mysql-connector-java-8.0.26.jar:8.0.26]
at com.mysql.cj.jdbc.ClientPreparedStatement.executeQuery(ClientPreparedStatement.java:1003) ~[mysql-connector-java-8.0.26.jar:8.0.26]
at com.zaxxer.hikari.pool.ProxyPreparedStatement.executeQuery(ProxyPreparedStatement.java:52) ~[HikariCP-4.0.3.jar:na]
at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.executeQuery(HikariProxyPreparedStatement.java) ~[HikariCP-4.0.3.jar:na]
at org.springframework.jdbc.core.JdbcTemplate$1.doInPreparedStatement(JdbcTemplate.java:722) ~[spring-jdbc-5.3.10.jar:5.3.10]
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:651) ~[spring-jdbc-5.3.10.jar:5.3.10]
... 42 common frames omitted
2021-09-27 16:31:27.913 INFO 3800 --- [ main] j.LocalContainerEntityManagerFactoryBean : Closing JPA EntityManagerFactory for persistence unit 'default'
2021-09-27 16:31:27.915 INFO 3800 --- [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown initiated...
2021-09-27 16:31:27.922 INFO 3800 --- [ main] com.zaxxer.hikari.HikariDataSource : HikariPool-1 - Shutdown completed.
Process finished with exit code 1
batch.config
package com.nilmani.literalmission.config
import com.nilmani.literalmission.model.User
import org.springframework.batch.core.Job
import org.springframework.batch.core.Step
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory
import org.springframework.batch.core.launch.support.RunIdIncrementer
import org.springframework.batch.item.ItemProcessor
import org.springframework.batch.item.ItemReader
import org.springframework.batch.item.ItemWriter
import org.springframework.batch.item.file.FlatFileItemReader
import org.springframework.batch.item.file.LineMapper
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper
import org.springframework.batch.item.file.mapping.DefaultLineMapper
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.core.io.FileSystemResource
#Configuration
#EnableBatchProcessing
class BatchConfig {
#Bean
fun job(jobBuilderFactory: JobBuilderFactory,
stepBuilderFactory: StepBuilderFactory,
itemReader: ItemReader<User>,
itemProcessor: ItemProcessor<User,User>,
itemWriter: ItemWriter<User>
):Job{
val step: Step = stepBuilderFactory["ETL-file-load"]
.chunk<User, User>(100)
.reader(itemReader)
.processor(itemProcessor)
.writer(itemWriter)
.build()
return jobBuilderFactory["ETL-Load"]
.incrementer(RunIdIncrementer())
.start(step)
.build()
}
#Bean
fun itemReader(): FlatFileItemReader<User>? {
val flatFileItemReader: FlatFileItemReader<User> = FlatFileItemReader<User>()
flatFileItemReader.setResource(FileSystemResource("src/main/resources/users.csv"))
flatFileItemReader.setName("CSV-Reader")
flatFileItemReader.setLinesToSkip(1)
flatFileItemReader.setLineMapper(lineMapper()!!)
return flatFileItemReader
}
#Bean
fun lineMapper(): LineMapper<User>? {
val defaultLineMapper: DefaultLineMapper<User> = DefaultLineMapper<User>()
val lineTokenizer = DelimitedLineTokenizer()
lineTokenizer.setDelimiter(",")
lineTokenizer.setStrict(false)
lineTokenizer.setNames("id", "name", "dept", "salary")
val fieldSetMapper: BeanWrapperFieldSetMapper<User> = BeanWrapperFieldSetMapper<User>()
fieldSetMapper.setTargetType(User::class.java)
defaultLineMapper.setLineTokenizer(lineTokenizer)
defaultLineMapper.setFieldSetMapper(fieldSetMapper)
return defaultLineMapper
}
}
DBWriter.kt
package com.nilmani.literalmission.batch
import com.nilmani.literalmission.model.User
import com.nilmani.literalmission.repository.UserRepository
import org.springframework.batch.item.ItemWriter
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
#Component
class DBWriter :ItemWriter<User> {
#Autowired
private lateinit var userRepository: UserRepository
#Autowired
fun DBWriter(userRepository: UserRepository){
this.userRepository = userRepository
}
#Throws(Exception::class)
override fun write(users: MutableList<out User>) {
println("Data Saved for Users: $users")
userRepository.saveAll(users)
}
}
itemprocessor.kt
package com.nilmani.literalmission.batch
import com.nilmani.literalmission.model.User
import org.springframework.batch.item.ItemProcessor
import org.springframework.stereotype.Component
import java.util.*
import kotlin.collections.HashMap
#Component
class Processor : ItemProcessor<User,User> {
private val DEPT_NAMES: MutableMap<String, String> = HashMap()
fun Processor() {
DEPT_NAMES["100"] = "Technology"
DEPT_NAMES["102"] = "Operations"
DEPT_NAMES["103"] = "Accounts"
DEPT_NAMES["104"] = "Devloper"
}
#Throws(Exception::class)
override fun process(user: User): User? {
val deptCode:String=user.dept
val dept=DEPT_NAMES[deptCode]
if (dept != null) {
user.dept = dept
}
user.time= Date()
println(String.format("Converted from [%s] to [%s]", deptCode, dept))
return user
}
}
LoadController.kt
package com.nilmani.literalmission.controller
import org.springframework.batch.core.*
import org.springframework.batch.core.launch.JobLauncher
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException
import org.springframework.batch.core.repository.JobRestartException
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation.GetMapping
import org.springframework.web.bind.annotation.RequestMapping
import org.springframework.web.bind.annotation.RestController
#RestController
#RequestMapping("/load")
class LoadController {
#Autowired
private lateinit var jobLauncher: JobLauncher
#Autowired
private lateinit var job:Job
#GetMapping
#Throws(
JobParametersInvalidException::class,
JobExecutionAlreadyRunningException::class,
JobRestartException::class,
JobInstanceAlreadyCompleteException::class
)
fun load(): BatchStatus {
val maps: MutableMap<String, JobParameter> = HashMap()
maps["time"] = JobParameter(System.currentTimeMillis())
val parameters = JobParameters(maps)
val jobExecution: JobExecution = jobLauncher.run(job, parameters)
System.out.println("JobExecution: " + jobExecution.status)
println("Batch is Running...")
while (jobExecution.isRunning) {
println("...")
}
return jobExecution.status
}
}
User.kt
package com.nilmani.literalmission.model
import java.util.*
import javax.persistence.Entity
import javax.persistence.Id
#Entity
data class User(
#Id
val id:Long=0,
val name:String="",
var dept:String="",
val salary:String="",
var time:Date
)
I added every thing properly how I get this type of issue
I added all the gradel depedency ,I donot know where I make mistakes I
provide all the database connection releated stuff
Here my database connection details
spring:
datasource:
url: jdbc:mysql://localhost:3306/login
driverClassName: com.mysql.cj.jdbc.Driver
username: root
password:
jpa:
database-platform: org.hibernate.dialect.MySQL5Dialect
show-sql: true
hibernate:
ddl-auto: update
According to the error Table 'login.batch_job_instance' doesn't exist, you need to create Spring Batch tables in the database before running your job.
You can do that either manually by execution the DDL script for MySQL, or if you use Spring Boot, set the property spring.batch.initialize-schema=always.
Related
I'm totally new to Mockito and I need to mock a post request so that I can test a client alone. However, no matter what I do I get the RESTEASY004655: Unable to invoke request exception. This is the simplified version of what I have so far. I have the class TestClassA which sends a post request to an api as follows:
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.Response;
import org.jboss.resteasy.client.jaxrs.ResteasyClient;
import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder;
import org.jboss.resteasy.client.jaxrs.ResteasyWebTarget;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestClassA {
public int moveActual(String path) throws IOException {
Response resp = null;
String newHome = "0" ;
ResteasyClient client = new ResteasyClientBuilder().build();
ResteasyWebTarget callTarget = client.target(path) ;
Map<String, Object> request = new HashMap<>();
request.put("operation", "dislocation");
request.put("direction", "right");
request.put("amount", "2");
request.put("unit", "metric");
String payload = new ObjectMapper().writeValueAsString(request);
resp = callTarget.request().post(Entity.entity(payload, "application/json"));
String outcome = resp.readEntity(String.class);
ObjectMapper outcomeMapper = new ObjectMapper();
#SuppressWarnings("unchecked")
Map<String, Object> finalResponse = (Map<String, Object>) outcomeMapper.readValue(outcome, Map.class);
if (finalResponse != null) {
newHome = (String) finalResponse.get("coordinate");
}
return Integer.parseInt(newHome) ;
}
}
I try to mock the post request using the mockito test bellow:
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.mockito.BDDMockito.given;
import java.util.HashMap;
import java.util.Map;
import org.jboss.resteasy.client.jaxrs.ResteasyWebTarget;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import externalTests.TestClassA;
#RunWith(MockitoJUnitRunner.class)
public class DispositionMockTest {
#Mock(answer = Answers.RETURNS_DEEP_STUBS)
ResteasyWebTarget targetMock;
#InjectMocks
TestClassA classA;
#Test
public void dispositionTest() throws Exception {
// Given
Map<String, Integer> outcomeMap = new HashMap<String, Integer>() ;
outcomeMap.put("coordinate", 4) ;
Response resp = Response.status(Status.OK).type(MediaType.APPLICATION_JSON).entity(outcomeMap).build() ;
// Builder req = DispositionMockTest.fakeRequest() ;
// given(req.post(ArgumentMatchers.any(Entity.class))).willReturn(fakeRequest()) ;
given(targetMock.request().post(ArgumentMatchers.any(Entity.class))).willReturn(resp) ;
// When
int result = classA.moveActual("url to what has to run") ;
// Then
assertEquals(4, result);
}
}
I also tried to mock the request method using the two commented out lines in the test and have it return more or less fake Builder from the method bellow.
public static Builder fakeRequest() {
ResteasyClient httpClient = new ResteasyClientBuilder().build();
ResteasyWebTarget target = httpClient.target("");
Builder req = target.request() ;
return req ;
}
But I keep getting this exception.
javax.ws.rs.ProcessingException: RESTEASY004655: Unable to invoke request: org.apache.http.client.ClientProtocolException
at org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine.invoke(ApacheHttpClient4Engine.java:325)
at org.jboss.resteasy.client.jaxrs.internal.ClientInvocation.invoke(ClientInvocation.java:443)
at org.jboss.resteasy.client.jaxrs.internal.ClientInvocation.invoke(ClientInvocation.java:62)
at org.jboss.resteasy.client.jaxrs.internal.ClientInvocationBuilder.post(ClientInvocationBuilder.java:219)
at externalTests.TestClassA.moveActual(TestClassA.java:33)
at externalTests.DispositionMockTest.dispositionTest(DispositionMockTest.java:60)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.mockito.internal.runners.DefaultInternalRunner$1$1.evaluate(DefaultInternalRunner.java:46)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.mockito.internal.runners.DefaultInternalRunner$1.run(DefaultInternalRunner.java:77)
at org.mockito.internal.runners.DefaultInternalRunner.run(DefaultInternalRunner.java:83)
at org.mockito.internal.runners.StrictRunner.run(StrictRunner.java:39)
at org.mockito.junit.MockitoJUnitRunner.run(MockitoJUnitRunner.java:163)
at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:89)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:41)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:542)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:770)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:464)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:210)
Caused by: org.apache.http.client.ClientProtocolException
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:187)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
at org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine.invoke(ApacheHttpClient4Engine.java:320)
... 33 more
Caused by: org.apache.http.ProtocolException: Target host is not specified
at org.apache.http.impl.conn.DefaultRoutePlanner.determineRoute(DefaultRoutePlanner.java:71)
at org.apache.http.impl.client.InternalHttpClient.determineRoute(InternalHttpClient.java:125)
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:184)
... 36 more
I tried many possible solutions I found online but I can't get anything to work. If anyone knows how I can mock a post request I would be truly thankful to learn from them.
For whoever may need it. I couldn't find a way to get it done using mockito. I had to move to wiremock. In wiremock you just record a successful transaction, create a wiremock server object, and have it mock the recorded behavior. Much easier and faster.
Because you mocked ResteasyWebTarget and didn't define what should be the return of request() method, you got that exception.
You should first mock request and return it to the request() method and define the behavior to post() method from mockedRequest
#Mock
private Invocation.Builder mockedRequest;
#Test
public void dispositionTest() throws Exception {
when(targetMock.request()).thenReturn(mockedRequest);
when(mockedRequest.post(any(Entity.class))).thenReturn(resp) ;
}
WebTarget request() doc
Invocation.Builder doc
I want to create a spring-boot project using plain jdbc as descripted here : https://spring.io/guides/gs/relational-data-access/.
I would like to use a mysql database instead of the h2 of the guide. Here are my config files :
build.gradle
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:2.0.5.RELEASE")
}
}
apply plugin: 'java'
apply plugin: 'eclipse'
apply plugin: 'idea'
apply plugin: 'org.springframework.boot'
apply plugin: 'io.spring.dependency-management'
bootJar {
baseName = 'gs-relational-data-access'
version = '0.1.0'
}
repositories {
mavenCentral()
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
dependencies {
// https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter
compile group: 'org.springframework.boot', name: 'spring-boot-starter', version: '2.1.0.RELEASE'
// https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc
compile group: 'org.springframework.boot', name: 'spring-boot-starter-jdbc', version: '2.1.0.RELEASE'
// https://mvnrepository.com/artifact/mysql/mysql-connector-java
compile group: 'mysql', name: 'mysql-connector-java', version: '5.1.47'
testCompile("junit:junit")
application.properties
# DATASOURCE (DataSourceAutoConfiguration & DataSourceProperties)
spring.datasource.url=jdbc:mysql://localhost:3306/spring-jdbc?useSSL=false
spring.datasource.username=root
spring.datasource.password=***
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.jpa.hibernate.ddl-auto=create
Application
package hello;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.jdbc.core.JdbcTemplate;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
#SpringBootApplication
public class Application implements CommandLineRunner {
private static final Logger log = LoggerFactory.getLogger(Application.class);
public static void main(String args[]) {
SpringApplication.run(Application.class, args);
}
#Autowired
JdbcTemplate jdbcTemplate;
#Override
public void run(String... strings) throws Exception {
log.info("Creating tables");
jdbcTemplate.execute("DROP TABLE customers IF EXISTS");
jdbcTemplate.execute("CREATE TABLE customers(" +
"id SERIAL, first_name VARCHAR(255), last_name VARCHAR(255))");
// Split up the array of whole names into an array of first/last names
List<Object[]> splitUpNames = Arrays.asList("John Woo", "Jeff Dean", "Josh Bloch", "Josh Long").stream()
.map(name -> name.split(" "))
.collect(Collectors.toList());
// Use a Java 8 stream to print out each tuple of the list
splitUpNames.forEach(name -> log.info(String.format("Inserting customer record for %s %s", name[0], name[1])));
// Uses JdbcTemplate's batchUpdate operation to bulk load data
jdbcTemplate.batchUpdate("INSERT INTO customers(first_name, last_name) VALUES (?,?)", splitUpNames);
log.info("Querying for customer records where first_name = 'Josh':");
jdbcTemplate.query(
"SELECT id, first_name, last_name FROM customers WHERE first_name = ?", new Object[] { "Josh" },
(rs, rowNum) -> new Customer(rs.getLong("id"), rs.getString("first_name"), rs.getString("last_name"))
).forEach(customer -> log.info(customer.toString()));
}
}
But the project fail to start and suggest :
Consider defining a bean of type 'org.springframework.jdbc.core.JdbcTemplate' in your configuration.
What could I do?
I'm trying to do some Scala JSON parsing in Spark. It work locally with my tests, but fails on a remote cluster.
Relevant Imports:
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import spray.json._
The classes:
case class Item(name: String,
siteType: String,
productCategoryType: String,
variants: List[VariantRange]) extends Serializable
case class VariantRange(name: String, start: Double, end: Double) extends Serializable
case class ItemList(items: List[Item])
object ItemJsonProtocol extends DefaultJsonProtocol {
implicit val variantRangeFormat = jsonFormat(VariantRange, "name", "start", "end")
implicit val itemFormat = jsonFormat(Item, "name", "siteType", "productCategoryType", "variants")
implicit object itemListJsonFormat extends RootJsonFormat[ItemList] {
override def write(obj: ItemList): JsValue = ???
override def read(json: JsValue): ItemList = ItemList(json.convertTo[List[Item]])
}
}
The parsing code:
def parseItems(json: String) = {
val mapper = new ObjectMapper() with ScalaObjectMapper
mapper.registerModule(DefaultScalaModule)
val jsonAst = json.parseJson
import ItemJsonProtocol._
jsonAst.convertTo[ItemList].items
}
def parseItems(): List[Item] = {
parseItems(Source.fromInputStream(getClass.getResourceAsStream("/data/items.json")).mkString)
}
This test passes locally:
test("Parses resources file") {
val items = parseItems()
items shouldNot be (null)
}
But in Spark it shows this:
15/12/07 17:59:05 ERROR ApplicationMaster: User class threw exception: java.lang.AbstractMethodError: com.intentmedia.data.AggregationJob$$anon$1.com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$_setter_$com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$$typeCache_$eq(Lorg/spark-project/guava/cache/LoadingCache;)V
java.lang.AbstractMethodError: com.intentmedia.data.AggregationJob$$anon$1.com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$_setter_$com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$$typeCache_$eq(Lorg/spark-project/guava/cache/LoadingCache;)V
at com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper$class.$init$(ScalaObjectMapper.scala:50)
at com.intentmedia.data.AggregationJob$$anon$1.<init>(AggregationJob.scala:76)
at com.intentmedia.data.AggregationJob$.parseExperiments(AggregationJob.scala:76)
at com.intentmedia.data.AggregationJob$.parseExperiments(AggregationJob.scala:85)
at com.intentmedia.data.AggregationJob$.runWorkflow(AggregationJob.scala:66)
at com.intentmedia.spark.SparkJob$class.main(SparkJob.scala:43)
at com.intentmedia.data.AggregationJob$.main(AggregationJob.scala:49)
at com.intentmedia.data.AggregationJob.main(AggregationJob.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:525)
15/12/07 17:59:05 INFO ApplicationMaster: Final app status: FAILED, exitCode: 15, (reason: User class threw exception: java.lang.AbstractMethodError: com.intentmedia.data.AggregationJob$$anon$1.com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$_setter_$com$fasterxml$jackson$module$scala$experimental$ScalaObjectMapper$$typeCache_$eq(Lorg/spark-project/guava/cache/LoadingCache;)V)
Do I need to use some kind of caching when I read the objects in? Am I not handling resource files correctly?
I have a class named Inbox which contains an Int and a List of Messages. Message is another class.
The problem is I want to store objects of the Inbox class in MongoDB. I have to serialize these objects so I am using Gson, but it throws an exception on deserialization.
java.lang.RuntimeException: Failed to invoke public scala.collection.immutable.List() with no args
Here is the Inbox class :
#SerialVersionUID(1)
class Inbox(val uuid: Int, var messageList: ListBuffer[Message]) extends Serializable {
addUuidToList(uuid)
/*
* Auxiliary Constructor
*/
def this() {
this(0, ListBuffer[Message]())
}
def addToMessageList(addMessage: Message) = {
messageList += addMessage
}
var IdList = new MutableList[Int]()
def addUuidToList(uuid : Int) = {
IdList += uuid
}
/*
* Getters
*/
def getUuid: Int = uuid
/*
* Returns sorted List based on dateTime attribute of the Message Class
*/
def getMessageList : ListBuffer[Message] = {
//var sortedList = messageList.sorted
messageList
}
def getUuidsList: MutableList[Int] = IdList
}
And the Message class :
class Message(
val uuid: Int,
val subject: String,
val body: String,
var artworkUuid: Int,
val dateTime: LocalDateTime = LocalDateTime.now()
) extends Ordered[Message] with Serializable
My Test application :
object Test extends App {
val inbox = new Inbox(333,messageList)
val gson = new Gson()
val g = gson.toJson(inbox)
println(g + " converting to json")
var inboxObj = gson.fromJson(g, classOf[Inbox])
println("message object returned is " + inboxObj)
}
The output printed on the console :
{"uuid":333,"messageList":{"scala$collection$mutable$ListBuffer$$start":{},"last0":{"head":{"uuid":321,"subject":"subject1","body":"bodyText1","artworkUuid":101,"dateTime":{"date":{"year":2015,"month":7,"day":14},"time":{"hour":11,"minute":6,"second":51,"nano":579000000}},"readStatusInt":0,"delete":{"deleteStatusInt":1,"deleteReasonInt":1}},"tl":{}},"exported":false,"len":2},"IdList":{"first0":{"elem":333,"next":{}},"last0":{"elem":333,"next":{}},"len":1}}converting to json
[error] (run-main-0) java.lang.RuntimeException: Failed to invoke public scala.collection.immutable.List() with no args
java.lang.RuntimeException: Failed to invoke public scala.collection.immutable.List() with no args
at com.google.gson.internal.ConstructorConstructor$3.construct(ConstructorConstructor.java:107)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:186)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$1.read(ReflectiveTypeAdapterFactory.java:103)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:196)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$1.read(ReflectiveTypeAdapterFactory.java:103)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:196)
at com.google.gson.Gson.fromJson(Gson.java:810)
at com.google.gson.Gson.fromJson(Gson.java:775)
at com.google.gson.Gson.fromJson(Gson.java:724)
at com.google.gson.Gson.fromJson(Gson.java:696)
at TestActor$.delayedEndpoint$TestActor$1(TestActor.scala:307)
at TestActor$delayedInit$body.apply(TestActor.scala:54)
at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:383)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at TestActor$.main(TestActor.scala:54)
at TestActor.main(TestActor.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
Caused by: java.lang.InstantiationException
at sun.reflect.InstantiationExceptionConstructorAccessorImpl.newInstance(InstantiationExceptionConstructorAccessorImpl.java:48)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at com.google.gson.internal.ConstructorConstructor$3.construct(ConstructorConstructor.java:104)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:186)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$1.read(ReflectiveTypeAdapterFactory.java:103)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:196)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$1.read(ReflectiveTypeAdapterFactory.java:103)
at com.google.gson.internal.bind.ReflectiveTypeAdapterFactory$Adapter.read(ReflectiveTypeAdapterFactory.java:196)
at com.google.gson.Gson.fromJson(Gson.java:810)
at com.google.gson.Gson.fromJson(Gson.java:775)
at com.google.gson.Gson.fromJson(Gson.java:724)
at com.google.gson.Gson.fromJson(Gson.java:696)
Maybe the problem is the ListBuffer I convert it before java.util List, try this code:
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import com.google.gson.Gson
import java.util.ArrayList
import java.lang.reflect.Type
import com.google.gson.reflect.TypeToken
object GSONinScala extends App {
var id=4
var fruits = new ListBuffer[String]()
fruits += "Apple"
fruits += "Banana"
fruits += "Orange"
val tmp : Inbox = Inbox(id, fruits)
val json = tmp.toJson()
println("Object2json: " + json)
}
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import com.google.gson.Gson
import java.util.ArrayList
import com.google.gson.annotations.Expose;
import java.util.{List => JavaList}
case class Inbox(uuid:Int,#Expose(deserialize = false) messageList:ListBuffer[String]) {
val msgList:JavaList[String] = messageList
def toJson() = new Gson().toJson(this)
}
I get this output
[info] Running GSONinScala
Object2json: {"uuid":4,"messageList":{"scala$collection$mutable$ListBuffer$$start":{},"last0":{"head":"Orange","tl":{}},"exported":false,"len":3},"msgList":["Apple","Banana","Orange"]}
[success] Total time: 2 s, completed 14-jul-2015 11:07:16
I hope that this helps you
i´m trying to use a endpoint to question mysql database in eclipse using tomcat 7 as server but it´s always giving me this error, does someone solved this problem with jdbi
type Exception report
message java.sql.SQLException: No suitable driver found for
jdbc:mysql://127.0.0.1/demo
The code:
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import org.json.JSONException;
import org.json.JSONObject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
#Path("/jdbiservice")
public class JdbiService {
#Path("{f}")
#GET
#Produces("application/json")
public Response convertFtoCfromInput(#PathParam("f") int f) throws JSONException {
DBI dbi = new DBI("jdbc:mysql://127.0.0.1/demo", "user", "pass");
Handle h = dbi.open();
BatchExample b = h.attach(BatchExample.class);
Something s =b.findById(f);
h.close();
JSONObject jsonObject = new JSONObject(s);
String result = jsonObject.toString();
return Response.status(200).entity(result).build();
}
}
Hi have the jar connector file on the eclipse project path and inside tomcat lib folder.
This worked for me
package com.crunchify.restjersey;
import java.util.List;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
import org.json.*;
import org.skife.jdbi.v2.*;
#Path("/sensorservice")
public class SensorService {
#Path("{id}")
#DELETE
public Response deleteSensorById(#PathParam("id") int id) {
///...
try {
DBI dbi = new DBI(SensorService.getDataSource());
Handle h = dbi.open();
SensorInterface si = h.attach(SensorInterface.class);
si.deleteById(id);;
h.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String result = "Deleted";
return Response.status(200).entity(result).build();
}
private static DataSource getDataSource (){
DataSource ds = null;
InitialContext contex;
try {
contex = new InitialContext();
ds = ( DataSource) contex.lookup("java:comp/env/jdbc/jndiname");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return ds;
}
}
at webinf/webxml
<resource-ref>
<description>DB Connection</description>
<res-ref-name>jdbc/mysql</res-ref-name>
<res-type>javax.sql.DataSource</res-type>
<res-auth>Container</res-auth>
</resource-ref>
on tomcat context file
<Resource
name = "jdbc/jndiname"
auth = "Container"
type = "javax.sql.DataSource"
maxActive ="100"
maxIdle = "30"
maxWait = "10000"
driverClassName = "com.mysql.jdbc.Driver"
url = "jdbc:mysql://localhost:3306/schema"
username = "user"
password = "pass"
/>
You should include the mysql driver in your dependencies.
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.34</version>
</dependency>