How can I instantiate a Mock Kafka Topic for junit tests? - junit

I have some JUnit tests on code that uses a kafka topic. The mock kafka topics I've tried do not work and the examples found online are very old so they also do not work with 0.8.2.1. How do I create a mock kafka topic using 0.8.2.1?
To clarify: I'm choosing to use an actual embedded instance of the topic in order to test with a real instance rather than mocking the hand off in mockito. This is so I can test that my custom encoders and decoders actually work and it doesn't fail when I go to use a real kafka instance.

https://gist.github.com/asmaier/6465468#file-kafkaproducertest-java
This example was updated to be working in the new 0.8.2.2 version. Here is the code snippet with maven dependencies:
pom.xml:
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.8.2.2</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.8.2.2</version>
<classifier>test</classifier>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.8.2.2</version>
</dependency>
</dependencies>
KafkaProducerTest.java:
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.I0Itec.zkclient.ZkClient;
import org.junit.Test;
import kafka.admin.TopicCommand;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.producer.KeyedMessage;
import kafka.producer.Producer;
import kafka.producer.ProducerConfig;
import kafka.server.KafkaConfig;
import kafka.server.KafkaServer;
import kafka.utils.MockTime;
import kafka.utils.TestUtils;
import kafka.utils.TestZKUtils;
import kafka.utils.Time;
import kafka.utils.ZKStringSerializer$;
import kafka.zk.EmbeddedZookeeper;
import static org.junit.Assert.*;
/**
* For online documentation
* see
* https://github.com/apache/kafka/blob/0.8.2/core/src/test/scala/unit/kafka/utils/TestUtils.scala
* https://github.com/apache/kafka/blob/0.8.2/core/src/main/scala/kafka/admin/TopicCommand.scala
* https://github.com/apache/kafka/blob/0.8.2/core/src/test/scala/unit/kafka/admin/TopicCommandTest.scala
*/
public class KafkaProducerTest {
private int brokerId = 0;
private String topic = "test";
#Test
public void producerTest() throws InterruptedException {
// setup Zookeeper
String zkConnect = TestZKUtils.zookeeperConnect();
EmbeddedZookeeper zkServer = new EmbeddedZookeeper(zkConnect);
ZkClient zkClient = new ZkClient(zkServer.connectString(), 30000, 30000, ZKStringSerializer$.MODULE$);
// setup Broker
int port = TestUtils.choosePort();
Properties props = TestUtils.createBrokerConfig(brokerId, port, true);
KafkaConfig config = new KafkaConfig(props);
Time mock = new MockTime();
KafkaServer kafkaServer = TestUtils.createServer(config, mock);
String [] arguments = new String[]{"--topic", topic, "--partitions", "1","--replication-factor", "1"};
// create topic
TopicCommand.createTopic(zkClient, new TopicCommand.TopicCommandOptions(arguments));
List<KafkaServer> servers = new ArrayList<KafkaServer>();
servers.add(kafkaServer);
TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, 0, 5000);
// setup producer
Properties properties = TestUtils.getProducerConfig("localhost:" + port);
ProducerConfig producerConfig = new ProducerConfig(properties);
Producer producer = new Producer(producerConfig);
// setup simple consumer
Properties consumerProperties = TestUtils.createConsumerProperties(zkServer.connectString(), "group0", "consumer0", -1);
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties));
// send message
KeyedMessage<Integer, byte[]> data = new KeyedMessage(topic, "test-message".getBytes(StandardCharsets.UTF_8));
List<KeyedMessage> messages = new ArrayList<KeyedMessage>();
messages.add(data);
producer.send(scala.collection.JavaConversions.asScalaBuffer(messages));
producer.close();
// deleting zookeeper information to make sure the consumer starts from the beginning
// see https://stackoverflow.com/questions/14935755/how-to-get-data-from-old-offset-point-in-kafka
zkClient.delete("/consumers/group0");
// starting consumer
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1);
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
if(iterator.hasNext()) {
String msg = new String(iterator.next().message(), StandardCharsets.UTF_8);
System.out.println(msg);
assertEquals("test-message", msg);
} else {
fail();
}
// cleanup
consumer.shutdown();
kafkaServer.shutdown();
zkClient.close();
zkServer.shutdown();
}
}
Be sure to check your mvn dependency:tree for any conflicting libraries. I had to add exclusions for slf and log4j:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.8.2.2</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.8.2.2</version>
<classifier>test</classifier>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.8.2.2</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
Another option I'm looking into is using apache curator:
Is it possible to start a zookeeper server instance in process, say for unit tests?
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<version>2.2.0-incubating</version>
<scope>test</scope>
</dependency>
TestingServer zkTestServer;
#Before
public void startZookeeper() throws Exception {
zkTestServer = new TestingServer(2181);
cli = CuratorFrameworkFactory.newClient(zkTestServer.getConnectString(), new RetryOneTime(2000));
}
#After
public void stopZookeeper() throws IOException {
cli.close();
zkTestServer.stop();
}

Have you tried mocking kafka consumer objects using a mocking framework like Mockito?

Related

Test Class isn't Picked Up In Maven

I have a test class in a Spring Boot (v 2.45) project.
#RunWith(OrderedTestRunner.class)
#SpringBootTest
#AutoConfigureMockMvc
public class MyResourceIT {...}
I can run it in Idea, but
mvn test
won't run it for some reason.
The test class uses JUnit 4 and Spring Boot v2.xx uses Junit 5. I don't know whether that is a reason or not. The followings are the import statements of the test class:
import com.hackerrank.test.utility.Order;
import com.hackerrank.test.utility.OrderedTestRunner;
import com.hackerrank.test.utility.ResultMatcher;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.rules.SpringClassRule;
import org.springframework.test.context.junit4.rules.SpringMethodRule;
import org.springframework.test.web.servlet.MockMvc;
import static org.junit.Assert.assertTrue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
and I add the following two test related dependencies to the pom.xml file:
<dependency>
<groupId>com.hackerrank.applications</groupId>
<artifactId>junit-ordered-test-runner</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
How to solve the problem?
The offical document says that
If you have tests that use JUnit 4, JUnit 5’s vintage engine can be used to run them. To use the vintage engine, add a dependency on junit-vintage-engine, as shown in the following example:
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
</exclusion>
</exclusions>
</dependency>

Spring security custom html login page not rendered

i have followed this link for all configurations to set link up my custom html page for login. However, when i access localhost/login, i am faced with error status 500.
I am unable to render a simple html page when I access localhost:8080/login.
Are there extra configurations needed?
Should the html page be located at templates folder? How does the application know it should render "login.html" ?
Is my controller being recognised?
config
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
#EnableWebSecurity
#Configuration
public class SecurityConfiguration extends WebSecurityConfigurerAdapter {
#Override
protected void configure(HttpSecurity http) throws Exception {
http
.authorizeRequests()
.anyRequest().fullyAuthenticated()
.and()
.formLogin()
.loginPage("/login")
.permitAll();
}
}
controller.
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
#Controller
public class LoginController {
#GetMapping("/login")
public String login() {
return "login";
}
}
replacing "login" above with below also shows error.
<h1> Login page <h1>
dependencies in pom.xml
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ldap</groupId>
<artifactId>spring-ldap-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-ldap</artifactId>
</dependency>
<dependency>
<groupId>com.unboundid</groupId>
<artifactId>unboundid-ldapsdk</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
<version>1.4.193</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
login.html is located in templates.
error message:
javax.servlet.ServletException: Circular view path [login]: would dispatch back to the current handler URL [/login] again. Check your ViewResolver setup! (Hint: This may be the result of an unspecified view, due to default view name generation.)
I have managed to resolve this. The main issue was because my artefactID was also named login. This resulted in all other context of "login", including "login.html" to be voided. I modified my controller to below and changing my html file to login2.html it worked.
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
#Controller
public class LoginController {
#GetMapping("/login")
public String login2() {
return "login2";
}
}

Exception while loading data from Mysql in spark

i am getting the below exception while running spark code to fetch the data from MYsql. can soneone please help.
code is below
private static final String MYSQL_CONNECTION_URL = "jdbc:mysql://localhost:3306/company";
private static final String MYSQL_USERNAME = "test";
private static final String MYSQL_PWD = "test123";
private static final SparkSession sparkSession =
SparkSession.builder().master("local[*]").appName("Spark2JdbcDs")
.config("spark.sql.warehouse.dir", "file:///tmp/tmp_warehouse")
.getOrCreate();
public static void main(String[] args) {
//JDBC connection properties
final Properties connectionProperties = new Properties();
connectionProperties.put("user", MYSQL_USERNAME);
connectionProperties.put("password", MYSQL_PWD);
Dataset<Row> jdbcDF = sparkSession.sql("SELECT * FROM emp");
List<Row> employeeFullNameRows = jdbcDF.collectAsList();
for (Row employeeFullNameRow : employeeFullNameRows) {
LOGGER.info(employeeFullNameRow);
}
16/09/23 13:17:55 INFO internal.SharedState: Warehouse path is 'file:///tmp/tmp_warehouse'.
16/09/23 13:17:55 INFO execution.SparkSqlParser: Parsing command: SELECT * FROM emp
Exception in thread "main" java.lang.UnsupportedOperationException: Not implemented by the DistributedFileSystem FileSystem implementation
at org.apache.hadoop.fs.FileSystem.getScheme(FileSystem.java:217)
at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2624)
at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2624)
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2634)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.makeQualifiedPath(SessionCatalog.scala:115)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createDatabase(SessionCatalog.scala:145)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.(SessionCatalog.scala:89)
at org.apache.spark.sql.internal.SessionState.catalog$lzycompute(SessionState.scala:95)
at org.apache.spark.sql.internal.SessionState.catalog(SessionState.scala:95)
at org.apache.spark.sql.internal.SessionState$$anon$1.(SessionState.scala:112)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:112)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:111)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at org.apache.spark.sql.SparkSession.baseRelationToDataFrame(SparkSession.scala:382)
at org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:238)
at org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:194)
at sparksql.sparksql1.main(sparksql1.java:40)
below is pom file
<!-- Hadoop Mapreduce Client Core -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.1</version>
</dependency>
<!-- Hadoop Core -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.2.1</version>
</dependency>
<!-- Spark -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<!-- Spark SQL -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.10</artifactId>
<version>2.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.20</version>
</dependency>
You have added both hadoop-core and hadoop-common in pom.xml.
Remove hadoop core and try it

org.apache.spark.sql.catalyst.TableIdentifier cannot be resolved error in SnappySQLJob

I have an compile-time error trying to write a SnappySQLJob. Am I missing a dependency?
The error message is:
The type org.apache.spark.sql.catalyst.TableIdentifier cannot be resolved. It is indirectly referenced from required .class files
#Override
public Object runJob(Object sparkContext, Config jobConfig) {
SnappyContext snappyContext = (SnappyContext)sparkContext;
String fileResource = "data.csv";
DataFrame dataFrame = snappyContext.read()
.format("com.databricks.spark.csv")
.option("header", "true")
.option("inferSchema", "true")
.load(fileResource);
// Compile-Time error is on this line
dataFrame.write().insertInto("example_table_col");
return null;
}
Here is my pom.xml dependencies:
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-core_2.10</artifactId>
<version>0.2.1-PREVIEW</version>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-tools_2.10</artifactId>
<version>0.2.1-PREVIEW</version>
<exclusions>
<exclusion>
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
<exclusion>
<artifactId>logback-classic</artifactId>
<groupId>ch.qos.logback</groupId>
</exclusion>
</exclusions>
</dependency>
This old release seems to be missing spark-catalyst dependency. I will suggest upgrading to 0.5 release version instead (snappy-tools is now called snappy-cluster) and the snappydata cluster should also be upgraded to 0.5
For the 0.2.1 release, below should correct the problem:
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-catalyst_2.10</artifactId>
<version>1.6.0-BETA</version>
</dependency>
<dependency>
<groupId>io.snappydata</groupId>
<artifactId>snappy-spark-sql_2.10</artifactId>
<version>1.6.0-BETA</version>
</dependency>

Table not created with hbm2ddl.auto=update in Hibernate 5

The database table is NOT auto-created by the <property name="hbm2ddl.auto">update</property> settings in hibernate-cfg.xml, with the following combination:
Java 8 + Tomcat 8 + MySQL + Hibernate 5
Java version:
java version "1.8.0_45"
Java(TM) SE Runtime Environment (build 1.8.0_45-b14)
Java HotSpot(TM) 64-Bit Server VM (build 25.45-b02, mixed mode)
MySQL version:
mysql Ver 14.14 Distrib 5.6.16, for osx10.7 (x86_64) using EditLine wrapper
Tomcat version:
apache-tomcat-8.0.22
pom.xml snippets:
<dependency>
<groupId>org.eclipse.persistence</groupId>
<artifactId>javax.persistence</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>5.0.3.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>5.0.3.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
<version>5.0.3.Final</version>
</dependency>
Entity class:
package com.test.entity;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.hibernate.annotations.GenericGenerator;
#Entity
#Table( name = "EVENTS" )
public class Event {
#Id
#GeneratedValue(generator="increment")
#GenericGenerator(name="increment", strategy = "increment")
private Long id;
private String title;
#Temporal(TemporalType.TIMESTAMP)
#Column(name = "EVENT_DATE")
private Date date;
public Event() {}
public Long getId() { return id; }
private void setId(Long id) { this.id = id; }
public Date getDate() { return date; }
public void setDate(Date date) { this.date = date; }
public String getTitle() { return title; }
public void setTitle(String title) { this.title = title; }
}
Hibernate SessionFactory initialization:
String resource = "hibernate.cfg.xml";
Configuration configuration = new Configuration();
configuration.configure(resource);
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties()).build();
SessionFactory sessionFactoryCore = configuration.buildSessionFactory(serviceRegistry);
hibernate.cfg.xml:
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<property name="dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="connection.url">jdbc:mysql://localhost:3306/test</property>
<property name="connection.username">mysqluser</property>
<property name="connection.password">******</property>
<property name="hbm2ddl.auto">update</property>
<property name="hibernate.max_fetch_depth">3</property>
<mapping class="com.test.entity.Event" />
</session-factory>
</hibernate-configuration>
HOWEVER, the table was created into MySQL database, with the following combination:
Java 8 + Tomcat 8 + MySQL + Hibernate 4
Everything same as above, except in pom.xml, I was using hibernate 4 instead of 5:
<dependency>
<groupId>org.eclipse.persistence</groupId>
<artifactId>javax.persistence</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>4.3.11.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>4.3.11.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
<version>4.3.11.Final</version>
</dependency>
What am I missing in Hibernate 5?
We faced a similar problem after upgrading to Hibernate 5.x and this was solved by changing the dialect to org.hibernate.dialect.MySQL5Dialect.
add <property name="javax.persistence.schema-generation.database.action">create</property> in your hibernate config file.
try something like that in sessionFactoryInitialisation :
StandardServiceRegistry standardRegistry = new StandardServiceRegistryBuilder().configure(configFile).build();
Metadata metadata =new MetadataSources(standardRegistry).getMetadataBuilder().build();
sessionFactory = metadata.getSessionFactoryBuilder().build();
return sessionFactory;