I am trying to implement multiple database with Spring Boot Hikari CP. I am getting
Caused by: org.springframework.beans.factory.NoUniqueBeanDefinitionException: No qualifying bean of type [javax.sql.DataSource] is defined: expected single matching bean but found 2: hikari_primary,hikari_secondary
For your reference I am attaching my spring boot datasource configuration files,
please dont go over primary and secondary naming conventions(they dont represent priorities), my requirement it to have two connection pool for two different databases.
Any help is appreciated
1.application.properties
spring.datasource.dataSourceClassName=com.microsoft.sqlserver.jdbc.SQLServerDataSource
primary.spring.datasource.url=jdbc:sqlserver://xxx.xxx.xxx.xxx:1433;DatabaseName=training
primary.spring.datasource.username=training
primary.spring.datasource.password=training
primary.spring.datasource.poolName=hikari_primary
primary.spring.datasource.maximumPoolSize=5
primary.spring.datasource.minimumIdle=3
primary.spring.datasource.maxLifetime=2000000
primary.spring.datasource.connectionTimeout=30000
primary.spring.datasource.idleTimeout=30000
primary.spring.datasource.pool-prepared-statements=true
primary.spring.datasource.max-open-prepared-statements=250
secondary.spring.datasource.url=jdbc:sqlserver://xxx.xxx.xxx.xxx:1433;DatabaseName=dev_xxxxx_core_v3
secondary.spring.datasource.username=developer
secondary.spring.datasource.password=Developer
secondary.spring.datasource.poolName=hikari_secondary
secondary.spring.datasource.maximumPoolSize=50
secondary.spring.datasource.minimumIdle=30
secondary.spring.datasource.maxLifetime=2000000
secondary.spring.datasource.connectionTimeout=30000
secondary.spring.datasource.idleTimeout=30000
secondary.spring.datasource.pool-prepared-statements=true
secondary.spring.datasource.max-open-prepared-statements=300
2. PrimaryDataSourceConfig.java
#Configuration
public class PrimaryDataSourceConfig {
#Value("${primary.spring.datasource.username}")
private String user;
#Value("${primary.spring.datasource.password}")
private String password;
#Value("${primary.spring.datasource.url}")
private String dataSourceUrl;
#Value("${spring.datasource.dataSourceClassName}")
private String dataSourceClassName;
#Value("${primary.spring.datasource.poolName}")
private String poolName;
#Value("${primary.spring.datasource.connectionTimeout}")
private int connectionTimeout;
#Value("${primary.spring.datasource.maxLifetime}")
private int maxLifetime;
#Value("${primary.spring.datasource.maximumPoolSize}")
private int maximumPoolSize;
#Value("${primary.spring.datasource.minimumIdle}")
private int minimumIdle;
#Value("${primary.spring.datasource.idleTimeout}")
private int idleTimeout;
#Bean(name="hikari_primary")
public HikariDataSource getHikariDataSourcePrimary() {
Properties dsProps = new Properties();
dsProps.put("url", dataSourceUrl);
dsProps.put("user", user);
dsProps.put("password", password);
Properties configProps = new Properties();
configProps.put("dataSourceClassName", dataSourceClassName);
configProps.put("poolName", poolName);
configProps.put("maximumPoolSize", maximumPoolSize);
configProps.put("minimumIdle", minimumIdle);
configProps.put("minimumIdle", minimumIdle);
configProps.put("connectionTimeout", connectionTimeout);
configProps.put("idleTimeout", idleTimeout);
configProps.put("dataSourceProperties", dsProps);
HikariConfig hc = new HikariConfig(configProps);
HikariDataSource ds = new HikariDataSource(hc);
return ds;
}
}
3. SecondayDataSourceConfig.java
#Configuration
public class SecondaryDataSourceConfig {
#Value("${secondary.spring.datasource.username}")
private String user;
#Value("${secondary.spring.datasource.password}")
private String password;
#Value("${secondary.spring.datasource.url}")
private String dataSourceUrl;
#Value("${spring.datasource.dataSourceClassName}")
private String dataSourceClassName;
#Value("${secondary.spring.datasource.poolName}")
private String poolName;
#Value("${secondary.spring.datasource.connectionTimeout}")
private int connectionTimeout;
#Value("${secondary.spring.datasource.maxLifetime}")
private int maxLifetime;
#Value("${secondary.spring.datasource.maximumPoolSize}")
private int maximumPoolSize;
#Value("${secondary.spring.datasource.minimumIdle}")
private int minimumIdle;
#Value("${secondary.spring.datasource.idleTimeout}")
private int idleTimeout;
#Bean(name="hikari_secondary")
public HikariDataSource getHikariDataSourceSecondary() {
Properties dsProps = new Properties();
dsProps.put("url", dataSourceUrl);
dsProps.put("user", user);
dsProps.put("password", password);
Properties configProps = new Properties();
configProps.put("dataSourceClassName", dataSourceClassName);
configProps.put("poolName", poolName);
configProps.put("maximumPoolSize", maximumPoolSize);
configProps.put("minimumIdle", minimumIdle);
configProps.put("minimumIdle", minimumIdle);
configProps.put("connectionTimeout", connectionTimeout);
configProps.put("idleTimeout", idleTimeout);
configProps.put("dataSourceProperties", dsProps);
HikariConfig hc = new HikariConfig(configProps);
HikariDataSource ds = new HikariDataSource(hc);
return ds;
}
}
4. Application.java
#SpringBootApplication
#ComponentScan("com.xxxx.springsql2o")
#EnableAutoConfiguration
public class Application
{
public static void main( String[] args )
{
SpringApplication.run(Application.class, args);
}
#Autowired
#Qualifier("hikari_primary")
DataSource hikariDataSourcePrimary;
#Autowired
#Qualifier("hikari_secondary")
DataSource hikariDataSourceSecondary;
#Bean(name= "primary_db")
public Sql2o getPrimarySql2o()
{
return new Sql2o(hikariDataSourcePrimary);
}
#Bean(name= "secondary_db")
public Sql2o getSecondarySql2o()
{
return new Sql2o(hikariDataSourceSecondary);
}
}
Spring boot is auto-configuring your application via #EnableAutoConfiguration (note that this annotation is already included in the composed #SpringBootApplication annotation). So my guess would be that you have some dependency that spring is trying to auto-configure (e.g. JPA) which uses/needs DataSource. If you can live with this, you can add #Primary to on of your DataSource Bean provider methods in order to satisfy that dependency.
So, for instance:
#Bean(name="hikari_primary")
#Primary
public HikariDataSource getHikariDataSourcePrimary() {...
Even if this should work, it would be recommended to remove auto-configuration for e.g. JPA or whatever spring boot is trying to auto-configure but you don't use/need and configure everything manually as it suits your application needs. Have two databases is certainly a custom configuration and does not conform to the spring boot easy-out-of-the-box approach.
Related
I'm trying to read from DB2 with a stored procedure ( SP ) that returns records based on page size and start page among other input parameters. My goal is however to request all the records in one call and load another table in Mysql using Spring Batch. Seems straight forward enough, however when I run the job below it returns the correct number of records as per the SP but the records are skewed and duplicated as if the transactions are not working properly. The reader uses one #Primary Datasources ( DS ), for the DB2 read and another DS for the JpaItemWriter destination (LocalContainerEntityManagerFactoryBean).
My understanding is that DB2 and Mysql returns a ResultSet so don't know why I am forced to add these lines to avoid Invalid Parameter Type or Cursor position errors....
reader.setVerifyCursorPosition(false);
reader.setRefCursorPosition(0);
I am reading from DB2 then inserting into a MySql table based on the supplied AccountCashRowMapper. It's a strange situation that almost works, the listener doesn't report any errors.
Spring Batch is supposed to inherently manage the transactions for me, however what configuration or code is missing to make this work ?
#EnableBatchProcessing
public class LoadAccountCashTableJob {
private static final Logger logger = LoggerFactory.getLogger(LoadAccountCashTableJob.class);
#Autowired
ApplicationContext context;
#Autowired
public AccountCashRepository repo;
#Autowired
public EntityManager em;
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private AccountCashProcessor accountCashProcessor;
#Autowired
private TaskExecutor taskExecutor;
#Autowired
#Qualifier("originDataSource")
protected DataSource dataSource;
#Qualifier(value = "AccountCashJob")
#Bean
public Job AccountCashJob() throws Exception{
return this.jobBuilderFactory.get("AccountCashJob")
.start(this.accountCashStep())
.build();
}
#Bean
public Step accountCashStep() throws Exception{
return this.stepBuilderFactory.get("accountCashStep")
//.transactionManager(transactionManager)
.<String, String>chunk(500)
.reader(this.cashSPReader(dataSource))
.processor(accountCashProcessor)
.writer(this.accountCashItemWriter() )
.taskExecutor(taskExecutor)
.listener(new ItemFailureLoggerListener())
.build();
}
#Bean(destroyMethod="")
#StepScope
public StoredProcedureItemReader cashSPReader (DataSource dataSource) throws Exception {
StoredProcedureItemReader reader = new StoredProcedureItemReader();
SqlParameter[] parameters = new SqlParameter[7];
parameters[0] = new SqlParameter("DIV", Types.VARCHAR);
parameters[1] = new SqlParameter("PageDebut", Types.SMALLINT);
parameters[2] = new SqlParameter("NbParPage",Types.SMALLINT);
parameters[3] = new SqlInOutParameter("O_NB_PAGES",Types.SMALLINT);
parameters[4] = new SqlInOutParameter("O_RC",Types.INTEGER);
parameters[5] = new SqlInOutParameter("O_RC_DSC",Types.VARCHAR);
parameters[6] = new SqlReturnResultSet(ACCOUNTCASH_RESULT_SET, new AccountCashRowMapper());
reader.setDataSource(dataSource);
reader.setProcedureName("VMDTSTSP.db2cdb_List_Positions_Encaisse");
reader.setRowMapper(new AccountCashRowMapper()); //FOR the output records..
reader.setParameters(parameters);
reader.setPreparedStatementSetter(new SPParamSetter());
reader.afterPropertiesSet();
logger.info("reader.getSql()>>>>>>>>>>>>>>"+reader.getSql());
reader.setVerifyCursorPosition(false);
reader.setRefCursorPosition(0);
return reader;
}
#Bean(destroyMethod="")
#StepScope
public JpaItemWriter<AccountCash> accountCashItemWriter() {
EntityManagerFactory emf = (EntityManagerFactory) context.getBean("entityManagerFactory");
em.createNativeQuery("TRUNCATE TABLE purefacts.account_cash").executeUpdate();
JpaItemWriter<AccountCash> accountCashJpaItemWriter = new JpaItemWriter<>();
accountCashJpaItemWriter.setEntityManagerFactory(emf);
accountCashJpaItemWriter.setUsePersist(true);
return accountCashJpaItemWriter;
}
public static class SPParamSetter implements PreparedStatementSetter {
#Override
public void setValues(PreparedStatement ps) throws SQLException {
AS400JDBCCallableStatement eventCallableSt=(AS400JDBCCallableStatement)ps;
eventCallableSt.setString(1, ACCOUNTCASH_DIV);
eventCallableSt.setInt(2, ACCOUNTCASH_START_PAGE);
eventCallableSt.setInt(3,ACCOUNTCASH_PAGE_SIZE);
eventCallableSt.registerOutParameter(4, Types.SMALLINT);
eventCallableSt.setInt(4, ACCOUNTCASH_O_NB_PAGES);
eventCallableSt.registerOutParameter(5, Types.INTEGER);
eventCallableSt.setInt(5, ACCOUNTCASH_O_ERROR_CODE);
eventCallableSt.registerOutParameter(6, Types.VARCHAR);
eventCallableSt.setString(6, ACCOUNTCASH_O_ERROR_DESCRIPTION);
eventCallableSt.getResultSet();
}
}
public class ItemFailureLoggerListener extends ItemListenerSupport {
private Log logger = LogFactory.getLog("item.error");
public void onReadError(Exception e) {
logger.error("Encountered error on read", e);
}
public void onWriteError(Exception ex, List items) {
logger.error("Encountered error on write", ex);
}
}
}
I'm having the following code:
#Data
#Validated
#ConfigurationProperties
public class Keys {
private final Key key = new Key();
#Data
#Validated
#ConfigurationProperties(prefix = "key")
public class Key {
private final Client client = new Client();
private final IntentToken intentToken = new IntentToken();
private final Intent intent = new Intent();
private final OAuth oauth = new OAuth();
private final ResourceToken resourceToken = new ResourceToken();
#Valid #NotNull private String authorization;
#Valid #NotNull private String bearer;
...
}
}
That is an instance representing a properties file such as:
key.authorization=Authorization
key.bearer=Bearer
..
As I can have different sources for the properties (properties file, MongoDB, etc), I have a client that inherit from Keys as follow:
Properties files source
#Component
#Configuration
#Primary
#PropertySource("classpath:${product}-keys.${env}.properties")
//#JsonAutoDetect(fieldVisibility = Visibility.ANY)
public class CustomerKeysProperties extends Keys {
}
Mongo source
#Data
#EqualsAndHashCode(callSuper=true)
#Component
//#Primary
#Document(collection = "customerKeys")
public class CustomerKeysMongo extends Keys {
#Id
private String id;
}
I just select the source I want to use annotating the class with #Primary. In the example above, CustomerKeysProperties is the active source.
All this work fine.
The issue I have is when I try to convert an instance of CustomerKeysProperties into JSON, as in the code below:
#SpringBootApplication
public class ConverterUtil {
public static void main(String[] args) throws Exception {
SpringApplication.run(ConverterUtil.class, args);
}
#Component
class CustomerInitializer implements CommandLineRunner {
#Autowired
private Keys k;
private final ObjectMapper mapper = new ObjectMapper();
#Override
public void run(String... args) throws Exception {
mapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
//mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
String jsonInString = mapper.writeValueAsString(k);
System.out.println(jsonInString);
}
}
}
While k contains all the properties set, the conversion fails:
Caused by: com.fasterxml.jackson.databind.exc.InvalidDefinitionException: No serializer found for class org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor and no properties discovered to create BeanSerializer (to avoid exception, disable SerializationFeature.FAIL_ON_EMPTY_BEANS) (through reference chain: x.client.customer.properties.CustomerKeysProperties$$EnhancerBySpringCGLIB$$eda308bd["CGLIB$CALLBACK_0"]->org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor["advised"]->org.springframework.aop.framework.ProxyFactory["targetSource"]->org.springframework.aop.target.SingletonTargetSource["target"]->x.client.customer.properties.CustomerKeysProperties$$EnhancerBySpringCGLIB$$4fd6c568["CGLIB$CALLBACK_0"])
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:77)
at com.fasterxml.jackson.databind.SerializerProvider.reportBadDefinition(SerializerProvider.java:1191)
And if I uncomment
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
as suggested in the logs, I have an infinite loop happening in Jackson causing a stackoverflow:
at com.fasterxml.jackson.databind.ser.BeanSerializer.serialize(BeanSerializer.java:155)
at com.fasterxml.jackson.databind.ser.BeanPropertyWriter.serializeAsField(BeanPropertyWriter.java:727)
at com.fasterxml.jackson.databind.ser.std.BeanSerializerBase.serializeFields(BeanSerializerBase.java:719)
at com.fasterxml.jackson.databind.ser.BeanSerializer.serialize(BeanSerializer.java:155)
at com.fasterxml.jackson.databind.ser.impl.IndexedListSerializer.serializeContents(IndexedListSerializer.java:119)
at com.fasterxml.jackson.databind.ser.impl.IndexedListSerializer.serialize(IndexedListSerializer.java:79)
at com.fasterxml.jackson.databind.ser.impl.IndexedListSerializer.serialize(IndexedListSerializer.java:18)
at com.fasterxml.jackson.databind.ser.BeanPropertyWriter.serializeAsField(BeanPropertyWriter.java:727)
at com.fasterxml.jackson.databind.ser.std.BeanSerializerBase.serializeFields(BeanSerializerBase.java:719)
at com.fasterxml.jackson.databind.ser.BeanSerializer.serialize(BeanSerializer.java:155)
..
Questions
At the end, I just want to provide an Util class than can convert a properties file in a JSON format that will be stored in MongoDB.
How can I solve this problem ?
Without passing through the object above, how can I transform a properties file into JSON ?
Can I save an arbitrary Java bean in MongoDB, with the conversion to JSON automagically done ?
The answer to any of the 3 questions above would be helpful.
Notes
To be noted that I use lombok. Not sure if this is the problem.
Another guess is that I'm trying to serialize a Spring managed bean and the proxy it involve cause jackson to not be able to do the serialization ? If so, what can be the turn-around ?
Thanks!
So found the problem:
jackson can't process managed bean.
The turn around was
try (InputStream input = getClass().getClassLoader().getResourceAsStream("foo.properties")) {
JavaPropsMapper mapper = new JavaPropsMapper();
Keys keys = mapper.readValue(input, Keys.class);
ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
String res = ow.writeValueAsString(keys);
System.out.println(res);
} catch (IOException e) {
e.printStackTrace();
}
where Keys was the Spring managed bean I was injecting.
And:
JavaPropsMapper come from:
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-properties</artifactId>
</dependency>
I am new to JUNITS and have been trying to use Mockito and PowerMockito for writing some test cases for my code but have been facing an issue.
Class Code:
public class Example implements Callable<Void> {
int startIndex;
int endIndex;
ConnectionPool connPool;
Properties properties;
public Example(int start, int end,
ConnectionPool connPool, Properties properties) {
this.startIndex = start;
this.endIndex = end;
this.connPool= connPool;
this.properties = properties;
}
#Override
public Void call() throws Exception {
long startTime = System.currentTimeMillis();
try {
List<String> listInput = new ArrayList<>();
Service service = new Service(
dbConnPool, properties, startIndex, endIndex);
service.getMethod(listInput);
.
.
.
JUNIT Code:
#RunWith(PowerMockRunner.class)
#PrepareForTest()
public class ExampleTest {
#Mock
private ConnectionPool connectionPool;
#Mock
private Properties properties;
#Mock
private Service service = new Service(
connectionPool, properties, 1, 1);
#Mock
private Connection connection;
#Mock
private Statement statement;
#Mock
private ResultSet resultSet;
#InjectMocks
private Example example = new Example(
1, 1, connectionPool, properties);
#Test
public void testCall() throws Exception {
List<String> listInput= new ArrayList<>();
listInput.add("data1");
when(service.getMethod(listInput)).thenReturn(listInput);
example.call();
}
Question: How to mock Service class and its method, getMethod, call ?
Explanation: The Service class has method getMethod, which is interacting with the DB. So, as I am not able to mock this method, the code goes through and then I have to mock all the objects in the getMethod as connection, resultset etc. else it throws NullPointerException.
Please help me understand what I am doing wrong and if possible provide your guidance on the way I should approach the JUNITS for this kind of method call.
Mockito won't help you to mock an object if you have calling of new Service inside of your method.
Instead you need to use PowerMock.expectNew
Service mockService = PowerMock.createMock(Service.class);
PowerMock.expectNew(Service.class, connectionPool, properties, 1, 1)
.andReturn(mockService);
PowerMock.replay(mockService);
For PowerMockito there is an equivalent:
PowerMockito.whenNew(Service.class)
.withArguments(connectionPool, properties, 1, 1)
.thenReturn(mockService);
Please check this article.
Spring Boot: 1.3.2.RELEASE
Vaadin : 7.6.3
Mysql: mysql-java-connector-5.1.38
HikariCP: 2.4.3
So I create a Table with only id and name columns. Then a vaadin componant Table to show the content of table. Until now everything is fine. Then I tried to filter the table with a vaadin Filter, and it works if the connection pool is com.vaadin.data.util.sqlcontainer.connection.SimpleJDBCConnectionPool, and doesnot if it is com.vaadin.data.util.sqlcontainer.connection.J2EEConnectionPool which I use HikariCP as the data source.
DBConfig.java
#Configuration
#EnableTransactionManagement
public class DatabaseConfig {
#Bean
public HikariConfig hikariConfig() {
HikariConfig config = new HikariConfig();
config.setDriverClassName("com.mysql.jdbc.Driver");
config.setJdbcUrl("jdbc:mysql://localhost:3306/test?autoReconnect=true&useSSL=false");
config.setUsername("root");
config.setPassword("root");
config.setMaximumPoolSize(20);
config.setPoolName("connectionPool");
return config;
}
#Bean
public DataSource dataSource() {
HikariDataSource dataSource = new HikariDataSource(hikariConfig());
return dataSource;
}
ApplicationUI.java
#Autowired
private J2EEConnectionPool connectionPool;
#Override
protected void init(VaadinRequest arg0) {
TableQuery tq = new TableQuery("medecine", connectionPool);// it does not work if I use HikariCP as the J2EEConnectionPool
JDBCConnectionPool pool = new SimpleJDBCConnectionPool("com.mysql.jdbc.Driver",
"jdbc:mysql://localhost:3306/test?autoReconnect=true&useSSL=false", "root", "root");
TableQuery tq = new TableQuery("medecine", connectionPool);// it works if I use **SimpleJDBCConnectionPool**.
container = new SQLContainer(tq);
table = new Table();
table.setContainerDataSource(container);
table.setWidth(100, Unit.PERCENTAGE);
mainLayout.addComponent(table);
Button addFilter = new Button("filter", new ClickListener() {
private static final long serialVersionUID = -7071683058766115266L;
#Override
public void buttonClick(ClickEvent arg0) {
Filter filter = new SimpleStringFilter("name", "medecine2", true, false);
container.addContainerFilter(filter);
}
});
mainLayout.addComponent(addFilter);
}
The reason I choose HikariCP as the data source is for a better performance and I'm not sure that the vaadin connection pool will be better than HikariCP. Or, it is limited to vaadin connection pool for the filtering function?
I'm trying to run a simple test to check values in a properties file which I've saved in the src/test/resources folder of my Maven project but the JUnit test is failing. My test is picking up the File OK but it doesn't return the expected value as the file doesn't look like its getting loaded. Anyone else have a similar issue? My code/test are as follows:
My Application Context File:
<bean id="myProps" class="org.springframework.beans.factory.config.PropertiesFactoryBean">
<property name="location" value="classpath:test.properties"/>
</bean>
My Code:
#Resource(name = "myProps") private Properties myProps;
#Value("#{myProps['totalNumberOfChanges']}") private String totalNumberOfChangesStr;
#Value("#{myProps['delayTime']}") private String delayTimeStr;
public void parseAttributesFromConfigFile() {
String methodName = "parsePropertyAttributesFromConfigFile";
try {
totalNumberOfChanges = Integer.parseInt(totalNumberOfChangesStr);
delayTime = Integer.parseInt(delayTimeStr);
numEntriesToIterateThru = (totalNumberOfChanges / delayTime);
} catch (NumberFormatException nfe) {
LOGGER.error(methodName, "", "Number Format Exception Occured" + nfe.getMessage());
}
}
My Junit Test:
#RunWith(SpringJUnit4ClassRunner.class)
#ContextConfiguration(locations = { "classpath:META-INF/spring/Testpu.xml" })
public class ConfigPropertiesTest {
private final int NUM_ENTRIES_TO_ITERATE_THRU = 100;
private final int TOTAL_NUMBER_OF_CHANGES = 100000;
private final int DELAY_TIME = 1000;
private ConfigProperties configProperties;
#Before
public void setUp() throws Exception {
configProperties = new ConfigProperties();
}
#Test
public final void testParseAttributesFromConfigFileIsCalled() {
configProperties.parseAttributesFromConfigFile();
int numEntriesToIterateOver = configProperties.getNumEntriesToIterateThru();
assertEquals(numEntriesToIterateOver, NUM_ENTRIES_TO_ITERATE_THRU);
int numberOfChanges = configProperties.getTotalNumberOfChanges();
assertEquals(numberOfChanges, TOTAL_NUMBER_OF_CHANGES);
int delayTime = configProperties.getDelayTime();
assertEquals(delayTime, DELAY_TIME);
}
}
You are creating the ConfigProperties class in your Before method. If you want Spring to populated values based on annotations the bean must be created as part of the Spring context. If you have an instance of ConfigProperties in your Spring context, load that instance into your test using #Autowired