Spring data r2dbc is not auto creating tables in mysql [duplicate] - mysql

I am creating a quick project using R2DBC and H2 to familiarize myself with this new reactive stuff. Made a repository that extends ReactiveCrudRepository and all is well with the world, as long as i use the DatabaseClient to issue a CREATE TABLE statement that matches my entity first...
I understand spring data R2DBC is not as fully featured as spring data JPA (yet?) but is there currently a way to generate the schema from the entity classes?
Thanks

No, there is currently no way to generate schema from entities with Spring Data R2DBC.
I'm using it in a project with Postgres DB and it's complicated to manage database migrations, but I managed to wire in Flyway with synchronous Postgre driver (Flyway doesn't work with reactive drivers yet) at startup to handle schema migrations.
Even though you still have to write your own CREATE TABLE statements which shouldn't be that hard and you could even modify your entities in some simple project to create JPA entities and let Hibernate create schema then copy-paste it into a migration file in your R2DBC project.

It is possible for tests and for production.
I production make sure your user has no access to change schema otherwise you may delete tables by mistake!!! or use a migration tool like flyway.
You need to put your schema.sql in the main resources and add the relevant properties
spring.r2dbc.initialization-mode=always
h2 for test and postgres for prod
I use gradle and the versions of driver are:
implementation 'org.springframework.boot.experimental:spring-boot-actuator-autoconfigure-r2dbc'
runtimeOnly 'com.h2database:h2'
runtimeOnly 'io.r2dbc:r2dbc-h2'
runtimeOnly 'io.r2dbc:r2dbc-postgresql'
runtimeOnly 'org.postgresql:postgresql'
testImplementation 'org.springframework.boot.experimental:spring-boot-test-autoconfigure-r2dbc'
The BOM version is
dependencyManagement {
imports {
mavenBom 'org.springframework.boot.experimental:spring-boot-bom-r2dbc:0.1.0.M3'
}
}

That's how I solved this problem:
Controller:
#PostMapping(MAP + PATH_DDL_PROC_DB) //PATH_DDL_PROC_DB = "/database/{db}/{schema}/{table}"
public Flux<Object> createDbByDb(
#PathVariable("db") String db,
#PathVariable("schema") String schema,
#PathVariable("table") String table) {
return ddlProcService.createDbByDb(db,schema,table);
Service:
public Flux<Object> createDbByDb(String db,String schema,String table) {
return ddl.createDbByDb(db,schema,table);
}
Repository:
#Autowired
PostgresqlConnectionConfiguration.Builder connConfig;
public Flux<Object> createDbByDb(String db,String schema,String table) {
return createDb(db).thenMany(
Mono.from(connFactory(connConfig.database(db)).create())
.flatMapMany(
connection ->
Flux.from(connection
.createBatch()
.add(sqlCreateSchema(db))
.add(sqlCreateTable(db,table))
.add(sqlPopulateTable(db,table))
.execute()
)));
}
private Mono<Void> createDb(String db) {
PostgresqlConnectionFactory
connectionFactory = connFactory(connConfig);
DatabaseClient ddl = DatabaseClient.create(connectionFactory);
return ddl
.execute(sqlCreateDb(db))
.then();
}
Connection Class:
#Slf4j
#Configuration
#EnableR2dbcRepositories
public class Connection extends AbstractR2dbcConfiguration {
/*
**********************************************
* Spring Data JDBC:
* DDL: does not support JPA.
*
* R2DBC
* DDL:
* -does no support JPA
* -To achieve DDL, uses R2dbc.DataBaseClient
*
* DML:
* -it uses R2dbcREpositories
* -R2dbcRepositories is different than
* R2dbc.DataBaseClient
* ********************************************
*/
#Bean
public PostgresqlConnectionConfiguration.Builder connectionConfig() {
return PostgresqlConnectionConfiguration
.builder()
.host("db-r2dbc")
.port(5432)
.username("root")
.password("root");
}
#Bean
public PostgresqlConnectionFactory connectionFactory() {
return
new PostgresqlConnectionFactory(
connectionConfig().build()
);
}
}
DDL Scripts:
#Getter
#NoArgsConstructor(access = AccessLevel.PRIVATE)
public final class DDLScripts {
public static final String SQL_GET_TASK = "select * from tasks";
public static String sqlCreateDb(String db) {
String sql = "create database %1$s;";
String[] sql1OrderedParams = quotify(new String[]{db});
String finalSql = format(sql,(Object[]) sql1OrderedParams);
return finalSql;
}
public static String sqlCreateSchema(String schema) {
String sql = "create schema if not exists %1$s;";
String[] sql1OrderedParams = quotify(new String[]{schema});
return format(sql,(Object[]) sql1OrderedParams);
}
public static String sqlCreateTable(String schema,String table) {
String sql1 = "create table %1$s.%2$s " +
"(id serial not null constraint tasks_pk primary key, " +
"lastname varchar not null); ";
String[] sql1OrderedParams = quotify(new String[]{schema,table});
String sql1Final = format(sql1,(Object[]) sql1OrderedParams);
String sql2 = "alter table %1$s.%2$s owner to root; ";
String[] sql2OrderedParams = quotify(new String[]{schema,table});
String sql2Final = format(sql2,(Object[]) sql2OrderedParams);
return sql1Final + sql2Final;
}
public static String sqlPopulateTable(String schema,String table) {
String sql = "insert into %1$s.%2$s values (1, 'schema-table-%3$s');";
String[] sql1OrderedParams = quotify(new String[]{schema,table,schema});
return format(sql,(Object[]) sql1OrderedParams);
}
private static String[] quotify(String[] stringArray) {
String[] returnArray = new String[stringArray.length];
for (int i = 0; i < stringArray.length; i++) {
returnArray[i] = "\"" + stringArray[i] + "\"";
}
return returnArray;
}
}

It is actually possible to load a schema by defining a specific class in this way:
import io.r2dbc.spi.ConnectionFactory
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.core.io.ClassPathResource
import org.springframework.data.r2dbc.repository.config.EnableR2dbcRepositories
import org.springframework.r2dbc.connection.init.ConnectionFactoryInitializer
import org.springframework.r2dbc.connection.init.ResourceDatabasePopulator
#Configuration
#EnableR2dbcRepositories
class DbConfig {
#Bean
fun initializer(connectionFactory: ConnectionFactory): ConnectionFactoryInitializer {
val initializer = ConnectionFactoryInitializer()
initializer.setConnectionFactory(connectionFactory)
initializer.setDatabasePopulator(
ResourceDatabasePopulator(
ClassPathResource("schema.sql")
)
)
return initializer
}
}
Pay attention that IntelliJ gives an error "Could not autowire. No beans of 'ConnectionFactory' type found" but it is actually a false positive. So ignore it and build again your project.
The schema.sql file has to be put in resources folder.

Related

Spring Boot SQL JPA not using correct replica

I have an application where I am trying to distribute reads & writes between two replicas. For some reason JPA is only using my read-replica, not the write replica. The write replica is the primary replica. The result is that when I use JPA to try and write data I get and 'UPDATE command denied' error because it is using the read only datasource. I have tried doing my own annotation and using the #Transactional annotation. Both annotations are called via AOP with the correct datasource but JPA will not use it.
FYI Spring JDBC works correctly via the custom annotation. This is strictly a JPA issue. Below is some code:
My AOP class:
#Aspect
#Order(20)
#Component
public class RouteDataSourceInterceptor {
#Around("#annotation(com.kenect.db.common.annotations.UseDataSource) && execution(* *(..))")
public Object proceed(ProceedingJoinPoint pjp) throws Throwable {
try {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
UseDataSource annotation = method.getAnnotation(UseDataSource.class);
RoutingDataSource.setDataSourceName(annotation.value());
return pjp.proceed();
} finally {
RoutingDataSource.resetDataSource();
}
}
#Around("#annotation(transactional)")
public Object proceed(ProceedingJoinPoint proceedingJoinPoint, Transactional transactional) throws Throwable {
try {
if (transactional.readOnly()) {
RoutingDataSource.setDataSourceName(SQL_READ_REPLICA);
Klogger.info("Routing database call to the read replica");
} else {
RoutingDataSource.setDataSourceName(SQL_MASTER_REPLICA);
Klogger.info("Routing database call to the primary replica");
}
return proceedingJoinPoint.proceed();
} finally {
RoutingDataSource.resetDataSource();
}
}
}
My RoutingDataSource class:
public class RoutingDataSource extends AbstractRoutingDataSource {
private static final ThreadLocal<String> currentDataSourceName = new ThreadLocal<>();
public static synchronized void setDataSourceName(String name) {
currentDataSourceName.set(name);
}
public static synchronized void resetDataSource() {
currentDataSourceName.remove();
}
#Override
protected Object determineCurrentLookupKey() {
return currentDataSourceName.get();
}
}
AbstractDynamicDataSourceConfig
public abstract class AbstractDynamicDataSourceConfig {
private final ConfigurableEnvironment environment;
public AbstractDynamicDataSourceConfig(ConfigurableEnvironment environment) {
this.environment = environment;
}
protected DataSource getRoutingDataSource() {
Map<String, String> props = DBConfigurationUtils.getAllPropertiesStartingWith("spring.datasource", environment);
List<String> dataSourceNames = DBConfigurationUtils.getDataSourceNames(props.keySet());
RoutingDataSource routingDataSource = new RoutingDataSource();
Map<Object, Object> dataSources = new HashMap<>();
DataSource masterDataSource = null;
for (String name : dataSourceNames) {
DataSource dataSource = getDataSource("spring.datasource." + name);
dataSources.put(name, dataSource);
if (masterDataSource == null && name.toLowerCase().contains("master")) {
masterDataSource = dataSource;
}
}
if (dataSources.isEmpty()) {
throw new KenectInvalidParameterException("No datasources found.");
}
routingDataSource.setTargetDataSources(dataSources);
if (masterDataSource == null) {
masterDataSource = (DataSource) dataSources.get(dataSourceNames.get(0));
}
routingDataSource.setDefaultTargetDataSource(masterDataSource);
return routingDataSource;
}
protected DataSource getDataSource(String prefix) {
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl(environment.getProperty(prefix + ".jdbcUrl"));
hikariConfig.setUsername(environment.getProperty(prefix + ".username"));
hikariConfig.setPassword(environment.getProperty(prefix + ".password"));
return new HikariDataSource(hikariConfig);
}
}
application.yaml
spring:
datasource:
master:
jdbcUrl: jdbc:mysql://my-main-replica
username: some-user
password: some-password
read-replica:
jdbcUrl: jdbc:mysql://my-read-replica
username: another-user
password: another-password
If I use the annotation on with JDBC template then it works as expected:
THIS WORKS:
// Uses main replica as it is not specified
public Message insertMessage(Message message) {
String sql = "INSERT INTO message(" +
" `conversationId`," +
" `body`)" +
" VALUE (" +
" :conversationId," +
" :body" +
")";
MapSqlParameterSource parameters = new MapSqlParameterSource();
parameters.addValue("conversationId", message.getConversationId());
parameters.addValue("body", message.getBody());
namedJdbcTemplate.update(sql, parameters);
}
// Uses read replica
#UseDataSource(SQL_READ_REPLICA)
public List<Message> getMessage(long id) {
MapSqlParameterSource parameters = new MapSqlParameterSource();
parameters.addValue("id", id);
String sql = "SELECT " +
" conversationId," +
" body" +
" FROM message"
" WHERE id = :id";
return namedJdbcTemplate.query(sql, parameters, new BeanPropertyRowMapper<>(Message.class));
}
If I use a JPA interface it always uses the read replica:
THIS FAILS:
#Repository
public interface MessageJpaRepository extends JpaRepository<MessageEntity, Long> {
// Should use the main-replica but always uses the read-replica
#Modifying
#Query(value =
"UPDATE clarioMessage SET" +
" body = :body" +
" WHERE id = :id" +
" AND organizationId = :organizationId",
nativeQuery = true)
#Transactional
int updateMessageBodyByIdAndOrganizationId(#Param("body") String body, #Param("id")long id, #Param("organizationId")long organizationId);
}
So I am just getting the error below when I try to use the main-replica. I have tried using the #UseDataSource annotation and AOP does actually intercept it. But, it still uses the read-replica.
java.sql.SQLSyntaxErrorException: UPDATE command denied to user 'read-replica-user'#'read replica IP' for table 'message'
What am I missing?
When you use #UseDataSource, it is working so it seems rules out any issues with implementation of aspect.
And When you #Transactional, it uses the secondary replica, regardless of your your AOP being invoked. My suspicion is by the TransactionInterceptor created by spring is invoked before your RouteDataSourceInterceptor. You can try the following:
Put a breakpoint in your aop method as well as a break point in org.springframework.transaction.interceptor.TransactionInterceptor.invoke method to see which one invokes first. You want your interceptor invoked first
If your interceptor is not invoked first, I would modify your interceptor to have high order as follows.
#Aspect
#Order(Ordered.HIGHEST_PRECEDENCE)
#Component
public class RouteDataSourceInterceptor {
I still don't understand how you are telling TransactionInterceptor to choose the DataSource you set in RouteDataSourceInterceptor. I have not used multi tenant setup but recently I came across a question which I helped to solve and I can see it is implementing AbstractDataSourceBasedMultiTenantConnectionProviderImpl. So I hope you have something similar. Not able to switch database after defining Spring AOP

Switch from JsonStringType to JsonBinaryType when the project uses both MySQL and PostgreSQL

I have a problem with column json when it's necessary to switching from PostgreSQL to MariaDB/MySql.
I use Spring Boot + JPA + Hibernate + hibernate-types-52.
The table i want to map is like this:
CREATE TABLE atable(
...
acolumn JSON,
...
);
Ok it works for PostgreSQL and MariaDB/MySql.
The problem is when i want to deploy an application that switch easly from one to another because the correct hibernate-types-52 implementation for PostgreSQL and MySQL/MariaDB are different
This works on MySQL/MariaDB
#Entity
#Table(name = "atable")
#TypeDef(name = "json", typeClass = JsonStringType.class)
public class Atable {
...
#Type(type = "json")
#Column(name = "acolumn", columnDefinition = "json")
private JsonNode acolumn;
...
}
This works on PosgreSQL
#Entity
#Table(name = "atable")
#TypeDef(name = "json", typeClass = JsonBinaryType.class)
public class Atable {
...
#Type(type = "json")
#Column(name = "acolumn", columnDefinition = "json")
private JsonNode acolumn;
...
}
Any kind of solutions to switch from JsonBinaryType to JsonStringType (or any other solution to solve this) is appreciated.
The Hypersistence Utils project, you can just use the JsonType, which works with PostgreSQL, MySQL, Oracle, SQL Server, or H2.
So, use JsonType instead of JsonBinaryType or JsonStringType
#Entity
#Table(name = "atable")
#TypeDef(name = "json", typeClass = JsonType.class)
public class Atable {
#Type(type = "json")
#Column(name = "acolumn", columnDefinition = "json")
private JsonNode acolumn;
}
That's it!
There are some crazy things you can do - with the limitation that this only works for specific types and columns:
First, to replace the static #TypeDef with a dynamic mapping:
You can use a HibernatePropertiesCustomizer to add a TypeContributorList:
#Configuration
public class HibernateConfig implements HibernatePropertiesCustomizer {
#Value("${spring.jpa.database-platform:}")
private Class<? extends Driver> driverClass;
#Override
public void customize(Map<String, Object> hibernateProperties) {
AbstractHibernateType<Object> jsonType;
if (driverClass != null && PostgreSQL92Dialect.class.isAssignableFrom(driverClass)) {
jsonType = new JsonBinaryType(Atable.class);
} else {
jsonType = new JsonStringType(Atable.class);
}
hibernateProperties.put(EntityManagerFactoryBuilderImpl.TYPE_CONTRIBUTORS,
(TypeContributorList) () -> List.of(
(TypeContributor) (TypeContributions typeContributions, ServiceRegistry serviceRegistry) ->
typeContributions.contributeType(jsonType, "myType")));
}
}
So this is limited to the Atable.class now and I have named this custom Json-Type 'myType'. I.e., you annotate your property with #Type(type = 'myType').
I'm using the configured Dialect here, but in my application I'm checking the active profiles for DB-specific profiles.
Also note that TypeContributions .contributeType(BasicType, String...) is deprecated since Hibernate 5.3. I haven't looked into the new mechanism yet.
So that covers the #Type part, but if you want to use Hibernate Schema generation, you'll still need the #Column(columnDefinition = "... bit, so Hibernate knows which column type to use.
This is where it start's feeling a bit yucky. We can register an Integrator to manipulate the Mapping Metadata:
hibernateProperties.put(EntityManagerFactoryBuilderImpl.INTEGRATOR_PROVIDER,
(IntegratorProvider) () -> Collections.singletonList(JsonColumnMappingIntegrator.INSTANCE));
As a demo I'm only checking for PostgreSQL and I'm applying the dynamic columnDefinition only to a specific column in a specific entity:
public class JsonColumnMappingIntegrator implements Integrator {
public static final JsonColumnMappingIntegrator INSTANCE =
new JsonColumnMappingIntegrator();
#Override
public void integrate(
Metadata metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
Database database = metadata.getDatabase();
if (PostgreSQL92Dialect.class.isAssignableFrom(database.getDialect().getClass())) {
Column acolumn=
((Column) metadata.getEntityBinding(Atable.class.getName()).getProperty("acolumn").getColumnIterator().next());
settingsCol.setSqlType("json");
}
}
#Override
public void disintegrate(SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry) {
}
}
metadata.getEntityBindings() would give you all Entity Bindings, over which you can iterate and then iterate over the properties. This seems quite inefficient though.
I'm also not sure whether you can set things like 'IS JSON' constraints etc., so a custom create script would be better.

How to connect to oracle and retrive data

I am new to Java and Playframework as well. I have play running. But am unable to retrieve data from oracle. I added the /lib folder and added the ojdbc6.jar and play deployed successfully. I know play is connected to my DB as when I had the incorrect config it complained. The config is below in the application.conf
db.default.driver=oracle.jdbc.driver.OracleDriver
db.default.url="jdbc:oracle:thin:#hostname:1521:SOMESID"
db.default.user=someuser
db.default.password=somepassword
All the above seems ok. Now in the application.java I am trying to fetch data from DB. I do not want to use Hibernate or any ORM layers. The application.java is below. Added the method getMetaData to talk to Oracle but does not work. Perhaps I have not written the method properly or not imported the relevant libraries. A working .java file would be very helpful. If I can manage to retrieve data form oracle great otherwise hope I don't have to go the spring framework route. Any help would be greatly appreciated. Thanks
package controllers;
import play.*;
import play.mvc.*;
import views.html.*;
import play.db.*;
public class Application extends Controller {
public static Result index() {
//return ok(index.render("Hello Worldxxx"));
//me: the below is the default return
return redirect(routes.Application.tasks());
}
public static Result tasks() {
return TODO;
}
public static Result newTask() {
return TODO;
}
public static Result deleteTask(Long id) {
return TODO;
}
public static Result getMetaData() {
Connection connection = DB.getConnection();
ResultSet resultSet = connection.prepareStatement("SELECT * FROM sometable").executeQuery();
metaData = resultSet.getMetaData();
connection.close();
return ok(metaData.toString());
}
}

Hibernate export to csv

I want to export query result to excel or csv file.
I am using hibernate struts.
Is there any query like 'into outfile' which can directly export excel to specified location?
In MySQL database, 'into outfile' query works fine but in hibernate it is not working.
I tried using native sql but it gives error 'couldn't execute bulk manipulation query' and anyhow I can not solve that.
I am using MySQL database.
If you are writing an web app and using spring you can do it by writing data to an output stream
Write a simple class to construct your response
public class CsvResponse {
private final String filename;
private final List<YourPojo> records;
public CsvResponse(List<YourPojo> records, String filename) {
this.records = records;
this.filename = filename;
}
public String getFilename() {
return filename;
}
public List<YourPojo> getRecords() {
return records;
}
}
Now write a message converter to write them to an output stream
public class CsvMessageConverter extends AbstractHttpMessageConverter<CsvResponse> {
public static final MediaType MEDIA_TYPE = new MediaType("text", "csv", Charset.forName("UTF-8"));
public CsvMessageConverter() {
super(MEDIA_TYPE);
}
protected boolean supports(Class<?> clazz) {
return CsvResponse.class.equals(clazz);
}
protected void writeInternal(CsvResponse response, HttpOutputMessage output) throws Exception {
output.getHeaders().setContentType(MEDIA_TYPE);
output.getHeaders().set("Content-Disposition", "attachment; filename=\"" + response.getFilename() + "\"");
OutputStream out = output.getBody();
CsvWriter writer = new CsvWriter(new OutputStreamWriter(out), '\u0009');
List<YourPojo> allRecords = response.getRecords();
for (int i = 1; i < allRecords.size(); i++) {
YourPojo aReq = allRecords.get(i);
writer.write(aReq.toString());
}
writer.close();
}
}
Add this Message converter to your app context config file
<mvc:annotation-driven>
<mvc:message-converters register-defaults="true">
<bean class="com.yourpackage.CsvMessageConverter"/>
</mvc:message-converters>
</mvc:annotation-driven>
Finally the controller will look like
#RequestMapping(value = "/csvData", method = RequestMethod.GET, produces="text/csv")
#ResponseBody
public CsvResponse getFullData(HttpSession session) throws IOException {
// get data
List<YourPojo> allRecords = yourService.getData();
return new CsvResponse(allRecords, "yourData.csv");
}
I've found a similar way using JAX RS here.
But the bottomline is you'll have to use a REST mechanism to get data into the output stream if you want to do it in proper way but if your only target is to get data into a file you can just get your data in a list and then simply write it to a file.

How to use custom mysql query from my liferay custom portlet?

I am using Liferay and developing my custom portlet, now I want to use custom query to retrieve some data from multiple table with joins etc.
I have googled the things for my problem but can't find the simple way to understand the step-by-step procedure.
So if any one can guide me or give me any tutorial to create Custom SQL query for my custom portlet.
after this 4th step i have built my service in eclipse,and its showing successfully.there are two file created in service/persistence package with the name AdvertiseFinder.java and AdvertiseFinderUtil.java but when i try to access the method getAd_DisplayforReports with the advertiseFinderUtil.getAd_DisplayforReports("Any arguement with string")
its giving me error that no such method in AdvertiseFinderUtil
I have build the service after updating my AdvertiseFinderImpl Method.but its not working
this is my AdvertiseFinderImpl Class
package emenu.advertise.database.service.persistence;
import com.liferay.portal.service.persistence.impl.BasePersistenceImpl;
import emenu.advertise.database.model.ad_display;
import emenu.advertise.database.model.advertise;
import emenu.advertise.database.model.impl.ad_displayImpl;
import java.util.List;
import com.liferay.portal.SystemException;
import com.liferay.portal.kernel.dao.orm.QueryPos;
import com.liferay.portal.kernel.dao.orm.SQLQuery;
import com.liferay.portal.kernel.dao.orm.Session;
import com.liferay.util.dao.orm.CustomSQLUtil;
public class AdvertiseFinderImpl extends BasePersistenceImpl<ad_display> implements advertiseFinder{
public void getall() {
}
// the name of the query
public static String GET_ADVERTISE = AdvertiseFinderImpl.class.getName()
+ ".getAdvertise";
// the method which will be called from the ServiceImpl class
public List<ad_display> getAd_DisplayforReports(String pattern) throws SystemException {
Session session = null;
try {
// open a new hibernate session
session = openSession();
// pull out our query from book.xml, created earlier
String sql = CustomSQLUtil.get(GET_ADVERTISE);
// create a SQLQuery object
SQLQuery q = session.createSQLQuery(sql);
// replace the "Book" in the query string with the fully qualified java class
// this has to be the hibernate table name
q.addEntity("a_ad_display", ad_displayImpl.class);
// Get query position instance
QueryPos qPos = QueryPos.getInstance(q);
// fill in the "?" value of the custom query
// this is same like forming a prepared statement
qPos.add(pattern);
// execute the query and return a list from the db
return (List<ad_display>)q.list();
/*
// use this block if you want to return the no. of rows (count)
int rows = 0;
Iterator<Long> itr = q.list().iterator();
if (itr.hasNext()) { Long count = itr.next();
if (count != null) { rows = count.intValue(); } }
return rows;
*/
} catch (Exception e) {
throw new SystemException(e);
} finally {
closeSession(session);
}
}
}
my default-ext.xml is following
<?xml version="1.0"?>
<custom-sql>
<sql file="custom-sql/emenu.xml" />
</custom-sql>
my emenu.xml is here
<custom-sql>
<sql id="emenu.advertise.database.service.persistence.AdvertiseFinderImpl.getAd_DisplayforReports">
<![CDATA[
SELECT
*
FROM
a_ad_display
]]>
</sql>
</custom-sql>
change
return (List<ad_display>)q.list();
to
return (List<ad_display>) QueryUtil.list(q, getDialect(), -1, -1);
Following are the steps to write custom query / finder methods in Liferay:
Create a new finder called EntityFinderImpl.java in the /generated/service/persistence directory.
'build-service' on the project.
The ServiceBuilder autogenerates the following two extra files: EntityFinder.java and EntityFinderUtil.java
Now open the EntityFinderImpl.java file and let this class extend the BasePersistenceImpl and implement EntityFinder. (Assumed that the Entity (table-name) is defined in the service.xml and other required classes are also autogenerated by ServiceBuilder)
Now add required custom method to EntityFinderImpl.java and build service again to distribute this method to Util classes.
Custom method can be created using liferay's DynamicQuery API or SQL-query as following:
public List<Entity> getCustomDataFromFinder("Parameters") throws SystemException {
Session session = null;
StringBuilder queryString = new StringBuilder();
Entity e = new EntityImpl();
try {
session = openSession();
queryString.append(" Write your Query here and conditionally append parameter value(s).");
SQLQuery query = session.createSQLQuery(queryString.toString());
query.addEntity("EntityName", EntityImpl.class);
return (List<Entity>) QueryUtil.list(query, getDialect(), 0, -1);
}
catch (Exception e) {
throw new SystemException(e);
}
finally {
if (session != null) {
closeSession(session);
}
}
}