Solr Java Index Searching - mysql

I'm new to Solr , actually I'm trying search data from mysql using Solr Index Search method, when I search single data, it's shows multiple data related to the Index,
for eg:
Company table:-
Id company_name company_id company_emp
1 Xx 1 Xx
2 yy 2 Yy
empDetail:-
Id company_id company_empA emp_city emp_mobile
1 1 dd ss 32455
2 1 cc cc 344444
3 1 zz zz 56778998
Here I have used OneToMany relationship between Company table and Employee Detail Table, When I search ss from emp_City , it's gives all data which is related to Company_id instead of only ss
Current result show like this
id company_id company_name company_empA emp_city
1 1 Xx dd ss
2 1 Xx cc cc
3 1 Xx zz zz
the above result shows all the emp_city value which is related to the Company_id. but I want to get only ss.
I have configured Data-config.xml Solr configuration file as follow
<document name="company1">
<entity name="companys" pk="id" query="SELECT * FROM company" deltaImportQuery="select * from company where id='${dataimporter.delta.id}'" >
<field column="company_id" name="id" />
<field column=" company_name " name=" company_name " />
<field column=" company_emp " name=" company_emp " />
<field column="company_id" name="companyId"/>
<entity name="companyDetails" pk="id " query="select * from company_detail where company_id='${companys.company_id}' ORDER BY '${companys.company_id}'"
parentDeltaQuery="select id from company_detail where company_id=${companys.company_id}">
<field column="company_id" name="id"/>
<field column="company_id" name="com_companyId"/>
<field column="company_empA" name="com_companyemp"/>
<field column=" emp_city " name=" emp_city" />
<field column=" emp_mobile " name=" emp_mobile "/>
</entity>
</entity>
I think the problem is in this Query, but I couldn't find it out, kindly help me to resolve it. thanks in advance.

Related

Solr not returning all documents after importing with the Data Handler

I have a Solr 8.7.0 installation and I'm using the Data Handler importer plugin via a MySQLi connection.
I have four entities declared:
<dataConfig>
<dataSource type="JdbcDataSource"
driver="com.mysql.jdbc.Driver"
url="jdbc:mysql://localhost:3306/hmsscot_bassculture"
user="myuser"
password="mypw"/>
<document>
<entity name="author" query="select id,type,firstname,surname,biographical_info,extrainfo from bassculture_author">
<field column="id" name="id"/>
<field column="type" name="type"/>
<field column="firstname" name="firstname"/>
<field column="surname" name="surname"/>
<field column="biographical_info" name="biographical_info"/>
<field column="extrainfo" name="extrainfo"/>
</entity>
<entity name="source" query="select id,type,short_title,full_title,publisher,author_id,orientation,variants from bassculture_source">
<field column="id" name="id"/>
<field column="type" name="type"/>
<field column="short_title" name="short_title"/>
<field column="full_title" name="full_title"/>
<field column="publisher" name="publisher"/>
<field column="author_id" name="author_id"/>
<entity name="author" query="SELECT s.*, CONCAT(ba.firstname, ' ', ba.surname) AS author FROM bassculture_source s, bassculture_author ba WHERE s.id=${source.id} AND s.author_id = ba.id;">
<field column="author" name="author"/>
</entity>
<field column="description" name="description"/>
<field column="orientation" name="orientation"/>
<field column="variants" name="variants"/>
</entity>
<entity name="copy" query="select id,type,folder,source_id,item_notes,seller,library,shelfmark,pagination,dimensions from bassculture_item">
<field column="id" name="id"/>
<field column="type" name="type"/>
<field column="folder" name="folder"/>
<field column="source_id" name="source_id"/>
<entity name="source_title" query="select id,short_title from bassculture_source where id=${copy.source_id}">
<field column="short_title" name="source_title"/>
</entity>
<entity name="source_author" query="SELECT bt.*, CONCAT(ba.firstname, ' ', ba.surname) AS source_author FROM bassculture_tune bt, bassculture_item c, bassculture_source s, bassculture_author ba WHERE c.id=${copy.id} AND c.source_id = s.id AND s.author_id = ba.id;">
<field column="source_author" name="source_author"/>
</entity>
<field column="item_notes" name="item_notes"/>
<field column="seller" name="seller"/>
<field column="library" name="library"/>
<field column="shelfmark" name="shelfmark"/>
<field column="paginations" name="pagination"/>
<field column="dimensions" name="dimension"/>
</entity>
<entity name="tune" query="select id,type,name,start_page,alternate_spellings,item_id from bassculture_tune">
<field column="id" name="id"/>
<field column="type" name="type"/>
<field column="name" name="name"/>
<entity name="source_title" query="select s.* FROM bassculture_source s, bassculture_item c, bassculture_tune bt where bt.id=${tune.id} AND c.source_id = s.id AND bt.item_id = c.id">
<field column="short_title" name="source_title"/>
</entity>
<entity name="tune_author" query="SELECT bt.*, CONCAT(ba.firstname, ' ', ba.surname, ' ', ba.extrainfo) AS tune_author FROM bassculture_tune bt, bassculture_item c, bassculture_source s, bassculture_author ba WHERE bt.id=${tune.id} AND bt.item_id = c.id AND c.source_id = s.id AND s.author_id = ba.id;">
<field column="tune_author" name="tune_author" />
</entity>
<field column="start_page" name="start_page"/>
<field column="alternate_spellings" name="alternate_spellings"/>
<field column="item_id" name="item_id"/>
</entity>
</document>
</dataConfig>
Now, I'm experiencing something which doesn't make sense to me. If I run the data importer leaving the 'entity' drop-down blank (i.e. import all entities):
I get:
Indexing completed. Added/Updated: 2357 documents. Deleted 0 documents. (Duration: 13s)
This is the correct number of documents (authors+sources+copies+tunes). Nevertheless, when I query the database I only get 1938 documents:
"responseHeader":{
"status":0,
"QTime":103,
"params":{
"q":"*:*",
"_":"1609335106436"}},
"response":{"numFound":1938,"start":0,"numFoundExact":true,"docs":[
{
[...]
This are only the tunes (last entity in the configuration file above). I also see this in the dashboard:
If on the other hand I select the entities one by one (e.g. author etc...):
the plugin imports correctly the author, tune, and copy entities (each time the . query reflects the documents imported). Once I get to the fourth entity though (tune), the index apparently 'forgets' about the previous three entities - although after running it, plugin reports 'documents deleted: 0' - and the . query goes back to only 1938 documents found (i.e. only tunes).
There's no error message in the logs. What am I missing?
PARTIAL SOLUTION
I managed to add a prefix to the id in order to differentiate the four different data, so that unique IDs don't get rewritten, eg:
SELECT name,start_page,alternate_spellings,item_id, CONCAT('tune_', id) AS id, 'tune' as type FROM bassculture_tune;
Nevertheless, I need the database id (without the prefix) of the current tune, in this case, for some later comparison, eg:
<entity name="tune_author" query="SELECT bt.*, CONCAT(ba.firstname, ' ', ba.surname, ' ', ba.extrainfo) AS tune_author FROM bassculture_tune bt, bassculture_item c, bassculture_source s, bassculture_author ba WHERE bt.id=${tune.id} AND bt.item_id = c.id AND c.source_id = s.id AND s.author_id = ba.id;">
<field column="tune_author" name="tune_author" />
</entity>
Since ${tune.id} now has a prefix the whole query doesn't do what I need any more. Is there a way to strip the prefix locally?
Edit 2
The query
<entity name="tune_author" query="select s.* FROM bassculture_source s, bassculture_item c, bassculture_tune bt WHERE bt.id=REPLACE(${tune.id}, 'tune_', '') AND c.source_id = s.id AND bt.item_id = c.id;">
throws an error (unable to execute query) on importing data on Solr.
This is the error in the Solr log:
Caused by: java.sql.SQLSyntaxErrorException: You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'SELECT REPLACE(tune_1, 'tune_', ''), AND c.source_id = s.id AND bt.item_id = c.i' at line 1
PS
Something like
select item_id FROM bassculture_tune bt WHERE bt.id= (SELECT REPLACE('tune_1', 'tune_', ''));
on MySQL console works just fine.
Introducing variables
I'm trying my luck with a variable now:
<entity name="this_tune_id" query="SET #this_tune_id = REPLACE('${tune.id}','tune_','');">
</entity>
<entity name="source_title" query="select s.* FROM bassculture_source s, bassculture_item c, bassculture_tune bt WHERE c.source_id = s.id AND bt.item_id = c.id AND bt.id = ${this_tune_id};">
<field column="short_title" name="source_title"/>
</entity>
This gives me a
org.apache.solr.handler.dataimport.DataImportHandlerException: java.lang.ArrayIndexOutOfBoundsException: Index -1 out of bounds for length 1
error.
FINAL SOLUTION
I am storing the database ID as this_tune_id, and the Solr id (with the prefix) as id, so that I can use this_tune_id for my queries while still storing a prefixed id in Solr:
<entity name="tune" query="SELECT name,start_page,alternate_spellings,item_id, id AS this_tune_id, CONCAT('tune_', id) AS id, 'tune' as type FROM bassculture_tune;">
<field column="name" name="name"/>
<entity name="source_title" query="select s.* FROM bassculture_source s, bassculture_item c, bassculture_tune bt WHERE c.source_id = s.id AND bt.item_id = c.id AND bt.id = ${tune.this_tune_id};">
The screenshot containing data from your import reveals the reason: maxDocs shows there has been 2357 documents imported; but there is 419 that has been marked as deleted. Your unique key field (usually id) has overlap between the documents you're importing, resulting in the newer documents overwriting the older ones.
419 documents has been overwritten by documents imported later because over overlapping ids.
You can solve this by prepending the entity type to your ids (there is no need for the ids to be numeric) - the easiest way is to prefix it in your SQL:
SELECT CONCAT('tune_', id) AS id, FROM ..
SELECT CONCAT('author_', id) AS id, .. FROM ..
... repeating for each source ..
That way the id for an author will be author_1 and will not overwrite tune_1 as it would otherwise, where both would have 1 as their ids.

Generate a transaction report with all the database tables in Unicentapos

I am trying to generate report in unicentaopos4.2.2. The report is created in Jasper but there is a corresponding bs file. There is only one SQL query and for my report I need multiple query in the jrxml file.
This is the sales_extendedcashregisterlog.bs file that have only one SQL query with the report.setSentence().
report = new com.openbravo.pos.reports.PanelReportBean();
report.setTitleKey("Menu.ExtendedCashRegisterLog");
report.setReport("/com/openbravo/reports/sales_extendedcashregisterlog");
report.setResourceBundle("com/openbravo/reports /sales_extendedcashregisterlog_messages");
report.setSentence("SELECT " +
"tickets.TICKETID AS TICKET_NO, " +
"receipts.DATENEW AS TICKET_DATE, " +
"people.NAME AS PERSON, " +
"payments.PAYMENT AS PAYMENT, " +
"payments.NOTES, " +
"payments.TOTAL AS MONEY, " +
"payments.TENDERED " +
"FROM ((tickets tickets " +
"LEFT OUTER JOIN people people ON (tickets.PERSON = people.ID)) " +
"RIGHT OUTER JOIN receipts receipts ON (receipts.ID = tickets.ID)) " +
"LEFT OUTER JOIN payments payments ON (receipts.ID = payments.RECEIPT) " +
"WHERE ?(QBF_FILTER) " +
"ORDER BY TICKET_DATE ASC");
report.addParameter("receipts.DATENEW");
report.addParameter("receipts.DATENEW");
paramdates = new com.openbravo.pos.reports.JParamsDatesInterval();
paramdates.setStartDate(com.openbravo.beans.DateUtils.getToday());
// JG - 8 Jan 14 paramdates.setEndDate(com.openbravo.beans.DateUtils.getToday());
paramdates.setEndDate(com.openbravo.beans.DateUtils.getTodayMinutes());
report.addQBFFilter(paramdates);
report.addField("TICKET_NO", com.openbravo.data.loader.Datas.STRING);
report.addField("TICKET_DATE", com.openbravo.data.loader.Datas.TIMESTAMP);
report.addField("PERSON", com.openbravo.data.loader.Datas.STRING);
report.addField("PAYMENT", com.openbravo.data.loader.Datas.STRING);
report.addField("NOTES", com.openbravo.data.loader.Datas.STRING);
report.addField("MONEY", com.openbravo.data.loader.Datas.DOUBLE);
report.addField("TENDERED", com.openbravo.data.loader.Datas.DOUBLE);
return report;
Now we have a jrxml file. I can edit this file using pallet feature but the thing is the sql query that give me the fields so that I can use them. here is the query of the jrxml file that it already have.
queryString>
<![CDATA[SELECT
tickets.TICKETID AS TICKET_NO,
receipts.DATENEW AS TICKET_DATE,
payments.TOTAL AS MONEY,
people.NAME AS PERSON,
payments.PAYMENT AS PAYMENT
FROM receipts
LEFT JOIN tickets ON receipts.ID = tickets.ID
LEFT JOIN payments ON receipts.ID = payments.RECEIPT
LEFT JOIN people ON tickets.PERSON = PERSON.ID
ORDER BY tickets.TICKETID]]>
</queryString>
<field name="TICKET_NO" class="java.lang.String"/>
<field name="TICKET_DATE" class="java.util.Date"/>
<field name="PERSON" class="java.lang.String"/>
<field name="PAYMENT" class="java.lang.String"/>
<field name="NOTES" class="java.lang.String"/>
<field name="MONEY" class="java.lang.Double"/>
<field name="TENDERED" class="java.lang.Double"/>
the fields are already inside. Now I want to add more than one sql query, so how exactly I can do it.
Basically I am trying to generate a transaction log in unicentaopos with all the needed data.

Quantity in stock in stock locations in Exact Online

Using the following query, I found that for items that have a stock location, there are multiple rows returned from the REST API StockLocations of Exact Online:
select spn.item_code_attr || '-' || spn.warehouse_code_attr || '-' || stn.code key
, itm.itemgroupcode
, itm.itemgroupdescription
, spn.item_code_attr
, spn.item_description
, spn.currentquantity
, spn.planning_in
, spn.planning_out
, spn.currentquantity + spn.planning_in - spn.planning_out plannedquantity
, -1 bestelniveau /* out of scope */
, itm.costpricestandard costprijs
, itm.costpricestandard * spn.currentquantity stockvalue
, spn.warehouse_code_attr
, stn.code locatie
, itm.unitcode UOM
, itm.id
, whe.id
, sln.stock
, sln.itemid
, sln.warehouse
, stn.id
from exactonlinexml..StockPositions spn
join exactonlinerest..items itm
on itm.code = spn.item_code_attr
and itm.code = 'LE-10242'
and itm.isstockitem = 1
join exactonlinerest..warehouses whe
on whe.code = spn.warehouse_code_attr
left
outer
join exactonlinerest..stocklocations sln
on sln.itemid = itm.id
and sln.stock != 0
and sln.warehouse = whe.id
left
outer
join storagelocations stn
on stn.id = sln.storagelocation
and stn.warehouse = sln.warehouse
--
-- Filter out no stock nor planned.
--
where ( spn.currentquantity !=0
or
spn.planning_in != 0
or
spn.planning_out != 0
)
and spn.item_code_attr = 'LE-10242'
order
by key
For example, for this item, there are 10 StockLocations. When I sum the field Stock, it returns the stock quantity found in StockPositions. However, it seems that every transaction creates an additional StockLocation entry.
I would expect StockLocation to contain per location in stock the total amount to be found there.
EDIT
The StockLocations API is described in https://start.exactonline.nl/api/v1/{division}/logistics/$metadata as:
<EntityType Name="StockLocation">
<Key>
<PropertyRef Name="ItemID"/>
</Key>
<Property Name="ItemID" Type="Edm.Guid" Nullable="false"/>
<Property Name="Warehouse" Type="Edm.Guid" Nullable="true"/>
<Property Name="WarehouseCode" Type="Edm.String" Nullable="true"/>
<Property Name="WarehouseDescription" Type="Edm.String" Nullable="true"/>
<Property Name="Stock" Type="Edm.Double" Nullable="true"/>
<Property Name="StorageLocation" Type="Edm.Guid" Nullable="true"/>
<Property Name="StorageLocationCode" Type="Edm.String" Nullable="true"/>
<Property Name="StorageLocationDescription" Type="Edm.String" Nullable="true"/>
</EntityType>
Somehow it is not documented at https://start.exactonline.nl/docs/HlpRestAPIResources.aspx
What am I doing wrong?
Discussed question on Hackathon with engineer. This is as the StockLocation API works; the naming does not optimally reflect the contents, but this is intended behaviour.
With a select field, sum(stock) from stocklocations group by field, you can get the right information.
To improve join performance, it is recommended to use an inline view for this such as select ... from table1 join table2 ... join ( select field, sum(stock) from stocklocations group by field).

SSIS Package - From One Table to Infinite Tables Depending on Data

I have a simple requirement. I have a table with Product Names and their Count. I want to create a SSIS package to extract data from one table to infinite tables based on Product Name.
In table if i have 10 products then SSIS package should create 10 tables dynamically with one product in each table.
Table Name : Products
ProductName , QuantitySold
ABC 10
xyz 15
Testing 25
Table Name : ABC
ProductName , QuantitySold
ABC 10
Table Name : XYZ
ProductName , QuantitySold
xyz 15
Table Name : Testing
ProductName , QuantitySold
ABC 10
Conceptually, you're looking at something like
The concept is that you will identify all the product names in the table and perform 2 tasks on each row: Create the target table, if needed. Run a query against your source for that one row and load it into the table.
Variables
I have 6 variables declared
Query_TableCreateBase is a big string that formatted looks like
IF NOT EXISTS
(
SELECT
*
FROM
sys.tables AS T
WHERE
T.name = '<Table/>'
)
BEGIN
CREATE TABLE dbo.<Table/>
(
ProductName varchar(30) NOT NULL
, QuantitySold int NOT NULL
);
END
I have expressions on Query_Source, Query_TableCreate and TargetTable
Query_Source expression
"SELECT ProductName, QuantitySold FROM (
VALUES
('ABC', 10)
, ('xyz', 15)
, ('Testing', 25)
) Products(ProductName, QuantitySold) WHERE ProductName = '" + #[User::ProductName] + "'"
Query_TableCreate expression
replace(#[User::Query_TableCreateBase], "<Table/>", #[User::ProductName])
TargetTable expression
"[dbo].[" +#[User::ProductName] + "]"
SQL Get Rows
I simulate your Products table with a query. I load those results into a variable named RS_Product.
SELECT
ProductName
FROM
(
VALUES
('ABC', 10)
, ('xyz', 15)
, ('Testing', 25)
) Products(ProductName, QuantitySold);
FELC Shred Results
I use a Foreach Loop Container, set to process an ADO Result set and parse out the 0th column into our ProductName variable
SQL Create Table if needed
This is a query that gets evaluated out to something like
IF NOT EXISTS
(
SELECT
*
FROM
sys.tables AS T
WHERE
T.name = 'Foo'
)
BEGIN
CREATE TABLE dbo.Foo
(
ProductName varchar(30) NOT NULL
, QuantitySold int NOT NULL
);
END
DFT Load Table
I have this set as DelayValidation = true as the table may not exist right up until it gets the signal to start.
Again, simulating your Products table, my query looks like
SELECT ProductName, QuantitySold FROM (
VALUES
('ABC', 10)
, ('xyz', 15)
, ('Testing', 25)
) Products(ProductName, QuantitySold) WHERE ProductName = 'Foo'
Wrapup
Strictly speaking, the data flow is not required. It could all be done through your Execute SQL Task if we pulled back all the columns in our source query.
Biml implemenation
Biml, the Business Intelligence Markup Language, describes the platform for business intelligence. Here, we're going to use it to describe the ETL. BIDS Helper, is a free add on for Visual Studio/BIDS/SSDT that addresses a host of shortcomings with it. Specifically, we're going to use the ability to transform a Biml file describing ETL into an SSIS package. This has the added benefit of providing you a mechanism for being able to generate exactly the solution I'm describing versus clicking through many tedious dialogue boxes.
The following code assumes you have a default instance on your local machine and that within tempdb, you have a table called Foo.
use tempdb;
GO
CREATE TABLE dbo.Foo
(
ProductName varchar(30) NOT NULL
, QuantitySold int NOT NULL
);
Save the following script into a .biml file which when you add to your SSIS project will show up under the Miscellaneous virtual folder. Right click, choose Generate SSIS Package and it should create a package called so_27320726
<Biml xmlns="http://schemas.varigence.com/biml.xsd">
<Connections>
<OleDbConnection Name="tempdb" ConnectionString="Data Source=localhost;Initial Catalog=tempdb;Provider=SQLNCLI10.1;Integrated Security=SSPI;" />
</Connections>
<Packages>
<Package Name="so_27320726" ConstraintMode="Parallel" >
<Variables>
<Variable Name="ProductName" DataType="String">Foo</Variable>
<Variable Name="Query_Source" DataType="String" EvaluateAsExpression="true">"SELECT ProductName, QuantitySold FROM (
VALUES
('ABC', 10)
, ('xyz', 15)
, ('Testing', 25)
) Products(ProductName, QuantitySold) WHERE ProductName = '" + #[User::ProductName] + "'"</Variable>
<Variable Name="Query_TableCreate" DataType="String" EvaluateAsExpression="true"><![CDATA[replace(#[User::Query_TableCreateBase], "<Table/>", #[User::ProductName])]]></Variable>
<Variable Name="Query_TableCreateBase" DataType="String" ><![CDATA[IF NOT EXISTS
(
SELECT
*
FROM
sys.tables AS T
WHERE
T.name = '<Table/>'
)
BEGIN
CREATE TABLE dbo.<Table/>
(
ProductName varchar(30) NOT NULL
, QuantitySold int NOT NULL
);
END]]></Variable>
<Variable Name="RS_Product" DataType="Object" />
<Variable Name="TargetTable" DataType="String" EvaluateAsExpression="true">"[dbo].[" +#[User::ProductName] + "]"</Variable>
</Variables>
<Tasks>
<ExecuteSQL Name="SQL Get Rows" ConnectionName="tempdb" ResultSet="Full">
<Variables>
<Variable Name="Variable" DataType="Int32" IncludeInDebugDump="Include">0</Variable>
</Variables>
<Results>
<Result Name="0" VariableName="User.RS_Product" />
</Results>
<DirectInput>SELECT
*
FROM
(
VALUES
('ABC', 10)
, ('xyz', 15)
, ('Testing', 25)
) Products(ProductName, QuantitySold);</DirectInput>
</ExecuteSQL>
<ForEachAdoLoop Name="FELC Shred Results" ConstraintMode="Linear" SourceVariableName="User.RS_Product">
<PrecedenceConstraints>
<Inputs>
<Input OutputPathName="SQL Get Rows.Output" SsisName="Constraint" />
</Inputs>
</PrecedenceConstraints>
<Tasks>
<ExecuteSQL Name="SQL Create Table if needed" ConnectionName="tempdb">
<VariableInput VariableName="User.Query_TableCreate" />
</ExecuteSQL>
<Dataflow Name="DFT Load Table" DelayValidation="true">
<Transformations>
<OleDbSource Name="OLE_SRC Get Data" DefaultCodePage="1252" ConnectionName="tempdb">
<VariableInput VariableName="User.Query_Source" />
</OleDbSource>
<OleDbDestination Name="OLE_DST Save data" ConnectionName="tempdb" >
<TableFromVariableOutput VariableName="User.TargetTable" />
<Columns>
<Column SourceColumn="ProductName" TargetColumn="ProductName" />
<Column SourceColumn="QuantitySold" TargetColumn="QuantitySold" />
</Columns>
</OleDbDestination>
</Transformations>
</Dataflow>
</Tasks>
<VariableMappings>
<VariableMapping Name="0" VariableName="User.ProductName" />
</VariableMappings>
</ForEachAdoLoop>
</Tasks>
<Connections>
<Connection ConnectionName="tempdb" />
</Connections>
</Package>
</Packages>
</Biml>

Hibernate composite key with an auto increment field

Here one table having one ID that i want auto increment and two more columns that in composite fashion.
ID ST_NAME EDU_ID
1 A E1
2 As E1
3 Af E1
4 B E2
5 Ba E2
ID will be unique all the table
ST_NAME and EDU_ID Will be composite type
how can i build the table using hibernate
Entity
Table.java
public class Table {
private Long id;
pirvate Composite composite;
// setters & getters
}
Composite.java
public class Composite{
private String stName;
pirvate Long eduId;
// setters & getters
}
Table.hbm.xml
<id name="id" type="java.lang.Long">
<column name="id" />
<generator class="increment" />
</id>
<composite-id name="composite" class="com.test.Composite">
<key-property name="stName" type="java.lang.String">
<column name="ST_NAME" />
</key-property>
<key-property name="eduId" type="java.lang.Long">
<column name="EDU_ID" />
</key-property>
</composite-id>