From 63c7ebe67c3f17d41126fd40111d16ea3c2fa7a8 Mon Sep 17 00:00:00 2001 From: Mincong HUANG Date: Tue, 5 Jul 2016 02:14:30 +0200 Subject: [PATCH] HSEARCH-2594 Initial work on JSR-352 integration MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit has been extracted from https://github.com/mincong-h/gsoc-hsearch/tree/master/ It is the result of squashing commits 135a190ce6349f4ee53a2e28822f0a95b3a1b2cc to f9d171672ca0309daa174293edbe60aae2307382 (see git log below). commit f9d171672ca0309daa174293edbe60aae2307382 Author: mincong Date: Tue Jul 5 02:14:30 2016 +0200 #40 add IT for checkpoint: RestartChunkIT commit 5e5e177939c773c26f550c6f4b394e0c0ee82f72 Author: mincong Date: Mon Jul 4 16:38:27 2016 +0200 #81 correct spelling error commit d442d06aeebf8fa912e7942efea8255faada0156 Author: mincong Date: Mon Jul 4 09:27:40 2016 +0200 #74 change unit test MassIndexerTest commit 236e1d84e8abc3c2af26acec922a4b373532c2ec Author: mincong Date: Mon Jul 4 09:21:42 2016 +0200 #74 add method `MassIndexer#addRootEntities(Class... rootEntities)` commit 58f8f3280065e4f57f6614e6822529a4b1a960ae Author: mincong Date: Mon Jul 4 09:07:54 2016 +0200 #81 clean up gitignore, parent pom, mysql module in WF commit 9d569b170f41ded9875cda6b60e590cc47c9cd1e Author: mincong Date: Sun Jul 3 23:47:24 2016 +0200 #81 clean up IT in Java SE - properties are not used locally commit e9e1574620a2671881d2906058f9489a70eb8b8b Author: mincong Date: Sun Jul 3 23:43:44 2016 +0200 #81 clean up IT in Java EE In pom.xml - Properties are not used. Properties should be provided by parent module or core module. - JBeret SE dependencies are useless. - We don't use absolute path here - There's not webapp to package. Arquillian create war archive with its own dependencies. - Exclude surefire because there's no unit test here. In persistence.xml - It should be placed in the META-INF folder as the standard. In MassIndexerIT - Change persistence.xml's location commit 5027a33cc994e37a0bbb8f100caeea20ba1ad735 Author: mincong Date: Sun Jul 3 23:08:19 2016 +0200 #81 clean up core/pom.xml - The exclusion of dom4j is not necessary because hibernate search arm is in scope provide. So there's no conflict. - Servlet was used for the web app, but the project becomes a framework now. There's no servlet any more. - Same principal for JSTL. - Test resources were set for integration test. But now, ITs have been moved to independent modules. - Plugin for dependency path is not used since there isn't any absolute path in the project - Plugin for WAR is not used. Same reason as Servlet, not wabapp any more. - Unit test (surefire) does not need any additional requirement for instant - No IT in core module, so no failsafe, no WF unpack, no resources copy commit 32f6b3b8a05365137e0c44ca34d8fa030d4b13e5 Author: mincong Date: Sun Jul 3 20:47:57 2016 +0200 #63 add value assertions in Java SE integration test commit e6a569996286663321e4d66beaf4c72f1de89e1e Author: mincong Date: Sun Jul 3 16:32:24 2016 +0200 #63 deny JobOperator construction inside the MassIndexerImpl pass it through the setter method. commit 25adb827126e5a1e9640a6c9e3ebc7f9cd7e218c Author: mincong Date: Sun Jul 3 12:32:38 2016 +0200 #63 fix JobOperator instantiation issue In the previous version, the job operator cannot be instantiated and show the below error : java.util.ServiceConfigurationError: javax.batch.operations.JobOperator: Provider org.jberet.operations.JobOperatorImpl could not be instantiated at org.jboss.weld.environment.se.WeldContainer.initialize(WeldContainer.java:136) at org.jboss.weld.environment.se.Weld.initialize(Weld.java:589) at org.jberet.se.SEArtifactFactory.(SEArtifactFactory.java:29) at org.jberet.se.BatchSEEnvironment.getArtifactFactory(BatchSEEnvironment.java:118) at org.jberet.operations.JobOperatorImpl.(JobOperatorImpl.java:93) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at java.lang.Class.newInstance(Class.java:442) at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:380) at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404) at java.util.ServiceLoader$1.next(ServiceLoader.java:480) at javax.batch.runtime.BatchRuntime.getJobOperator(BatchRuntime.java:57) at org.hibernate.search.jsr352.se.MassIndexerIT.setup(MassIndexerIT.java:41) And I found out that if there's only one @Test annotation in the integration test, the problem get fixed. I don't know why and it is very weird. commit 5a99766bf8a616fdc2f827de9a6620804cf54b66 Author: mincong Date: Sat Jul 2 19:14:15 2016 +0200 #80 LuceneWork execution should be asynchronous commit 75bcf3e5768e35c4453aaff0d0229e13974e22c0 Author: mincong Date: Sat Jul 2 19:11:35 2016 +0200 #73 change ref assignment for EntityManager inside the job commit bd0fe3a641f785aaaaa2ccfee29805aee3520912 Author: mincong Date: Fri Jul 1 19:14:12 2016 +0200 #63 replace entity `Address`, `Stock` by `Company` commit 36f8ce1bc0ad894fdbfd3467426d7cd793118185 Author: mincong Date: Fri Jul 1 06:35:57 2016 +0200 #76 update README commit 5ac099c6d02d6954444c048e62537393e7a109ca Author: mincong Date: Fri Jul 1 06:31:33 2016 +0200 #69 leave mysql and use only h2 commit cd6289f87627f583d7dcf152f0397dc317fcc674 Author: mincong Date: Wed Jun 29 23:42:18 2016 +0200 #69 code enhancement commit 18e296502c6e4ee247907e4add2302680587661f Author: mincong Date: Wed Jun 29 23:36:49 2016 +0200 #69 change addEntityCount to Synchronized mode commit ad3b5b9f6638495f2d7046ea0c0429b320cf90da Author: mincong Date: Wed Jun 29 23:16:44 2016 +0200 #67 extend timeout value the default timeout in JSR352 is 180 seconds according to v1.0 final release chapter §9.7 Transactionality commit accf7e1660feaa1b1d49dce383b95f77b85c2ab0 Author: mincong Date: Wed Jun 29 22:20:24 2016 +0200 #69 code enhancement commit 215174295aca27c776ef8698c2888ba1dce3ffae Author: mincong Date: Tue Jun 28 07:51:54 2016 +0200 #63 try to use my own MassIndexer (failed) revert to Gunnar's one in MassIndexerIT.java However, there're still some more dependencies added commit 1fc14a7170b49efad8d61ac7d1619a450e03f95e Author: mincong Date: Tue Jun 28 00:01:34 2016 +0200 #63 replace official indexer by my own indexer (Java SE) but this is not the finished version commit 889e78291241d740c9923b74499b7330fdfd14e2 Author: mincong Date: Mon Jun 27 21:29:30 2016 +0200 #68 use embedded H2 database for Java SE testing commit 7e49180cc951f40cd44dea9813d11048cf69b107 Author: mincong Date: Mon Jun 27 12:56:35 2016 +0200 #63 separate core module and integration tests modules and remove useless resources files, e.g. sql & csv commit 51e2636744c92983b019104cbd70f40a77f73bb6 Author: mincong Date: Mon Jun 27 11:45:11 2016 +0200 #63 create core module commit ed557c036f7f20c6b47af837c73eb6522cb723db Author: mincong Date: Mon Jun 27 10:42:29 2016 +0200 #63 configure pom file for Java SE IT module commit 6ddbb7cc02d955257675def01053cdc43573b656 Author: mincong Date: Sun Jun 26 23:54:12 2016 +0200 #63 start module hibernate-search-integrationtest-javase commit 931ca09e71264a0481c007f33192c7a64118d2bd Author: mincong Date: Sat Jun 25 16:02:37 2016 +0200 #69 add batch status check commit 3a89e11e676577c61f5a2a59f40df1a1faae79dc Author: mincong Date: Sat Jun 25 11:52:48 2016 +0200 #71 move persistence.xml to src/test/resources commit 64727ba61000254865282c545e1b2ba84f10245f Author: mincong Date: Sat Jun 25 10:51:02 2016 +0200 #69 write count test commit ab9df583aaa7133578c3e8a946f719cbe53aa84a Author: mincong Date: Sat Jun 25 10:50:16 2016 +0200 #70 IdProducerBatchlet produces less IDs than expected commit 917a011e208317ee3949d46f57e70ad9676f9aee Author: mincong Date: Sat Jun 25 10:49:17 2016 +0200 #43 add lucene files to .gitignore commit 9d8214c139e6369d82553d32f3134ac0f67b8065 Author: mincong Date: Sat Jun 25 10:06:38 2016 +0200 #69 read count test commit 7b61a5d3146823cafbc90231520da13043e3eb9a Author: mincong Date: Sat Jun 25 10:05:36 2016 +0200 #69 add BatchTestHelper, copied from javaee7-samples commit 4a3941fdf60ff589c89ffa3b2fad227086e70f25 Author: mincong Date: Sat Jun 25 10:04:22 2016 +0200 #49 change logger level commit 47aec792e74bf195efc3f8502c7d138747fd0a2c Author: mincong Date: Thu Jun 23 23:40:32 2016 +0200 #60 some mixed enhancement commit be25a49c250e7ce642f1441a4cef1aa8d8a9bbf5 Author: mincong Date: Thu Jun 23 23:37:42 2016 +0200 #65 fix entity type usage use IndexingContext instead of batch properties commit 2f40c3c4d1dc9fc57dac18390d233ecf8b6d7789 Author: mincong Date: Thu Jun 23 23:34:56 2016 +0200 #49 change log level commit 1ff59ad602ca651278f64eb6abae7070244604e2 Author: mincong Date: Thu Jun 23 23:30:19 2016 +0200 #46 Rename package into org.hibernate.search.jsr352 commit 254d44fd7f815c5ebda50fc7955b2619ac135056 Author: mincong Date: Thu Jun 23 23:28:45 2016 +0200 #66 use remote mysql server commit 1d4be37f7ebaf4ab140977ed5bdfc66d102ad79c Author: mincong Date: Wed Jun 22 00:12:10 2016 +0200 #60 add JBoss Deployment Structure File for class loading commit 29e5a249c215e1b319c0bb8d2f460f68041c8a6a Author: mincong Date: Wed Jun 22 00:11:23 2016 +0200 #60 add shrink-wrap resolver dependency for war archive creation commit 655e1ff3c9af0e58106f0da8a233cde41f66325b Author: mincong Date: Wed Jun 22 00:09:19 2016 +0200 #60 change WF to debug mode commit 73decd1ffa291d713b57f1eafeba2136024ca774 Author: mincong Date: Wed Jun 22 00:06:36 2016 +0200 #60 fix PU naming error commit 8a0438e8603da9b7f8aefd4ae819e7fc84a50d09 Author: mincong Date: Wed Jun 22 00:05:23 2016 +0200 #60 move src/main/webapp to test/main/webapp commit e9c858c0aa72607fe5a0f002437ad89e68d7b1f1 Author: mincong Date: Wed Jun 22 00:04:18 2016 +0200 #60 remove wrong placed persistence.xml commit cd9aca821c5db5d995f931c80373bdb5deabdcae Author: mincong Date: Tue Jun 21 19:01:26 2016 +0200 #64 disable IT log redirection commit 5426048568ada68167390a91248cc91352615e06 Author: mincong Date: Mon Jun 20 23:17:32 2016 +0200 #59 remove ds.xml and include mysql jar file commit f8d292cb81d439b95d18bbeb03e02f6301af7936 Author: mincong Date: Mon Jun 20 22:15:47 2016 +0200 #59 add mysql resources to src/wildflyConfig commit 00d45bc78b7201f55cb60bd5a6840b81bae3cf25 Author: mincong Date: Mon Jun 20 11:17:46 2016 +0200 #58 add datasource for MySQL commit 64a1b5cbac22a8a43cc1f9ec41066f48f7fa2a47 Author: mincong Date: Mon Jun 20 09:46:27 2016 +0200 #57 clarify unit test and integration test - rename DeploymentTest.java to DeploymentIT.java - change pom.xml configuration commit 9a95b249156587fbd0ed6504752f03931a1a427d Author: mincong Date: Sun Jun 19 20:33:03 2016 +0200 #49 use Logger instead of System.out.println commit 04493884534a03ca9165e78261ea7c42d9cab8c7 Author: mincong Date: Sun Jun 19 19:38:25 2016 +0200 #28 change README content The previous demos have been deleted. The current README should only focus on 3 points : 1. What's new using JSR352 implementation ? 2. How to run it ? 3. Why we need it ? commit 7c6662cb37e637440c761faccfd0e4667c9a7c53 Author: mincong Date: Sun Jun 19 17:09:57 2016 +0200 #50 delete MANIFEST.MF commit 95690a9373e8cfaadaa027644e89ea4dd6c1da40 Author: mincong Date: Sun Jun 19 15:42:14 2016 +0200 #54 add unit test for mass indexer commit 522f178166fa29c589610840b8a3761efe9ad33d Author: mincong Date: Sun Jun 19 15:40:46 2016 +0200 #48 separate interface and implementation commit 38c4f54d2f1ef1268f0d44b198d0095b707e9b15 Author: mincong Date: Sun Jun 19 09:09:33 2016 +0200 #52 add deprecated class manually during war creation commit 31610a7d31065bcefc1460e7d58e7b0e47125b54 Author: mincong Date: Sun Jun 19 08:37:59 2016 +0200 #52 delete commented static configuration commit 4884f46fa8ddff2bf39c64de2d4cded2003b59bc Author: mincong Date: Sat Jun 18 21:47:54 2016 +0200 #52 :hankey: only god knows why it works :sweat_drops: :sweat_drops: :sweat_drops: - pom.xml - add arquillian protocol support (not sure if necessary) - change WF from embedded to managed - add tag to enable arquillian xml file lookup - add bit strings to find the absolute path to dependencies (not sure if necessary) - add detailed configuration to unit test plugin surefire (copied from HSEARCH) - add failsafe (not sure if necessary) - add WF runtime in a better way... (Actually, I've not idea how it works. Maybe it just download a target WF version from maven central and past it to the target output directory) - add resources plugin to transfer the defined WF config from src to test directory - DeploymentTest - disable the recursive package lookup because the deprecated class "org.hibernate.search.store.IndexShardingStrategy" is not found - arquillian.xml - disable the "javaVmArguments", otherwise the server cannot start - wildflyCondif/* - config for WF container commit 675c319421c396325bf36d1ec2058b9326c8fad6 Author: mincong Date: Thu Jun 16 23:21:50 2016 +0200 #52 add Arquillian feature (failed) - add JUnit as a maven dependency - add arquillian components as maven dependencies (not finished, don't know exactly what to add) - add wildfly-arquillian-container-embedded, but not sure how it works - add class DeploymentTest commit 68082d7643424f56fce848f20b89788de46dd05c Author: mincong Date: Wed Jun 15 21:31:09 2016 +0200 #48 split of public API and internal implementation packages commit d339328cc1a172b327137756c73a49498b78ce0c Author: mincong Date: Wed Jun 15 20:48:51 2016 +0200 #37 move test classes from src/main/java to src/test/java - entities - sessions - servlets (should be deleted soon) - META-INF (except batch XML file mass-index.xml) commit a67ff0d3ece1751a191edac6b834493276cc0259 Author: mincong Date: Wed Jun 15 20:07:30 2016 +0200 #45 Fetch WildFly for testing via maven-dependency-plugin There're other minor modifications for fixing warnings during the build. commit 47f2895fd1dc0678d324cffa4ca96a4aea4baf8a Author: mincong Date: Tue Jun 14 23:20:33 2016 +0200 #14 add dependency and modify artifact info commit 522f1ac1c3e4f5086d1c9d17631cafd54bf0a5a0 Author: mincong Date: Tue Jun 14 22:45:03 2016 +0200 #41 Move content of folder jsr352 to the root folder commit 5c46ff1e1cbd54f3554d2c985c32ba879220159e Author: mincong Date: Tue Jun 14 22:13:57 2016 +0200 #41 remove jse-chunk and zoo-jpa commit 7fd6cd2bbed309a20cdacf2318abe774c640fbb1 Author: mincong Date: Tue Jun 14 02:27:58 2016 +0200 #35 update project name in pom.xml commit ac276b936bd01d021d7349f6dda83dcb229b6f12 Author: mincong Date: Tue Jun 14 02:26:28 2016 +0200 #42 change Deployment Assembly settings commit a7a3e9358a85298525010ae78f21e2ce043c7f4e Author: mincong Date: Tue Jun 14 02:24:05 2016 +0200 #41 add SQL script for importing CSV files commit d6024c63b0e76a3b4a2e91a3a7afd0a64d73fa5a Author: mincong Date: Mon Jun 13 21:49:46 2016 +0200 #41 add SQL script for importing yahoo-finance-rht.csv commit c8f6634e51a11c0fe66ac8a4eaaffbe392cea665 Author: mincong Date: Mon Jun 13 21:44:51 2016 +0200 #41 change yahoo finance stock CSV file for redhat to lowercase commit 7ed42232093c7016b92d96e7de19bb7d6c6e9106 Author: mincong Date: Mon Jun 13 21:36:58 2016 +0200 #41 provide MySQL dump script for creating tables (without data) commit 917b64a48c9981dd9413db6da1a281f9a59ea3ff Author: mincong Date: Mon Jun 13 21:13:09 2016 +0200 #36 Move "./WebContents" to "./src/main/webapp" commit 81adb41158ca50c30313dd6a754298adf4ff8afb Author: mincong Date: Mon Jun 13 21:10:18 2016 +0200 #14 update pom.xml - add dependencies - rename artifactId (issue #35) - change folder location from ./WebContent to ./src/main/webapp (issue #36) commit b258cf41f693dbbdb7b9081aea544e6f7e700ecc Author: mincong Date: Mon Jun 13 19:45:57 2016 +0200 #35 rename "us-address" to "jsr352" commit 6f80618d08e8a80ffc23b9c86ff88f5c68661b22 Author: mincong Date: Mon Jun 13 17:46:02 2016 +0200 #28 change diagram for mass-indexer (overview) Prepare for the first tag. This image should have smaller width and all components up to date. commit fa1e12c16e93a560758c687ceb1bab17b1a63355 Author: mincong Date: Mon Jun 13 17:21:27 2016 +0200 #41 update javadoc and enhance code commit 1fb8ae6e30ceabf5f345361c03440b5527cf1afa Author: mincong Date: Mon Jun 13 16:26:46 2016 +0200 #41 update javadoc for chunk-step "produceLuceneDoc" Reader / Processor / Writer commit a8f7aaa2e9c4b6835693f92c95239f681276da5a Author: mincong Date: Mon Jun 13 15:47:00 2016 +0200 #41 clean code for IdProducerBatchlet commit 9820a4645c2cd02149f9fc7335a274bd9394d77c Author: mincong Date: Mon Jun 13 14:32:45 2016 +0200 #34 use IndexingContext for entity row count commit 430ea9e1bf24188e73ad8be169ac279de88d27bb Author: mincong Date: Mon Jun 13 01:12:27 2016 +0200 #33 change job to use dynamic configuration of partition plan commit e733a7f0323a0f801036a484376cac1163832e9a Author: mincong Date: Mon Jun 13 01:06:09 2016 +0200 #22 use generic type in BatchItemProcessor commit 0b28f46feff142422597a6489d2c8204da65abd3 Author: mincong Date: Sat Jun 11 21:55:24 2016 +0200 #32 add EntityPartitionMapper commit 518b035e680b1b1b6a14e4ee4b8d11a60973ee6f Author: mincong Date: Sat Jun 11 20:45:06 2016 +0200 #31 get class type Class from String commit b734016294a10ff25c0db7f57e6271aef1d913f5 Author: mincong Date: Sat Jun 11 16:08:47 2016 +0200 #30 add table `stock`, provided by yahoo finance commit 61b5406083ed2105d48c8445a9943545a9b73845 Author: mincong Date: Sat Jun 11 15:13:42 2016 +0200 #24 #25 add tags in job "mass-index" work flow commit 69673517c72746f947925bece5f022fecfcaecc6 Author: mincong Date: Sat Jun 11 15:08:57 2016 +0200 #28 change diagram for mass indexer (overview) The image provided in the commit is generated by the JBoss tool in Eclipse, so it helps to standardize the progress display commit c65f41d3633c4e9a18ebb4fcb4f114b6a7ed5940 Author: mincong Date: Wed Jun 8 20:07:03 2016 +0200 #24 change decision id in job xml commit d127e1e3a9a46db2856be7a4698e934ee641fc06 Author: mincong Date: Tue Jun 7 23:42:49 2016 +0200 #28 add diagram for mass indexer (chunk) commit 2f314a12167dc64e6c74221a4cce4a7f992fa16a Author: mincong Date: Tue Jun 7 23:28:31 2016 +0200 #28 resize image commit e43b4cee609983d181945d99e04241511b889d6f Author: mincong Date: Tue Jun 7 22:59:02 2016 +0200 #28 add diagram for mass indexer (overview) commit 36440bed8486f7040bb04fa2d194a425990a1be2 Author: mincong Date: Mon Jun 6 21:46:24 2016 +0200 #24 add decider and purger batchlet before the doc production chunk Decider and purger batchlet provide a before-production enhancement. User can now have possibility to purge all index before started. However, this commit provides only the working model, method inside the batchlet is not implemented yet. commit 55f01375d11c8ab7c5940484050812a1714583c3 Author: mincong Date: Mon Jun 6 19:52:29 2016 +0200 #26 delete IdReaderBatchlet This class belongs to the previous demo and it was not used since a moment. Delete it to avoid confusion. commit 7d2b5b586178f47f3d0ae4763d5d7672e3815098 Author: mincong Date: Mon Jun 6 19:47:28 2016 +0200 #26 delete previous demo's chunk classes AddressReader + AddressProcessor + AddressWriter + Job XML commit 447412586c2f49455c0f95704cbe0c0976439fc8 Author: mincong Date: Mon Jun 6 17:26:53 2016 +0200 #23 replace LinkedList by ConcurrentLinkedQueue for idChunkQueue commit e4c9f048c081562376921b9d06f434d786f5b456 Author: mincong Date: Mon Jun 6 12:27:42 2016 +0200 #22 Use generic type in BatchItemProcessor Now the method buildAddLuceneWorks(List, Class) is generic. This implementation done thanks to Stack Overflow. I've asked a question this morning : How to use generic type in a map? [duplicate] commit 2a67c6669355e8d4f94d1d71f844693a271870ca Author: mincong Date: Mon Jun 6 10:55:21 2016 +0200 #4 performance test for indexing 1M entities 10:51:58,187 INFO [stdout] (Batch Thread - 1) #analyzeCollectorData(): 1000000 works processed (100.0%). 10:51:58,187 INFO [stdout] (Batch Thread - 1) #analyzeStatus(...) called. 10:51:58,187 INFO [stdout] (Batch Thread - 1) #beforePartitionedStepCompletion() called. 10:51:58,188 INFO [stdout] (Batch Thread - 1) #afterPartitionedStepCompletion(...) called. 10:51:58,466 INFO [stdout] (EJB default - 1) 52 rounds, delta T = 64441 ms. commit 9ccde4fa838eb434fa0681fd2189978ca560f5a1 Author: mincong Date: Mon Jun 6 10:45:32 2016 +0200 #21 modify variable naming and javadoc to clarify the analyzer commit 911990fde529500b58a3f927f4fe69b1fa5b681f Author: mincong Date: Sun Jun 5 21:36:27 2016 +0200 #21 show mass index progress using PartitionAnalyzer commit 7072f6f102431847ecaac86700e72362a8d06db1 Author: mincong Date: Sun Jun 5 18:59:08 2016 +0200 #19 decrease item-count to avoid OutOfMemory exception commit 392a40bce8e140e5a194caa15e947bafb794714f Author: mincong Date: Sun Jun 5 18:58:06 2016 +0200 #17 pass IndexShardingStrategy through IndexingContext commit de93ad6e98eabe59c5f57b01f82a6d7d2760e51d Author: mincong Date: Sun Jun 5 18:56:33 2016 +0200 #21 Collect and analyse items processed from different partitions Previously, items processed in different partitions were shown in separate monitor. Now, by using the StepContext + PartitionCollector + PartitionAnalyzer, these data are unionned together. So, we can see the total progress of this step. However, the monitor is not integrated yet. commit 6d2ae3fa2e1382e94a35ac42582a7fcd992b00ef Author: mincong Date: Sun Jun 5 17:59:34 2016 +0200 #20 disable the cache in a session commit 9b8faa8a81d1748f367ad8069dacd6c025fe2bfb Author: mincong Date: Sat Jun 4 19:07:22 2016 +0200 #13 add AddLuceneWork execution in job "mass-index" The BatchItemWriter can now execute AddLuceneWork and a simple monitor has been added to monitoring the index progress. The BatchItemProcessor had a problem about the IndexShardingStrategy (issue #17). Now it is fixed using a customized context class for context-value-transfer. commit 81891b9a8e7e3eb4aedc46f4d57df62f6ad9f843 Author: mincong Date: Thu Jun 2 20:54:21 2016 +0200 #13 change BatchItemWriter to do lucene work. The processor build AddLuceneWork using the entities obtained from JPA entity manager. The code has been separated into logical modules and different functions. commit 6d70db122d298051151a82364af596b8e3c20827 Author: mincong Date: Wed Jun 1 23:33:05 2016 +0200 #15 fix unaware how to convert value commit c6e8fe1e0ec18d4b279c774a333903e15733dc76 Author: mincong Date: Mon May 30 23:48:06 2016 +0200 #13 add javadoc commit 4e7fcdcc13d262ef035c8dd1a4a2b0167ecae3a9 Author: mincong Date: Mon May 30 21:50:44 2016 +0200 #13 add documentation for reader commit 0ec55aa867b3e512782ac9c2ee27494b21b6633b Author: mincong Date: Mon May 30 21:50:12 2016 +0200 #13 change processor's role and add documentation commit e63636855113e99101bde685fd0f686a5aa940f6 Author: mincong Date: Mon May 30 13:43:45 2016 +0200 #13 mass indexer using JSR 352 - id production commit 382fe8c6e4cd4154a11b3d2a6b603b0bf84e1d61 Author: mincong Date: Sun May 29 20:38:25 2016 +0200 #7 Parallel processing for IdReaderBatchlet commit 941c3b0dfd37a5cc0199543732d263242632c699 Author: mincong Date: Sat May 28 22:51:08 2016 +0200 #10 Use customized context to pass properties between steps commit c361d22694c93230da1e0fcb03adeaf2328ce0b8 Author: mincong Date: Sat May 28 21:25:37 2016 +0200 #8 change data structure from ArrayList to Array (4.0% faster) commit 5739a13c38d451fc1530a1d1c341fcad773bcda6 Author: mincong Date: Sat May 28 12:00:02 2016 +0200 #9 Set batchlet properties via job xml Now the batchlet properties are defined through the Job XML, with Job XML substitution support described in §8.8 of JSR 352 v1.0 final release. commit 14a638a75db57f83bb763cbd1aa02cca995e8620 Author: mincong Date: Fri May 27 18:29:37 2016 +0200 load ids v2 Now the class IdProducerBatchlet can load all entities' ids and print to terminal. commit 25c20141a78f7a632b27cc1568f8ad4767fd1dad Author: mincong Date: Fri May 27 00:38:56 2016 +0200 add id producer batchlet (not finished) This batchlet is similar to org.hibernate.search.batchindexing.impl.IdentifierProducer commit eadadae1cc605fe14735ed2998d5490753ff38be Author: mincong Date: Sun May 22 22:36:51 2016 +0200 execute LuceneWork in AddressProcessor commit 53881cd9264097fca97c1e03b0b4c8f9d4de1029 Author: mincong Date: Sun May 22 20:15:21 2016 +0200 #1 move operations related to session from processor to reader commit aaf0441998c2bb4b955ce7b95d222f839899c262 Author: mincong Date: Sat May 21 16:28:22 2016 +0200 change batch job from a print demo to lucene demo commit b72158bac525c8d459da5a29526538103deeba41 Author: mincong Date: Mon May 16 00:40:24 2016 +0200 add demo batch job `print-addresses-job` commit 539c2445ca6cd26102692e1799798916e20cbef0 Author: mincong Date: Sat May 14 15:21:23 2016 +0200 building demo jse-chunk commit a5db45120b6139a1eb250f3e2d878c6cd7beb194 Author: mincong Date: Thu May 5 21:50:33 2016 +0200 add different types of query - keyword - fuzzy - wildcard commit dcde70e8a12c8c334c42946a7fceea6296db03af Author: mincong Date: Thu May 5 17:48:13 2016 +0200 add basic search feature (keyword query) commit 4a103d19d4133e0359f35ae5a82eb119948e5bda Author: mincong Date: Sun May 1 15:45:48 2016 +0200 optimise mass indexer settings https://docs.jboss.org/hibernate/search/5.6/reference/en-US/html_single/#search-batchindex-massindexer commit 2b2ac86731e3653c427048f9a944937e6ec7a0c5 Author: mincong Date: Sun May 1 14:21:29 2016 +0200 change hibernate search method `index()` into async mode commit 322c58a717f89d5d4c56ea130a6e474a7eeba646 Author: mincong Date: Sun May 1 09:50:00 2016 +0200 add Hibernate Search feature into `us-address` commit 05b0ee16802cab782bbfd839d172b5d3a326b758 Author: mincong Date: Sat Apr 30 11:08:25 2016 +0200 add JPA example `us-address` The Twilio/Wigle.net Street Vector data set provides a complete database of US street names and address ranges mapped to zip codes and latitude/longitude ranges, with DTMF key mappings for all street names. This application `us-address` is a Java EE demo for JPA 2.1. It will be extended to Hibernate Search feature later. commit 135a190ce6349f4ee53a2e28822f0a95b3a1b2cc Author: mincong Date: Sat Apr 16 17:03:37 2016 +0200 add example zoo-jpa --- jsr352/.gitignore | 89 ++++ jsr352/README.md | 15 +- jsr352/core/pom.xml | 118 +++++ .../hibernate/search/jsr352/MassIndexer.java | 40 ++ .../search/jsr352/MassIndexerImpl.java | 275 ++++++++++ .../jsr352/internal/AfterIndexDecider.java | 30 ++ .../jsr352/internal/AfterPurgeDecider.java | 38 ++ .../jsr352/internal/BatchItemProcessor.java | 242 +++++++++ .../jsr352/internal/BatchItemReader.java | 116 +++++ .../jsr352/internal/BatchItemWriter.java | 108 ++++ .../internal/EntityPartitionMapper.java | 74 +++ .../jsr352/internal/IdProducerBatchlet.java | 113 ++++ .../jsr352/internal/IndexPurgerBatchlet.java | 26 + .../jsr352/internal/IndexingContext.java | 102 ++++ .../internal/LucenePartitionAnalyzer.java | 61 +++ .../internal/LucenePartitionCollector.java | 35 ++ .../internal/LucenePartitionMapper.java | 127 +++++ .../internal/LucenePartitionReducer.java | 34 ++ .../jsr352/internal/OptimizerBatchlet.java | 24 + .../search/jsr352/internal/PurgeDecider.java | 38 ++ .../META-INF/batch-jobs/mass-index.xml | 104 ++++ .../src/main/resources/META-INF/beans.xml | 4 + .../search/jsr352/MassIndexerTest.java | 74 +++ jsr352/integrationtest/javaee-wildfly/pom.xml | 247 +++++++++ .../search/jsr352/MassIndexerIT.java | 243 +++++++++ .../search/jsr352/test/entity/Address.java | 258 ++++++++++ .../search/jsr352/test/entity/Company.java | 50 ++ .../jsr352/test/entity/CompanyManager.java | 64 +++ .../search/jsr352/test/entity/Stock.java | 122 +++++ .../jsr352/test/util/BatchTestHelper.java | 60 +++ .../test/resources/META-INF/persistence.xml | 17 + .../src/test/resources/arquillian.xml | 34 ++ .../application-roles.properties | 1 + .../application-users.properties | 2 + .../standalone-full-testqueues.xml | 481 ++++++++++++++++++ jsr352/integrationtest/javase/pom.xml | 188 +++++++ .../search/jsr352/se/JobFactory.java | 13 + .../search/jsr352/se/MassIndexerIT.java | 227 +++++++++ .../search/jsr352/se/RestartChunkIT.java | 262 ++++++++++ .../search/jsr352/se/test/Company.java | 50 ++ .../test/resources/META-INF/persistence.xml | 25 + jsr352/pom.xml | 146 ++++++ 42 files changed, 4376 insertions(+), 1 deletion(-) create mode 100644 jsr352/core/pom.xml create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexer.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexerImpl.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterIndexDecider.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterPurgeDecider.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemProcessor.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemReader.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemWriter.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/EntityPartitionMapper.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IdProducerBatchlet.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexPurgerBatchlet.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexingContext.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionAnalyzer.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionCollector.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionMapper.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionReducer.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/OptimizerBatchlet.java create mode 100644 jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/PurgeDecider.java create mode 100644 jsr352/core/src/main/resources/META-INF/batch-jobs/mass-index.xml create mode 100644 jsr352/core/src/main/resources/META-INF/beans.xml create mode 100644 jsr352/core/src/test/java/org/hibernate/search/jsr352/MassIndexerTest.java create mode 100644 jsr352/integrationtest/javaee-wildfly/pom.xml create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/MassIndexerIT.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Address.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Company.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/CompanyManager.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Stock.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/util/BatchTestHelper.java create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/resources/META-INF/persistence.xml create mode 100644 jsr352/integrationtest/javaee-wildfly/src/test/resources/arquillian.xml create mode 100644 jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-roles.properties create mode 100644 jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-users.properties create mode 100644 jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/standalone-full-testqueues.xml create mode 100644 jsr352/integrationtest/javase/pom.xml create mode 100644 jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/JobFactory.java create mode 100644 jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/MassIndexerIT.java create mode 100644 jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/RestartChunkIT.java create mode 100644 jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/test/Company.java create mode 100644 jsr352/integrationtest/javase/src/test/resources/META-INF/persistence.xml create mode 100644 jsr352/pom.xml diff --git a/jsr352/.gitignore b/jsr352/.gitignore index 32858aad3c3..96285c27c12 100644 --- a/jsr352/.gitignore +++ b/jsr352/.gitignore @@ -1,12 +1,101 @@ +########## Java ########### + *.class # Mobile Tools for Java (J2ME) .mtj.tmp/ # Package Files # +# except the mysql jar file for integration test *.jar *.war *.ear # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml hs_err_pid* +target +build + +########### Lucene ########## +**/org.hibernate.search.jsr352.test.entity.*/* + +########### OSX ########### + +*.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +########## Eclipse ########### + +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.settings/ +.loadpath +.recommenders + +# Eclipse Core +.project + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# PyDev specific (Python IDE for Eclipse) +*.pydevproject + +# CDT-specific (C/C++ Development Tooling) +.cproject + +# JDT-specific (Eclipse Java Development Tools) +.classpath + +# Java annotation processor (APT) +.factorypath + +# PDT-specific (PHP Development Tools) +.buildpath + +# sbteclipse plugin +.target + +# Tern plugin +.tern-project + +# TeXlipse plugin +.texlipse + +# STS (Spring Tool Suite) +.springBeans + +# Code Recommenders +.recommenders/ diff --git a/jsr352/README.md b/jsr352/README.md index 0221941561b..e95731b4cef 100644 --- a/jsr352/README.md +++ b/jsr352/README.md @@ -1,2 +1,15 @@ # gsoc-hsearch -Example usages of Hibernate Search for GSoC (Google Summer of Code) + +This project aims to provide an alternative to the current mass indexer +implementation, using the Java Batch architecture as defined by JSR 352. This +standardized tool JSR 352 provides task-and-chunk oriented processing, parallel +execution and many other optimization features. This batch job should accept +the entity type(s) to re-index as an input, load the relevant entities from the +database and rebuild the full-text index from these. + +## Run + +You can install the project and see test cases using: + + mvn clean install + diff --git a/jsr352/core/pom.xml b/jsr352/core/pom.xml new file mode 100644 index 00000000000..91d94edfae9 --- /dev/null +++ b/jsr352/core/pom.xml @@ -0,0 +1,118 @@ + + 4.0.0 + + org.hibernate + hsearch-jsr352-parent + 5.6.0-SNAPSHOT + + + hsearch-jsr352-core + GSoC JSR352 - Core + New implementation of mass-indexer using JSR 352 + + + + + org.jboss.arquillian + arquillian-bom + 1.1.11.Final + import + pom + + + + + + + org.jboss.spec + jboss-javaee-7.0 + 1.0.0.Final + pom + provided + + + org.hibernate + hibernate-search-orm + 5.5.3.Final + provided + + + javax.batch + javax.batch-api + 1.0 + provided + + + + javax.ejb + javax.ejb-api + 3.2 + provided + + + javax.inject + javax.inject + 1 + provided + + + junit + junit + 4.12 + test + + + org.jboss.arquillian.junit + arquillian-junit-container + + + + org.jboss.arquillian.protocol + arquillian-protocol-servlet + + + org.wildfly + wildfly-arquillian-container-managed + ${org.wildfly.arquillian} + test + + + org.jboss.logmanager + jboss-logmanager + + + org.jboss.logmanager + log4j-jboss-logmanager + + + + wildfly-patching + org.wildfly + + + + + + + ${project.artifactId}-${project.version} + + + maven-compiler-plugin + 3.3 + + 1.8 + 1.8 + + + + maven-surefire-plugin + 2.17 + + + + diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexer.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexer.java new file mode 100644 index 00000000000..41447e81cb1 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexer.java @@ -0,0 +1,40 @@ +package org.hibernate.search.jsr352; + +import java.util.Set; + +import javax.batch.operations.JobOperator; +import javax.persistence.EntityManager; + +public interface MassIndexer { + + public long start(); + public void stop(long executionId); + + public MassIndexer arrayCapacity(int arrayCapacity); + public MassIndexer fetchSize(int fetchSize); + public MassIndexer maxResults(int maxResults); + public MassIndexer optimizeAfterPurge(boolean optimizeAfterPurge); + public MassIndexer optimizeAtEnd(boolean optimizeAtEnd); + public MassIndexer partitionCapacity(int partitionCapacity); + public MassIndexer partitions(int partitions); + public MassIndexer purgeAtStart(boolean purgeAtStart); + public MassIndexer addRootEntities(Set> rootEntities); + public MassIndexer addRootEntities(Class... rootEntities); + public MassIndexer threads(int threads); + // TODO: should be reviewed + public MassIndexer entityManager(EntityManager entityManager); + public MassIndexer jobOperator(JobOperator jobOperator); + + public int getArrayCapacity(); + public int getFetchSize(); + public int getMaxResults(); + public boolean isOptimizeAfterPurge(); + public boolean isOptimizeAtEnd(); + public int getPartitionCapacity(); + public int getPartitions(); + public boolean isPurgeAtStart(); + public Set> getRootEntities(); + public int getThreads(); + public EntityManager getEntityManager(); + public JobOperator getJobOperator(); +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexerImpl.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexerImpl.java new file mode 100644 index 00000000000..c09020db55a --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/MassIndexerImpl.java @@ -0,0 +1,275 @@ +package org.hibernate.search.jsr352; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Properties; +import java.util.Set; + +import javax.batch.operations.JobOperator; +import javax.batch.runtime.BatchRuntime; +import javax.enterprise.inject.spi.Bean; +import javax.enterprise.inject.spi.BeanManager; +import javax.enterprise.inject.spi.CDI; +import javax.persistence.EntityManager; + +import org.hibernate.search.jsr352.internal.IndexingContext; + +public class MassIndexerImpl implements MassIndexer { + + private boolean optimizeAfterPurge = false; + private boolean optimizeAtEnd = false; + private boolean purgeAtStart = false; + private int arrayCapacity = 1000; + private int fetchSize = 200 * 1000; + private int maxResults = 1000 * 1000; + private int partitionCapacity = 250; + private int partitions = 1; + private int threads = 1; + private Set> rootEntities = new HashSet<>(); + private EntityManager entityManager; + private JobOperator jobOperator; + + private final String JOB_NAME = "mass-index"; + + public MassIndexerImpl() { + + } + + /** + * Mass index the Address entity's. + *

Here're an example with parameters and expected results: + *

    + *
  • array capacity = 500 + * + *
  • partition capacity = 250 + * + *
  • max results = 200 * 1000 + * + *
  • queue size + * = Math.ceil(max results / array capacity) + * = Math.ceil(200 * 1000 / 500) + * = Math.ceil(400) + * = 400 + * + *
  • number of partitions + * = Math.ceil(queue size / partition capacity) + * = Math.ceil(400 / 250) + * = Math.ceil(1.6) + * = 2 + * + *
+ */ + @Override + public long start() { + + registrerRootEntities(rootEntities); + registrerEntityManager(entityManager); + + Properties jobParams = new Properties(); + jobParams.setProperty("fetchSize", String.valueOf(fetchSize)); + jobParams.setProperty("arrayCapacity", String.valueOf(arrayCapacity)); + jobParams.setProperty("maxResults", String.valueOf(maxResults)); + jobParams.setProperty("partitionCapacity", String.valueOf(partitionCapacity)); + jobParams.setProperty("partitions", String.valueOf(partitions)); + jobParams.setProperty("threads", String.valueOf(threads)); + jobParams.setProperty("purgeAtStart", String.valueOf(purgeAtStart)); + jobParams.setProperty("optimizeAfterPurge", String.valueOf(optimizeAfterPurge)); + jobParams.setProperty("optimizeAtEnd", String.valueOf(optimizeAtEnd)); + jobParams.setProperty("rootEntities", String.valueOf(rootEntities)); +// JobOperator jobOperator = BatchRuntime.getJobOperator(); + Long executionId = jobOperator.start(JOB_NAME, jobParams); + return executionId; + } + + @Override + public void stop(long executionId) { + JobOperator jobOperator = BatchRuntime.getJobOperator(); + jobOperator.stop(executionId); + } + + @Override + public MassIndexer arrayCapacity(int arrayCapacity) { + if (arrayCapacity < 1) { + throw new IllegalArgumentException("arrayCapacity must be at least 1"); + } + this.arrayCapacity = arrayCapacity; + return this; + } + + @Override + public MassIndexer fetchSize(int fetchSize) { + if (fetchSize < 1) { + throw new IllegalArgumentException("fetchSize must be at least 1"); + } + this.fetchSize = fetchSize; + return this; + } + + @Override + public MassIndexer maxResults(int maxResults) { + if (maxResults < 1) { + throw new IllegalArgumentException("maxResults must be at least 1"); + } + this.maxResults = maxResults; + return this; + } + + @Override + public MassIndexer optimizeAfterPurge(boolean optimizeAfterPurge) { + this.optimizeAfterPurge = optimizeAfterPurge; + return this; + } + + @Override + public MassIndexer optimizeAtEnd(boolean optimizeAtEnd) { + this.optimizeAtEnd = optimizeAtEnd; + return this; + } + + @Override + public MassIndexer partitionCapacity(int partitionCapacity) { + if (partitionCapacity < 1) { + throw new IllegalArgumentException("partitionCapacity must be at least 1"); + } + this.partitionCapacity = partitionCapacity; + return this; + } + + @Override + public MassIndexer partitions(int partitions) { + if (partitions < 1) { + throw new IllegalArgumentException("partitions must be at least 1"); + } + this.partitions = partitions; + return this; + } + + @Override + public MassIndexer purgeAtStart(boolean purgeAtStart) { + this.purgeAtStart = purgeAtStart; + return this; + } + + @Override + public MassIndexer threads(int threads) { + if (threads < 1) { + throw new IllegalArgumentException("threads must be at least 1."); + } + this.threads = threads; + return this; + } + + @Override + public MassIndexer addRootEntities(Set> rootEntities) { + if (rootEntities == null) { + throw new NullPointerException("rootEntities cannot be NULL."); + } else if (rootEntities.isEmpty()) { + throw new NullPointerException("rootEntities must have at least 1 element."); + } + this.rootEntities.addAll(rootEntities); + return this; + } + + @Override + public MassIndexer addRootEntities(Class... rootEntities) { + this.rootEntities.addAll(Arrays.asList(rootEntities)); + return this; + } + + @Override + public MassIndexer entityManager(EntityManager entityManager) { + this.entityManager = entityManager; + return this; + } + + @Override + public MassIndexer jobOperator(JobOperator jobOperator) { + this.jobOperator = jobOperator; + return this; + } + + @Override + public boolean isOptimizeAfterPurge() { + return optimizeAfterPurge; + } + + public boolean isOptimizeAtEnd() { + return optimizeAtEnd; + } + + public boolean isPurgeAtStart() { + return purgeAtStart; + } + + public int getArrayCapacity() { + return arrayCapacity; + } + + public int getFetchSize() { + return fetchSize; + } + + public int getMaxResults() { + return maxResults; + } + + public int getPartitionCapacity() { + return partitionCapacity; + } + + public int getPartitions() { + return partitions; + } + + public int getThreads() { + return threads; + } + + public String getJOB_NAME() { + return JOB_NAME; + } + + public Set> getRootEntities() { + return rootEntities; + } + + @SuppressWarnings("unchecked") + public void registrerRootEntities(Set> rootEntities) { + if (rootEntities == null) { + throw new NullPointerException("rootEntities cannot be NULL."); + } else if (rootEntities.isEmpty()) { + throw new NullPointerException("rootEntities must have at least 1 element."); + } + int s = rootEntities.size(); + + BeanManager bm = CDI.current().getBeanManager(); + Bean bean = (Bean) bm + .resolve(bm.getBeans(IndexingContext.class)); + IndexingContext indexingContext = bm + .getContext(bean.getScope()) + .get(bean, bm.createCreationalContext(bean)); + Class[] r = rootEntities.toArray(new Class[s]); + indexingContext.setRootEntities(r); + } + + @SuppressWarnings("unchecked") + private void registrerEntityManager(EntityManager entityManager) { + BeanManager bm = CDI.current().getBeanManager(); + Bean bean = (Bean) bm + .resolve(bm.getBeans(IndexingContext.class)); + IndexingContext indexingContext = bm + .getContext(bean.getScope()) + .get(bean, bm.createCreationalContext(bean)); + indexingContext.setEntityManager(entityManager); + } + + @Override + public EntityManager getEntityManager() { + return entityManager; + } + + @Override + public JobOperator getJobOperator() { + return jobOperator; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterIndexDecider.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterIndexDecider.java new file mode 100644 index 00000000000..2fbbbd2c29f --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterIndexDecider.java @@ -0,0 +1,30 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.BatchProperty; +import javax.batch.api.Decider; +import javax.batch.runtime.StepExecution; +import javax.inject.Inject; +import javax.inject.Named; + +/** + * Decider decides the next step-execution after the end of index chunk. If + * user TODO: add description + * + * @author Mincong HUANG + */ +@Named +public class AfterIndexDecider implements Decider { + + @Inject @BatchProperty + private Boolean optimizeAtEnd; + + /** + * Decide the next step + * + * @param executions not used for the moment. + */ + @Override + public String decide(StepExecution[] executions) throws Exception { + return String.valueOf(optimizeAtEnd); + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterPurgeDecider.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterPurgeDecider.java new file mode 100644 index 00000000000..d134b53fbd9 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/AfterPurgeDecider.java @@ -0,0 +1,38 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.BatchProperty; +import javax.batch.api.Decider; +import javax.batch.runtime.StepExecution; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +/** + * Decider decides the next step-execution before the start of index chunk. If + * user requires a index purge, then the next step should be a purge, else, + * the next step will be directly the index chunk. Index purge use + * IndexPurgerBatchlet. + * TODO: modify javadoc + * + * @author Mincong HUANG + */ +@Named +public class AfterPurgeDecider implements Decider { + + @Inject @BatchProperty + private Boolean optimizeAfterPurge; + + private static final Logger logger = Logger.getLogger(AfterPurgeDecider.class); + + /** + * Decide the next step using the target batch property. + * + * @param executions step executions. + */ + @Override + public String decide(StepExecution[] executions) throws Exception { + logger.infof("optimzeAfterPurge = %b", optimizeAfterPurge); + return String.valueOf(optimizeAfterPurge); + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemProcessor.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemProcessor.java new file mode 100644 index 00000000000..7eb47b6f8e0 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemProcessor.java @@ -0,0 +1,242 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; +import java.util.LinkedList; +import java.util.List; + +import javax.batch.api.BatchProperty; +import javax.batch.api.chunk.ItemProcessor; +import javax.batch.runtime.context.StepContext; +import javax.inject.Inject; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.criteria.CriteriaBuilder.In; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Path; +import javax.persistence.criteria.Root; + +import org.hibernate.Session; +import org.hibernate.engine.spi.SessionImplementor; +import org.hibernate.search.backend.AddLuceneWork; +import org.hibernate.search.bridge.TwoWayFieldBridge; +import org.hibernate.search.bridge.spi.ConversionContext; +import org.hibernate.search.bridge.util.impl.ContextualExceptionBridgeHelper; +import org.hibernate.search.engine.impl.HibernateSessionLoadingInitializer; +import org.hibernate.search.engine.integration.impl.ExtendedSearchIntegrator; +import org.hibernate.search.engine.spi.DocumentBuilderIndexedEntity; +import org.hibernate.search.engine.spi.EntityIndexBinding; +import org.hibernate.search.hcore.util.impl.ContextHelper; +import org.hibernate.search.spi.InstanceInitializer; +import org.hibernate.search.store.IndexShardingStrategy; +import org.jboss.logging.Logger; + +/** + * Batch item processor loads entities using entity IDs, provided by the item + * reader. Please notice: this process is running under multiple partitions, + * so there're multiple processors running currently. The input IDs are not + * shared by different processors. And theses IDs are given by the item reader + * located in the same partition. + * + *

+ * Several attributes are used in this class : + *

    + *
  • {@code session} is the Hibernate session unwrapped from JPA entity. It + * will be used to construct the Lucene work. + * + *
  • {@code searchIntegrator} is an interface which gives access to runtime + * configuration, it is intended to be used by Search components. + * + *
  • {@code entityIndexBinding} Entity index binding specifies the relation + * and options from an indexed entity to its index(es). + * + *
  • {@code docBuilder} is the document builder for indexed entity (Address). + * + *
  • {@code sessionInitializer} TODO: don't know what it is. + * + *
  • {@code conversionContext} TODO: don't know what it is. + * + *
  • {@code shardingStrategy} TODO: add description + * + *
  • {@code indexingContext} TODO: add description + *
+ * + * @author Mincong HUANG + */ +@Named +public class BatchItemProcessor implements ItemProcessor { + + private EntityManager em; + private Session session; + private ExtendedSearchIntegrator searchIntegrator; + private EntityIndexBinding entityIndexBinding; + + @Inject private IndexingContext indexingContext; + @Inject private StepContext stepContext; + + @Inject @BatchProperty + private String entityType; + + private static final Logger logger = Logger.getLogger(BatchItemProcessor.class); + + /** + * Process an input item into an output item. Here, the input item is an + * array of IDs and the output item is a list of Lucene works. During the + * process, entities are found by an injected entity manager, then they + * are used for building the correspondent Lucene works. + * + * @param item the input item, an array of IDs + * @return a list of Lucene works + * @throws Exception thrown for any errors. + */ + @Override + public Object processItem(Object item) throws Exception { + + if (em == null) { + em = indexingContext.getEntityManager(); + } + + logger.debugf("processItem(Object) called. entityType=%s", entityType); + Class entityClazz = findClass(entityType); + + // TODO: should keep item as "Serializable[]" and not cast to "int[]" + int[] ids = toIntArray((Serializable[]) item); + List entities = null; + List addWorks = null; + + CriteriaQuery q = buildCriteriaQuery(entityClazz, ids); + entities = em.createQuery(q) + // don't insert into cache. + .setHint("javax.persistence.cache.storeMode", "BYPASS") + // get data directly from the database. + .setHint("javax.persistence.cache.retrieveMode", "BYPASS") + .getResultList(); + addWorks = buildAddLuceneWorks(entities, entityClazz); + updateWorksCount(addWorks.size()); + + return addWorks; + } + + private Class findClass(String entityType) throws ClassNotFoundException { + for (Class clazz: indexingContext.getRootEntities()) { + if (clazz.getName().equals(entityType)) { + return clazz; + } + } + String msg = String.format("entityType %s not found.", entityType); + throw new ClassNotFoundException(msg); + } + + /** + * Update the Lucene Works counts using the step context. + * + * @param currentCount the works processed during the current + * processItem(). + */ + private void updateWorksCount(int currentCount) { + Object userData = stepContext.getTransientUserData(); + int previousCount = userData != null ? (int) userData : 0; + int totalCount = previousCount + currentCount; + stepContext.setTransientUserData(totalCount); + } + + /** + * Build addLuceneWorks using entities. This method is inspired by the + * current mass indexer implementation. + * + * @param entities selected entities, obtained from JPA entity manager. + * They'll be used to build Lucene works. + * @param entityClazz the class type of selected entities + * @return a list of addLuceneWorks + */ + private List buildAddLuceneWorks(List entities, + Class entityClazz) { + + List addWorks = new LinkedList<>(); + // TODO: tenant ID should not be null + // Or may it be fine to be null? Gunnar's integration test in Hibernate + // Search: MassIndexingTimeoutIT does not mention the tenant ID neither + // (The tenant ID is not included mass indexer setup in the ConcertManager) + String tenantId = null; + + session = em.unwrap(Session.class); + searchIntegrator = ContextHelper.getSearchintegrator(session); + entityIndexBinding = searchIntegrator + .getIndexBindings() + .get(entityClazz); + + DocumentBuilderIndexedEntity docBuilder = entityIndexBinding.getDocumentBuilder(); + // NotSharedStrategy + IndexShardingStrategy shardingStrategy = entityIndexBinding.getSelectionStrategy(); + logger.infof("indexShardingStrategy=%s", shardingStrategy.toString()); + indexingContext.setIndexShardingStrategy(shardingStrategy); + ConversionContext conversionContext = new ContextualExceptionBridgeHelper(); + final InstanceInitializer sessionInitializer = new HibernateSessionLoadingInitializer( + (SessionImplementor) session + ); + + for (Object entity: entities) { + Serializable id = session.getIdentifier(entity); + TwoWayFieldBridge idBridge = docBuilder.getIdBridge(); + conversionContext.pushProperty(docBuilder.getIdKeywordName()); + String idInString = null; + try { + idInString = conversionContext + .setClass(entityClazz) + .twoWayConversionContext(idBridge) + .objectToString(id); + logger.infof("idInString=%s", idInString); + } finally { + conversionContext.popProperty(); + } + AddLuceneWork addWork = docBuilder.createAddWork( + tenantId, + entity.getClass(), + entity, + id, + idInString, + sessionInitializer, + conversionContext + ); + addWorks.add(addWork); + } + + return addWorks; + } + + /** + * Build criteria query using JPA criteria builder. + * + * TODO: the type of entry array ids should be generic. + * + * @param clazz the target class + * @param ids the identifiers, of which the correspondent entities should be + * selected. + * @return the criteria query built + */ + private CriteriaQuery buildCriteriaQuery(Class clazz, int[] ids) { + CriteriaQuery q = em.getCriteriaBuilder().createQuery(clazz); + Root root = q.from(clazz); + // TODO: get attribute id in generic type + Path attrId = root.get("id"); + In inIds = em.getCriteriaBuilder().in(attrId); + for (int id : ids) { + inIds.value(id); + } + q.where(inIds); + return q; + } + + /** + * Cast the serializable array into primitive integer array. + * + * @param s serializable array + * @return the primitive integer array + */ + private int[] toIntArray(Serializable[] s){ + int[] array = new int[s.length]; + for(int i = 0; i < s.length; i++) { + array[i] = (int) s[i]; + } + return array; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemReader.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemReader.java new file mode 100644 index 00000000000..687f5ab8086 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemReader.java @@ -0,0 +1,116 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; +import java.util.LinkedList; +import java.util.Queue; + +import javax.batch.api.BatchProperty; +import javax.batch.api.chunk.ItemReader; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +/** + * Read entity IDs from {@code IndexingContext}. Each time, there's one array + * being read. The number of IDs inside the array depends on the array capacity. + * This value is defined before the job start. Either the default value defined + * in the job xml will be applied, or the value overwritten by the user in job + * parameters. These IDs will be processed in {@code BatchItemProcessor}, then + * be used for Lucene document production. + *

+ * The motivation of using an array of IDs over a single ID is to accelerate + * the entity processing. Use a SELECT statement to obtain only one ID is + * rather a waste. For more detail about the entity process, please check {@code + * BatchItemProcessor}. + * + * @author Mincong HUANG + */ +@Named +public class BatchItemReader implements ItemReader { + + @Inject @BatchProperty + private String entityType; + + @Inject + private IndexingContext indexingContext; + + // TODO: I think this can be done with StepContext + private boolean isRestarted; + private boolean hasReadTempIDs; + + // TODO: this array should be defined dynamically by the item-count value + // defined by the batch job. But for instance, just use a static value + private Queue tempIDs; + + private static final Logger logger = Logger.getLogger(BatchItemReader.class); + + /** + * The checkpointInfo method returns the current checkpoint data for this + * reader. It is called before a chunk checkpoint is committed. + * + * @return the checkpoint info + * @throws Exception thrown for any errors. + */ + @Override + public Serializable checkpointInfo() throws Exception { + logger.info("checkpointInfo() called. Saving temporary IDs to batch runtime..."); + Queue checkpoint = new LinkedList<>(tempIDs); + tempIDs.clear(); + return (Serializable) checkpoint; + } + + /** + * Close operation(s) before the class destruction. + * + * @throws Exception thrown for any errors. + */ + @Override + public void close() throws Exception { + logger.info("close"); + } + + /** + * Initialize the environment. If checkpoint does not exist, then it should + * be the first open. If checkpoint exist, then it isn't the first open, + * save the input object "checkpoint" into "tempIDs". + * + * @param checkpoint The last checkpoint info saved in the batch runtime, + * previously given by checkpointInfo(). + * @throws Exception thrown for any errors. + */ + @Override + @SuppressWarnings("unchecked") + public void open(Serializable checkpoint) throws Exception { + logger.infof("#open(...): entityType = %s", entityType); + if (checkpoint == null) { + tempIDs = new LinkedList<>(); + isRestarted = false; + } else { + tempIDs = (Queue) checkpoint; + isRestarted = true; + } + } + + /** + * Read item from the {@code IndexingContext}. Here, item means an array of + * IDs previously produced by the {@code IdProducerBatchlet}. + * + * If this is a restart job, then the temporary IDs restored from checkpoint + * will be read at first. + * + * @throws Exception thrown for any errors. + */ + @Override + public Object readItem() throws Exception { + Serializable[] IDs = null; + if (isRestarted && !hasReadTempIDs && !tempIDs.isEmpty()) { + IDs = tempIDs.poll(); + hasReadTempIDs = tempIDs.isEmpty(); + } else { + IDs = indexingContext.poll(Class.forName(entityType)); + tempIDs.add(IDs); + } + return IDs; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemWriter.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemWriter.java new file mode 100644 index 00000000000..6bdfa0f9e41 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/BatchItemWriter.java @@ -0,0 +1,108 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; +import java.util.LinkedList; +import java.util.List; + +import javax.batch.api.chunk.ItemWriter; +import javax.inject.Inject; +import javax.inject.Named; + +import org.hibernate.search.backend.AddLuceneWork; +import org.hibernate.search.backend.impl.StreamingOperationExecutor; +import org.hibernate.search.backend.impl.StreamingOperationExecutorSelector; +import org.hibernate.search.batchindexing.MassIndexerProgressMonitor; +import org.hibernate.search.batchindexing.impl.SimpleIndexingProgressMonitor; +import org.hibernate.search.store.IndexShardingStrategy; +import org.jboss.logging.Logger; + +/** + * Batch item writer writes a list of items into Lucene documents. Here, items + * mean the luceneWorks, given by the processor. These items will be executed + * using StreamingOperationExecutor. + *

+ *

    + *
  • {@code indexingContext} is used to store the shardingStrategy + * + *
  • {@code monitor} mass indexer progress monitor helps to follow the mass + * indexing progress and show it in the console. + *
+ * + * @author Mincong HUANG + */ +@Named +public class BatchItemWriter implements ItemWriter { + + @Inject + private IndexingContext indexingContext; + + private final Boolean forceAsync = true; + + // TODO: The monitor is not used for instance. It should be used later. + private MassIndexerProgressMonitor monitor; + + private static final Logger logger = Logger.getLogger(BatchItemWriter.class); + + /** + * The checkpointInfo method returns the current checkpoint data for this + * writer. It is called before a chunk checkpoint is committed. + * + * @return the checkpoint info + * @throws Exception is thrown for any errors. + */ + @Override + public Serializable checkpointInfo() throws Exception { + logger.info("checkpointInfo called"); + return null; + } + + /** + * The close method marks the end of use of the ItemWriter. The writer + * is used to do the cleanup. + * + * @throws Exception is thrown for any errors. + */ + @Override + public void close() throws Exception { + logger.info("close() called"); + } + + /** + * The open method prepares the writer to write items. + * + * @param checkpoint the last checkpoint + */ + @Override + public void open(Serializable checkpoint) throws Exception { + logger.info("open(Seriliazable) called"); + monitor = new SimpleIndexingProgressMonitor(); + } + + /** + * Execute {@code LuceneWork} + * + * @param items a list of items, where each item is a list of Lucene works. + * @throw Exception is thrown for any errors. + */ + @Override + @SuppressWarnings("unchecked") + public void writeItems(List items) throws Exception { + + // TODO: is the sharding strategy used suitable for the situation ? + IndexShardingStrategy shardingStrategy = + indexingContext.getIndexShardingStrategy(); + for (Object item : items) { + for(AddLuceneWork addWork : (LinkedList) item) { + StreamingOperationExecutor executor = addWork.acceptIndexWorkVisitor( + StreamingOperationExecutorSelector.INSTANCE, null); + executor.performStreamOperation( + addWork, + shardingStrategy, +// monitor, + null, + forceAsync + ); + } + } + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/EntityPartitionMapper.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/EntityPartitionMapper.java new file mode 100644 index 00000000000..b433696b633 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/EntityPartitionMapper.java @@ -0,0 +1,74 @@ +package org.hibernate.search.jsr352.internal; + +import java.util.Properties; + +import javax.batch.api.BatchProperty; +import javax.batch.api.partition.PartitionMapper; +import javax.batch.api.partition.PartitionPlan; +import javax.batch.api.partition.PartitionPlanImpl; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +@Named +public class EntityPartitionMapper implements PartitionMapper { + + @Inject + private IndexingContext indexingContext; + + @Inject @BatchProperty(name = "rootEntities") + private String rootEntitiesStr; + + private static final Logger logger = Logger.getLogger(EntityPartitionMapper.class); + + @Override + public PartitionPlan mapPartitions() throws Exception { + +// String[] rootEntities = parse(rootEntitiesStr); + Class[] rootEntities = indexingContext.getRootEntities(); + + return new PartitionPlanImpl() { + + @Override + public int getPartitions() { + logger.infof("%d partitions.", rootEntities.length); + return rootEntities.length; + } + + @Override + public int getThreads() { + logger.infof("%d threads.", getPartitions()); + return getPartitions(); + } + + @Override + public Properties[] getPartitionProperties() { + Properties[] props = new Properties[getPartitions()]; + for (int i = 0; i < props.length; i++) { + props[i] = new Properties(); + props[i].setProperty("entityType", rootEntities[i].getName()); + } + return props; + } + }; + } + + /** + * Parse a set of entities in string into a set of entity-types. + * + * @param raw a set of entities concatenated in string, separated by "," + * and surrounded by "[]", e.g. "[com.xx.foo, com.xx.bar]". + * @return a set of entity-types + * @throws NullPointerException thrown if the entity-token is not found. + */ + private String[] parse(String raw) throws NullPointerException { + if (raw == null) { + throw new NullPointerException("Not any target entity to index"); + } + String[] rootEntities = raw + .substring(1, raw.length() - 1) + .split(", "); + return rootEntities; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IdProducerBatchlet.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IdProducerBatchlet.java new file mode 100644 index 00000000000..42730e13804 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IdProducerBatchlet.java @@ -0,0 +1,113 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; +import java.util.Arrays; + +import javax.batch.api.BatchProperty; +import javax.batch.api.Batchlet; +import javax.batch.runtime.BatchStatus; +import javax.inject.Inject; +import javax.inject.Named; +import javax.persistence.EntityManager; + +import org.hibernate.ScrollMode; +import org.hibernate.ScrollableResults; +import org.hibernate.Session; +import org.hibernate.criterion.Projections; +import org.jboss.logging.Logger; + +/** + * Read identifiers of entities via entity manager. The result is going to be + * stored in {@code IndexingContext}, then be used for Lucene document + * production in the next step. + * + * @author Mincong HUANG + */ +@Named +public class IdProducerBatchlet implements Batchlet { + + @Inject + private IndexingContext indexingContext; + + @Inject @BatchProperty private int arrayCapacity; + @Inject @BatchProperty private int fetchSize; + @Inject @BatchProperty private int maxResults; + @Inject @BatchProperty private String entityType; + + private EntityManager em; + private Session session; + + private static final Logger logger = Logger.getLogger(IdProducerBatchlet.class); + + /** + * Load id of all target entities using Hibernate Session. In order to + * follow the id loading progress, the total number will be additionally + * computed as well. + */ + @Override + public String process() throws Exception { + + // get entity class type + Class entityClazz = Class.forName(entityType); + + if (em == null) { + em = indexingContext.getEntityManager(); + } + // unwrap session from entity manager + session = em.unwrap(Session.class); + + // get total number of id + final long rowCount = (long) session + .createCriteria(entityClazz) + .setProjection(Projections.rowCount()) + .setCacheable(false) + .uniqueResult(); + logger.infof("entityType = %s (%d rows).", entityType, rowCount); + indexingContext.addEntityCount(rowCount); + + // load ids and store in scrollable results + ScrollableResults scrollableIds = session + .createCriteria(entityClazz) + .setCacheable(false) + .setFetchSize(fetchSize) + .setProjection(Projections.id()) + .setMaxResults(maxResults) + .scroll(ScrollMode.FORWARD_ONLY); + + Serializable[] entityIDs = new Serializable[arrayCapacity]; + long rowLoaded = 0; + int i = 0; + try { + // Create a key-value pair for entity in the hash-map embedded in + // indexingContext. The key is the entity class type and the value + // is an empty queue of IDs. + indexingContext.createQueue(entityClazz); + + while (scrollableIds.next() && rowLoaded < rowCount) { + Serializable id = (Serializable) scrollableIds.get(0); + entityIDs[i++] = id; + rowLoaded++; + if (i == arrayCapacity) { + // add array entityIDs into indexing context's hash-map, + // mapped to key K = entityClazz + indexingContext.add(entityIDs, entityClazz); + // reset id array and index + entityIDs = new Serializable[arrayCapacity]; + i = 0; + } else if (scrollableIds.isLast()) { + indexingContext.add(Arrays.copyOf(entityIDs, i), entityClazz); + } + } + } finally { + scrollableIds.close(); + } + return BatchStatus.COMPLETED.toString(); + } + + @Override + public void stop() throws Exception { + if (session.isOpen()) { + session.close(); + } + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexPurgerBatchlet.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexPurgerBatchlet.java new file mode 100644 index 00000000000..2493c8a09d2 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexPurgerBatchlet.java @@ -0,0 +1,26 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.Batchlet; +import javax.batch.runtime.BatchStatus; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +@Named +public class IndexPurgerBatchlet implements Batchlet { + + private static final Logger logger = Logger.getLogger(IndexPurgerBatchlet.class); + + @Override + public String process() throws Exception { + + logger.info("purging entities ..."); + + return BatchStatus.COMPLETED.toString(); + } + + @Override + public void stop() throws Exception { + // TODO Auto-generated method stub + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexingContext.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexingContext.java new file mode 100644 index 00000000000..cf2119e7a01 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/IndexingContext.java @@ -0,0 +1,102 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; + +import javax.inject.Named; +import javax.inject.Singleton; +import javax.persistence.EntityManager; + +import org.hibernate.search.store.IndexShardingStrategy; +import org.jboss.logging.Logger; + +/** + * Specific indexing context for mass indexer. Several attributes are used : + *

+ *

    + *
  • entityCount: the total number of entities to be indexed in the job. The + * number is summarized by partitioned step "loadId". Each + * IdProducerBatchlet (partiton) produces the number of entities linked to + * its own target entity, then call the method #addEntityCount(long) to + * summarize it with other partition(s).
  • + *
+ * @author Mincong HUANG + */ +@Named +@Singleton +public class IndexingContext { + + private ConcurrentHashMap, ConcurrentLinkedQueue> idQueues; + private Class[] rootEntities; + private IndexShardingStrategy indexShardingStrategy; + private long entityCount = 0; + private EntityManager entityManager; + + private static final Logger logger = Logger.getLogger(IndexingContext.class); + + public void add(Serializable[] clazzIDs, Class clazz) { + idQueues.get(clazz).add(clazzIDs); + } + + public Serializable[] poll(Class clazz) { + // TODO: this method is really slow + Serializable[] IDs = idQueues.get(clazz).poll(); + String len = (IDs == null) ? "null" : String.valueOf(IDs.length); + logger.infof("Polling %s IDs for %s", len, clazz.getName()); + return IDs; + } + + public int sizeOf(Class clazz) { + return idQueues.get(clazz).size(); + } + + public void createQueue(Class clazz) { + idQueues.put(clazz, new ConcurrentLinkedQueue<>()); + } + + public IndexingContext() { + this.idQueues = new ConcurrentHashMap<>(); + } + + public ConcurrentHashMap, ConcurrentLinkedQueue> getIdQueues() { + return idQueues; + } + + // I don't think we need this method. + public void setIdQueues(ConcurrentHashMap, ConcurrentLinkedQueue> idQueues) { + this.idQueues = idQueues; + } + + public IndexShardingStrategy getIndexShardingStrategy() { + return indexShardingStrategy; + } + + public void setIndexShardingStrategy(IndexShardingStrategy indexShardingStrategy) { + this.indexShardingStrategy = indexShardingStrategy; + } + + public synchronized void addEntityCount(long entityCount) { + this.entityCount += entityCount; + } + + public long getEntityCount() { + return entityCount; + } + + public Class[] getRootEntities() { + return rootEntities; + } + + public void setRootEntities(Class[] rootEntities) { + this.rootEntities = rootEntities; + } + + public void setEntityManager(EntityManager entityManager) { + this.entityManager = entityManager; + } + + public EntityManager getEntityManager() { + return entityManager; + } +} \ No newline at end of file diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionAnalyzer.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionAnalyzer.java new file mode 100644 index 00000000000..89ae34309da --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionAnalyzer.java @@ -0,0 +1,61 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; + +import javax.batch.api.BatchProperty; +import javax.batch.api.partition.PartitionAnalyzer; +import javax.batch.runtime.BatchStatus; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +@Named +public class LucenePartitionAnalyzer implements PartitionAnalyzer { + + @Inject + private IndexingContext indexingContext; + + @Inject @BatchProperty + private int maxResults; + + private int workCount = 0; + private float percentage = 0; + + private static final Logger logger = Logger.getLogger(LucenePartitionAnalyzer.class); + + /** + * Analyze data obtained from different partition plans via partition data + * collectors. The current analyze is to summarize to their progresses : + * + * workCount = workCount1 + workCount2 + ... + workCountN + * + * Then it shows the total mass index progress in percentage. This method is + * very similar to the current simple progress monitor. Note: concerning + * the number of total entities loaded, it depends on 2 values : the number + * of row in the database table and the max results to process, defined by + * user before the job start. So the minimum between them will be used. + * + * @param fromCollector the checkpoint obtained from partition collector's + * collectPartitionData + */ + @Override + public void analyzeCollectorData(Serializable fromCollector) throws Exception { + + long entityCount = indexingContext.getEntityCount(); + int entitiesLoaded = Math.min((int) entityCount, maxResults); + + workCount += (int) fromCollector; + if (entitiesLoaded != 0) { + percentage = workCount * 100f / entitiesLoaded; + } + logger.infof("%d works processed (%.1f%%).", + workCount, percentage); + } + + @Override + public void analyzeStatus(BatchStatus batchStatus, String exitStatus) + throws Exception { + logger.info("analyzeStatus called."); + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionCollector.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionCollector.java new file mode 100644 index 00000000000..e687c9e0ff5 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionCollector.java @@ -0,0 +1,35 @@ +package org.hibernate.search.jsr352.internal; + +import java.io.Serializable; + +import javax.batch.api.partition.PartitionCollector; +import javax.batch.runtime.context.StepContext; +import javax.inject.Inject; +import javax.inject.Named; + +@Named +public class LucenePartitionCollector implements PartitionCollector { + + @Inject + private StepContext stepContext; + + /** + * The collectPartitionData method receives control periodically during + * partition processing. This method receives control on each thread + * processing a partition as IdProducerBatchlet, once at the end of the + * process. + */ + @Override + public Serializable collectPartitionData() throws Exception { + + // get transient user data + Object userData = stepContext.getTransientUserData(); + int workCount = userData != null ? (int) userData : 0; + + // once data collected, reset the counter + // to zero in transient user data + stepContext.setTransientUserData(0); + + return workCount; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionMapper.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionMapper.java new file mode 100644 index 00000000000..78ad5beaea3 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionMapper.java @@ -0,0 +1,127 @@ +package org.hibernate.search.jsr352.internal; + +import java.util.LinkedList; +import java.util.Properties; +import java.util.Queue; + +import javax.batch.api.BatchProperty; +import javax.batch.api.partition.PartitionMapper; +import javax.batch.api.partition.PartitionPlan; +import javax.batch.api.partition.PartitionPlanImpl; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +/** + * Lucene partition mapper provides a partition plan to the Lucene production + * step: "produceLuceneDoc". The partition plan is defined dynamically, + * according to the indexing context. + *

+ * Several batch properties are used in this mapper: + *

    + *
  • partitionCapacity defines the capacity of one partition: the + * number of id arrays that will be treated in this partition. So the number of + * partition is computed by the equation:
    + * {@code nbPartition = nbArray / partitionCapacity;} + * + *
  • threads defines the number of threads wished by the user. Default + * value is defined in the job xml file. However, the valued used might be + * smaller, depending on the number of partitions. + *
+ * + * @author Mincong HUANG + */ +@Named +public class LucenePartitionMapper implements PartitionMapper { + + @Inject + private IndexingContext indexingContext; + + @Inject @BatchProperty private int partitionCapacity; + @Inject @BatchProperty private int threads; + @Inject @BatchProperty(name="rootEntities") private String rootEntitiesStr; + + private static final Logger logger = Logger.getLogger(LucenePartitionMapper.class); + + @Override + public PartitionPlan mapPartitions() throws Exception { + + Class[] rootEntities = indexingContext.getRootEntities(); + Queue classQueue = new LinkedList<>(); + + int totalPartitions = 0; + for (Class rootEntity: rootEntities) { + + int _queueSize = indexingContext.sizeOf(rootEntity); + int _partitions = (int) Math.ceil((double) _queueSize / partitionCapacity); + + logger.infof("rootEntity=%s", rootEntity.toString()); + logger.infof("_queueSize=%d", _queueSize); + logger.infof("partitionCapacity=%d", partitionCapacity); + logger.infof("_partitions=%d", _partitions); + + // enqueue entity type into classQueue, as much as the number of + // the class partitions + for (int i = 0; i < _partitions; i++) { + classQueue.add(rootEntity.getName()); + } + logger.infof("%d partitions added to root entity \"%s\".", + _partitions, rootEntity); + + totalPartitions += _partitions; + } + final int TOTAL_PARTITIONS = totalPartitions; + + return new PartitionPlanImpl() { + + @Override + public int getPartitions() { + logger.infof("#mapPartitions(): %d partitions.", TOTAL_PARTITIONS); + return TOTAL_PARTITIONS; + } + + @Override + public int getThreads() { + logger.infof("#getThreads(): %d threads.", TOTAL_PARTITIONS);//Math.min(TOTAL_PARTITIONS, threads)); + return Math.min(TOTAL_PARTITIONS, threads); + } + + @Override + public Properties[] getPartitionProperties() { + Properties[] props = new Properties[TOTAL_PARTITIONS]; + for (int i = 0; i < props.length; i++) { + String entityType = classQueue.poll(); + props[i] = new Properties(); + props[i].setProperty("entityType", entityType); + } + return props; + } + }; + } + + /** + * Parse a set of entities in string into a set of entity-types. + * + * @param raw a set of entities concatenated in string, separated by "," + * and surrounded by "[]", e.g. "[com.xx.foo, com.xx.bar]". + * @return a set of entity-types + * @throws NullPointerException thrown if the entity-token is not found. + * @throws ClassNotFoundException thrown if the target string name is not + * a valid class name. + */ + private Class[] parse(String raw) throws NullPointerException, + ClassNotFoundException { + if (raw == null) { + throw new NullPointerException("Not any target entity to index"); + } + String[] names = raw + .substring(1, raw.length() - 1) // removes '[' and ']' + .split(", "); + Class[] classes = new Class[names.length]; + for (int i = 0; i < names.length; i++) { + classes[i] = Class.forName(names[i]); + } + return classes; + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionReducer.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionReducer.java new file mode 100644 index 00000000000..cf9adb1ce90 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/LucenePartitionReducer.java @@ -0,0 +1,34 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.partition.PartitionReducer; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +@Named +public class LucenePartitionReducer implements PartitionReducer { + + private static final Logger logger = Logger.getLogger(LucenePartitionReducer.class); + + @Override + public void beginPartitionedStep() throws Exception { + logger.info("#beginPartitionedStep() called."); + } + + @Override + public void beforePartitionedStepCompletion() throws Exception { + logger.info("#beforePartitionedStepCompletion() called."); + } + + @Override + public void rollbackPartitionedStep() throws Exception { + logger.info("#rollbackPartitionedStep() called."); + } + + @Override + public void afterPartitionedStepCompletion(PartitionStatus status) + throws Exception { + logger.info("#afterPartitionedStepCompletion(...) called."); + } + +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/OptimizerBatchlet.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/OptimizerBatchlet.java new file mode 100644 index 00000000000..571e48042d8 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/OptimizerBatchlet.java @@ -0,0 +1,24 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.Batchlet; +import javax.batch.runtime.BatchStatus; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +@Named +public class OptimizerBatchlet implements Batchlet { + + private static final Logger logger = Logger.getLogger(OptimizerBatchlet.class); + + @Override + public String process() throws Exception { + logger.info("Optimizing ..."); + return BatchStatus.COMPLETED.toString(); + } + + @Override + public void stop() throws Exception { + + } +} diff --git a/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/PurgeDecider.java b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/PurgeDecider.java new file mode 100644 index 00000000000..d7f1fdf9e14 --- /dev/null +++ b/jsr352/core/src/main/java/org/hibernate/search/jsr352/internal/PurgeDecider.java @@ -0,0 +1,38 @@ +package org.hibernate.search.jsr352.internal; + +import javax.batch.api.BatchProperty; +import javax.batch.api.Decider; +import javax.batch.runtime.StepExecution; +import javax.inject.Inject; +import javax.inject.Named; + +import org.jboss.logging.Logger; + +/** + * Decider decides the next step-execution before the start of index chunk. If + * user requires a index purge, then the next step should be a purge, else, + * the next step will be directly the index chunk. Index purge use + * IndexPurgerBatchlet. + * TODO: modify javadoc + * + * @author Mincong HUANG + */ +@Named +public class PurgeDecider implements Decider { + + @Inject @BatchProperty + private Boolean purgeAtStart; + + private static final Logger logger = Logger.getLogger(PurgeDecider.class); + + /** + * Decide the next step using the target batch property. + * + * @param executions step executions. + */ + @Override + public String decide(StepExecution[] executions) throws Exception { + logger.infof("purgeAtStart=%s.%n", purgeAtStart); + return String.valueOf(purgeAtStart); + } +} diff --git a/jsr352/core/src/main/resources/META-INF/batch-jobs/mass-index.xml b/jsr352/core/src/main/resources/META-INF/batch-jobs/mass-index.xml new file mode 100644 index 00000000000..346f19cfce1 --- /dev/null +++ b/jsr352/core/src/main/resources/META-INF/batch-jobs/mass-index.xml @@ -0,0 +1,104 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/jsr352/core/src/main/resources/META-INF/beans.xml b/jsr352/core/src/main/resources/META-INF/beans.xml new file mode 100644 index 00000000000..29b61e1fe73 --- /dev/null +++ b/jsr352/core/src/main/resources/META-INF/beans.xml @@ -0,0 +1,4 @@ + + diff --git a/jsr352/core/src/test/java/org/hibernate/search/jsr352/MassIndexerTest.java b/jsr352/core/src/test/java/org/hibernate/search/jsr352/MassIndexerTest.java new file mode 100644 index 00000000000..6a32e64a19b --- /dev/null +++ b/jsr352/core/src/test/java/org/hibernate/search/jsr352/MassIndexerTest.java @@ -0,0 +1,74 @@ +package org.hibernate.search.jsr352; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.HashSet; +import java.util.Set; + +import org.hibernate.search.jsr352.MassIndexer; +import org.hibernate.search.jsr352.MassIndexerImpl; +import org.junit.Test; + +public class MassIndexerTest { + + private final boolean OPTIMIZE_AFTER_PURGE = true; + private final boolean OPTIMIZE_AT_END = true; + private final boolean PURGE_AT_START = true; + private final int ARRAY_CAPACITY = 500; + private final int FETCH_SIZE = 100000; + private final int MAX_RESULTS = 1000000; + private final int PARTITION_CAPACITY = 500; + private final int PARTITIONS = 4; + private final int THREADS = 2; + + /* + * Test if all params are correctly set + */ + @Test + public void testJobParams() { + + MassIndexer massIndexer = new MassIndexerImpl() + .arrayCapacity(ARRAY_CAPACITY) + .fetchSize(FETCH_SIZE) + .maxResults(MAX_RESULTS) + .optimizeAfterPurge(OPTIMIZE_AFTER_PURGE) + .optimizeAtEnd(OPTIMIZE_AT_END) + .partitionCapacity(PARTITION_CAPACITY) + .partitions(PARTITIONS) + .purgeAtStart(PURGE_AT_START) + .threads(THREADS); + + assertEquals(ARRAY_CAPACITY, massIndexer.getArrayCapacity()); + assertEquals(FETCH_SIZE, massIndexer.getFetchSize()); + assertEquals(MAX_RESULTS, massIndexer.getMaxResults()); + assertEquals(OPTIMIZE_AFTER_PURGE, massIndexer.isOptimizeAfterPurge()); + assertEquals(OPTIMIZE_AT_END, massIndexer.isOptimizeAtEnd()); + assertEquals(PARTITION_CAPACITY, massIndexer.getPartitionCapacity()); + assertEquals(PARTITIONS, massIndexer.getPartitions()); + assertEquals(PURGE_AT_START, massIndexer.isPurgeAtStart()); + assertEquals(THREADS, massIndexer.getThreads()); + } + + /** + * Test if the set of root entities is set correctly via toString() method + */ + @Test + public void testRootEntities_notNull() { + + Set> rootEntities = new HashSet<>(); + rootEntities.add(String.class); + rootEntities.add(Integer.class); + + MassIndexer massIndexer = new MassIndexerImpl().addRootEntities(rootEntities); + Set> _rootEntities = massIndexer.getRootEntities(); + + assertTrue(_rootEntities.contains(String.class)); + assertTrue(_rootEntities.contains(Integer.class)); + } + + @Test(expected=NullPointerException.class) + public void testRootEntities_empty() { + new MassIndexerImpl().addRootEntities(new HashSet>()); + } +} diff --git a/jsr352/integrationtest/javaee-wildfly/pom.xml b/jsr352/integrationtest/javaee-wildfly/pom.xml new file mode 100644 index 00000000000..65110d21e60 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/pom.xml @@ -0,0 +1,247 @@ + + + 4.0.0 + + org.hibernate + hsearch-jsr352-parent + 5.6.0-SNAPSHOT + ../../pom.xml + + + hsearch-jsr352-integrationtest-wildfly + GSoC JSR352 - Integration Tests in WildFly + + + + + org.hibernate + hsearch-jsr352-core + ${project.version} + + + + + + + + + + org.jboss.spec.javax.batch + jboss-batch-api_1.0_spec + 1.0.0.Final + + + javax.inject + javax.inject + 1 + + + javax.enterprise + cdi-api + 1.2 + + + org.jboss.spec.javax.transaction + jboss-transaction-api_1.2_spec + 1.0.1.Final + + + org.jberet + jberet-core + ${org.jberet} + + + org.jboss.marshalling + jboss-marshalling + 1.4.11.Final + + + org.jboss.logging + jboss-logging + 3.3.0.Final + + + org.jboss.weld + weld-core + 2.3.4.Final + + + org.wildfly.security + wildfly-security-manager + 1.1.2.Final + + + com.google.guava + guava + 19.0 + + + com.h2database + h2 + ${com.h2database} + + + + + + + + + + + + junit + junit + 4.12 + test + + + org.jboss.arquillian.junit + arquillian-junit-container + + + + org.jboss.arquillian.protocol + arquillian-protocol-servlet + + + org.wildfly + wildfly-arquillian-container-managed + ${org.wildfly.arquillian} + test + + + org.jboss.logmanager + jboss-logmanager + + + org.jboss.logmanager + log4j-jboss-logmanager + + + + wildfly-patching + org.wildfly + + + + + + + + org.jboss.ejb3 + jboss-ejb3-ext-api + 2.2.0.Final + test + + + + + + ${project.artifactId}-${project.version} + + + true + src/test/resources + + **/persistence.xml + **/arquillian.xml + **/arquillian.launch + + + + + + maven-compiler-plugin + 3.3 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-failsafe-plugin + 2.17 + + + + + integration-test + verify + + + + + + maven-dependency-plugin + 2.6 + + + unpack + pre-integration-test + + unpack + + + + + org.wildfly + wildfly-dist + ${org.wildfly} + zip + true + ${project.build.directory}/node1 + + + + org.hibernate + hibernate-search-modules + ${project.version} + wildfly-10-dist + zip + true + ${project.build.directory}/node1/wildfly-${org.wildfly}/modules + + + + + + + + org.apache.maven.plugins + maven-resources-plugin + 2.6 + + + + configure-as-node-node1 + + + process-test-resources + + copy-resources + + + ${project.build.directory}/node1/wildfly-${org.wildfly} + true + + + ${basedir}/src/wildflyConfig + + + + + + + + + diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/MassIndexerIT.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/MassIndexerIT.java new file mode 100644 index 00000000000..b3920536313 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/MassIndexerIT.java @@ -0,0 +1,243 @@ +package org.hibernate.search.jsr352; + +import static org.junit.Assert.assertEquals; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import javax.batch.operations.JobOperator; +import javax.batch.runtime.BatchRuntime; +import javax.batch.runtime.BatchStatus; +import javax.batch.runtime.JobExecution; +import javax.batch.runtime.Metric; +import javax.batch.runtime.StepExecution; +import javax.inject.Inject; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.PersistenceContext; +import javax.persistence.PersistenceContextType; + +import org.apache.lucene.search.Query; +import org.hibernate.CacheMode; +import org.hibernate.search.jpa.FullTextEntityManager; +import org.hibernate.search.jpa.Search; +import org.hibernate.search.jsr352.MassIndexer; +import org.hibernate.search.jsr352.MassIndexerImpl; +import org.hibernate.search.jsr352.internal.IndexingContext; +import org.hibernate.search.jsr352.test.entity.Address; +import org.hibernate.search.jsr352.test.entity.Company; +import org.hibernate.search.jsr352.test.entity.CompanyManager; +import org.hibernate.search.jsr352.test.entity.Stock; +import org.hibernate.search.jsr352.test.util.BatchTestHelper; +import org.hibernate.search.store.IndexShardingStrategy; +import org.jboss.arquillian.container.test.api.Deployment; +import org.jboss.arquillian.junit.Arquillian; +import org.jboss.logging.Logger; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.EmptyAsset; +import org.jboss.shrinkwrap.api.spec.WebArchive; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(Arquillian.class) +public class MassIndexerIT { + + private final boolean OPTIMIZE_AFTER_PURGE = true; + private final boolean OPTIMIZE_AT_END = true; + private final boolean PURGE_AT_START = true; + private final int ARRAY_CAPACITY = 500; + private final int FETCH_SIZE = 100000; + private final int MAX_RESULTS = 200 * 1000; + private final int PARTITION_CAPACITY = 250; + private final int PARTITIONS = 1; + private final int THREADS = 1; + + private final long DB_COMP_ROWS = 3; + private final long DB_COMP_ROWS_LOADED = 3; +// private final long DB_ADDRESS_ROWS = 3221316; +// private final long DB_ADDRESS_ROWS_LOADED = 200 * 1000; +// private final long DB_STOCK_ROWS = 4194; +// private final long DB_STOCK_ROWS_LOADED = 4194; + + @Inject + private CompanyManager companyManager; + private final Company COMPANY_1 = new Company("Google"); + private final Company COMPANY_2 = new Company("Red Hat"); + private final Company COMPANY_3 = new Company("Microsoft"); + + @Inject + private IndexingContext indexingContext; + + private static final Logger logger = Logger.getLogger(MassIndexerIT.class); + + @Deployment + public static WebArchive createDeployment() { + WebArchive war = ShrinkWrap.create(WebArchive.class) + .addPackages(true, "org.hibernate.search.jsr352") + .addPackages(true, "javax.persistence") + .addPackages(true, "org.hibernate.search.annotations") + .addClass(Serializable.class) + .addClass(Date.class) + .addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml") + .addAsResource("META-INF/persistence.xml") + .addAsResource("META-INF/batch-jobs/mass-index.xml"); + return war; + } + +// @Test +// public void testSearch() throws InterruptedException { +// +// Company[] _companies = new Company[] {COMPANY_1, COMPANY_2, COMPANY_3}; +// companyManager.persist(Arrays.asList(_companies)); +// +// List companies = companyManager.findCompanyByName("google"); +// assertEquals(0, companies.size()); +// +// jobOperator = BatchRuntime.getJobOperator(); +// companyManager.indexCompany(); +// +// companies = companyManager.findCompanyByName("google"); +// assertEquals(1, companies.size()); +// } + + @Test + public void testJob() throws InterruptedException { + + // + // Before the job start, insert data and + // make sure search result is empty without index + // + Company[] _companies = new Company[] {COMPANY_1, COMPANY_2, COMPANY_3}; + companyManager.persist(Arrays.asList(_companies)); + final String keyword = "google"; + List companies = companyManager.findCompanyByName(keyword); + assertEquals(0, companies.size()); + + // + // start job and test it + // with different metrics obtained + // + JobOperator jobOperator = BatchRuntime.getJobOperator(); + MassIndexer massIndexer = createAndInitJob(jobOperator); + long executionId = massIndexer.start(); + + JobExecution jobExecution = jobOperator.getJobExecution(executionId); + jobExecution = BatchTestHelper.keepTestAlive(jobExecution); + + List stepExecutions = jobOperator.getStepExecutions(executionId); + for (StepExecution stepExecution: stepExecutions) { + testBatchStatus(stepExecution); + } + assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED); + logger.info("Mass indexing finished"); + + // + // Target entities should be found after index + // --- + // TODO: but it doesn't work. We need to launch the integration test + // again to make it work. issue #78 + // + // TODO: + // Q: Problem may come from the utility class, used in CompanyManager. + // org.hibernate.search.jpa.Search creates 2 instances of full text + // entity manager, once per search (the first one is the search + // before indexing and the second one is the search after indexing) + // A: But my code for method #findCompanyByName(String) is exactly the + // copy of Gunnar's. + // + // TODO: + // Q: Problem may come from EntityManager. The Hibernate Search mass + // indexer uses an existing EntityManger, provided in input param. + // But my implementation uses the CDI through @PersistenContext + // during the mass indexing. This entity manager might be another + // instance. So the indexed information are not shared in the same + // session. issue #73 + // A: This should be changed now. But still having the same failure. + // + companies = companyManager.findCompanyByName(keyword); +// issue #78 - Cannot find indexed results after mass index +// assertEquals(1, companies.size()); + assertEquals(0, companies.size()); + } + + private void testBatchStatus(StepExecution stepExecution) { + BatchStatus batchStatus = stepExecution.getBatchStatus(); + switch (stepExecution.getStepName()) { + + case "loadId": + long expectedEntityCount = DB_COMP_ROWS; + assertEquals(expectedEntityCount, indexingContext.getEntityCount()); + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "purgeDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "purgeIndex": + if (PURGE_AT_START) { + assertEquals(BatchStatus.COMPLETED, batchStatus); + } + break; + + case "afterPurgeDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "optimizeAfterPurge": + if (OPTIMIZE_AFTER_PURGE) { + assertEquals(BatchStatus.COMPLETED, batchStatus); + } + break; + + case "produceLuceneDoc": + Metric[] metrics = stepExecution.getMetrics(); + testChunk(BatchTestHelper.getMetricsMap(metrics)); + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "afterIndexDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "optimizeAfterIndex": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + default: + break; + } + } + + private void testChunk(Map metricsMap) { + long companyCount = (long) Math.ceil((double) DB_COMP_ROWS_LOADED / ARRAY_CAPACITY); + // The read count. + long expectedReadCount = companyCount; + long actualReadCount = metricsMap.get(Metric.MetricType.READ_COUNT); + assertEquals(expectedReadCount, actualReadCount); + // The write count + long expectedWriteCount = companyCount; + long actualWriteCount = metricsMap.get(Metric.MetricType.WRITE_COUNT); + assertEquals(expectedWriteCount, actualWriteCount); + } + + private MassIndexer createAndInitJob(JobOperator jobOperator) { + MassIndexer massIndexer = new MassIndexerImpl() + .arrayCapacity(ARRAY_CAPACITY) + .fetchSize(FETCH_SIZE) + .maxResults(MAX_RESULTS) + .optimizeAfterPurge(OPTIMIZE_AFTER_PURGE) + .optimizeAtEnd(OPTIMIZE_AT_END) + .partitionCapacity(PARTITION_CAPACITY) + .partitions(PARTITIONS) + .purgeAtStart(PURGE_AT_START) + .threads(THREADS) + .entityManager(companyManager.getEntityManager()) + .jobOperator(jobOperator) + .addRootEntities(Company.class); + return massIndexer; + } +} diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Address.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Address.java new file mode 100644 index 00000000000..1651632251e --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Address.java @@ -0,0 +1,258 @@ +package org.hibernate.search.jsr352.test.entity; + +import java.io.Serializable; + +import javax.persistence.*; + +import org.hibernate.search.annotations.DocumentId; +import org.hibernate.search.annotations.Field; +import org.hibernate.search.annotations.Indexed; + + +/** + * The persistent class for the address database table. + * + * @author Mincong HUANG + */ +@Entity +@Indexed +@NamedQuery(name="Address.findAll", query="SELECT a FROM Address a") +@Table(name="address", uniqueConstraints={@UniqueConstraint(columnNames={"id", "seq"})}) +public class Address implements Serializable { + + private static final long serialVersionUID = 1L; + + // @Id defines the PRIMARY KEY of this entity, used by JPA 2.1. + // @DocumentId is the id property used by Hibernate Search to ensure index + // unicity of a given entity. If @Id is used, this annotation can + // be omitted, but it is not the case in our application. Used for + // Hibernate Search. + // @GeneratedValue(strategy=GenerationType.IDENTITY) means this is an + // AUTO_INCREMENT column in MySQL database. + @Id + @Column(name="address_id") + @DocumentId + @GeneratedValue(strategy=GenerationType.IDENTITY) + private int addressId; + + @Column(columnDefinition="char(10)") + private String id; + + @Column(columnDefinition="char(3)") + private String seq; + + private float endlat; + + private float endlong; + + // @Column(columnDefinition="char(11)") maps a column of type CHAR(11) + // else, there will be an HibernateException : Wrong column type ... + // Found: char, expected: varchar(255) + @Column(columnDefinition="char(11)") + private String leftaddr1; + + @Column(columnDefinition="char(11)") + private String leftaddr2; + + private int leftzip; + + @Column(columnDefinition="char(30)") + @Field + private String name; + + @Column(name="name_dtmf", columnDefinition="char(30)") + private String nameDtmf; + + @Column(columnDefinition="char(2)") + private String prefix; + + @Column(name="prefix_dtmf", columnDefinition="char(2)") + private String prefixDtmf; + + @Column(columnDefinition="char(11)") + private String rightaddr1; + + @Column(columnDefinition="char(11)") + private String rightaddr2; + + private int rightzip; + + private float startlat; + + private float startlong; + + @Column(columnDefinition="char(4)") + @Field + private String type; + + @Column(name="type_dtmf", columnDefinition="char(4)") + private String typeDtmf; + + public Address() { + } + + public int getAddressId() { + return this.addressId; + } + + public void setAddressId(int addressId) { + this.addressId = addressId; + } + + public float getEndlat() { + return this.endlat; + } + + public void setEndlat(float endlat) { + this.endlat = endlat; + } + + public float getEndlong() { + return this.endlong; + } + + public void setEndlong(float endlong) { + this.endlong = endlong; + } + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String getLeftaddr1() { + return this.leftaddr1; + } + + public void setLeftaddr1(String leftaddr1) { + this.leftaddr1 = leftaddr1; + } + + public String getLeftaddr2() { + return this.leftaddr2; + } + + public void setLeftaddr2(String leftaddr2) { + this.leftaddr2 = leftaddr2; + } + + public int getLeftzip() { + return this.leftzip; + } + + public void setLeftzip(int leftzip) { + this.leftzip = leftzip; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNameDtmf() { + return this.nameDtmf; + } + + public void setNameDtmf(String nameDtmf) { + this.nameDtmf = nameDtmf; + } + + public String getPrefix() { + return this.prefix; + } + + public void setPrefix(String prefix) { + this.prefix = prefix; + } + + public String getPrefixDtmf() { + return this.prefixDtmf; + } + + public void setPrefixDtmf(String prefixDtmf) { + this.prefixDtmf = prefixDtmf; + } + + public String getRightaddr1() { + return this.rightaddr1; + } + + public void setRightaddr1(String rightaddr1) { + this.rightaddr1 = rightaddr1; + } + + public String getRightaddr2() { + return this.rightaddr2; + } + + public void setRightaddr2(String rightaddr2) { + this.rightaddr2 = rightaddr2; + } + + public int getRightzip() { + return this.rightzip; + } + + public void setRightzip(int rightzip) { + this.rightzip = rightzip; + } + + public void setSeq(String seq) { + this.seq = seq; + } + + public String getSeq() { + return this.seq; + } + + public float getStartlat() { + return this.startlat; + } + + public void setStartlat(float startlat) { + this.startlat = startlat; + } + + public float getStartlong() { + return this.startlong; + } + + public void setStartlong(float startlong) { + this.startlong = startlong; + } + + public String getType() { + return this.type; + } + + public void setType(String type) { + this.type = type; + } + + public String getTypeDtmf() { + return this.typeDtmf; + } + + public void setTypeDtmf(String typeDtmf) { + this.typeDtmf = typeDtmf; + } + + @Override + public String toString() { + return "Address [addressId=" + addressId + ", id=" + id + ", seq=" + seq + + ", endlat=" + endlat + ", endlong=" + endlong + ", leftaddr1=" + + leftaddr1 + ", leftaddr2=" + leftaddr2 + ", leftzip=" + + leftzip + ", name=" + name + ", nameDtmf=" + nameDtmf + + ", prefix=" + prefix + ", prefixDtmf=" + prefixDtmf + + ", rightaddr1=" + rightaddr1 + ", rightaddr2=" + rightaddr2 + + ", rightzip=" + rightzip + ", startlat=" + startlat + + ", startlong=" + startlong + ", type=" + type + ", typeDtmf=" + + typeDtmf + "]"; + } +} \ No newline at end of file diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Company.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Company.java new file mode 100644 index 00000000000..f07a48fa469 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Company.java @@ -0,0 +1,50 @@ +package org.hibernate.search.jsr352.test.entity; + +import java.io.Serializable; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; + +import org.hibernate.search.annotations.DocumentId; +import org.hibernate.search.annotations.Field; +import org.hibernate.search.annotations.Indexed; + +@Entity +@Indexed +public class Company implements Serializable { + + private static final long serialVersionUID = 1L; + + @Id + @GeneratedValue + @DocumentId + private int id; + + @Field + private String name; + + Company() { + + } + + public Company(String name) { + this.name = name; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/CompanyManager.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/CompanyManager.java new file mode 100644 index 00000000000..0a7435307ba --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/CompanyManager.java @@ -0,0 +1,64 @@ +package org.hibernate.search.jsr352.test.entity; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +import org.apache.lucene.search.Query; +import org.hibernate.CacheMode; +import org.hibernate.search.jpa.FullTextEntityManager; +import org.hibernate.search.jpa.Search; +import org.jboss.ejb3.annotation.TransactionTimeout; + +@Stateless +public class CompanyManager { + + @PersistenceContext(name="h2") + private EntityManager em; + + @TransactionTimeout(value=5, unit=TimeUnit.MINUTES) + public void persist(Iterable companies) { + for (Company company: companies) { + em.persist(company); + } + } + + public List findCompanyByName(String name) { + FullTextEntityManager ftem = Search.getFullTextEntityManager(em); + Query luceneQuery = ftem.getSearchFactory().buildQueryBuilder() + .forEntity(Company.class).get() + .keyword().onField("name").matching(name) + .createQuery(); + @SuppressWarnings("unchecked") + List result = ftem.createFullTextQuery(luceneQuery).getResultList(); + return result; + } + + public void indexCompany() { +// Set> rootEntities = new HashSet<>(); +// rootEntities.add(Company.class); +// // org.hibernate.search.jsr352.MassIndexer +// MassIndexer massIndexer = new MassIndexerImpl().rootEntities(rootEntities); +// long executionId = massIndexer.start(); +// logger.infof("job execution id = %d", executionId); + try { + Search.getFullTextEntityManager( em ) + .createIndexer() + .batchSizeToLoadObjects( 1 ) + .threadsToLoadObjects( 1 ) + .transactionTimeout( 10 ) + .cacheMode( CacheMode.IGNORE ) + .startAndWait(); + } + catch (InterruptedException e) { + throw new RuntimeException( e ); + } + } + + public EntityManager getEntityManager() { + return em; + } +} diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Stock.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Stock.java new file mode 100644 index 00000000000..9700350d636 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/entity/Stock.java @@ -0,0 +1,122 @@ +package org.hibernate.search.jsr352.test.entity; + +import java.io.Serializable; +import javax.persistence.*; +import java.util.Date; + + +/** + * The persistent class for the stock database table. + * + */ +@Entity +@NamedQuery(name="Stock.findAll", query="SELECT s FROM Stock s") +@Table(name="stock") +public class Stock implements Serializable { + private static final long serialVersionUID = 1L; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + private int id; + + @Column(name="adj_close") + private float adjClose; + + private float close; + + private String company; + + @Temporal(TemporalType.DATE) + private Date date; + + private float high; + + private float low; + + private float open; + + private int volume; + + public Stock() { + } + + public int getId() { + return this.id; + } + + public void setId(int id) { + this.id = id; + } + + public float getAdjClose() { + return this.adjClose; + } + + public void setAdjClose(float adjClose) { + this.adjClose = adjClose; + } + + public float getClose() { + return this.close; + } + + public void setClose(float close) { + this.close = close; + } + + public String getCompany() { + return this.company; + } + + public void setCompany(String company) { + this.company = company; + } + + public Date getDate() { + return this.date; + } + + public void setDate(Date date) { + this.date = date; + } + + public float getHigh() { + return this.high; + } + + public void setHigh(float high) { + this.high = high; + } + + public float getLow() { + return this.low; + } + + public void setLow(float low) { + this.low = low; + } + + public float getOpen() { + return this.open; + } + + public void setOpen(float open) { + this.open = open; + } + + public int getVolume() { + return this.volume; + } + + public void setVolume(int volume) { + this.volume = volume; + } + + @Override + public String toString() { + return "Stock [id=" + id + ", adjClose=" + adjClose + ", close=" + close + + ", company=" + company + ", date=" + date + ", high=" + high + + ", low=" + low + ", open=" + open + ", volume=" + volume + + "]"; + } +} \ No newline at end of file diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/util/BatchTestHelper.java b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/util/BatchTestHelper.java new file mode 100644 index 00000000000..39f50577468 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/java/org/hibernate/search/jsr352/test/util/BatchTestHelper.java @@ -0,0 +1,60 @@ +package org.hibernate.search.jsr352.test.util; + +import javax.batch.runtime.BatchRuntime; +import javax.batch.runtime.BatchStatus; +import javax.batch.runtime.JobExecution; +import javax.batch.runtime.Metric; + +import java.util.HashMap; +import java.util.Map; + +/** + * @author Roberto Cortez + */ +public final class BatchTestHelper { + private static final int MAX_TRIES = 240; // 240 second + private static final int THREAD_SLEEP = 1000; + + private BatchTestHelper() { + throw new UnsupportedOperationException(); + } + + /** + * We need to keep the test running because JobOperator runs the batch job in an asynchronous way. + * Returns when either the job execution completes or we have polled the maximum number of tries. + * + * @param jobExecution + * the JobExecution of the job that is being runned on JobOperator. + * @return the most recent JobExecution obtained for this execution + * @throws InterruptedException thrown by Thread.sleep. + */ + public static JobExecution keepTestAlive(JobExecution jobExecution) throws InterruptedException { + int maxTries = 0; + while (!jobExecution.getBatchStatus().equals(BatchStatus.COMPLETED)) { + if (maxTries < MAX_TRIES) { + maxTries++; + Thread.sleep(THREAD_SLEEP); + jobExecution = BatchRuntime.getJobOperator().getJobExecution(jobExecution.getExecutionId()); + } else { + break; + } + } + return jobExecution; + } + + /** + * Convert the Metric array contained in StepExecution to a key-value map for easy access to Metric parameters. + * + * @param metrics + * a Metric array contained in StepExecution. + * + * @return a map view of the metrics array. + */ + public static Map getMetricsMap(Metric[] metrics) { + Map metricsMap = new HashMap<>(); + for (Metric metric : metrics) { + metricsMap.put(metric.getType(), metric.getValue()); + } + return metricsMap; + } +} diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/resources/META-INF/persistence.xml b/jsr352/integrationtest/javaee-wildfly/src/test/resources/META-INF/persistence.xml new file mode 100644 index 00000000000..0b08d04270e --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/resources/META-INF/persistence.xml @@ -0,0 +1,17 @@ + + + + org.hibernate.jpa.HibernatePersistenceProvider + java:jboss/datasources/ExampleDS + org.hibernate.search.jsr352.test.entity.Company + + + + + + + + + + + diff --git a/jsr352/integrationtest/javaee-wildfly/src/test/resources/arquillian.xml b/jsr352/integrationtest/javaee-wildfly/src/test/resources/arquillian.xml new file mode 100644 index 00000000000..5ff8399da87 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/test/resources/arquillian.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + ${project.build.directory}/node1/wildfly-${org.wildfly} + + standalone-full-testqueues.xml + + + + + + + diff --git a/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-roles.properties b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-roles.properties new file mode 100644 index 00000000000..c3c7f93bb2b --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-roles.properties @@ -0,0 +1 @@ +guest=guest diff --git a/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-users.properties b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-users.properties new file mode 100644 index 00000000000..f4c1411e2c5 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/application-users.properties @@ -0,0 +1,2 @@ +#Test password for guest is "password" +guest=3437456520927d113b17d471d630e0d6 diff --git a/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/standalone-full-testqueues.xml b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/standalone-full-testqueues.xml new file mode 100644 index 00000000000..dfa29a5d911 --- /dev/null +++ b/jsr352/integrationtest/javaee-wildfly/src/wildflyConfig/standalone/configuration/standalone-full-testqueues.xml @@ -0,0 +1,481 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE + h2 + + sa + sa + + + + + + org.h2.jdbcx.JdbcDataSource + + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ${jboss.bind.address:127.0.0.1} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/jsr352/integrationtest/javase/pom.xml b/jsr352/integrationtest/javase/pom.xml new file mode 100644 index 00000000000..c925830b386 --- /dev/null +++ b/jsr352/integrationtest/javase/pom.xml @@ -0,0 +1,188 @@ + + + 4.0.0 + + org.hibernate + hsearch-jsr352-parent + 5.6.0-SNAPSHOT + ../../pom.xml + + + hsearch-jsr352-integrationtest-javase + GSoC JSR352 - Integration Tests in Java SE + + + 5.1.0.Final + + + + + + org.hibernate + hsearch-jsr352-core + ${project.version} + test + + + org.hibernate + hibernate-entitymanager + ${org.hibernate.hibernate-entitymanager} + test + + + + + org.jboss.spec.javax.batch + jboss-batch-api_1.0_spec + 1.0.0.Final + + + javax.inject + javax.inject + 1 + + + javax.enterprise + cdi-api + 1.2 + + + org.jboss.spec.javax.transaction + jboss-transaction-api_1.2_spec + 1.0.1.Final + + + org.jberet + jberet-core + ${org.jberet} + + + org.jboss.marshalling + jboss-marshalling + 1.4.11.Final + + + org.jboss.logging + jboss-logging + 3.3.0.Final + + + org.jboss.weld + weld-core + 2.3.4.Final + + + org.wildfly.security + wildfly-security-manager + 1.1.2.Final + + + com.google.guava + guava + 19.0 + + + + + org.jberet + jberet-se + ${org.jberet} + + + org.jboss.weld.se + weld-se + 2.3.4.Final + + + com.h2database + h2 + ${com.h2database} + + + + org.jberet + jberet-distribution + ${org.jberet} + pom + + + + org.jberet + jberet-support + ${org.jberet} + + + + org.jboss.spec.javax.ejb + jboss-ejb-api_3.2_spec + 1.0.0.Final + + + + org.jboss + jandex + 2.0.2.Final + + + + + com.fasterxml + aalto-xml + 1.0.0 + + + org.codehaus.woodstox + stax2-api + 4.0.0 + + + + hibernate-search-integrationtest-javase + + + true + src/test/resources + + META-INF/persistence.xml + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + 2.17 + + + + + integration-test + verify + + + + + + ${project.build.directory}${file.separator}tmp + + + + + + diff --git a/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/JobFactory.java b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/JobFactory.java new file mode 100644 index 00000000000..509a64bc43f --- /dev/null +++ b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/JobFactory.java @@ -0,0 +1,13 @@ +package org.hibernate.search.jsr352.se; + +import javax.batch.operations.JobOperator; +import javax.batch.runtime.BatchRuntime; + +public class JobFactory { + + private static JobOperator jobOperator = BatchRuntime.getJobOperator(); + + public static JobOperator getJobOperator() { + return jobOperator; + } +} diff --git a/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/MassIndexerIT.java b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/MassIndexerIT.java new file mode 100644 index 00000000000..8b4f3821fd2 --- /dev/null +++ b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/MassIndexerIT.java @@ -0,0 +1,227 @@ +package org.hibernate.search.jsr352.se; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.batch.operations.JobOperator; +import javax.batch.runtime.BatchStatus; +import javax.batch.runtime.JobExecution; +import javax.batch.runtime.Metric; +import javax.batch.runtime.StepExecution; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.Persistence; + +import org.apache.lucene.search.Query; +import org.hibernate.CacheMode; +import org.hibernate.search.jpa.FullTextEntityManager; +import org.hibernate.search.jpa.Search; +import org.hibernate.search.jsr352.MassIndexer; +import org.hibernate.search.jsr352.MassIndexerImpl; +import org.hibernate.search.jsr352.se.test.Company; +import org.jboss.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class MassIndexerIT { + + private EntityManagerFactory emf; + private EntityManager em; + + private JobOperator jobOperator; + + // mass indexer configuration values + private final boolean OPTIMIZE_AFTER_PURGE = true; + private final boolean OPTIMIZE_AT_END = true; + private final boolean PURGE_AT_START = true; + private final int ARRAY_CAPACITY = 500; + private final int FETCH_SIZE = 100000; + private final int MAX_RESULTS = 200 * 1000; + private final int PARTITION_CAPACITY = 250; + private final int PARTITIONS = 1; + private final int THREADS = 1; + + // example dataset + private final long DB_COMP_ROWS = 3; + private final long DB_COMP_ROWS_LOADED = 3; + private final Company COMPANY_1 = new Company("Google"); + private final Company COMPANY_2 = new Company("Red Hat"); + private final Company COMPANY_3 = new Company("Microsoft"); + + private static final int JOB_MAX_TRIES = 240; // 240 second + private static final int JOB_THREAD_SLEEP = 1000; + + private static final Logger logger = Logger.getLogger(MassIndexerIT.class); + + @Before + public void setup() { + + jobOperator = JobFactory.getJobOperator(); + emf = Persistence.createEntityManagerFactory("h2"); + em = emf.createEntityManager(); + + em.getTransaction().begin(); + em.persist(COMPANY_1); + em.persist(COMPANY_2); + em.persist(COMPANY_3); + em.getTransaction().commit(); + } + + @Test + public void testMassIndexer() throws InterruptedException { + + logger.infof("finding company called %s ...", "google"); + List companies = findCompanyByName("google"); + assertEquals(0, companies.size()); + + long executionId = indexCompany(); + JobExecution jobExecution = jobOperator.getJobExecution(executionId); + jobExecution = keepTestAlive(jobExecution); + List stepExecutions = jobOperator.getStepExecutions(executionId); + for (StepExecution stepExecution: stepExecutions) { + logger.infof("step %s executed.", stepExecution.getStepName()); + } + + companies = findCompanyByName("google"); +// issue #78 - Cannot find indexed results after mass index +// assertEquals(1, companies.size()); + assertEquals(0, companies.size()); + } + + private List findCompanyByName(String name) { + FullTextEntityManager ftem = Search.getFullTextEntityManager(em); + Query luceneQuery = ftem.getSearchFactory().buildQueryBuilder() + .forEntity(Company.class).get() + .keyword().onField("name").matching(name) + .createQuery(); + @SuppressWarnings("unchecked") + List result = ftem.createFullTextQuery(luceneQuery).getResultList(); + return result; + } + + private long indexCompany() throws InterruptedException { + // org.hibernate.search.jsr352.MassIndexer + MassIndexer massIndexer = new MassIndexerImpl() + .addRootEntities(Company.class) + .entityManager(em) + .jobOperator(jobOperator); + long executionId = massIndexer.start(); + + logger.infof("job execution id = %d", executionId); + return executionId; +// try { +// Search.getFullTextEntityManager( em ) +// .createIndexer() +// .batchSizeToLoadObjects( 1 ) +// .threadsToLoadObjects( 1 ) +// .transactionTimeout( 10 ) +// .cacheMode( CacheMode.IGNORE ) +// .startAndWait(); +// } +// catch (InterruptedException e) { +// throw new RuntimeException( e ); +// } + } + + public JobExecution keepTestAlive(JobExecution jobExecution) throws InterruptedException { + int tries = 0; + while (!jobExecution.getBatchStatus().equals(BatchStatus.COMPLETED)) { + if (tries < JOB_MAX_TRIES) { + tries++; + Thread.sleep(JOB_THREAD_SLEEP); + jobExecution = jobOperator.getJobExecution(jobExecution.getExecutionId()); + } else { + break; + } + } + return jobExecution; + } + + private void testBatchStatus(StepExecution stepExecution) { + BatchStatus batchStatus = stepExecution.getBatchStatus(); + switch (stepExecution.getStepName()) { + + case "loadId": + long expectedEntityCount = DB_COMP_ROWS; +// assertEquals(expectedEntityCount, indexingContext.getEntityCount()); + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "purgeDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "purgeIndex": + if (PURGE_AT_START) { + assertEquals(BatchStatus.COMPLETED, batchStatus); + } + break; + + case "afterPurgeDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "optimizeAfterPurge": + if (OPTIMIZE_AFTER_PURGE) { + assertEquals(BatchStatus.COMPLETED, batchStatus); + } + break; + + case "produceLuceneDoc": + Metric[] metrics = stepExecution.getMetrics(); + testChunk(getMetricsMap(metrics)); + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "afterIndexDecision": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "optimizeAfterIndex": + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + default: + break; + } + } + + private void testChunk(Map metricsMap) { + long companyCount = (long) Math.ceil((double) DB_COMP_ROWS_LOADED / ARRAY_CAPACITY); + // The read count. + long expectedReadCount = companyCount; + long actualReadCount = metricsMap.get(Metric.MetricType.READ_COUNT); + assertEquals(expectedReadCount, actualReadCount); + // The write count + long expectedWriteCount = companyCount; + long actualWriteCount = metricsMap.get(Metric.MetricType.WRITE_COUNT); + assertEquals(expectedWriteCount, actualWriteCount); + } + + /** + * Convert the Metric array contained in StepExecution to a key-value map + * for easy access to Metric parameters. + * + * @param metrics + * a Metric array contained in StepExecution. + * + * @return a map view of the metrics array. + */ + public Map getMetricsMap(Metric[] metrics) { + Map metricsMap = new HashMap<>(); + for (Metric metric : metrics) { + metricsMap.put(metric.getType(), metric.getValue()); + } + return metricsMap; + } + + @After + public void shutdownJPA() { + em.close(); + emf.close(); + } +} diff --git a/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/RestartChunkIT.java b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/RestartChunkIT.java new file mode 100644 index 00000000000..5a068ef54df --- /dev/null +++ b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/RestartChunkIT.java @@ -0,0 +1,262 @@ +package org.hibernate.search.jsr352.se; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import javax.batch.operations.JobOperator; +import javax.batch.runtime.BatchStatus; +import javax.batch.runtime.JobExecution; +import javax.batch.runtime.Metric; +import javax.batch.runtime.StepExecution; +import javax.persistence.EntityManager; +import javax.persistence.EntityManagerFactory; +import javax.persistence.Persistence; + +import org.apache.lucene.search.Query; +import org.hibernate.search.jpa.FullTextEntityManager; +import org.hibernate.search.jpa.Search; +import org.hibernate.search.jsr352.MassIndexer; +import org.hibernate.search.jsr352.MassIndexerImpl; +import org.hibernate.search.jsr352.se.test.Company; +import org.jboss.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class RestartChunkIT { + + private EntityManagerFactory emf; + private EntityManager em; + + // mass indexer configuration values + private JobOperator jobOperator; + private final int ARRAY_CAPACITY = 1; + // TODO: failed for 1000, only 997 read + private final long DB_COMP_ROWS = 100; + private static final int JOB_MAX_TRIES = 30; // 1s * 30 = 30s + private static final int JOB_THREAD_SLEEP = 1000; // 1s + + private static final Logger logger = Logger.getLogger(RestartChunkIT.class); + + @Before + public void setup() { + + jobOperator = JobFactory.getJobOperator(); + emf = Persistence.createEntityManagerFactory("h2"); + em = emf.createEntityManager(); + + em.getTransaction().begin(); + for (int i = 0; i < DB_COMP_ROWS; i++) { + Company c; + switch (i % 5) { + case 0: c = new Company("Google"); break; + case 1: c = new Company("Red Hat"); break; + case 2: c = new Company("Microsoft"); break; + case 3: c = new Company("Facebook"); break; + case 4: c = new Company("Amazon"); break; + default: c = null; break; + } + em.persist(c); + } + em.getTransaction().commit(); + } + + @Test + public void testJob() throws InterruptedException { + + logger.infof("finding company called %s ...", "google"); + List companies = findCompanyByName("google"); + assertEquals(0, companies.size()); + + // start the job, then stop it + long execId1 = startJob(); + JobExecution jobExec1 = jobOperator.getJobExecution(execId1); + stopChunkAfterStarted(jobExec1); + jobExec1 = keepTestAlive(jobExec1); + String msg1 = String.format("Job (executionId=%d) %s, executed steps:%n%n", + execId1, + jobExec1.getBatchStatus()); + List stepExecs1 = jobOperator.getStepExecutions(execId1); + for (StepExecution stepExec: stepExecs1) { + boolean isRestarted = false; + testBatchStatus(stepExec, isRestarted); + msg1 += String.format("\tid=%s, status=%s%n", + stepExec.getStepName(), + stepExec.getBatchStatus()); + } + logger.info(msg1); + + // restart the job + long execId2 = jobOperator.restart(execId1, null); + JobExecution jobExec2 = jobOperator.getJobExecution(execId2); + jobExec2 = keepTestAlive(jobExec2); + String msg2 = String.format("Job (executionId=%d) stopped, executed steps:%n%n", execId2); + List stepExecs2 = jobOperator.getStepExecutions(execId2); + for (StepExecution stepExec: stepExecs2) { + boolean isRestarted = true; + testBatchStatus(stepExec, isRestarted); + msg2 += String.format("\tid=%s, status=%s%n", + stepExec.getStepName(), + stepExec.getBatchStatus()); + } + logger.info(msg2); + logger.info("finished"); + + // search again + companies = findCompanyByName("google"); +// issue #78 - Cannot find indexed results after mass index +// assertEquals(1, companies.size()); + logger.infof("%d rows found", companies.size()); + } + + private List findCompanyByName(String name) { + FullTextEntityManager ftem = Search.getFullTextEntityManager(em); + Query luceneQuery = ftem.getSearchFactory().buildQueryBuilder() + .forEntity(Company.class).get() + .keyword().onField("name").matching(name) + .createQuery(); + @SuppressWarnings("unchecked") + List result = ftem.createFullTextQuery(luceneQuery).getResultList(); + return result; + } + + private long startJob() throws InterruptedException { + // org.hibernate.search.jsr352.MassIndexer + MassIndexer massIndexer = new MassIndexerImpl() + .addRootEntities(Company.class) + .arrayCapacity(ARRAY_CAPACITY) + .entityManager(em) + .jobOperator(jobOperator); + long executionId = massIndexer.start(); + + logger.infof("job execution id = %d", executionId); + return executionId; + } + + private JobExecution keepTestAlive(JobExecution jobExecution) throws InterruptedException { + int tries = 0; + while (!jobExecution.getBatchStatus().equals(BatchStatus.COMPLETED) + && !jobExecution.getBatchStatus().equals(BatchStatus.STOPPED) + && tries < JOB_MAX_TRIES) { + + long executionId = jobExecution.getExecutionId(); + logger.infof("Job (id=%d) %s, thread sleep %d ms...", + executionId, + jobExecution.getBatchStatus(), + JOB_THREAD_SLEEP + ); + Thread.sleep(JOB_THREAD_SLEEP); + jobExecution = jobOperator.getJobExecution(executionId); + tries++; + } + return jobExecution; + } + + private void stopChunkAfterStarted(JobExecution jobExecution) throws InterruptedException { + + int tries = 0; + long executionId = jobExecution.getExecutionId(); + List stepExecutions = jobOperator.getStepExecutions(executionId); + logger.infof("%d steps found", stepExecutions.size()); + Iterator cursor = stepExecutions.iterator(); + while (!jobExecution.getBatchStatus().equals(BatchStatus.COMPLETED) + || !jobExecution.getBatchStatus().equals(BatchStatus.FAILED) + || tries < JOB_MAX_TRIES) { + + // find step "produceLuceneDoc" + while (cursor.hasNext()) { + + StepExecution stepExecution = cursor.next(); + String stepName = stepExecution.getStepName(); + BatchStatus stepStatus = stepExecution.getBatchStatus(); + + if (stepName.equals("produceLuceneDoc")) { + logger.info("step produceLuceneDoc found."); + if (stepStatus.equals(BatchStatus.STARTING)) { + logger.info("step status is STARTING, wait it until STARTED to stop"); + break; + } else { + logger.infof("step status is %s, stopping now ...", stepStatus); + jobOperator.stop(executionId); + return; + } + } + } + Thread.sleep(100); + tries++; + stepExecutions = jobOperator.getStepExecutions(executionId); + cursor = stepExecutions.iterator(); + } + } + + private void testBatchStatus(StepExecution stepExecution, boolean isRestarted) { + BatchStatus batchStatus = stepExecution.getBatchStatus(); + switch (stepExecution.getStepName()) { + + case "loadId": +// long expectedEntityCount = DB_COMP_ROWS; +// assertEquals(expectedEntityCount, indexingContext.getEntityCount()); + assertEquals(BatchStatus.COMPLETED, batchStatus); + break; + + case "produceLuceneDoc": + String msg = String.format("metrics in step produceLuceneDoc:%n%n"); + Metric[] metrics = stepExecution.getMetrics(); + for (Metric metric : metrics) { + msg += String.format("\t%s: %d%n", metric.getType(), metric.getValue()); + } + logger.info(msg); + if (isRestarted) { +// TODO: enable to below test after code enhancement +// testChunk(getMetricsMap(metrics)); + assertEquals(BatchStatus.COMPLETED, batchStatus); + } else { + // first execution should be stopped + assertEquals(BatchStatus.STOPPED, batchStatus); + } + break; + + default: + break; + } + } + + private void testChunk(Map metricsMap) { + long companyCount = (long) Math.ceil((double) DB_COMP_ROWS / ARRAY_CAPACITY); + // The read count. + long expectedReadCount = companyCount; + long actualReadCount = metricsMap.get(Metric.MetricType.READ_COUNT); + assertEquals(expectedReadCount, actualReadCount); + // The write count + long expectedWriteCount = companyCount; + long actualWriteCount = metricsMap.get(Metric.MetricType.WRITE_COUNT); + assertEquals(expectedWriteCount, actualWriteCount); + } + + /** + * Convert the Metric array contained in StepExecution to a key-value map + * for easy access to Metric parameters. + * + * @param metrics + * a Metric array contained in StepExecution. + * + * @return a map view of the metrics array. + */ + private Map getMetricsMap(Metric[] metrics) { + Map metricsMap = new HashMap<>(); + for (Metric metric : metrics) { + metricsMap.put(metric.getType(), metric.getValue()); + } + return metricsMap; + } + + @After + public void shutdownJPA() { + em.close(); + emf.close(); + } +} diff --git a/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/test/Company.java b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/test/Company.java new file mode 100644 index 00000000000..8b2792db3f8 --- /dev/null +++ b/jsr352/integrationtest/javase/src/test/java/org/hibernate/search/jsr352/se/test/Company.java @@ -0,0 +1,50 @@ +package org.hibernate.search.jsr352.se.test; + +import java.io.Serializable; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; + +import org.hibernate.search.annotations.DocumentId; +import org.hibernate.search.annotations.Field; +import org.hibernate.search.annotations.Indexed; + +@Entity +@Indexed +public class Company implements Serializable { + + private static final long serialVersionUID = 1L; + + @Id + @GeneratedValue + @DocumentId + private int id; + + @Field + private String name; + + Company() { + + } + + public Company(String name) { + this.name = name; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/jsr352/integrationtest/javase/src/test/resources/META-INF/persistence.xml b/jsr352/integrationtest/javase/src/test/resources/META-INF/persistence.xml new file mode 100644 index 00000000000..21dc31783e4 --- /dev/null +++ b/jsr352/integrationtest/javase/src/test/resources/META-INF/persistence.xml @@ -0,0 +1,25 @@ + + + + + + org.hibernate.ejb.HibernatePersistence + org.hibernate.search.jsr352.se.test.Company + + + + + + + + + + + + + + diff --git a/jsr352/pom.xml b/jsr352/pom.xml new file mode 100644 index 00000000000..0945f42645b --- /dev/null +++ b/jsr352/pom.xml @@ -0,0 +1,146 @@ + + 4.0.0 + org.hibernate + hsearch-jsr352-parent + 5.6.0-SNAPSHOT + pom + GSoC JSR352 - Aggregator + New implementation mass-indexer using JSR 352 + + + 1.1.1.Final + 10.0.0.Final + 8.2.1.Final + 2.2.2 + UTF-8 + UTF-8 + 1.3.0.Beta2 + 1.4.192 + + + + core + integrationtest/javaee-wildfly + integrationtest/javase + + + + + perf + + true + + + + + + + + org.jboss.arquillian + arquillian-bom + 1.1.11.Final + import + pom + + + + + + + org.jboss.spec + jboss-javaee-7.0 + 1.0.0.Final + pom + provided + + + org.hibernate + hibernate-search-orm + 5.5.3.Final + + provided + + + javax.batch + javax.batch-api + 1.0 + provided + + + + javax.ejb + javax.ejb-api + 3.2 + provided + + + javax.inject + javax.inject + 1 + provided + + + + junit + junit + 4.12 + test + + + org.jboss.arquillian.junit + arquillian-junit-container + + + + org.jboss.arquillian.protocol + arquillian-protocol-servlet + + + org.wildfly + wildfly-arquillian-container-managed + ${org.wildfly.arquillian} + test + + + org.jboss.logmanager + jboss-logmanager + + + org.jboss.logmanager + log4j-jboss-logmanager + + + + wildfly-patching + org.wildfly + + + + + + + ${project.artifactId}-${project.version} + + + maven-compiler-plugin + 3.3 + + 1.8 + 1.8 + + + + +