Skip to content

Commit

Permalink
#25 add the afterChunk enhancement
Browse files Browse the repository at this point in the history
  • Loading branch information
mincong-h committed Jul 17, 2016
1 parent 960a8ad commit 3afd079
Show file tree
Hide file tree
Showing 3 changed files with 87 additions and 2 deletions.
@@ -0,0 +1,76 @@
/*
* Hibernate Search, full-text search for your domain model
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.search.jsr352.internal.steps.afterChunk;

import java.util.Set;

import javax.batch.api.BatchProperty;
import javax.batch.api.Batchlet;
import javax.batch.runtime.context.JobContext;
import javax.inject.Inject;
import javax.inject.Named;
import javax.naming.InitialContext;
import javax.persistence.EntityManager;

import org.hibernate.Session;
import org.hibernate.search.backend.spi.BatchBackend;
import org.hibernate.search.hcore.util.impl.ContextHelper;
import org.hibernate.search.jsr352.internal.JobContextData;
import org.jboss.logging.Logger;

/**
* Enhancements after the chunk step "produceLuceneDoc" (lucene document
* production)
*
* @author Mincong Huang
*/
@Named
public class AfterChunkBatchlet implements Batchlet {

private static final Logger logger = Logger.getLogger( AfterChunkBatchlet.class );
private final JobContext jobContext;

@Inject
@BatchProperty
private String persistenceUnitName;

@Inject
@BatchProperty
private boolean optimizeAtEnd;

@Inject
public AfterChunkBatchlet(JobContext jobContext) {
this.jobContext = jobContext;
}

@Override
public String process() throws Exception {

if ( this.optimizeAtEnd ) {

logger.info( "purging index for all entities ..." );
String path = "java:comp/env/" + persistenceUnitName;
EntityManager em = (EntityManager) InitialContext.doLookup( path );
Session session = em.unwrap( Session.class );
final BatchBackend backend = ContextHelper
.getSearchintegrator( session )
.makeBatchBackend( null );

logger.info( "optimizing all entities ..." );
JobContextData jobData = (JobContextData) jobContext.getTransientUserData();
Set<Class<?>> targetedClasses = jobData.getEntityClazzSet();
backend.optimize( targetedClasses );
backend.flush( targetedClasses );
}
return null;
}

@Override
public void stop() throws Exception {
// TODO Auto-generated method stub
}
}
11 changes: 10 additions & 1 deletion core/src/main/resources/META-INF/batch-jobs/mass-index.xml
Expand Up @@ -27,7 +27,7 @@
</batchlet>
</step>

<step id="produceLuceneDoc">
<step id="produceLuceneDoc" next="afterChunk">
<chunk checkpoint-policy="custom">
<reader ref="itemReader">
<properties>
Expand Down Expand Up @@ -69,4 +69,13 @@
<reducer ref="partitionReducer" />
</partition>
</step>

<step id="afterChunk">
<batchlet ref="afterChunkBatchlet">
<properties>
<property name="optimizeAtEnd" value="#{jobParameters['optimizeAtEnd']}" />
<property name="persistenceUnitName" value="#{jobParameters['persistenceUnitName']}" />
</properties>
</batchlet>
</step>
</job>
Expand Up @@ -49,7 +49,7 @@ public class MassIndexerIT {
private static final Logger logger = Logger.getLogger( MassIndexerIT.class );

private final boolean JOB_OPTIMIZE_AFTER_PURGE = true;
private final boolean JOB_OPTIMIZE_AT_END = false;
private final boolean JOB_OPTIMIZE_AT_END = true;
private final boolean JOB_PURGE_AT_START = true;
private final int JOB_FETCH_SIZE = 100 * 1000;
private final int JOB_MAX_RESULTS = 200 * 1000;
Expand Down

0 comments on commit 3afd079

Please sign in to comment.