Skip to content

Commit

Permalink
HSEARCH-3269 Integrate Search 6 workspace for admin operations
Browse files Browse the repository at this point in the history
  • Loading branch information
fax4ever committed Oct 15, 2020
1 parent b37411c commit d3cd9d2
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,12 @@
import javax.inject.Inject;
import javax.persistence.EntityManagerFactory;

import org.hibernate.Session;
import org.hibernate.search.batch.jsr352.logging.impl.Log;
import org.hibernate.search.batch.jsr352.massindexing.MassIndexingJobParameters;
import org.hibernate.search.batch.jsr352.massindexing.MassIndexingJobParameters.Defaults;
import org.hibernate.search.batch.jsr352.massindexing.impl.JobContextData;
import org.hibernate.search.batch.jsr352.massindexing.impl.util.PersistenceUtil;
import org.hibernate.search.batch.jsr352.massindexing.impl.util.SerializationUtil;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.util.common.logging.impl.LoggerFactory;

/**
Expand All @@ -44,8 +43,6 @@ public class AfterChunkBatchlet extends AbstractBatchlet {
@BatchProperty(name = MassIndexingJobParameters.TENANT_ID)
private String tenantId;

private Session session;

@Override
public String process() throws Exception {
boolean optimizeOnFinish = SerializationUtil.parseBooleanParameterOptional(
Expand All @@ -57,14 +54,8 @@ public String process() throws Exception {

JobContextData jobData = (JobContextData) jobContext.getTransientUserData();
EntityManagerFactory emf = jobData.getEntityManagerFactory();
session = PersistenceUtil.openSession( emf, tenantId );
// TODO HSEARCH-3269 merge segments
Search.mapping( emf ).scope( Object.class ).workspace( tenantId ).mergeSegments();
}
return null;
}

@Override
public void stop() throws Exception {
session.close();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@
import javax.inject.Inject;
import javax.persistence.EntityManagerFactory;

import org.hibernate.Session;
import org.hibernate.search.batch.jsr352.logging.impl.Log;
import org.hibernate.search.batch.jsr352.massindexing.MassIndexingJobParameters;
import org.hibernate.search.batch.jsr352.massindexing.MassIndexingJobParameters.Defaults;
import org.hibernate.search.batch.jsr352.massindexing.impl.JobContextData;
import org.hibernate.search.batch.jsr352.massindexing.impl.util.PersistenceUtil;
import org.hibernate.search.batch.jsr352.massindexing.impl.util.SerializationUtil;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.work.SearchWorkspace;
import org.hibernate.search.util.common.logging.impl.LoggerFactory;

/**
Expand Down Expand Up @@ -61,16 +61,15 @@ public String process() throws Exception {
if ( purgeAllOnStart ) {
JobContextData jobData = (JobContextData) jobContext.getTransientUserData();
EntityManagerFactory emf = jobData.getEntityManagerFactory();
try ( Session session = PersistenceUtil.openSession( emf, tenantId ) ) {
// TODO HSEARCH-3269 purge
SearchWorkspace workspace = Search.mapping( emf ).scope( Object.class ).workspace( tenantId );
workspace.purge();

// This is necessary because the batchlet is not executed inside a transaction
// TODO HSEARCH-3269 flush
// This is necessary because the batchlet is not executed inside a transaction
workspace.flush();

if ( optimizeAfterPurge ) {
log.startOptimization();
// TODO HSEARCH-3269 merge segments
}
if ( optimizeAfterPurge ) {
log.startOptimization();
workspace.mergeSegments();
}
}
return null;
Expand Down

0 comments on commit d3cd9d2

Please sign in to comment.