From 072b947411d9c0131d68f2999fc1d059dbfc2ebc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yoann=20Rodi=C3=A8re?= Date: Tue, 5 May 2020 14:53:17 +0200 Subject: [PATCH] HSEARCH-3589 Test the Lucene backend --- .../HibernateSearchNormalizerWrapper.java | 2 +- .../lucene/analysis/impl/TokenizerChain.java | 2 +- .../backend/lucene/LuceneBackendIT.java | 80 +++++++++++++++++++ 3 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 integrationtest/backend/lucene/src/test/java/org/hibernate/search/integrationtest/backend/lucene/LuceneBackendIT.java diff --git a/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/HibernateSearchNormalizerWrapper.java b/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/HibernateSearchNormalizerWrapper.java index 69635489c08..1e848a2f496 100644 --- a/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/HibernateSearchNormalizerWrapper.java +++ b/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/HibernateSearchNormalizerWrapper.java @@ -10,7 +10,7 @@ import org.apache.lucene.analysis.AnalyzerWrapper; -final class HibernateSearchNormalizerWrapper extends AnalyzerWrapper { +public final class HibernateSearchNormalizerWrapper extends AnalyzerWrapper { private final String normalizerName; diff --git a/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/TokenizerChain.java b/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/TokenizerChain.java index 70642ec5cc6..69ae9fbb947 100644 --- a/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/TokenizerChain.java +++ b/backend/lucene/src/main/java/org/hibernate/search/backend/lucene/analysis/impl/TokenizerChain.java @@ -18,7 +18,7 @@ /** * Inspired by Apache Solr's org.apache.solr.analysis.TokenizerChain.TokenizerChain */ -final class TokenizerChain extends Analyzer { +public final class TokenizerChain extends Analyzer { private final CharFilterFactory[] charFilters; private final TokenizerFactory tokenizer; diff --git a/integrationtest/backend/lucene/src/test/java/org/hibernate/search/integrationtest/backend/lucene/LuceneBackendIT.java b/integrationtest/backend/lucene/src/test/java/org/hibernate/search/integrationtest/backend/lucene/LuceneBackendIT.java new file mode 100644 index 00000000000..226decf0786 --- /dev/null +++ b/integrationtest/backend/lucene/src/test/java/org/hibernate/search/integrationtest/backend/lucene/LuceneBackendIT.java @@ -0,0 +1,80 @@ +/* + * Hibernate Search, full-text search for your domain model + * + * License: GNU Lesser General Public License (LGPL), version 2.1 or later + * See the lgpl.txt file in the root directory or . + */ +package org.hibernate.search.integrationtest.backend.lucene; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.hibernate.search.backend.lucene.LuceneBackend; +import org.hibernate.search.backend.lucene.analysis.impl.HibernateSearchNormalizerWrapper; +import org.hibernate.search.backend.lucene.analysis.impl.TokenizerChain; +import org.hibernate.search.engine.common.spi.SearchIntegration; +import org.hibernate.search.integrationtest.backend.tck.testsupport.configuration.DefaultAnalysisDefinitions; +import org.hibernate.search.integrationtest.backend.tck.testsupport.util.rule.SearchSetupHelper; +import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappedIndex; +import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappingSchemaManagementStrategy; +import org.hibernate.search.util.impl.test.annotation.TestForIssue; + +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +import org.apache.lucene.analysis.standard.StandardAnalyzer; + +public class LuceneBackendIT { + + private static final String BACKEND_NAME = "MyBackend"; + + @ClassRule + public static final SearchSetupHelper setupHelper = new SearchSetupHelper(); + + private static final StubMappedIndex index = StubMappedIndex.withoutFields( "MainIndex" ); + + private static LuceneBackend backend; + + @BeforeClass + public static void setup() { + SearchIntegration integration = setupHelper.start( BACKEND_NAME ).withIndex( index ) + .withSchemaManagement( StubMappingSchemaManagementStrategy.NONE ) + .setup(); + backend = integration.getBackend( BACKEND_NAME ).unwrap( LuceneBackend.class ); + } + + @Test + @TestForIssue(jiraKey = "HSEARCH-3589") + public void analyzer() { + assertThat( backend.analyzer( DefaultAnalysisDefinitions.ANALYZER_STANDARD_ENGLISH.name ) ) + .isNotEmpty() + .containsInstanceOf( StandardAnalyzer.class ); + assertThat( backend.analyzer( DefaultAnalysisDefinitions.ANALYZER_NGRAM.name ) ) + .isNotEmpty() + .containsInstanceOf( TokenizerChain.class ); + } + + @Test + @TestForIssue(jiraKey = "HSEARCH-3589") + public void analyzer_missing() { + assertThat( backend.analyzer( "unknown" ) ).isEmpty(); + // Normalizers are not analyzers + assertThat( backend.analyzer( DefaultAnalysisDefinitions.NORMALIZER_LOWERCASE.name ) ).isEmpty(); + } + + @Test + @TestForIssue(jiraKey = "HSEARCH-3589") + public void normalizer() { + assertThat( backend.normalizer( DefaultAnalysisDefinitions.NORMALIZER_LOWERCASE.name ) ) + .isNotEmpty() + .containsInstanceOf( HibernateSearchNormalizerWrapper.class ); + } + + @Test + @TestForIssue(jiraKey = "HSEARCH-3589") + public void normalizer_missing() { + assertThat( backend.normalizer( "unknown" ) ).isEmpty(); + // Analyzers are not normalizers + assertThat( backend.normalizer( DefaultAnalysisDefinitions.ANALYZER_STANDARD_ENGLISH.name ) ).isEmpty(); + } +}