Skip to content
Browse files

[feature] facets on nodes search

svn path=/branches/rangeindex/; revision=18754
  • Loading branch information...
1 parent 904c1a1 commit 1603b0e61019a5150efd01a2d7e46cd2720e7699 @shabanovd shabanovd committed Jul 22, 2013
View
2 extensions/indexes/lucene/src/org/exist/indexing/lucene/LuceneIndexWorker.java
@@ -1375,7 +1375,7 @@ public float getScore() {
return score;
}
- private void setScore(float score) {
+ protected void setScore(float score) {
this.score = score;
}
View
1 extensions/indexes/lucene/src/org/exist/indexing/lucene/MarkableTokenFilter.java
@@ -2,7 +2,6 @@
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.Token;
import java.util.List;
import java.util.LinkedList;
View
220 extensions/indexes/lucene/src/org/exist/indexing/lucene/QueryDocuments.java
@@ -22,50 +22,19 @@
package org.exist.indexing.lucene;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
import java.util.List;
import java.util.Properties;
-import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.facet.encoding.DGapVInt8IntDecoder;
-import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetSearchParams;
-import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
-import org.apache.lucene.facet.search.CountingFacetsAggregator;
-import org.apache.lucene.facet.search.FacetArrays;
-import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult;
-import org.apache.lucene.facet.search.FacetResultNode;
-import org.apache.lucene.facet.search.FacetResultsHandler;
-import org.apache.lucene.facet.search.FacetsAggregator;
-import org.apache.lucene.facet.search.FastCountingFacetsAggregator;
-import org.apache.lucene.facet.search.FloatFacetResultsHandler;
-import org.apache.lucene.facet.search.IntFacetResultsHandler;
-import org.apache.lucene.facet.search.TopKFacetResultsHandler;
-import org.apache.lucene.facet.search.TopKInEachNodeHandler;
-import org.apache.lucene.facet.search.FacetRequest.FacetArraysSource;
-import org.apache.lucene.facet.search.FacetRequest.ResultMode;
-import org.apache.lucene.facet.search.FacetRequest.SortOrder;
-import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
-import org.apache.lucene.facet.util.PartitionsUtils;
-import org.apache.lucene.index.AtomicReader;
-import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
-import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.ArrayUtil;
-import org.apache.lucene.util.FixedBitSet;
import org.exist.Database;
-import org.exist.dom.DefaultDocumentSet;
import org.exist.dom.DocumentImpl;
import org.exist.dom.DocumentSet;
import org.exist.dom.QName;
@@ -78,13 +47,9 @@
*/
public class QueryDocuments {
- public interface SearchCallback {
- public void found(DocumentImpl document, float score);
- }
-
public static List<FacetResult> query(LuceneIndexWorker worker, DocumentSet docs,
Query query, FacetSearchParams searchParams,
- SearchCallback callback)
+ SearchCallback<DocumentImpl> callback)
throws IOException, ParseException, TerminatedException {
final LuceneIndex index = worker.index;
@@ -106,7 +71,7 @@
public static List<FacetResult> query(LuceneIndexWorker worker, DocumentSet docs,
List<QName> qnames, String queryStr, FacetSearchParams searchParams, Properties options,
- SearchCallback callback) throws IOException, ParseException,
+ SearchCallback<DocumentImpl> callback) throws IOException, ParseException,
TerminatedException {
qnames = worker.getDefinedIndexes(qnames);
@@ -145,86 +110,21 @@
}
}
- private static class DocumentHitCollector extends Collector {
-
- private Scorer scorer;
+ private static class DocumentHitCollector extends QueryFacetCollector {
- private AtomicReaderContext context;
- private AtomicReader reader;
- private NumericDocValues docIdValues;
-
- private final DocumentSet docs;
- private final SearchCallback callback;
-
- protected final List<MatchingDocs> matchingDocs = new ArrayList<MatchingDocs>();
- protected final FacetArrays facetArrays;
-
- protected final TaxonomyReader taxonomyReader;
- protected final FacetSearchParams searchParams;
-
- private int totalHits;
- private FixedBitSet bits;
- private float[] scores;
-
- private DefaultDocumentSet docbits;
- //private FixedBitSet docbits;
+ private final SearchCallback<DocumentImpl> callback;
private DocumentHitCollector(
final DocumentSet docs,
- final SearchCallback callback,
+ final SearchCallback<DocumentImpl> callback,
final FacetSearchParams searchParams,
final TaxonomyReader taxonomyReader) {
- this.docs = docs;
- this.callback = callback;
+ super(docs, searchParams, taxonomyReader);
- this.searchParams = searchParams;
- this.taxonomyReader = taxonomyReader;
-
-// this.facetArrays = new FacetArrays(taxonomyReader.getSize());
-
- this.facetArrays = new FacetArrays(
- PartitionsUtils.partitionSize(searchParams.indexingParams, taxonomyReader));
-
- docbits = new DefaultDocumentSet(1031);//docs.getDocumentCount());
- //docbits = new FixedBitSet(docs.getDocumentCount());
-
- }
-
- @Override
- public void setScorer(Scorer scorer) throws IOException {
- this.scorer = scorer;
- }
-
- @Override
- public void setNextReader(AtomicReaderContext atomicReaderContext)
- throws IOException {
- reader = atomicReaderContext.reader();
- docIdValues = reader.getNumericDocValues(LuceneUtil.FIELD_DOC_ID);
-
- if (bits != null) {
- matchingDocs.add(new MatchingDocs(context, bits, totalHits, scores));
- }
- bits = new FixedBitSet(reader.maxDoc());
- totalHits = 0;
- scores = new float[64]; // some initial size
- context = atomicReaderContext;
- }
-
- protected void finish() {
- if (bits != null) {
- matchingDocs.add(new MatchingDocs(this.context, bits, totalHits, scores));
- bits = null;
- scores = null;
- context = null;
- }
- }
-
- @Override
- public boolean acceptsDocsOutOfOrder() {
- return false;
+ this.callback = callback;
}
@Override
@@ -261,111 +161,5 @@ public void collect(int doc) {
e.printStackTrace();
}
}
-
- private boolean verifySearchParams(FacetSearchParams fsp) {
- // verify that all category lists were encoded with DGapVInt
- for (FacetRequest fr : fsp.facetRequests) {
- CategoryListParams clp = fsp.indexingParams.getCategoryListParams(fr.categoryPath);
- if (clp.createEncoder().createMatchingDecoder().getClass() != DGapVInt8IntDecoder.class) {
- return false;
- }
- }
-
- return true;
- }
-
- private FacetsAggregator getAggregator() {
- if (verifySearchParams(searchParams)) {
- return new FastCountingFacetsAggregator();
- } else {
- return new CountingFacetsAggregator();
- }
- }
-
- private Set<CategoryListParams> getCategoryLists() {
- if (searchParams.indexingParams.getAllCategoryListParams().size() == 1) {
- return Collections.singleton(searchParams.indexingParams.getCategoryListParams(null));
- }
-
- HashSet<CategoryListParams> clps = new HashSet<CategoryListParams>();
- for (FacetRequest fr : searchParams.facetRequests) {
- clps.add(searchParams.indexingParams.getCategoryListParams(fr.categoryPath));
- }
- return clps;
- }
-
- private FacetResultsHandler createFacetResultsHandler(FacetRequest fr) {
- if (fr.getDepth() == 1 && fr.getSortOrder() == SortOrder.DESCENDING) {
- FacetArraysSource fas = fr.getFacetArraysSource();
- if (fas == FacetArraysSource.INT) {
- return new IntFacetResultsHandler(taxonomyReader, fr, facetArrays);
- }
-
- if (fas == FacetArraysSource.FLOAT) {
- return new FloatFacetResultsHandler(taxonomyReader, fr, facetArrays);
- }
- }
-
- if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) {
- return new TopKInEachNodeHandler(taxonomyReader, fr, facetArrays);
- }
- return new TopKFacetResultsHandler(taxonomyReader, fr, facetArrays);
- }
-
- private static FacetResult emptyResult(int ordinal, FacetRequest fr) {
- FacetResultNode root = new FacetResultNode(ordinal, 0);
- root.label = fr.categoryPath;
- return new FacetResult(fr, root, 0);
- }
-
- List<FacetResult> facetResults = null;
-
- public List<FacetResult> getFacetResults() throws IOException {
- if (facetResults == null) {
- finish();
- facetResults = accumulate();
- }
- return facetResults;
- }
-
- private List<FacetResult> accumulate() throws IOException {
-
- // aggregate facets per category list (usually only one category list)
- FacetsAggregator aggregator = getAggregator();
- for (CategoryListParams clp : getCategoryLists()) {
- for (MatchingDocs md : matchingDocs) {
- aggregator.aggregate(md, clp, facetArrays);
- }
- }
-
- ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays();
-
- // compute top-K
- final int[] children = arrays.children();
- final int[] siblings = arrays.siblings();
- List<FacetResult> res = new ArrayList<FacetResult>();
- for (FacetRequest fr : searchParams.facetRequests) {
- int rootOrd = taxonomyReader.getOrdinal(fr.categoryPath);
- // category does not exist
- if (rootOrd == TaxonomyReader.INVALID_ORDINAL) {
- // Add empty FacetResult
- res.add(emptyResult(rootOrd, fr));
- continue;
- }
- CategoryListParams clp = searchParams.indexingParams.getCategoryListParams(fr.categoryPath);
- // someone might ask to aggregate ROOT category
- if (fr.categoryPath.length > 0) {
- OrdinalPolicy ordinalPolicy = clp.getOrdinalPolicy(fr.categoryPath.components[0]);
- if (ordinalPolicy == OrdinalPolicy.NO_PARENTS) {
- // rollup values
- aggregator.rollupValues(fr, rootOrd, children, siblings, facetArrays);
- }
- }
-
- FacetResultsHandler frh = createFacetResultsHandler(fr);
- res.add(frh.compute());
- }
- return res;
- }
}
}
View
254 extensions/indexes/lucene/src/org/exist/indexing/lucene/QueryFacetCollector.java
@@ -0,0 +1,254 @@
+/*
+ * eXist Open Source Native XML Database
+ * Copyright (C) 2013 The eXist Project
+ * http://exist-db.org
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * $Id$
+ */
+package org.exist.indexing.lucene;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.facet.encoding.DGapVInt8IntDecoder;
+import org.apache.lucene.facet.params.CategoryListParams;
+import org.apache.lucene.facet.params.FacetSearchParams;
+import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
+import org.apache.lucene.facet.search.CountingFacetsAggregator;
+import org.apache.lucene.facet.search.FacetArrays;
+import org.apache.lucene.facet.search.FacetRequest;
+import org.apache.lucene.facet.search.FacetResult;
+import org.apache.lucene.facet.search.FacetResultNode;
+import org.apache.lucene.facet.search.FacetResultsHandler;
+import org.apache.lucene.facet.search.FacetsAggregator;
+import org.apache.lucene.facet.search.FastCountingFacetsAggregator;
+import org.apache.lucene.facet.search.FloatFacetResultsHandler;
+import org.apache.lucene.facet.search.IntFacetResultsHandler;
+import org.apache.lucene.facet.search.TopKFacetResultsHandler;
+import org.apache.lucene.facet.search.TopKInEachNodeHandler;
+import org.apache.lucene.facet.search.FacetRequest.FacetArraysSource;
+import org.apache.lucene.facet.search.FacetRequest.ResultMode;
+import org.apache.lucene.facet.search.FacetRequest.SortOrder;
+import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.util.PartitionsUtils;
+import org.apache.lucene.index.AtomicReader;
+import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.util.FixedBitSet;
+import org.exist.dom.DefaultDocumentSet;
+import org.exist.dom.DocumentSet;
+
+/**
+ * @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
+ *
+ */
+public abstract class QueryFacetCollector extends Collector {
+
+ protected Scorer scorer;
+
+ protected AtomicReaderContext context;
+ protected AtomicReader reader;
+ protected NumericDocValues docIdValues;
+
+ protected final DocumentSet docs;
+
+ protected final List<MatchingDocs> matchingDocs = new ArrayList<MatchingDocs>();
+ protected final FacetArrays facetArrays;
+
+ protected final TaxonomyReader taxonomyReader;
+ protected final FacetSearchParams searchParams;
+
+ protected int totalHits;
+ protected FixedBitSet bits;
+ protected float[] scores;
+
+ protected DefaultDocumentSet docbits;
+ //private FixedBitSet docbits;
+
+ protected QueryFacetCollector(
+ final DocumentSet docs,
+
+ final FacetSearchParams searchParams,
+
+ final TaxonomyReader taxonomyReader) {
+
+ this.docs = docs;
+
+ this.searchParams = searchParams;
+ this.taxonomyReader = taxonomyReader;
+
+// this.facetArrays = new FacetArrays(taxonomyReader.getSize());
+ this.facetArrays = new FacetArrays(
+ PartitionsUtils.partitionSize(searchParams.indexingParams, taxonomyReader));
+
+ docbits = new DefaultDocumentSet(1031);//docs.getDocumentCount());
+ //docbits = new FixedBitSet(docs.getDocumentCount());
+
+ }
+
+ @Override
+ public void setScorer(Scorer scorer) throws IOException {
+ this.scorer = scorer;
+ }
+
+ @Override
+ public void setNextReader(AtomicReaderContext atomicReaderContext)
+ throws IOException {
+ reader = atomicReaderContext.reader();
+ docIdValues = reader.getNumericDocValues(LuceneUtil.FIELD_DOC_ID);
+
+ if (bits != null) {
+ matchingDocs.add(new MatchingDocs(context, bits, totalHits, scores));
+ }
+ bits = new FixedBitSet(reader.maxDoc());
+ totalHits = 0;
+ scores = new float[64]; // some initial size
+ context = atomicReaderContext;
+ }
+
+ protected void finish() {
+ if (bits != null) {
+ matchingDocs.add(new MatchingDocs(this.context, bits, totalHits, scores));
+ bits = null;
+ scores = null;
+ context = null;
+ }
+ }
+
+ @Override
+ public boolean acceptsDocsOutOfOrder() {
+ return false;
+ }
+
+ @Override
+ public abstract void collect(int doc);
+
+ private boolean verifySearchParams(FacetSearchParams fsp) {
+ // verify that all category lists were encoded with DGapVInt
+ for (FacetRequest fr : fsp.facetRequests) {
+ CategoryListParams clp = fsp.indexingParams.getCategoryListParams(fr.categoryPath);
+ if (clp.createEncoder().createMatchingDecoder().getClass() != DGapVInt8IntDecoder.class) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private FacetsAggregator getAggregator() {
+ if (verifySearchParams(searchParams)) {
+ return new FastCountingFacetsAggregator();
+ } else {
+ return new CountingFacetsAggregator();
+ }
+ }
+
+ private Set<CategoryListParams> getCategoryLists() {
+ if (searchParams.indexingParams.getAllCategoryListParams().size() == 1) {
+ return Collections.singleton(searchParams.indexingParams.getCategoryListParams(null));
+ }
+
+ HashSet<CategoryListParams> clps = new HashSet<CategoryListParams>();
+ for (FacetRequest fr : searchParams.facetRequests) {
+ clps.add(searchParams.indexingParams.getCategoryListParams(fr.categoryPath));
+ }
+ return clps;
+ }
+
+ private FacetResultsHandler createFacetResultsHandler(FacetRequest fr) {
+ if (fr.getDepth() == 1 && fr.getSortOrder() == SortOrder.DESCENDING) {
+ FacetArraysSource fas = fr.getFacetArraysSource();
+ if (fas == FacetArraysSource.INT) {
+ return new IntFacetResultsHandler(taxonomyReader, fr, facetArrays);
+ }
+
+ if (fas == FacetArraysSource.FLOAT) {
+ return new FloatFacetResultsHandler(taxonomyReader, fr, facetArrays);
+ }
+ }
+
+ if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) {
+ return new TopKInEachNodeHandler(taxonomyReader, fr, facetArrays);
+ }
+ return new TopKFacetResultsHandler(taxonomyReader, fr, facetArrays);
+ }
+
+ private static FacetResult emptyResult(int ordinal, FacetRequest fr) {
+ FacetResultNode root = new FacetResultNode(ordinal, 0);
+ root.label = fr.categoryPath;
+ return new FacetResult(fr, root, 0);
+ }
+
+ List<FacetResult> facetResults = null;
+
+ public List<FacetResult> getFacetResults() throws IOException {
+ if (facetResults == null) {
+ finish();
+ facetResults = accumulate();
+ }
+ return facetResults;
+ }
+
+ private List<FacetResult> accumulate() throws IOException {
+
+ // aggregate facets per category list (usually only one category list)
+ FacetsAggregator aggregator = getAggregator();
+ for (CategoryListParams clp : getCategoryLists()) {
+ for (MatchingDocs md : matchingDocs) {
+ aggregator.aggregate(md, clp, facetArrays);
+ }
+ }
+
+ ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays();
+
+ // compute top-K
+ final int[] children = arrays.children();
+ final int[] siblings = arrays.siblings();
+ List<FacetResult> res = new ArrayList<FacetResult>();
+ for (FacetRequest fr : searchParams.facetRequests) {
+ int rootOrd = taxonomyReader.getOrdinal(fr.categoryPath);
+ // category does not exist
+ if (rootOrd == TaxonomyReader.INVALID_ORDINAL) {
+ // Add empty FacetResult
+ res.add(emptyResult(rootOrd, fr));
+ continue;
+ }
+ CategoryListParams clp = searchParams.indexingParams.getCategoryListParams(fr.categoryPath);
+ // someone might ask to aggregate ROOT category
+ if (fr.categoryPath.length > 0) {
+ OrdinalPolicy ordinalPolicy = clp.getOrdinalPolicy(fr.categoryPath.components[0]);
+ if (ordinalPolicy == OrdinalPolicy.NO_PARENTS) {
+ // rollup values
+ aggregator.rollupValues(fr, rootOrd, children, siblings, facetArrays);
+ }
+ }
+
+ FacetResultsHandler frh = createFacetResultsHandler(fr);
+ res.add(frh.compute());
+ }
+ return res;
+ }
+}
+
View
222 extensions/indexes/lucene/src/org/exist/indexing/lucene/QueryNodes.java
@@ -0,0 +1,222 @@
+/*
+ * eXist Open Source Native XML Database
+ * Copyright (C) 2013 The eXist Project
+ * http://exist-db.org
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * $Id$
+ */
+package org.exist.indexing.lucene;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.facet.params.FacetSearchParams;
+import org.apache.lucene.facet.search.FacetResult;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.queryparser.classic.ParseException;
+import org.apache.lucene.queryparser.classic.QueryParser;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.BytesRef;
+import org.exist.Database;
+import org.exist.dom.DocumentImpl;
+import org.exist.dom.DocumentSet;
+import org.exist.dom.NodeProxy;
+import org.exist.dom.QName;
+import org.exist.indexing.lucene.LuceneIndexWorker.LuceneMatch;
+import org.exist.numbering.NodeId;
+import org.exist.storage.DBBroker;
+import org.exist.storage.ElementValue;
+import org.exist.util.ByteConversion;
+import org.exist.xquery.TerminatedException;
+import org.w3c.dom.Node;
+
+/**
+ * @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
+ *
+ */
+public class QueryNodes {
+
+ public static List<FacetResult> query(LuceneIndexWorker worker, QName qname, int contextId, DocumentSet docs,
+ Query query, FacetSearchParams searchParams,
+ SearchCallback<NodeProxy> callback)
+ throws IOException, ParseException, TerminatedException {
+
+ final LuceneIndex index = worker.index;
+
+ final Database db = index.getBrokerPool();
+
+ IndexSearcher searcher = null;
+ try {
+ searcher = index.getSearcher();
+ final TaxonomyReader taxonomyReader = index.getTaxonomyReader();
+
+ DocumentHitCollector collector = new DocumentHitCollector(db, worker, query, qname, contextId, docs, callback, searchParams, taxonomyReader);
+
+ searcher.search(query, collector);
+
+ return collector.getFacetResults();
+ } finally {
+ index.releaseSearcher(searcher);
+ }
+ }
+
+ public static List<FacetResult> query(LuceneIndexWorker worker, DocumentSet docs,
+ List<QName> qnames, int contextId, String queryStr, FacetSearchParams searchParams, Properties options,
+ SearchCallback<NodeProxy> callback) throws IOException, ParseException,
+ TerminatedException {
+
+ qnames = worker.getDefinedIndexes(qnames);
+
+ final LuceneIndex index = worker.index;
+
+ final Database db = index.getBrokerPool();
+
+ DBBroker broker = db.getActiveBroker();
+
+ IndexSearcher searcher = null;
+ try {
+ searcher = index.getSearcher();
+ final TaxonomyReader taxonomyReader = index.getTaxonomyReader();
+
+ DocumentHitCollector collector = new DocumentHitCollector(db, worker, null, null, contextId, docs, callback, searchParams, taxonomyReader);
+
+ for (QName qname : qnames) {
+
+ String field = LuceneUtil.encodeQName(qname, db.getSymbols());
+
+ Analyzer analyzer = worker.getAnalyzer(null, qname, broker, docs);
+
+ QueryParser parser = new QueryParser(LuceneIndex.LUCENE_VERSION_IN_USE, field, analyzer);
+
+ worker.setOptions(options, parser);
+
+ Query query = parser.parse(queryStr);
+
+ collector.qname = qname;
+ collector.query = query;
+
+ searcher.search(query, collector);
+ }
+
+ return collector.getFacetResults();
+ } finally {
+ index.releaseSearcher(searcher);
+ }
+ }
+
+ private static class DocumentHitCollector extends QueryFacetCollector {
+
+ private BinaryDocValues nodeIdValues;
+
+ private final byte[] buf = new byte[1024];
+
+ private final Database db;
+ private final LuceneIndexWorker worker;
+ private Query query;
+
+ private QName qname;
+ private final int contextId;
+
+ private final SearchCallback<NodeProxy> callback;
+
+ private DocumentHitCollector(
+
+ final Database db,
+ final LuceneIndexWorker worker,
+ final Query query,
+
+ final QName qname,
+ final int contextId,
+
+ final DocumentSet docs,
+ final SearchCallback<NodeProxy> callback,
+
+ final FacetSearchParams searchParams,
+
+ final TaxonomyReader taxonomyReader) {
+
+ super(docs, searchParams, taxonomyReader);
+
+ this.db = db;
+ this.worker = worker;
+ this.query = query;
+
+ this.qname = qname;
+ this.contextId = contextId;
+
+ this.callback = callback;
+ }
+
+ @Override
+ public void setNextReader(AtomicReaderContext atomicReaderContext) throws IOException {
+ super.setNextReader(atomicReaderContext);
+ nodeIdValues = this.reader.getBinaryDocValues(LuceneUtil.FIELD_NODE_ID);
+ }
+
+ @Override
+ public void collect(int doc) {
+ try {
+ float score = scorer.score();
+ int docId = (int) this.docIdValues.get(doc);
+
+ DocumentImpl storedDocument = docs.getDoc(docId);
+ if (storedDocument == null)
+ return;
+
+ if (!docbits.contains(docId)) {
+ docbits.add(storedDocument);
+
+ bits.set(doc);
+ if (totalHits >= scores.length) {
+ float[] newScores = new float[ArrayUtil.oversize(totalHits + 1, 4)];
+ System.arraycopy(scores, 0, newScores, 0, totalHits);
+ scores = newScores;
+ }
+ scores[totalHits] = score;
+ totalHits++;
+ }
+
+ // XXX: understand: check permissions here? No, it may slowdown, better to check final set
+
+ BytesRef ref = new BytesRef(buf);
+ this.nodeIdValues.get(doc, ref);
+ int units = ByteConversion.byteToShort(ref.bytes, ref.offset);
+ NodeId nodeId = db.getNodeFactory().createFromData(units, ref.bytes, ref.offset + 2);
+ //LOG.info("doc: " + docId + "; node: " + nodeId.toString() + "; units: " + units);
+
+ NodeProxy storedNode = new NodeProxy(storedDocument, nodeId);
+ if (qname != null)
+ storedNode.setNodeType(qname.getNameType() == ElementValue.ATTRIBUTE ? Node.ATTRIBUTE_NODE : Node.ELEMENT_NODE);
+
+ LuceneMatch match = worker. new LuceneMatch(contextId, nodeId, query);
+ match.setScore(score);
+ storedNode.addMatch(match);
+ callback.found(storedNode, score);
+ //resultSet.add(storedNode, sizeHint);
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+}
View
214 extensions/indexes/lucene/test/src/org/exist/indexing/lucene/FacetAbstractTest.java
@@ -0,0 +1,214 @@
+package org.exist.indexing.lucene;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.*;
+import java.util.Map.Entry;
+
+import org.exist.Indexer;
+import org.exist.TestUtils;
+import org.exist.collections.Collection;
+import org.exist.collections.CollectionConfigurationManager;
+import org.exist.collections.IndexInfo;
+import org.exist.dom.DefaultDocumentSet;
+import org.exist.dom.DocumentImpl;
+import org.exist.dom.DocumentSet;
+import org.exist.dom.MutableDocumentSet;
+import org.exist.storage.BrokerPool;
+import org.exist.storage.DBBroker;
+import org.exist.storage.md.MetaData;
+import org.exist.storage.md.Metas;
+import org.exist.storage.txn.TransactionManager;
+import org.exist.storage.txn.Txn;
+import org.exist.test.TestConstants;
+import org.exist.util.Configuration;
+import org.exist.util.ConfigurationHelper;
+import org.exist.xmldb.XmldbURI;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class FacetAbstractTest {
+
+ protected static BrokerPool db;
+ protected static Collection root;
+ protected Boolean savedConfig;
+
+ protected DocumentSet configureAndStore(String configuration, Resource[] resources) {
+ DBBroker broker = null;
+ TransactionManager transact = null;
+ Txn transaction = null;
+ MutableDocumentSet docs = new DefaultDocumentSet();
+ try {
+ broker = db.get(db.getSecurityManager().getSystemSubject());
+ assertNotNull(broker);
+ transact = db.getTransactionManager();
+ assertNotNull(transact);
+ transaction = transact.beginTransaction();
+ assertNotNull(transaction);
+
+ MetaData md = MetaData.get();
+ assertNotNull(md);
+
+ if (configuration != null) {
+ CollectionConfigurationManager mgr = db.getConfigurationManager();
+ mgr.addConfiguration(transaction, broker, root, configuration);
+ }
+
+ for (Resource resource : resources) {
+ IndexInfo info = root.validateXMLResource(transaction, broker, XmldbURI.create(resource.docName), resource.data);
+ assertNotNull(info);
+
+ if (docs != null) {
+ Metas docMD = md.getMetas(info.getDocument());
+ if (docMD == null) {
+ docMD = md.addMetas(info.getDocument());
+ }
+ assertNotNull(docMD);
+
+ for (Entry<String, String> entry : resource.metas.entrySet()) {
+ docMD.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ root.store(transaction, broker, info, resource.data, false);
+
+ docs.add(info.getDocument());
+ }
+
+ transact.commit(transaction);
+ } catch (Exception e) {
+ if (transact != null)
+ transact.abort(transaction);
+ e.printStackTrace();
+ fail(e.getMessage());
+ } finally {
+ db.release(broker);
+ }
+
+ return docs;
+ }
+
+ @Before
+ public void setup() {
+ DBBroker broker = null;
+ TransactionManager transact = null;
+ Txn transaction = null;
+ try {
+ broker = db.get(db.getSecurityManager().getSystemSubject());
+ assertNotNull(broker);
+ transact = db.getTransactionManager();
+ assertNotNull(transact);
+ transaction = transact.beginTransaction();
+ assertNotNull(transaction);
+
+ root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI);
+ assertNotNull(root);
+ broker.saveCollection(transaction, root);
+
+ transact.commit(transaction);
+
+ Configuration config = BrokerPool.getInstance().getConfiguration();
+ savedConfig = (Boolean) config.getProperty(Indexer.PROPERTY_PRESERVE_WS_MIXED_CONTENT);
+ config.setProperty(Indexer.PROPERTY_PRESERVE_WS_MIXED_CONTENT, Boolean.TRUE);
+ } catch (Exception e) {
+ if (transact != null)
+ transact.abort(transaction);
+ e.printStackTrace();
+ fail(e.getMessage());
+ } finally {
+ if (db != null)
+ db.release(broker);
+ }
+ }
+
+ @After
+ public void cleanup() {
+ BrokerPool pool = null;
+ DBBroker broker = null;
+ TransactionManager transact = null;
+ Txn transaction = null;
+ try {
+ pool = BrokerPool.getInstance();
+ assertNotNull(pool);
+ broker = pool.get(pool.getSecurityManager().getSystemSubject());
+ assertNotNull(broker);
+ transact = pool.getTransactionManager();
+ assertNotNull(transact);
+ transaction = transact.beginTransaction();
+ assertNotNull(transaction);
+
+ Collection collConfig = broker.getOrCreateCollection(transaction,
+ XmldbURI.create(XmldbURI.CONFIG_COLLECTION + "/db"));
+ assertNotNull(collConfig);
+ broker.removeCollection(transaction, collConfig);
+
+ if (root != null) {
+ assertNotNull(root);
+ broker.removeCollection(transaction, root);
+ }
+ transact.commit(transaction);
+
+ Configuration config = BrokerPool.getInstance().getConfiguration();
+ config.setProperty(Indexer.PROPERTY_PRESERVE_WS_MIXED_CONTENT, savedConfig);
+ } catch (Exception e) {
+ if (transact != null)
+ transact.abort(transaction);
+ e.printStackTrace();
+ fail(e.getMessage());
+ } finally {
+ if (pool != null) pool.release(broker);
+ }
+ }
+
+ @BeforeClass
+ public static void startDB() {
+ try {
+ File confFile = ConfigurationHelper.lookup("conf.xml");
+ Configuration config = new Configuration(confFile.getAbsolutePath());
+ config.setProperty(Indexer.PROPERTY_SUPPRESS_WHITESPACE, "none");
+ config.setProperty(Indexer.PRESERVE_WS_MIXED_CONTENT_ATTRIBUTE, Boolean.TRUE);
+ BrokerPool.configure(1, 5, config);
+ db = BrokerPool.getInstance();
+ assertNotNull(db);
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ }
+ }
+
+ @AfterClass
+ public static void stopDB() {
+ TestUtils.cleanupDB();
+ BrokerPool.stopAll(false);
+ db = null;
+ root = null;
+ }
+
+ protected class Resource {
+ final String docName;
+ final String data;
+ final Map<String, String> metas;
+
+ Resource(String docName, String data, Map<String, String> metas) {
+ this.docName = docName;
+ this.data = data;
+ this.metas = metas;
+ }
+ }
+
+ protected class CountDocuments implements SearchCallback<DocumentImpl> {
+
+ int count = 0;
+
+ @Override
+ public void found(DocumentImpl document, float score) {
+ count++;
+ }
+
+ }
+}
+
View
1,325 extensions/indexes/lucene/test/src/org/exist/indexing/lucene/FacetIndexTest.java
@@ -4,10 +4,7 @@
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
-import java.io.File;
-import java.io.StringReader;
import java.util.*;
-import java.util.Map.Entry;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.facet.params.FacetSearchParams;
@@ -17,80 +14,19 @@
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.Query;
-import org.exist.Indexer;
-import org.exist.TestUtils;
-import org.exist.collections.Collection;
-import org.exist.collections.CollectionConfigurationManager;
-import org.exist.collections.IndexInfo;
-import org.exist.dom.DefaultDocumentSet;
-import org.exist.dom.DocumentImpl;
import org.exist.dom.DocumentSet;
-import org.exist.dom.MutableDocumentSet;
import org.exist.dom.QName;
-import org.exist.indexing.Index;
-import org.exist.indexing.IndexWorker;
-import org.exist.indexing.OrderedValuesIndex;
-import org.exist.indexing.QNamedKeysIndex;
-import org.exist.indexing.lucene.QueryDocuments.SearchCallback;
-import org.exist.security.xacml.AccessContext;
-import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
-import org.exist.storage.ElementValue;
-import org.exist.storage.md.MetaData;
-import org.exist.storage.md.Metas;
-import org.exist.storage.txn.TransactionManager;
-import org.exist.storage.txn.Txn;
-import org.exist.test.TestConstants;
-import org.exist.util.Configuration;
-import org.exist.util.ConfigurationHelper;
-import org.exist.util.MimeTable;
-import org.exist.util.MimeType;
-import org.exist.util.Occurrences;
-import org.exist.xmldb.XmldbURI;
-import org.exist.xquery.XQuery;
-import org.exist.xquery.XQueryContext;
-import org.exist.xquery.CompiledXQuery;
-import org.exist.xquery.value.Sequence;
-import org.exist.xupdate.Modification;
-import org.exist.xupdate.XUpdateProcessor;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
import org.junit.Test;
-import org.xml.sax.InputSource;
-public class FacetIndexTest {
+public class FacetIndexTest extends FacetAbstractTest {
protected static String XUPDATE_START =
"<xu:modifications version=\"1.0\" xmlns:xu=\"http://www.xmldb.org/xupdate\">";
protected static String XUPDATE_END =
"</xu:modifications>";
- private static String XML1 =
- "<section>" +
- " <head>The title in big letters</head>" +
- " <p rend=\"center\">A simple paragraph with <hi>just</hi> text in it.</p>" +
- " <p rend=\"right\">paragraphs with <span>mix</span><span>ed</span> content are <span>danger</span>ous.</p>" +
- "</section>";
-
- private static String XML2 =
- "<test>" +
- " <item id='1' attr='attribute'><description>Chair</description></item>" +
- " <item id='2'><description>Table</description>\n<condition>good</condition></item>" +
- " <item id='3'><description>Cabinet</description>\n<condition>bad</condition></item>" +
- "</test>";
-
- private static String XML3 =
- "<section>" +
- " <head>TITLE IN UPPERCASE LETTERS</head>" +
- " <p>UPPERCASE PARAGRAPH</p>" +
- "</section>";
-
- private static String XML4 =
- "<test><a>A X</a><b><c>B X</c> C</b></test>";
-
private static String XML5 =
"<article>" +
" <head>The <b>title</b>of it</head>" +
@@ -101,82 +37,6 @@
" <p>Another simple paragraph.</p>" +
"</article>";
- private static String XML6 =
- "<a>" +
- " <b>AAA</b>" +
- " <c>AAA</c>" +
- " <b>AAA</b>" +
- "</a>";
-
- private static String XML7 =
- "<section>" +
- " <head>Query Test</head>" +
- " <p>Eine wunderbare Heiterkeit hat meine ganze Seele eingenommen, gleich den " +
- " süßen Frühlingsmorgen, die ich mit ganzem Herzen genieße. Ich bin allein und " +
- " freue mich meines Lebens in dieser Gegend, die für solche Seelen geschaffen " +
- " ist wie die meine. Ich bin so glücklich, mein Bester, so ganz in dem Gefühle " +
- " von ruhigem Dasein versunken, daß meine Kunst darunter leidet.</p>" +
- "</section>";
-
- private static String COLLECTION_CONFIG1 =
- "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
- " <index>" +
- " <fulltext default=\"none\">" +
- " </fulltext>" +
- " <lucene>" +
- " <analyzer class=\"org.apache.lucene.analysis.core.SimpleAnalyzer\"/>" +
- " <text match=\"/section/p\"/>" +
- " <text qname=\"head\"/>" +
- " <text qname=\"@rend\"/>" +
- " <text qname=\"hi\"/>" +
- " <text qname=\"LINE\"/>" +
- " </lucene>" +
- " </index>" +
- "</collection>";
-
- private static String COLLECTION_CONFIG2 =
- "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
- " <index>" +
- " <fulltext default=\"none\">" +
- " <create qname=\"item\"/>" +
- " <create qname=\"description\"/>" +
- " <create qname=\"condition\"/>" +
- " <create qname=\"@attr\"/>" +
- " </fulltext>" +
- " <lucene>" +
- " <text qname=\"item\"/>" +
- " <text match=\"//description\"/>" +
- " <text qname=\"condition\"/>" +
- " <text qname=\"@attr\"/>" +
- " </lucene>" +
- " </index>" +
- "</collection>";
-
- private static String COLLECTION_CONFIG3 =
- "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
- " <index>" +
- " <fulltext default=\"none\">" +
- " </fulltext>" +
- " <lucene>" +
- " <analyzer id=\"whitespace\" class=\"org.apache.lucene.analysis.core.WhitespaceAnalyzer\"/>" +
- " <text match=\"/section/head\" analyzer=\"whitespace\"/>" +
- " <text match=\"//p\"/>" +
- " </lucene>" +
- " </index>" +
- "</collection>";
-
- private static String COLLECTION_CONFIG4 =
- "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
- " <index>" +
- " <fulltext default=\"none\">" +
- " </fulltext>" +
- " <lucene>" +
- " <text match=\"/test/a\"/>" +
- " <text match=\"/test/b/*\"/>" +
- " </lucene>" +
- " </index>" +
- "</collection>";
-
private static String COLLECTION_CONFIG5 =
"<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
" <index xmlns:tei=\"http://www.tei-c.org/ns/1.0\">" +
@@ -198,18 +58,6 @@
" </index>" +
"</collection>";
- private static String COLLECTION_CONFIG6 =
- "<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
- " <index xmlns:tei=\"http://www.tei-c.org/ns/1.0\">" +
- " <fulltext default=\"none\" attributes=\"no\">" +
- " </fulltext>" +
- " <lucene>" +
- " <text qname=\"b\"/>" +
- " <text qname=\"c\" boost=\"2.0\"/>" +
- " </lucene>" +
- " </index>" +
- "</collection>";
-
private static Map<String, String> metas1 = new HashMap<String, String>();
static {
metas1.put("status", "draft");
@@ -220,93 +68,6 @@
metas2.put("status", "final");
}
- private static BrokerPool db;
- private static Collection root;
- private Boolean savedConfig;
-
- @Test
- public void simpleQueries() {
- System.out.println("Test simple queries ...");
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG1, XML1, "test.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- checkIndex(docs, broker, new QName[] { new QName("head", "") }, "title", 1);
- Occurrences[] o = checkIndex(docs, broker, new QName[]{new QName("p", "")}, "with", 1);
- assertEquals(2, o[0].getOccurrences());
- checkIndex(docs, broker, new QName[] { new QName("hi", "") }, "just", 1);
- checkIndex(docs, broker, null, "in", 1);
-
- QName attrQN = new QName("rend", "");
- attrQN.setNameType(ElementValue.ATTRIBUTE);
- checkIndex(docs, broker, new QName[] { attrQN }, null, 2);
- checkIndex(docs, broker, new QName[] { attrQN }, "center", 1);
- checkIndex(docs, broker, new QName[] { attrQN }, "right", 1);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("/section[ft:query(p, 'content')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(p/@rend, 'center')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(hi, 'just')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(p/*, 'just')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(head/*, 'just')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
- System.out.println("Test PASSED.");
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void configuration() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG4, XML4, "test.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- checkIndex(docs, broker, new QName[] { new QName("a", "") }, "x", 1);
- checkIndex(docs, broker, new QName[] { new QName("c", "") }, "x", 1);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("/test[ft:query(a, 'x')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/test[ft:query(.//c, 'x')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/test[ft:query(b, 'x')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
private void checkFacet(List<FacetResult> facets) {
assertEquals(1, facets.size());
@@ -326,7 +87,6 @@ private void checkFacet(List<FacetResult> facets) {
node = subResults.get(1);
assertEquals(1.0, node.value, 0.0001);
assertEquals("status/draft", node.label.toString());
-
}
@Test
@@ -358,7 +118,7 @@ public void inlineAndIgnore() {
// new CountFacetRequest(new CategoryPath("Author"), 10)
);
- Count cb = new Count();
+ CountDocuments cb = new CountDocuments();
List<QName> qnames = new ArrayList<QName>();
qnames.add(new QName("head", ""));
@@ -470,1086 +230,5 @@ public void inlineAndIgnore() {
db.release(broker);
}
}
-
- @Test
- public void boosts() {
- configureAndStore(COLLECTION_CONFIG6, XML6, "test.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("for $a in ft:query((//b|//c), 'AAA') " +
- "order by ft:score($a) descending return $a/local-name(.)", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(3, seq.getItemCount());
- assertEquals("c", seq.getStringValue());
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void queryTranslation() {
- configureAndStore(COLLECTION_CONFIG1, XML7, "test.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
-
- XQueryContext context = new XQueryContext(broker.getBrokerPool(), AccessContext.TEST);
- CompiledXQuery compiled = xquery.compile(context, "declare variable $q external; " +
- "ft:query(//p, util:parse($q)/query)");
-
- context.declareVariable("q", "<query><term>heiterkeit</term></query>");
- Sequence seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <term>heiterkeit</term><term>blablabla</term>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <term occur='should'>heiterkeit</term><term occur='should'>blablabla</term>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <term occur='must'>heiterkeit</term><term occur='must'>blablabla</term>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <term occur='must'>heiterkeit</term><term occur='not'>herzen</term>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <phrase occur='must'>wunderbare heiterkeit</phrase><term occur='must'>herzen</term>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <phrase slop='5'>heiterkeit seele eingenommen</phrase>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- // phrase with wildcards
- context.declareVariable("q",
- "<query>" +
- " <phrase slop='5'><term>heiter*</term><term>se?nnnle*</term></phrase>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <wildcard>?eiter*</wildcard>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <fuzzy max-edits='2'>selee</fuzzy>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <bool>" +
- " <fuzzy occur='must' max-edits='2'>selee</fuzzy>" +
- " <wildcard occur='should'>bla*</wildcard>" +
- " </bool>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <regex>heit.*keit</regex>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- context.declareVariable("q",
- "<query>" +
- " <phrase><term>wunderbare</term><regex>heit.*keit</regex></phrase>" +
- "</query>");
- seq = xquery.execute(compiled, null);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void analyzers() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG3, XML3, "test.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- checkIndex(docs, broker, new QName[] { new QName("head", "") }, "TITLE", 1);
- checkIndex(docs, broker, new QName[] { new QName("p", "") }, "uppercase", 1);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("/section[ft:query(p, 'UPPERCASE')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(head, 'TITLE')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- seq = xquery.execute("/section[ft:query(head, 'title')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(0, seq.getItemCount());
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void dropSingleDoc() {
- System.out.println("Test removal of single document ...");
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG1, XML1, "dropDocument.xml");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- System.out.println("Removing document dropDocument.xml");
- root.removeXMLResource(transaction, broker, XmldbURI.create("dropDocument.xml"));
- transact.commit(transaction);
-
- checkIndex(docs, broker, null, null, 0);
-
- System.out.println("Test PASSED.");
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void dropDocuments() {
- System.out.println("Test removal of multiple documents ...");
- configureAndStore(COLLECTION_CONFIG1, "samples/shakespeare");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//LINE[ft:query(., 'bark')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(6, seq.getItemCount());
-
- System.out.println("Removing document r_and_j.xml");
- root.removeXMLResource(transaction, broker, XmldbURI.create("r_and_j.xml"));
- transact.commit(transaction);
-
- seq = xquery.execute("//LINE[ft:query(., 'bark')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(3, seq.getItemCount());
-
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
- System.out.println("Removing document hamlet.xml");
- root.removeXMLResource(transaction, broker, XmldbURI.create("hamlet.xml"));
- transact.commit(transaction);
-
- seq = xquery.execute("//LINE[ft:query(., 'bark')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- System.out.println("Test PASSED.");
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void removeCollection() {
- System.out.println("Test removal of collection ...");
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG1, "samples/shakespeare");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//SPEECH[ft:query(LINE, 'love')]", null, AccessContext.TEST);
- assertNotNull(seq);
- System.out.println("Found: " + seq.getItemCount());
- assertEquals(166, seq.getItemCount());
-
- System.out.println("Removing collection");
- broker.removeCollection(transaction, root);
-
- root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI);
- assertNotNull(root);
- broker.saveCollection(transaction, root);
-
- transact.commit(transaction);
-
- root = null;
-
- checkIndex(docs, broker, null, null, 0);
-
- System.out.println("Test PASSED.");
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- @Test
- public void reindex() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG1, XML1, "dropDocument.xml");
- DBBroker broker = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
-
- broker.reindexCollection(TestConstants.TEST_COLLECTION_URI);
-
- checkIndex(docs, broker, new QName[] { new QName("head", "") }, "title", 1);
- Occurrences[] o = checkIndex(docs, broker, new QName[]{new QName("p", "")}, "with", 1);
- assertEquals(2, o[0].getOccurrences());
- checkIndex(docs, broker, new QName[] { new QName("hi", "") }, "just", 1);
- checkIndex(docs, broker, null, "in", 1);
-
- QName attrQN = new QName("rend", "");
- attrQN.setNameType(ElementValue.ATTRIBUTE);
- checkIndex(docs, broker, new QName[] { attrQN }, null, 2);
- checkIndex(docs, broker, new QName[] { attrQN }, "center", 1);
- } catch (Exception e) {
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- }
-
- /**
- * Remove nodes from different levels of the tree and check if the index is
- * correctly updated.
- */
- @Test
- public void xupdateRemove() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG2, XML2, "xupdate.xml");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- transact = db.getTransactionManager();
- transaction = transact.beginTransaction();
-
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 1);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 2);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//item[ft:query(description, 'chair')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- XUpdateProcessor proc = new XUpdateProcessor(broker, docs, AccessContext.TEST);
- assertNotNull(proc);
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- String xupdate =
- XUPDATE_START +
- " <xu:remove select=\"//item[@id='2']/condition\"/>" +
- XUPDATE_END;
- Modification[] modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 1);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 4);
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "good", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "good", 0);
- Occurrences o[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "table", 1);
- assertEquals("table", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "cabinet", 1);
- assertEquals("cabinet", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "table", 1);
- assertEquals("table", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "cabinet", 1);
- assertEquals("cabinet", o[0].getTerm());
-
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:remove select=\"//item[@id='3']/description/text()\"/>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:remove select=\"//item[@id='1']\"/>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 1);
- assertEquals("table", o[0].getTerm());
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "chair", 0);
-
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- if (db != null) {
- db.release(broker);
- }
- }
- }
-
- /**
- * Remove nodes from different levels of the tree and check if the index is
- * correctly updated.
- */
- @Test
- public void xupdateInsert() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG2, XML2, "xupdate.xml");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- transact = db.getTransactionManager();
- transaction = transact.beginTransaction();
-
- Occurrences occur[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 1);
- assertEquals("chair", occur[0].getTerm());
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//item[ft:query(description, 'chair')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- // Append to root node
- XUpdateProcessor proc = new XUpdateProcessor(broker, docs, AccessContext.TEST);
- assertNotNull(proc);
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- String xupdate =
- XUPDATE_START +
- " <xu:append select=\"/test\">" +
- " <item id='4'><description>Armchair</description> <condition>bad</condition></item>" +
- " </xu:append>" +
- XUPDATE_END;
- Modification[] modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- Occurrences o[] = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 2);
- System.out.println("prices: " + o.length);
- for (int i = 0; i < o.length; i++) {
- System.out.println("occurance: " + o[i].getTerm() + ": " + o[i].getOccurrences());
- }
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 4);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 6);
-
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "bad", 1);
- assertEquals("bad", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "armchair", 1);
- assertEquals("armchair", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "bad", 1);
- assertEquals("bad", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "armchair", 1);
- assertEquals("armchair", o[0].getTerm());
-
- // Insert before top element
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:insert-before select=\"//item[@id = '1']\">" +
- " <item id='0'><description>Wheelchair</description> <condition>poor</condition></item>" +
- " </xu:insert-before>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 5);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 8);
-
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "poor", 1);
- assertEquals("poor", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wheelchair", 1);
- assertEquals("wheelchair", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "poor", 1);
- assertEquals("poor", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "wheelchair", 1);
- assertEquals("wheelchair", o[0].getTerm());
-
- // Insert after element
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:insert-after select=\"//item[@id = '1']\">" +
- " <item id='1.1'><description>refrigerator</description> <condition>perfect</condition></item>" +
- " </xu:insert-after>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 4);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 6);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 10);
-
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "perfect", 1);
- assertEquals("perfect", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "refrigerator", 1);
- assertEquals("refrigerator", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "perfect", 1);
- assertEquals("perfect", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "refrigerator", 1);
- assertEquals("refrigerator", o[0].getTerm());
-
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:insert-after select=\"//item[@id = '1']/description\">" +
- " <condition>average</condition>" +
- " </xu:insert-after>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 5);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 11);
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "average", 1);
- assertEquals("average", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "average", 1);
- assertEquals("average", o[0].getTerm());
-
- // Insert before nested element
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:insert-before select=\"//item[@id = '1']/description\">" +
- " <condition>awesome</condition>" +
- " </xu:insert-before>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 6);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 12);
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "awesome", 1);
- assertEquals("awesome", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "awesome", 1);
- assertEquals("awesome", o[0].getTerm());
-
- // Overwrite attribute
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:append select=\"//item[@id = '1']\">" +
- " <xu:attribute name=\"attr\">abc</xu:attribute>" +
- " </xu:append>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- QName qnattr[] = { new QName("attr", "", "") };
- qnattr[0].setNameType(ElementValue.ATTRIBUTE);
- o = checkIndex(docs, broker, qnattr, null, 1);
- assertEquals("abc", o[0].getTerm());
- checkIndex(docs, broker, qnattr, "attribute", 0);
-
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- if (db != null) {
- db.release(broker);
- }
- }
- }
-
- @Test
- public void xupdateUpdate() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG2, XML2, "xupdate.xml");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- transact = db.getTransactionManager();
- transaction = transact.beginTransaction();
-
- Occurrences occur[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 1);
- assertEquals("chair", occur[0].getTerm());
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//item[ft:query(description, 'chair')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- // Update element content
- XUpdateProcessor proc = new XUpdateProcessor(broker, docs, AccessContext.TEST);
- assertNotNull(proc);
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- String xupdate =
- XUPDATE_START +
- " <xu:update select=\"//item[@id = '1']/description\">wardrobe</xu:update>" +
- XUPDATE_END;
- Modification[] modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "chair", 0);
- Occurrences o[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wardrobe", 1);
- assertEquals("wardrobe", o[0].getTerm());
-
- // Update text node
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:update select=\"//item[@id = '1']/description/text()\">Wheelchair</xu:update>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wardrobe", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "wardrobe", 0);
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wheelchair", 1);
- assertEquals("wheelchair", o[0].getTerm());
-
- // Update attribute value
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- " <xu:update select=\"//item[@id = '1']/@attr\">abc</xu:update>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- QName qnattr[] = { new QName("attr", "", "") };
- qnattr[0].setNameType(ElementValue.ATTRIBUTE);
- o = checkIndex(docs, broker, qnattr, null, 1);
- assertEquals("abc", o[0].getTerm());
- checkIndex(docs, broker, qnattr, "attribute", 0);
-
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- if (db != null) {
- db.release(broker);
- }
- }
- }
-
- @Test
- public void xupdateReplace() {
- DocumentSet docs = configureAndStore(COLLECTION_CONFIG2, XML2, "xupdate.xml");
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- transact = db.getTransactionManager();
- transaction = transact.beginTransaction();
-
- Occurrences occur[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 1);
- assertEquals("chair", occur[0].getTerm());
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 5);
-
- XQuery xquery = broker.getXQueryService();
- assertNotNull(xquery);
- Sequence seq = xquery.execute("//item[ft:query(description, 'chair')]", null, AccessContext.TEST);
- assertNotNull(seq);
- assertEquals(1, seq.getItemCount());
-
- XUpdateProcessor proc = new XUpdateProcessor(broker, docs, AccessContext.TEST);
- assertNotNull(proc);
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- String xupdate =
- XUPDATE_START +
- "<xu:replace select=\"//item[@id = '1']\">" +
- "<item id='4'><description>Wheelchair</description> <condition>poor</condition></item>" +
- "</xu:replace>" +
- XUPDATE_END;
- Modification[] modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("condition", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 6);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "chair", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "chair", 0);
- Occurrences o[] = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wheelchair", 1);
- assertEquals("wheelchair", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("condition", "") }, "poor", 1);
- assertEquals("poor", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "wheelchair", 1);
- assertEquals("wheelchair", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "poor", 1);
- assertEquals("poor", o[0].getTerm());
-
- proc.setBroker(broker);
- proc.setDocumentSet(docs);
- xupdate =
- XUPDATE_START +
- "<xu:replace select=\"//item[@id = '4']/description\">" +
- "<description>Armchair</description>" +
- "</xu:replace>" +
- XUPDATE_END;
- modifications = proc.parse(new InputSource(new StringReader(xupdate)));
- assertNotNull(modifications);
- modifications[0].process(transaction);
- proc.reset();
-
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, null, 3);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, null, 6);
- checkIndex(docs, broker, new QName[] { new QName("description", "") }, "wheelchair", 0);
- checkIndex(docs, broker, new QName[] { new QName("item", "") }, "wheelchair", 0);
- o = checkIndex(docs, broker, new QName[] { new QName("description", "") }, "armchair", 1);
- assertEquals("armchair", o[0].getTerm());
- o = checkIndex(docs, broker, new QName[] { new QName("item", "") }, "armchair", 1);
- assertEquals("armchair", o[0].getTerm());
-
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- if (db != null) {
- db.release(broker);
- }
- }
- }
-
- private DocumentSet configureAndStore(String configuration, String data, String docName) {
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- MutableDocumentSet docs = new DefaultDocumentSet();
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- if (configuration != null) {
- CollectionConfigurationManager mgr = db.getConfigurationManager();
- mgr.addConfiguration(transaction, broker, root, configuration);
- }
-
- IndexInfo info = root.validateXMLResource(transaction, broker, XmldbURI.create(docName), data);
- assertNotNull(info);
- root.store(transaction, broker, info, data, false);
-
- docs.add(info.getDocument());
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- return docs;
- }
-
- private DocumentSet configureAndStore(String configuration, String directory) {
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- MutableDocumentSet docs = new DefaultDocumentSet();
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- if (configuration != null) {
- CollectionConfigurationManager mgr = db.getConfigurationManager();
- mgr.addConfiguration(transaction, broker, root, configuration);
- }
-
- File file = new File(directory);
- File[] files = file.listFiles();
- MimeTable mimeTab = MimeTable.getInstance();
- for (int j = 0; j < files.length; j++) {
- MimeType mime = mimeTab.getContentTypeFor(files[j].getName());
- if(mime != null && mime.isXMLType()) {
- System.out.println("Storing document " + files[j].getName());
- InputSource is = new InputSource(files[j].getAbsolutePath());
- IndexInfo info =
- root.validateXMLResource(transaction, broker, XmldbURI.create(files[j].getName()), is);
- assertNotNull(info);
- is = new InputSource(files[j].getAbsolutePath());
- root.store(transaction, broker, info, is, false);
- docs.add(info.getDocument());
- }
- }
- transact.commit(transaction);
- } catch (Exception e) {
- if (transact != null)
- transact.abort(transaction);
- e.printStackTrace();
- fail(e.getMessage());
- } finally {
- db.release(broker);
- }
- return docs;
- }
-
- private DocumentSet configureAndStore(String configuration, Resource[] resources) {
- DBBroker broker = null;
- TransactionManager transact = null;
- Txn transaction = null;
- MutableDocumentSet docs = new DefaultDocumentSet();
- try {
- broker = db.get(db.getSecurityManager().getSystemSubject());
- assertNotNull(broker);
- transact = db.getTransactionManager();
- assertNotNull(transact);
- transaction = transact.beginTransaction();
- assertNotNull(transaction);
-
- MetaData md = MetaData.get();
- assertNotNull(md);
-
- if (configuration != null) {
- CollectionConfigurationManager mgr = db.getConfigurationManager();
- mgr.addConfiguration(transaction, broker, root, configuration);
- }
-
- for (Resource resource : resources) {
- IndexInfo info = root.validateXMLResource(transaction, broker, XmldbURI.create(resource.docName), resource.data);
- assertNotNull(info);
-
- if (docs != null) {
- Metas docMD = md.getMetas(info.getDocument());
- if (docMD == null) {
- docMD = md.addMetas(info.getDocument());
- }
- assertNotNull(docMD);
-
- for (Entry<String, String> entry : resource.metas.entrySet()) {
- docMD.put(entry.getKey(), entry.getValue());
- }
- }
-
- root.store(transaction, broker, info, resource.data, false);
-