Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merge branch 'solandra' of git://github.com/tjake/Solandra into solandra

  • Loading branch information...
commit 68bfd91f92e8241b963eb36b65ddeb32263cc38c 2 parents 3a589dd + 8b044cf
@ceocoder authored
View
67 src/lucandra/IndexReader.java
@@ -26,6 +26,8 @@
import java.util.concurrent.ConcurrentMap;
import lucandra.cluster.CassandraIndexManager;
+import lucandra.serializers.thrift.DocumentMetadata;
+import lucandra.serializers.thrift.ThriftTerm;
import com.google.common.collect.MapMaker;
@@ -34,19 +36,19 @@
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.SimpleAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.document.*;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.*;
+import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.TermFreqVector;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.OpenBitSet;
import solandra.SolandraFieldSelector;
@@ -311,49 +313,30 @@ public Document document(int docNum, FieldSelector selector) throws CorruptIndex
continue;
}
- byte[] value;
- ByteBuffer v = ByteBuffer.wrap(CassandraUtils.decompress(ByteBufferUtil.getArray(col.value())));
- int vlimit = v.limit() - v.position();
-
- if (v.get(v.limit() - 1) != Byte.MAX_VALUE && v.get(v.limit() - 1) != Byte.MIN_VALUE)
+ DocumentMetadata dm = lucandra.IndexWriter.fromBytesUsingThrift(col.value());
+
+ for(ThriftTerm term : dm.getTerms())
{
- throw new CorruptIndexException("Lucandra field is not properly encoded: " + docNum + "("
- + fieldName + ")");
-
- }
- else if (v.get(v.limit() - 1) == Byte.MAX_VALUE)
- { // Binary
- value = new byte[vlimit - 1];
- ByteBufferUtil.arrayCopy(v, v.position(), value, 0, vlimit - 1);
-
- field = new Field(fieldName, value, Store.YES);
- cacheDoc.add(field);
- }
- else if (v.get(v.limit() - 1) == Byte.MIN_VALUE)
- { // String
- value = new byte[vlimit - 1];
- ByteBufferUtil.arrayCopy(v, v.position(), value, 0, vlimit - 1);
-
- // Check for multi-fields
- String fieldString = new String(value, "UTF-8");
-
- if (fieldString.indexOf(CassandraUtils.delimeter) >= 0)
+ Fieldable f = null;
+
+ if( term.isSetLongVal() )
+ {
+ f = new NumericField(term.getField()).setLongValue(term.getLongVal());
+ }
+ else if(term.isSetIs_binary())
{
- StringTokenizer tok = new StringTokenizer(fieldString, CassandraUtils.delimeter);
- while (tok.hasMoreTokens())
- {
- field = new Field(fieldName, tok.nextToken(), Store.YES, Index.ANALYZED);
- cacheDoc.add(field);
- }
+ if(term.is_binary)
+ f = new Field(term.getField(), term.getText());
+ else
+ f = new Field(term.getField(), new String(term.getText()), Store.YES, Index.ANALYZED);
}
else
- {
+ throw new RuntimeException("Malformed term");
+
+ cacheDoc.add(f);
- field = new Field(fieldName, fieldString, Store.YES, Index.ANALYZED);
- cacheDoc.add(field);
- }
- }
- }
+ }
+ }
}
// Mark the required doc
View
133 src/lucandra/IndexWriter.java
@@ -19,9 +19,7 @@
*/
package lucandra;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.UnsupportedEncodingException;
+import java.io.*;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
@@ -44,22 +42,19 @@
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
-import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
-import org.apache.lucene.analysis.tokenattributes.TermAttribute;
+import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
+import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
-import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.protocol.TBinaryProtocol.Factory;
import org.apache.thrift.transport.TMemoryInputTransport;
import org.apache.thrift.transport.TTransport;
@@ -89,8 +84,8 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
ByteBuffer indexTermsKey = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, "terms"
.getBytes("UTF-8"));
- List<ThriftTerm> allIndexedTerms = new ArrayList<ThriftTerm>();
- Map<String, byte[]> fieldCache = new HashMap<String, byte[]>(1024);
+ DocumentMetadata allIndexedTerms = new DocumentMetadata();
+ Map<String, DocumentMetadata> fieldCache = new HashMap<String, DocumentMetadata>(1024);
// By default we don't handle indexSharding
// We round robin replace the index
@@ -99,21 +94,27 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
ByteBuffer docId = ByteBuffer.wrap(CassandraUtils.writeVInt(docNumber));
int position = 0;
- for (Fieldable field : (List<Fieldable>) doc.getFields())
+ for (Fieldable field : doc.getFields())
{
ThriftTerm firstTerm = null;
+
// Indexed field
if (field.isIndexed() && field.isTokenized())
{
-
TokenStream tokens = field.tokenStreamValue();
if (tokens == null)
{
- tokens = analyzer.tokenStream(field.name(), new StringReader(field.stringValue()));
+ Reader tokReader = field.readerValue();
+
+ if (tokReader == null)
+ tokReader = new StringReader(field.stringValue());
+
+ tokens = analyzer.reusableTokenStream(field.name(), tokReader);
}
+
// collect term information per field
Map<Term, Map<ByteBuffer, List<Number>>> allTermInformation = new HashMap<Term, Map<ByteBuffer, List<Number>>>();
@@ -138,11 +139,12 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
// positions
PositionIncrementAttribute posIncrAttribute = null;
if (field.isStorePositionWithTermVector())
- posIncrAttribute = (PositionIncrementAttribute) tokens
- .addAttribute(PositionIncrementAttribute.class);
-
- TermAttribute termAttribute = (TermAttribute) tokens.addAttribute(TermAttribute.class);
+ posIncrAttribute = (PositionIncrementAttribute) tokens.addAttribute(PositionIncrementAttribute.class);
+ //term as string
+ CharTermAttribute termAttribute = (CharTermAttribute) tokens.addAttribute(CharTermAttribute.class);
+
+
// store normalizations of field per term per document rather
// than per field.
// this adds more to write but less to read on other side
@@ -151,14 +153,14 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
while (tokens.incrementToken())
{
tokensInField++;
- Term term = new Term(field.name(), termAttribute.term());
-
- ThriftTerm tterm = new ThriftTerm(field.name(), termAttribute.term());
+ Term term = new Term(field.name(), termAttribute.toString());
+
+ ThriftTerm tterm = new ThriftTerm(term.field()).setText(ByteBuffer.wrap(term.text().getBytes("UTF-8"))).setIs_binary(false);
if(firstTerm == null)
firstTerm = tterm;
- allIndexedTerms.add(tterm);
+ allIndexedTerms.addToTerms(tterm);
// fetch all collected information for this term
Map<ByteBuffer, List<Number>> termInfo = allTermInformation.get(term);
@@ -227,7 +229,7 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
invertState.setLength(tokensInField);
final float norm = similarity.computeNorm(field.name(), invertState);
- bnorm.add(Similarity.encodeNorm(norm));
+ bnorm.add(Similarity.getDefault().encodeNormValue(norm));
}
for (Map.Entry<Term, Map<ByteBuffer, List<Number>>> term : allTermInformation.entrySet())
@@ -259,12 +261,12 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
// Untokenized fields go in without a termPosition
if (field.isIndexed() && !field.isTokenized())
{
- ThriftTerm tterm = new ThriftTerm(field.name(), field.stringValue());
+ ThriftTerm tterm = new ThriftTerm(field.name()).setText(ByteBuffer.wrap(field.stringValue().getBytes("UTF-8"))).setIs_binary(false);
if(firstTerm == null)
firstTerm = tterm;
- allIndexedTerms.add(tterm);
+ allIndexedTerms.addToTerms(tterm);
ByteBuffer key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"),
CassandraUtils.delimeterBytes, field.name().getBytes("UTF-8"), CassandraUtils.delimeterBytes,
@@ -284,60 +286,49 @@ public void addDocument(String indexName, Document doc, Analyzer analyzer, int d
// Stores each field as a column under this doc key
if (field.isStored())
- {
-
- byte[] _value = field.isBinary() ? field.getBinaryValue() : field.stringValue().getBytes("UTF-8");
-
- // first byte flags if binary or not
- byte[] value = new byte[_value.length + 1];
- System.arraycopy(_value, 0, value, 0, _value.length);
-
- value[value.length - 1] = (byte) (field.isBinary() ? Byte.MAX_VALUE : Byte.MIN_VALUE);
-
- // logic to handle multiple fields w/ same name
- byte[] currentValue = fieldCache.get(field.name());
- if (currentValue == null)
+ {
+ ThriftTerm tt = new ThriftTerm(field.name());
+
+ if (field instanceof NumericField)
{
- fieldCache.put(field.name(), value);
+ Number n = ((NumericField) field).getNumericValue();
+ tt.setLongVal(n.longValue());
}
- else
+
+ byte[] value = field.isBinary() ? field.getBinaryValue() : field.stringValue().getBytes("UTF-8");
+ tt.setText(ByteBuffer.wrap(value)).setIs_binary(field.isBinary());
+
+
+ // logic to handle multiple fields w/ same name
+ DocumentMetadata currentValue = fieldCache.get(field.name());
+ if (currentValue == null)
{
-
- // append new data
- byte[] newValue = new byte[currentValue.length + CassandraUtils.delimeterBytes.length
- + value.length - 1];
- System.arraycopy(currentValue, 0, newValue, 0, currentValue.length - 1);
- System.arraycopy(CassandraUtils.delimeterBytes, 0, newValue, currentValue.length - 1,
- CassandraUtils.delimeterBytes.length);
- System.arraycopy(value, 0, newValue,
- currentValue.length + CassandraUtils.delimeterBytes.length - 1, value.length);
-
- fieldCache.put(field.name(), newValue);
+ currentValue = new DocumentMetadata();
+ fieldCache.put(field.name(), currentValue);
}
+
+ currentValue.addToTerms(tt);
}
//Store for field cache
if(firstTerm != null)
- {
-
+ {
ByteBuffer fieldCacheKey = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, firstTerm.field.getBytes());
- CassandraUtils.addMutations(workingMutations, CassandraUtils.fieldCacheColumnFamily, CassandraUtils.writeVInt(docNumber), fieldCacheKey, firstTerm.text.getBytes("UTF-8"));
+ CassandraUtils.addMutations(workingMutations, CassandraUtils.fieldCacheColumnFamily, CassandraUtils.writeVInt(docNumber), fieldCacheKey, firstTerm.text);
if(logger.isDebugEnabled())
logger.debug(indexName+" - firstTerm: "+ByteBufferUtil.string(fieldCacheKey));
-
- }
-
+ }
}
ByteBuffer key = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes,
Integer.toHexString(docNumber).getBytes("UTF-8"));
// Store each field as a column under this docId
- for (Map.Entry<String, byte[]> field : fieldCache.entrySet())
+ for (Map.Entry<String, DocumentMetadata> field : fieldCache.entrySet())
{
CassandraUtils.addMutations(workingMutations, CassandraUtils.docColumnFamily, field.getKey().getBytes(
- "UTF-8"), key, CassandraUtils.compress(field.getValue()));
+ "UTF-8"), key, toBytesUsingThrift(field.getValue()));
}
// Finally, Store meta-data so we can delete this document
@@ -451,26 +442,26 @@ private void deleteLucandraDocument(String indexName, int docNumber, boolean aut
if (metaCol == null)
return;
- List<Term> terms = fromBytesUsingThrift(metaCol.value());
+ DocumentMetadata terms = fromBytesUsingThrift(metaCol.value());
Set<String> fields = new HashSet<String>();
- for (Term term : terms)
+ for (ThriftTerm term : terms.getTerms())
{
//remove from field cache
- if(!fields.contains(term.field()))
+ if(!fields.contains(term.getField()))
{
- ByteBuffer fieldCacheKey = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, term.field().getBytes());
+ ByteBuffer fieldCacheKey = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, term.getField().getBytes());
CassandraUtils.addMutations(workingMutations, CassandraUtils.fieldCacheColumnFamily, CassandraUtils.writeVInt(docNumber), fieldCacheKey, (ByteBuffer) null);
- fields.add(term.field());
+ fields.add(term.getField());
}
try
{
- key = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, term.field()
- .getBytes("UTF-8"), CassandraUtils.delimeterBytes, term.text().getBytes("UTF-8"));
+ key = CassandraUtils.hashKeyBytes(indexNameBytes, CassandraUtils.delimeterBytes, term.getField()
+ .getBytes("UTF-8"), CassandraUtils.delimeterBytes, term.getText());
}
catch (UnsupportedEncodingException e)
{
@@ -599,9 +590,8 @@ private void appendMutations(String indexName, Map<ByteBuffer, RowMutation> muta
}
/** Write all terms to bytes using thrift serialization */
- public static ByteBuffer toBytesUsingThrift(List<ThriftTerm> allTerms) throws IOException
+ public static ByteBuffer toBytesUsingThrift(DocumentMetadata data) throws IOException
{
- DocumentMetadata data = new DocumentMetadata(allTerms);
try
{
@@ -614,7 +604,7 @@ public static ByteBuffer toBytesUsingThrift(List<ThriftTerm> allTerms) throws IO
}
/** Read the object from bytes string. */
- public static List<Term> fromBytesUsingThrift(ByteBuffer data) throws IOException
+ public static DocumentMetadata fromBytesUsingThrift(ByteBuffer data) throws IOException
{
DocumentMetadata docMeta = new DocumentMetadata();
@@ -632,11 +622,6 @@ public static ByteBuffer toBytesUsingThrift(List<ThriftTerm> allTerms) throws IO
throw new IOException(e);
}
- List<Term> terms = new ArrayList<Term>(docMeta.terms.size());
- for(ThriftTerm term : docMeta.terms)
- {
- terms.add(new Term(term.field, term.text));
- }
- return terms;
+ return docMeta;
}
}
View
14 src/lucandra/TermFreqVector.java
@@ -27,6 +27,7 @@
import java.util.Arrays;
import java.util.List;
+import lucandra.serializers.thrift.DocumentMetadata;
import lucandra.serializers.thrift.ThriftTerm;
import org.apache.cassandra.db.ReadCommand;
@@ -82,25 +83,24 @@ public TermFreqVector(String indexName, String field, int docI)
return; // this docId is missing
}
- List<Term> allTerms;
+
- allTerms = IndexWriter.fromBytesUsingThrift(rows.get(0).cf.getColumn(
+ DocumentMetadata allTerms = IndexWriter.fromBytesUsingThrift(rows.get(0).cf.getColumn(
CassandraUtils.documentMetaFieldBytes).value());
List<ReadCommand> readCommands = new ArrayList<ReadCommand>();
- for (Term t : allTerms)
+ for (ThriftTerm t : allTerms.getTerms())
{
-
// skip the ones not of this field
- if (!t.field().equals(field))
+ if (!t.getField().equals(field))
continue;
// add to multiget params
try
{
- key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.field()
- .getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.text().getBytes("UTF-8"));
+ key = CassandraUtils.hashKeyBytes(indexName.getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.getField()
+ .getBytes("UTF-8"), CassandraUtils.delimeterBytes, t.getText());
}
catch (UnsupportedEncodingException e)
{
View
54 thrift/gen-java/lucandra/serializers/thrift/DocumentMetadata.java
@@ -21,21 +21,15 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.thrift.*;
-import org.apache.thrift.async.*;
-import org.apache.thrift.meta_data.*;
-import org.apache.thrift.transport.*;
-import org.apache.thrift.protocol.*;
+public class DocumentMetadata implements org.apache.thrift.TBase<DocumentMetadata, DocumentMetadata._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DocumentMetadata");
-public class DocumentMetadata implements TBase<DocumentMetadata, DocumentMetadata._Fields>, java.io.Serializable, Cloneable {
- private static final TStruct STRUCT_DESC = new TStruct("DocumentMetadata");
-
- private static final TField TERMS_FIELD_DESC = new TField("terms", TType.LIST, (short)1);
+ private static final org.apache.thrift.protocol.TField TERMS_FIELD_DESC = new org.apache.thrift.protocol.TField("terms", org.apache.thrift.protocol.TType.LIST, (short)1);
public List<ThriftTerm> terms;
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements TFieldIdEnum {
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
TERMS((short)1, "terms");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -94,14 +88,14 @@ public String getFieldName() {
// isset id assignments
- public static final Map<_Fields, FieldMetaData> metaDataMap;
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
- Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TERMS, new FieldMetaData("terms", TFieldRequirementType.REQUIRED,
- new ListMetaData(TType.LIST,
- new StructMetaData(TType.STRUCT, ThriftTerm.class))));
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.TERMS, new org.apache.thrift.meta_data.FieldMetaData("terms", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ThriftTerm.class))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
- FieldMetaData.addStructMetaDataMap(DocumentMetadata.class, metaDataMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DocumentMetadata.class, metaDataMap);
}
public DocumentMetadata() {
@@ -164,7 +158,7 @@ public void unsetTerms() {
this.terms = null;
}
- /** Returns true if field terms is set (has been asigned a value) and false otherwise */
+ /** Returns true if field terms is set (has been assigned a value) and false otherwise */
public boolean isSetTerms() {
return this.terms != null;
}
@@ -197,7 +191,7 @@ public Object getFieldValue(_Fields field) {
throw new IllegalStateException();
}
- /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
@@ -260,7 +254,7 @@ public int compareTo(DocumentMetadata other) {
return lastComparison;
}
if (isSetTerms()) {
- lastComparison = TBaseHelper.compareTo(this.terms, typedOther.terms);
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.terms, typedOther.terms);
if (lastComparison != 0) {
return lastComparison;
}
@@ -272,20 +266,20 @@ public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
- public void read(TProtocol iprot) throws TException {
- TField field;
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
- if (field.type == TType.STOP) {
+ if (field.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (field.id) {
case 1: // TERMS
- if (field.type == TType.LIST) {
+ if (field.type == org.apache.thrift.protocol.TType.LIST) {
{
- TList _list0 = iprot.readListBegin();
+ org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
this.terms = new ArrayList<ThriftTerm>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
@@ -297,11 +291,11 @@ public void read(TProtocol iprot) throws TException {
iprot.readListEnd();
}
} else {
- TProtocolUtil.skip(iprot, field.type);
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
default:
- TProtocolUtil.skip(iprot, field.type);
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
iprot.readFieldEnd();
}
@@ -311,14 +305,14 @@ public void read(TProtocol iprot) throws TException {
validate();
}
- public void write(TProtocol oprot) throws TException {
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
validate();
oprot.writeStructBegin(STRUCT_DESC);
if (this.terms != null) {
oprot.writeFieldBegin(TERMS_FIELD_DESC);
{
- oprot.writeListBegin(new TList(TType.STRUCT, this.terms.size()));
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, this.terms.size()));
for (ThriftTerm _iter3 : this.terms)
{
_iter3.write(oprot);
@@ -347,10 +341,10 @@ public String toString() {
return sb.toString();
}
- public void validate() throws TException {
+ public void validate() throws org.apache.thrift.TException {
// check for required fields
if (terms == null) {
- throw new TProtocolException("Required field 'terms' was not present! Struct: " + toString());
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'terms' was not present! Struct: " + toString());
}
}
View
301 thrift/gen-java/lucandra/serializers/thrift/ThriftTerm.java
@@ -21,28 +21,28 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.thrift.*;
-import org.apache.thrift.async.*;
-import org.apache.thrift.meta_data.*;
-import org.apache.thrift.transport.*;
-import org.apache.thrift.protocol.*;
-
/**
* Term Information..
*/
-public class ThriftTerm implements TBase<ThriftTerm, ThriftTerm._Fields>, java.io.Serializable, Cloneable {
- private static final TStruct STRUCT_DESC = new TStruct("ThriftTerm");
+public class ThriftTerm implements org.apache.thrift.TBase<ThriftTerm, ThriftTerm._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ThriftTerm");
- private static final TField FIELD_FIELD_DESC = new TField("field", TType.STRING, (short)1);
- private static final TField TEXT_FIELD_DESC = new TField("text", TType.STRING, (short)2);
+ private static final org.apache.thrift.protocol.TField FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("field", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField TEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("text", org.apache.thrift.protocol.TType.STRING, (short)2);
+ private static final org.apache.thrift.protocol.TField IS_BINARY_FIELD_DESC = new org.apache.thrift.protocol.TField("is_binary", org.apache.thrift.protocol.TType.BOOL, (short)3);
+ private static final org.apache.thrift.protocol.TField LONG_VAL_FIELD_DESC = new org.apache.thrift.protocol.TField("longVal", org.apache.thrift.protocol.TType.I64, (short)4);
public String field;
- public String text;
+ public ByteBuffer text;
+ public boolean is_binary;
+ public long longVal;
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements TFieldIdEnum {
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
FIELD((short)1, "field"),
- TEXT((short)2, "text");
+ TEXT((short)2, "text"),
+ IS_BINARY((short)3, "is_binary"),
+ LONG_VAL((short)4, "longVal");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
@@ -61,6 +61,10 @@ public static _Fields findByThriftId(int fieldId) {
return FIELD;
case 2: // TEXT
return TEXT;
+ case 3: // IS_BINARY
+ return IS_BINARY;
+ case 4: // LONG_VAL
+ return LONG_VAL;
default:
return null;
}
@@ -101,40 +105,50 @@ public String getFieldName() {
}
// isset id assignments
+ private static final int __IS_BINARY_ISSET_ID = 0;
+ private static final int __LONGVAL_ISSET_ID = 1;
+ private BitSet __isset_bit_vector = new BitSet(2);
- public static final Map<_Fields, FieldMetaData> metaDataMap;
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
- Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FIELD, new FieldMetaData("field", TFieldRequirementType.REQUIRED,
- new FieldValueMetaData(TType.STRING)));
- tmpMap.put(_Fields.TEXT, new FieldMetaData("text", TFieldRequirementType.REQUIRED,
- new FieldValueMetaData(TType.STRING)));
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FIELD, new org.apache.thrift.meta_data.FieldMetaData("field", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.TEXT, new org.apache.thrift.meta_data.FieldMetaData("text", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.IS_BINARY, new org.apache.thrift.meta_data.FieldMetaData("is_binary", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+ tmpMap.put(_Fields.LONG_VAL, new org.apache.thrift.meta_data.FieldMetaData("longVal", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
- FieldMetaData.addStructMetaDataMap(ThriftTerm.class, metaDataMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ThriftTerm.class, metaDataMap);
}
public ThriftTerm() {
}
public ThriftTerm(
- String field,
- String text)
+ String field)
{
this();
this.field = field;
- this.text = text;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ThriftTerm(ThriftTerm other) {
+ __isset_bit_vector.clear();
+ __isset_bit_vector.or(other.__isset_bit_vector);
if (other.isSetField()) {
this.field = other.field;
}
if (other.isSetText()) {
- this.text = other.text;
+ this.text = org.apache.thrift.TBaseHelper.copyBinary(other.text);
+;
}
+ this.is_binary = other.is_binary;
+ this.longVal = other.longVal;
}
public ThriftTerm deepCopy() {
@@ -145,6 +159,10 @@ public ThriftTerm deepCopy() {
public void clear() {
this.field = null;
this.text = null;
+ setIs_binaryIsSet(false);
+ this.is_binary = false;
+ setLongValIsSet(false);
+ this.longVal = 0;
}
public String getField() {
@@ -160,7 +178,7 @@ public void unsetField() {
this.field = null;
}
- /** Returns true if field field is set (has been asigned a value) and false otherwise */
+ /** Returns true if field field is set (has been assigned a value) and false otherwise */
public boolean isSetField() {
return this.field != null;
}
@@ -171,11 +189,21 @@ public void setFieldIsSet(boolean value) {
}
}
- public String getText() {
- return this.text;
+ public byte[] getText() {
+ setText(org.apache.thrift.TBaseHelper.rightSize(text));
+ return text == null ? null : text.array();
+ }
+
+ public ByteBuffer bufferForText() {
+ return text;
}
- public ThriftTerm setText(String text) {
+ public ThriftTerm setText(byte[] text) {
+ setText(text == null ? (ByteBuffer)null : ByteBuffer.wrap(text));
+ return this;
+ }
+
+ public ThriftTerm setText(ByteBuffer text) {
this.text = text;
return this;
}
@@ -184,7 +212,7 @@ public void unsetText() {
this.text = null;
}
- /** Returns true if field text is set (has been asigned a value) and false otherwise */
+ /** Returns true if field text is set (has been assigned a value) and false otherwise */
public boolean isSetText() {
return this.text != null;
}
@@ -195,6 +223,52 @@ public void setTextIsSet(boolean value) {
}
}
+ public boolean isIs_binary() {
+ return this.is_binary;
+ }
+
+ public ThriftTerm setIs_binary(boolean is_binary) {
+ this.is_binary = is_binary;
+ setIs_binaryIsSet(true);
+ return this;
+ }
+
+ public void unsetIs_binary() {
+ __isset_bit_vector.clear(__IS_BINARY_ISSET_ID);
+ }
+
+ /** Returns true if field is_binary is set (has been assigned a value) and false otherwise */
+ public boolean isSetIs_binary() {
+ return __isset_bit_vector.get(__IS_BINARY_ISSET_ID);
+ }
+
+ public void setIs_binaryIsSet(boolean value) {
+ __isset_bit_vector.set(__IS_BINARY_ISSET_ID, value);
+ }
+
+ public long getLongVal() {
+ return this.longVal;
+ }
+
+ public ThriftTerm setLongVal(long longVal) {
+ this.longVal = longVal;
+ setLongValIsSet(true);
+ return this;
+ }
+
+ public void unsetLongVal() {
+ __isset_bit_vector.clear(__LONGVAL_ISSET_ID);
+ }
+
+ /** Returns true if field longVal is set (has been assigned a value) and false otherwise */
+ public boolean isSetLongVal() {
+ return __isset_bit_vector.get(__LONGVAL_ISSET_ID);
+ }
+
+ public void setLongValIsSet(boolean value) {
+ __isset_bit_vector.set(__LONGVAL_ISSET_ID, value);
+ }
+
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case FIELD:
@@ -209,7 +283,23 @@ public void setFieldValue(_Fields field, Object value) {
if (value == null) {
unsetText();
} else {
- setText((String)value);
+ setText((ByteBuffer)value);
+ }
+ break;
+
+ case IS_BINARY:
+ if (value == null) {
+ unsetIs_binary();
+ } else {
+ setIs_binary((Boolean)value);
+ }
+ break;
+
+ case LONG_VAL:
+ if (value == null) {
+ unsetLongVal();
+ } else {
+ setLongVal((Long)value);
}
break;
@@ -224,11 +314,17 @@ public Object getFieldValue(_Fields field) {
case TEXT:
return getText();
+ case IS_BINARY:
+ return new Boolean(isIs_binary());
+
+ case LONG_VAL:
+ return new Long(getLongVal());
+
}
throw new IllegalStateException();
}
- /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
@@ -239,6 +335,10 @@ public boolean isSet(_Fields field) {
return isSetField();
case TEXT:
return isSetText();
+ case IS_BINARY:
+ return isSetIs_binary();
+ case LONG_VAL:
+ return isSetLongVal();
}
throw new IllegalStateException();
}
@@ -274,6 +374,24 @@ public boolean equals(ThriftTerm that) {
return false;
}
+ boolean this_present_is_binary = true && this.isSetIs_binary();
+ boolean that_present_is_binary = true && that.isSetIs_binary();
+ if (this_present_is_binary || that_present_is_binary) {
+ if (!(this_present_is_binary && that_present_is_binary))
+ return false;
+ if (this.is_binary != that.is_binary)
+ return false;
+ }
+
+ boolean this_present_longVal = true && this.isSetLongVal();
+ boolean that_present_longVal = true && that.isSetLongVal();
+ if (this_present_longVal || that_present_longVal) {
+ if (!(this_present_longVal && that_present_longVal))
+ return false;
+ if (this.longVal != that.longVal)
+ return false;
+ }
+
return true;
}
@@ -291,6 +409,16 @@ public int hashCode() {
if (present_text)
builder.append(text);
+ boolean present_is_binary = true && (isSetIs_binary());
+ builder.append(present_is_binary);
+ if (present_is_binary)
+ builder.append(is_binary);
+
+ boolean present_longVal = true && (isSetLongVal());
+ builder.append(present_longVal);
+ if (present_longVal)
+ builder.append(longVal);
+
return builder.toHashCode();
}
@@ -307,7 +435,7 @@ public int compareTo(ThriftTerm other) {
return lastComparison;
}
if (isSetField()) {
- lastComparison = TBaseHelper.compareTo(this.field, typedOther.field);
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.field, typedOther.field);
if (lastComparison != 0) {
return lastComparison;
}
@@ -317,7 +445,27 @@ public int compareTo(ThriftTerm other) {
return lastComparison;
}
if (isSetText()) {
- lastComparison = TBaseHelper.compareTo(this.text, typedOther.text);
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.text, typedOther.text);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetIs_binary()).compareTo(typedOther.isSetIs_binary());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetIs_binary()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.is_binary, typedOther.is_binary);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetLongVal()).compareTo(typedOther.isSetLongVal());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetLongVal()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.longVal, typedOther.longVal);
if (lastComparison != 0) {
return lastComparison;
}
@@ -329,32 +477,48 @@ public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
- public void read(TProtocol iprot) throws TException {
- TField field;
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
- if (field.type == TType.STOP) {
+ if (field.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (field.id) {
case 1: // FIELD
- if (field.type == TType.STRING) {
+ if (field.type == org.apache.thrift.protocol.TType.STRING) {
this.field = iprot.readString();
} else {
- TProtocolUtil.skip(iprot, field.type);
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
case 2: // TEXT
- if (field.type == TType.STRING) {
- this.text = iprot.readString();
+ if (field.type == org.apache.thrift.protocol.TType.STRING) {
+ this.text = iprot.readBinary();
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
+ case 3: // IS_BINARY
+ if (field.type == org.apache.thrift.protocol.TType.BOOL) {
+ this.is_binary = iprot.readBool();
+ setIs_binaryIsSet(true);
} else {
- TProtocolUtil.skip(iprot, field.type);
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ }
+ break;
+ case 4: // LONG_VAL
+ if (field.type == org.apache.thrift.protocol.TType.I64) {
+ this.longVal = iprot.readI64();
+ setLongValIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
default:
- TProtocolUtil.skip(iprot, field.type);
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
iprot.readFieldEnd();
}
@@ -364,7 +528,7 @@ public void read(TProtocol iprot) throws TException {
validate();
}
- public void write(TProtocol oprot) throws TException {
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
validate();
oprot.writeStructBegin(STRUCT_DESC);
@@ -374,8 +538,20 @@ public void write(TProtocol oprot) throws TException {
oprot.writeFieldEnd();
}
if (this.text != null) {
- oprot.writeFieldBegin(TEXT_FIELD_DESC);
- oprot.writeString(this.text);
+ if (isSetText()) {
+ oprot.writeFieldBegin(TEXT_FIELD_DESC);
+ oprot.writeBinary(this.text);
+ oprot.writeFieldEnd();
+ }
+ }
+ if (isSetIs_binary()) {
+ oprot.writeFieldBegin(IS_BINARY_FIELD_DESC);
+ oprot.writeBool(this.is_binary);
+ oprot.writeFieldEnd();
+ }
+ if (isSetLongVal()) {
+ oprot.writeFieldBegin(LONG_VAL_FIELD_DESC);
+ oprot.writeI64(this.longVal);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
@@ -394,25 +570,36 @@ public String toString() {
sb.append(this.field);
}
first = false;
- if (!first) sb.append(", ");
- sb.append("text:");
- if (this.text == null) {
- sb.append("null");
- } else {
- sb.append(this.text);
+ if (isSetText()) {
+ if (!first) sb.append(", ");
+ sb.append("text:");
+ if (this.text == null) {
+ sb.append("null");
+ } else {
+ org.apache.thrift.TBaseHelper.toString(this.text, sb);
+ }
+ first = false;
+ }
+ if (isSetIs_binary()) {
+ if (!first) sb.append(", ");
+ sb.append("is_binary:");
+ sb.append(this.is_binary);
+ first = false;
+ }
+ if (isSetLongVal()) {
+ if (!first) sb.append(", ");
+ sb.append("longVal:");
+ sb.append(this.longVal);
+ first = false;
}
- first = false;
sb.append(")");
return sb.toString();
}
- public void validate() throws TException {
+ public void validate() throws org.apache.thrift.TException {
// check for required fields
if (field == null) {
- throw new TProtocolException("Required field 'field' was not present! Struct: " + toString());
- }
- if (text == null) {
- throw new TProtocolException("Required field 'text' was not present! Struct: " + toString());
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'field' was not present! Struct: " + toString());
}
}
View
6 thrift/lucandra.thrift
@@ -1,11 +1,15 @@
namespace java lucandra.serializers.thrift
+
+
/**
* Term Information..
*/
struct ThriftTerm {
1: required string field,
- 2: required string text
+ 2: optional binary text,
+ 3: optional bool is_binary,
+ 4: optional i64 longVal
}
struct DocumentMetadata {
Please sign in to comment.
Something went wrong with that request. Please try again.