Skip to content
This repository has been archived by the owner on Feb 27, 2023. It is now read-only.

Commit

Permalink
make hector build for cassandra beta1 changes (see r1094102 and r1094…
Browse files Browse the repository at this point in the history
…628)

object-mapper tests still fail because schema can no longer be defined in cassandra.yaml (see cassandra/conf/schema-sample.txt)
  • Loading branch information
michaelsembwever committed Apr 29, 2011
1 parent b7947de commit c2aeed0
Show file tree
Hide file tree
Showing 11 changed files with 225 additions and 195 deletions.
108 changes: 54 additions & 54 deletions core/pom.xml
Expand Up @@ -104,70 +104,70 @@
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>apache-cassandra</artifactId>
<version>0.8.0-20110415</version>
<artifactId>cassandra-all</artifactId>
<version>0.8.0-beta1</version>
</dependency>
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>apache-cassandra-thrift</artifactId>
<version>0.8.0-20110415</version>
<artifactId>cassandra-thrift</artifactId>
<version>0.8.0-beta1</version>
</dependency>
<dependency>
<groupId>org.apache.cassandra.deps</groupId>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.6.0</version>
<version>0.6.1</version>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.6</version>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.6</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>r08</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.1</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>r08</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.cassandra.deps</groupId>
<artifactId>avro</artifactId>
<version>1.4.0-cassandra-1</version>
</dependency>
<dependency>
<groupId>org.apache.cassandra.deps</groupId>
<artifactId>avro</artifactId>
<version>1.4.0-cassandra-1</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>netty</artifactId>
<groupId>org.jboss.netty</groupId>
</exclusion>
<exclusion>
<artifactId>paranamer</artifactId>
<groupId>com.thoughtworks.paranamer</groupId>
</exclusion>
<exclusion>
<artifactId>paranamer-ant</artifactId>
<groupId>com.thoughtworks.paranamer</groupId>
</exclusion>
<exclusion>
<artifactId>velocity</artifactId>
<groupId>org.apache.velocity</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr</artifactId>
<exclusions>
<exclusion>
<artifactId>netty</artifactId>
<groupId>org.jboss.netty</groupId>
</exclusion>
<exclusion>
<artifactId>paranamer</artifactId>
<groupId>com.thoughtworks.paranamer</groupId>
</exclusion>
<exclusion>
<artifactId>paranamer-ant</artifactId>
<groupId>com.thoughtworks.paranamer</groupId>
</exclusion>
<exclusion>
<artifactId>velocity</artifactId>
<groupId>org.apache.velocity</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr</artifactId>
<version>3.1.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
<artifactId>concurrentlinkedhashmap-lru</artifactId>
<version>1.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.googlecode.concurrentlinkedhashmap</groupId>
<artifactId>concurrentlinkedhashmap-lru</artifactId>
<version>1.1</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -232,7 +232,7 @@
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
Expand Down
31 changes: 16 additions & 15 deletions core/src/main/java/me/prettyprint/cassandra/model/HColumnImpl.java
Expand Up @@ -33,8 +33,9 @@ public HColumnImpl(N name, V value, long clock, Serializer<N> nameSerializer,
notNull(name, "name is null");
notNull(value, "value is null");

this.column = new Column(nameSerializer.toByteBuffer(name),
valueSerializer.toByteBuffer(value), clock);
this.column = new Column(nameSerializer.toByteBuffer(name));
this.column.setValue(valueSerializer.toByteBuffer(value));
this.column.setTimestamp(clock);
}

public HColumnImpl(Column thriftColumn, Serializer<N> nameSerializer,
Expand Down Expand Up @@ -78,7 +79,7 @@ public HColumn<N,V> setClock(long clock) {
}

/**
* Set the time-to-live value for this column in seconds.
* Set the time-to-live value for this column in seconds.
* The server will mark this column as deleted once the number of seconds has elapsed.
*/
@Override
Expand All @@ -93,15 +94,15 @@ public int getTtl() {
}

@Override
public N getName() {
public N getName() {
return column.isSetName() ? nameSerializer.fromByteBuffer(column.name.duplicate()) : null;
}

@Override
public V getValue() {
public V getValue() {
return column.isSetValue() ? valueSerializer.fromByteBuffer(column.value.duplicate()) : null;
}
}


@Override
public long getClock() {
Expand All @@ -126,23 +127,23 @@ public Serializer<N> getNameSerializer() {
@Override
public Serializer<V> getValueSerializer() {
return valueSerializer;
}
}

@Override
public ByteBuffer getNameBytes() {
public ByteBuffer getNameBytes() {
return column.isSetName() ? column.name.duplicate() : null;
}

@Override
public ByteBuffer getValueBytes() {
public ByteBuffer getValueBytes() {
return column.isSetValue() ? column.value.duplicate() : null;
}

/**
* Clear value, timestamp and ttl (the latter two set to '0') leaving only the column name
*/
@Override
public HColumn<N,V> clear() {
public HColumn<N,V> clear() {
column.value = null;
column.timestamp = 0;
column.ttl = 0;
Expand All @@ -151,8 +152,8 @@ public HColumn<N,V> clear() {
column.setValueIsSet(false);
return this;
}



@Override
public HColumn<N, V> apply(V value, long clock, int ttl) {
Expand All @@ -166,7 +167,7 @@ public HColumn<N, V> apply(Column c) {
this.column = c;
return this;
}

@Override
public String toString() {
return String.format("HColumn(%s=%s)",getName(), getValue());
Expand Down
Expand Up @@ -57,7 +57,7 @@ public class KeyspaceServiceImpl implements KeyspaceService {
private CassandraHost cassandraHost;

private final FailoverPolicy failoverPolicy;

private final Map<String, String> credentials;

public KeyspaceServiceImpl(String keyspaceName,
Expand Down Expand Up @@ -228,17 +228,17 @@ public List<Column> execute(Cassandra.Client cassandra) throws HectorException {
operateWithFailover(op);
return op.getResult();
}

@Override
public List<Column> getSlice(String key, ColumnParent columnParent, SlicePredicate predicate)
throws HectorException {
return getSlice(StringSerializer.get().toByteBuffer(key), columnParent, predicate);
}

@Override
public List<CounterColumn> getCounterSlice(final ByteBuffer key, final ColumnParent columnParent,
final SlicePredicate predicate) throws HectorException {
Operation<List<CounterColumn>> op =
Operation<List<CounterColumn>> op =
new Operation<List<CounterColumn>>(OperationType.READ, failoverPolicy, keyspaceName, credentials) {

@Override
Expand All @@ -258,7 +258,7 @@ public List<CounterColumn> execute(Cassandra.Client cassandra) throws HectorExce
// Inconsistency
throw new HectorException("Regular Column is part of the set of Counter Column");
}

}
return result;
} catch (Exception e) {
Expand Down Expand Up @@ -394,9 +394,9 @@ public Void execute(Cassandra.Client cassandra) throws HectorException {
};
operateWithFailover(op);
}

@Override
public void addCounter(final ByteBuffer key, final ColumnParent columnParent, final CounterColumn counterColumn)
public void addCounter(final ByteBuffer key, final ColumnParent columnParent, final CounterColumn counterColumn)
throws HectorException {
Operation<Void> op = new Operation<Void>(OperationType.WRITE, failoverPolicy, keyspaceName, credentials) {

Expand All @@ -412,7 +412,7 @@ public Void execute(Cassandra.Client cassandra) throws HectorException {
};
operateWithFailover(op);
}

@Override
public void addCounter(String key, ColumnParent columnParent, CounterColumn counterColumn) throws HectorException {
addCounter(StringSerializer.get().toByteBuffer(key), columnParent, counterColumn);
Expand All @@ -425,7 +425,9 @@ public void insert(String key, ColumnPath columnPath, ByteBuffer value) throws H
if (columnPath.isSetSuper_column()) {
columnParent.setSuper_column(columnPath.getSuper_column());
}
Column column = new Column(ByteBuffer.wrap(columnPath.getColumn()), value, connectionManager.createClock());
Column column = new Column(ByteBuffer.wrap(columnPath.getColumn()));
column.setValue(value);
column.setTimestamp(connectionManager.createClock());
insert(StringSerializer.get().toByteBuffer(key), columnParent, column);
}

Expand All @@ -436,7 +438,9 @@ public void insert(String key, ColumnPath columnPath, ByteBuffer value, long tim
if (columnPath.isSetSuper_column()) {
columnParent.setSuper_column(columnPath.getSuper_column());
}
Column column = new Column(ByteBuffer.wrap(columnPath.getColumn()), value, timestamp);
Column column = new Column(ByteBuffer.wrap(columnPath.getColumn()));
column.setValue(value);
column.setTimestamp(timestamp);
insert(StringSerializer.get().toByteBuffer(key), columnParent, column);
}

Expand Down Expand Up @@ -624,7 +628,7 @@ public Void execute(Cassandra.Client cassandra) throws HectorException {
};
operateWithFailover(op);
}

@Override
public void removeCounter(final ByteBuffer key, final ColumnPath columnPath) throws HectorException {
Operation<Void> op = new Operation<Void>(OperationType.WRITE, failoverPolicy, keyspaceName, credentials) {
Expand All @@ -641,7 +645,7 @@ public Void execute(Cassandra.Client cassandra) throws HectorException {
};
operateWithFailover(op);
}

@Override
public void removeCounter(String key, ColumnPath columnPath) throws HectorException {
removeCounter(StringSerializer.get().toByteBuffer(key), columnPath);
Expand Down Expand Up @@ -694,7 +698,7 @@ public Column execute(Cassandra.Client cassandra) throws HectorException {
}
return op.getResult();
}

@Override
public CounterColumn getCounter(final ByteBuffer key, final ColumnPath columnPath) throws HectorException {
Operation<CounterColumn> op = new Operation<CounterColumn>(OperationType.READ, failoverPolicy, keyspaceName, credentials) {
Expand All @@ -703,7 +707,7 @@ public CounterColumn getCounter(final ByteBuffer key, final ColumnPath columnPat
public CounterColumn execute(Cassandra.Client cassandra) throws HectorException {
ColumnOrSuperColumn cosc;
try {
cosc = cassandra.get(key, columnPath, getThriftCl(OperationType.READ));
cosc = cassandra.get(key, columnPath, getThriftCl(OperationType.READ));
} catch (NotFoundException e) {
setException(xtrans.translate(e));
return null;
Expand All @@ -720,7 +724,7 @@ public CounterColumn execute(Cassandra.Client cassandra) throws HectorException
}
return op.getResult();
}

@Override
public CounterColumn getCounter(String key, ColumnPath columnPath) throws HectorException {
return getCounter(StringSerializer.get().toByteBuffer(key), columnPath);
Expand Down
Expand Up @@ -22,30 +22,39 @@
public class ColumnSliceTest {
StringSerializer se = StringSerializer.get();
LongSerializer le = LongSerializer.get();


@Test
public void testConstruction() {
List<Column> tColumns = new ArrayList<Column>();
ColumnSlice<String, Long> slice = new ColumnSliceImpl<String, Long>(tColumns, se, le);
Assert.assertTrue(slice.getColumns().isEmpty());

tColumns.add(new Column(ByteBuffer.wrap(new byte[]{}), ByteBuffer.wrap(new byte[]{}), 0L));
Column column = new Column(ByteBuffer.wrap(new byte[]{}));
column.setValue(ByteBuffer.wrap(new byte[]{}));
column.setTimestamp(0L);
tColumns.add(column);
slice = new ColumnSliceImpl<String, Long>(tColumns, se, le);
Assert.assertEquals(1, slice.getColumns().size());

tColumns = new ArrayList<Column>();
tColumns.add(new Column(se.toByteBuffer("1"), le.toByteBuffer(1L), 0L));
column = new Column(se.toByteBuffer("1"));
column.setValue(le.toByteBuffer(1L));
column.setTimestamp(0L);
tColumns.add(column);
slice = new ColumnSliceImpl<String, Long>(tColumns, se, le);
Assert.assertEquals((Long) 1L, slice.getColumnByName("1").getValue());
}

@Test
public void testMultiCallOnByteBuffer() {
List<Column> tColumns = new ArrayList<Column>();
tColumns.add(new Column(se.toByteBuffer("1"), ByteBuffer.wrap("colvalue".getBytes()), 0L));
Column column = new Column(se.toByteBuffer("1"));
column.setValue(ByteBuffer.wrap("colvalue".getBytes()));
column.setTimestamp(0L);
tColumns.add(column);
ColumnSlice<String, ByteBuffer> slice = new ColumnSliceImpl<String, ByteBuffer>(tColumns, se, ByteBufferSerializer.get());

ByteBuffer value = slice.getColumnByName("1").getValue();
Assert.assertEquals("colvalue", se.fromByteBuffer(value));
value.rewind();
Expand Down

0 comments on commit c2aeed0

Please sign in to comment.