Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@
<project.type>multi</project.type>
<dist.id>spring-data-cassandra</dist.id>
<springdata.commons>1.13.0.BUILD-SNAPSHOT</springdata.commons>
<cassandra-unit.version>2.1.9.2</cassandra-unit.version>
<cassandra-unit.version>3.0.0.1</cassandra-unit.version>
<el.version>1.0</el.version>
<failsafe.version>2.16</failsafe.version>
<jamm.version>0.3.1</jamm.version>
<cassandra.version>2.1.11</cassandra.version>
<cassandra-driver-dse.version>2.1.7.1</cassandra-driver-dse.version>
<cassandra.version>3.0.0</cassandra.version>
<cassandra-driver-dse.version>3.0.0-rc1</cassandra-driver-dse.version>
</properties>

<developers>
Expand Down Expand Up @@ -126,7 +126,7 @@
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.0.1</version>
<version>1.1.2.1</version>
<scope>test</scope>
</dependency>

Expand Down Expand Up @@ -158,8 +158,8 @@
<artifactId>cassandra-driver-core</artifactId>
</exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
<artifactId>logback-core</artifactId>
<groupId>ch.qos.logback</groupId>
</exclusion>
</exclusions>
<scope>test</scope>
Expand Down
4 changes: 2 additions & 2 deletions spring-cql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
<artifactId>logback-core</artifactId>
<groupId>ch.qos.logback</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.springframework.util.Assert;

import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.ColumnDefinitions.Definition;
import com.datastax.driver.core.Host;
Expand Down Expand Up @@ -571,7 +572,7 @@ protected Object firstColumnToObject(Row row) {
if (cols.size() == 0) {
return null;
}
return cols.getType(0).deserialize(row.getBytesUnsafe(0), ProtocolVersion.NEWEST_SUPPORTED);
return CodecRegistry.DEFAULT_INSTANCE.codecFor(cols.getType(0)).deserialize(row.getBytesUnsafe(0), ProtocolVersion.NEWEST_SUPPORTED);
}

/**
Expand All @@ -588,7 +589,7 @@ protected Map<String, Object> toMap(Row row) {

for (Definition def : cols.asList()) {
String name = def.getName();
map.put(name, def.getType().deserialize(row.getBytesUnsafe(name), ProtocolVersion.NEWEST_SUPPORTED));
map.put(name, CodecRegistry.DEFAULT_INSTANCE.codecFor(def.getType()).deserialize(row.getBytesUnsafe(name), ProtocolVersion.NEWEST_SUPPORTED));
}

return map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import org.springframework.core.convert.converter.Converter;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.ProtocolVersion;
import com.datastax.driver.core.Row;
Expand All @@ -24,7 +25,7 @@ public List<Object> convert(Row row) {

for (Definition def : cols.asList()) {
String name = def.getName();
list.add(row.isNull(name) ? null : def.getType().deserialize(
list.add(row.isNull(name) ? null : CodecRegistry.DEFAULT_INSTANCE.codecFor(def.getType()).deserialize(
row.getBytesUnsafe(name), ProtocolVersion.NEWEST_SUPPORTED));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import org.springframework.core.convert.converter.Converter;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.ColumnDefinitions.Definition;
import com.datastax.driver.core.ProtocolVersion;
Expand All @@ -27,8 +28,8 @@ public Map<String, Object> convert(Row row) {
String name = def.getName();
map.put(
name,
row.isNull(name) ? null : def.getType().deserialize(row.getBytesUnsafe(name),
ProtocolVersion.NEWEST_SUPPORTED));
row.isNull(name) ? null : CodecRegistry.DEFAULT_INSTANCE.codecFor(def.getType())
.deserialize(row.getBytesUnsafe(name), ProtocolVersion.NEWEST_SUPPORTED));
}

return map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Host;
import com.datastax.driver.core.Host.StateListener;

Expand Down Expand Up @@ -49,8 +50,13 @@ public void onRemove(Host host) {
}

@Override
public void onSuspected(Host host) {
log.info("Host Suspected: " + host.getAddress());
public void onRegister(Cluster cluster) {
log.info("Cluster registered: " + cluster.getClusterName());
}

@Override
public void onUnregister(Cluster cluster) {
log.info("Cluster unregistered: " + cluster.getClusterName());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Host;
import com.datastax.driver.core.LatencyTracker;
import com.datastax.driver.core.Statement;
Expand All @@ -34,5 +35,13 @@ public class TestLatencyTracker implements LatencyTracker {
public void update(Host host, Statement statement, Exception exception, long newLatencyNanos) {
LOG.info("Latency Tracker: " + host.getAddress() + ", " + newLatencyNanos + " nanoseconds.");
}

@Override
public void onRegister(Cluster cluster) {
}

@Override
public void onUnregister(Cluster cluster) {
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,23 +20,28 @@

import org.springframework.cassandra.core.keyspace.IndexDescriptor;

import com.datastax.driver.core.ColumnMetadata.IndexMetadata;
import com.datastax.driver.core.IndexMetadata;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.TableMetadata;

public class CqlIndexSpecificationAssertions {

public static double DELTA = 1e-6; // delta for comparisons of doubles

public static void assertIndex(IndexDescriptor expected, String keyspace, Session session) {
IndexMetadata imd = session.getCluster().getMetadata().getKeyspace(keyspace.toLowerCase())
.getTable(expected.getTableName().toCql()).getColumn(expected.getColumnName().toCql()).getIndex();
TableMetadata tableMetadata = session.getCluster().getMetadata().getKeyspace(keyspace.toLowerCase())
.getTable(expected.getTableName().toCql());

IndexMetadata imd = tableMetadata.getIndex(expected.getName().toCql());

assertEquals(expected.getName(), imd.getName());
assertEquals(expected.getName(), imd == null ? null : imd.getName());
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please note, with Cassandra 3 and above, the column name is not mapped to the index anymore. This is covered by the Datastax upgrade guide. The index name is the best way to retrieve the index now.

This potentially breaks backwards compatibility with Cassandra v2? I didn't try.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is test code, so this isn't really a problem. It's possible to use Cassandra Driver V3 with Cassandra v2 (tested against 2.2.5).

public static void assertNoIndex(IndexDescriptor expected, String keyspace, Session session) {
IndexMetadata imd = session.getCluster().getMetadata().getKeyspace(keyspace.toLowerCase())
.getTable(expected.getTableName().toCql()).getColumn(expected.getColumnName().toCql()).getIndex();
TableMetadata tableMetadata = session.getCluster().getMetadata().getKeyspace(keyspace.toLowerCase())
.getTable(expected.getTableName().toCql());

IndexMetadata imd = tableMetadata.getIndex(expected.getName().toCql());

assertNull(imd);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import com.datastax.driver.core.ColumnMetadata;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.TableMetadata.Options;
import com.datastax.driver.core.TableOptionsMetadata;

public class CqlTableSpecificationAssertions {

Expand Down Expand Up @@ -66,7 +66,7 @@ public static void assertPrimaryKeyColumns(TableDescriptor expected, TableMetada
assertColumns(expected.getPrimaryKeyColumns(), actual.getPrimaryKey());
}

public static void assertOptions(Map<String, Object> expected, Options actual) {
public static void assertOptions(Map<String, Object> expected, TableOptionsMetadata actual) {

for (String key : expected.keySet()) {

Expand Down Expand Up @@ -139,7 +139,7 @@ public static TableOption getTableOptionFor(String key) {
}

@SuppressWarnings("unchecked")
public static <T> T getOptionFor(TableOption option, Class<?> type, Options options) {
public static <T> T getOptionFor(TableOption option, Class<?> type, TableOptionsMetadata options) {
switch (option) {
case BLOOM_FILTER_FP_CHANCE:
return (T) (Double) options.getBloomFilterFalsePositiveChance();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ public void tearDown() throws Exception {
@Test
public void configuredProtocolVersionShouldBeSet() throws Exception {

cassandraCqlClusterFactoryBean.setProtocolVersion(ProtocolVersion.V2);
cassandraCqlClusterFactoryBean.setProtocolVersion(ProtocolVersion.V4);
cassandraCqlClusterFactoryBean.setPort(CASSANDRA_NATIVE_PORT);
cassandraCqlClusterFactoryBean.afterPropertiesSet();

assertEquals(ProtocolVersion.V2, getProtocolVersionEnum(cassandraCqlClusterFactoryBean));
assertEquals(ProtocolVersion.V4, getProtocolVersionEnum(cassandraCqlClusterFactoryBean));
}

@Test
Expand All @@ -68,6 +68,6 @@ private ProtocolVersion getProtocolVersionEnum(CassandraCqlClusterFactoryBean ca

// initialize connection factory
cassandraCqlClusterFactoryBean.getObject().init();
return cassandraCqlClusterFactoryBean.getObject().getConfiguration().getProtocolOptions().getProtocolVersionEnum();
return cassandraCqlClusterFactoryBean.getObject().getConfiguration().getProtocolOptions().getProtocolVersion();
}
}
3 changes: 3 additions & 0 deletions spring-cql/src/test/resources/spring-cassandra.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,9 @@ partitioner: org.apache.cassandra.dht.Murmur3Partitioner
data_file_directories:
- target/embeddedCassandra/data

hints_directory:
- target/embeddedCassandra/hints

# commit log
commitlog_directory: target/embeddedCassandra/commitlog

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import org.springframework.cassandra.core.cql.CqlIdentifier;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.Row;
Expand Down Expand Up @@ -50,15 +51,15 @@ public Object get(int i) {

List<DataType> collectionTypes = type.getTypeArguments();
if (collectionTypes.size() == 2) {
return row.getMap(i, collectionTypes.get(0).asJavaClass(), collectionTypes.get(1).asJavaClass());
return row.getMap(i, CodecRegistry.DEFAULT_INSTANCE.codecFor(collectionTypes.get(0)).getJavaType().getRawType(), CodecRegistry.DEFAULT_INSTANCE.codecFor(collectionTypes.get(1)).getJavaType().getRawType());
}

if (type.equals(DataType.list(collectionTypes.get(0)))) {
return row.getList(i, collectionTypes.get(0).asJavaClass());
return row.getList(i, CodecRegistry.DEFAULT_INSTANCE.codecFor(collectionTypes.get(0)).getJavaType().getRawType());
}

if (type.equals(DataType.set(collectionTypes.get(0)))) {
return row.getSet(i, collectionTypes.get(0).asJavaClass());
return row.getSet(i, CodecRegistry.DEFAULT_INSTANCE.codecFor(collectionTypes.get(0)).getJavaType().getRawType());
}

throw new IllegalStateException("Unknown Collection type encountered. Valid collections are Set, List and Map.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.Insert;
Expand Down Expand Up @@ -240,7 +241,7 @@ protected void writeInsertFromWrapper(final ConvertingPropertyAccessor accessor,
public void doWithPersistentProperty(CassandraPersistentProperty prop) {

Object value = accessor.getProperty(prop,
prop.isCompositePrimaryKey() ? prop.getType() : prop.getDataType().asJavaClass());
prop.isCompositePrimaryKey() ? prop.getType() : CodecRegistry.DEFAULT_INSTANCE.codecFor(prop.getDataType()).getJavaType().getRawType());

if (log.isDebugEnabled()) {
log.debug("doWithProperties Property.type {}, Property.value {}", prop.getType().getName(), value);
Expand Down Expand Up @@ -279,7 +280,7 @@ protected void writeUpdateFromWrapper(final ConvertingPropertyAccessor accessor,
public void doWithPersistentProperty(CassandraPersistentProperty prop) {

Object value = accessor.getProperty(prop,
prop.isCompositePrimaryKey() ? prop.getType() : prop.getDataType().asJavaClass());
prop.isCompositePrimaryKey() ? prop.getType() : CodecRegistry.DEFAULT_INSTANCE.codecFor(prop.getDataType()).getJavaType().getRawType());

if (prop.isCompositePrimaryKey()) {
CassandraPersistentEntity<?> keyEntity = prop.getCompositePrimaryKeyEntity();
Expand Down Expand Up @@ -312,7 +313,7 @@ protected void writeDeleteWhereFromWrapper(final ConvertingPropertyAccessor acce
@Override
public void doWithPersistentProperty(CassandraPersistentProperty prop) {

Object value = accessor.getProperty(prop, prop.getDataType().asJavaClass());
Object value = accessor.getProperty(prop, CodecRegistry.DEFAULT_INSTANCE.codecFor(prop.getDataType()).getJavaType().getRawType());
where.and(QueryBuilder.eq(prop.getColumnName().toCql(), value));
}
});
Expand Down Expand Up @@ -374,7 +375,7 @@ public Object getId(Object object, CassandraPersistentEntity<?> entity) {
CassandraPersistentProperty idProperty = entity.getIdProperty();
if (idProperty != null) {
return wrapper.getProperty(entity.getIdProperty(),
idProperty.isCompositePrimaryKey() ? idProperty.getType() : idProperty.getDataType().asJavaClass());
idProperty.isCompositePrimaryKey() ? idProperty.getType() : CodecRegistry.DEFAULT_INSTANCE.codecFor(idProperty.getDataType()).getJavaType().getRawType());
}

// if the class doesn't have an id property, then it's using MapId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.springframework.expression.PropertyAccessor;
import org.springframework.expression.TypedValue;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.DataType;
import com.datastax.driver.core.ProtocolVersion;
import com.datastax.driver.core.Row;
Expand Down Expand Up @@ -52,7 +53,7 @@ public TypedValue read(EvaluationContext context, Object target, String name) {
}
DataType columnType = row.getColumnDefinitions().getType(name);
ByteBuffer bytes = row.getBytes(name);
Object object = columnType.deserialize(bytes, ProtocolVersion.NEWEST_SUPPORTED);
Object object = CodecRegistry.DEFAULT_INSTANCE.codecFor(columnType).deserialize(bytes, ProtocolVersion.NEWEST_SUPPORTED);
return new TypedValue(object);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,30 +188,22 @@ public DataType getDataType() {
private DataType getDataTypeFor(CassandraType annotation) {

DataType.Name type = annotation.type();
switch (type) {

if (type.isCollection()) {
switch (type) {
case MAP:
ensureTypeArguments(annotation.typeArguments().length, 2);
return DataType.map(getDataTypeFor(annotation.typeArguments()[0]),
getDataTypeFor(annotation.typeArguments()[1]));

case MAP:
ensureTypeArguments(annotation.typeArguments().length, 2);
return DataType.map(getDataTypeFor(annotation.typeArguments()[0]),
getDataTypeFor(annotation.typeArguments()[1]));
case LIST:
ensureTypeArguments(annotation.typeArguments().length, 1);
return DataType.list(getDataTypeFor(annotation.typeArguments()[0]));

case LIST:
ensureTypeArguments(annotation.typeArguments().length, 1);
return DataType.list(getDataTypeFor(annotation.typeArguments()[0]));

case SET:
ensureTypeArguments(annotation.typeArguments().length, 1);
return DataType.set(getDataTypeFor(annotation.typeArguments()[0]));

default:
throw new InvalidDataAccessApiUsageException(String.format(
"unknown multivalued DataType [%s] for property [%s] in entity [%s]", type, getType(), getOwner()
.getName()));
}
} else {
case SET:
ensureTypeArguments(annotation.typeArguments().length, 1);
return DataType.set(getDataTypeFor(annotation.typeArguments()[0]));

default:
return CassandraSimpleTypeHolder.getDataTypeFor(type);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.springframework.data.mapping.model.SimpleTypeHolder;
import org.springframework.data.util.TypeInformation;

import com.datastax.driver.core.CodecRegistry;
import com.datastax.driver.core.DataType;

/**
Expand Down Expand Up @@ -60,7 +61,7 @@ public class CassandraSimpleTypeHolder extends SimpleTypeHolder {

for (DataType dataType : DataType.allPrimitiveTypes()) {

Class<?> javaClass = dataType.asJavaClass();
Class<?> javaClass = CodecRegistry.DEFAULT_INSTANCE.codecFor(dataType).getJavaType().getRawType();
simpleTypes.add(javaClass);

dataTypesByJavaClass.put(javaClass, dataType);
Expand Down