Permalink
Browse files

preliminary support for virtual keyspaces

  • Loading branch information...
1 parent 79be5d0 commit 77ecfd398f979761266a3a6b87cb237430324d97 @edanuff edanuff committed Jan 20, 2011
View
15 core/.gitignore
@@ -0,0 +1,15 @@
+.idea
+.DS_Store
+hector.iml
+releases
+target
+tmp
+bin
+.classpath
+.project
+.settings
+out
+*.ipr
+*.iws
+*.iml
+.springBeans
View
56 core/src/main/java/me/prettyprint/cassandra/model/ExecutingKeyspace.java
@@ -17,72 +17,82 @@
/**
* Thread Safe
+ *
* @author Ran Tavory
* @author zznate
*/
public class ExecutingKeyspace implements Keyspace {
- private static final Map<String, String> EMPTY_CREDENTIALS = Collections.emptyMap();
+ private static final Map<String, String> EMPTY_CREDENTIALS = Collections
+ .emptyMap();
- private ConsistencyLevelPolicy consistencyLevelPolicy;
- private FailoverPolicy failoverPolicy;
+ protected ConsistencyLevelPolicy consistencyLevelPolicy;
+ protected FailoverPolicy failoverPolicy;
- private final HConnectionManager connectionManager;
- private final String keyspace;
- private final Map<String, String> credentials;
+ protected final HConnectionManager connectionManager;
+ protected final String keyspace;
+ protected final Map<String, String> credentials;
private final ExceptionsTranslator exceptionTranslator;
- public ExecutingKeyspace(String keyspace, HConnectionManager connectionManager,
- ConsistencyLevelPolicy consistencyLevelPolicy, FailoverPolicy failoverPolicy) {
- this(keyspace, connectionManager, consistencyLevelPolicy, failoverPolicy, EMPTY_CREDENTIALS);
+ public ExecutingKeyspace(String keyspace,
+ HConnectionManager connectionManager,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy) {
+ this(keyspace, connectionManager, consistencyLevelPolicy, failoverPolicy,
+ EMPTY_CREDENTIALS);
}
-
- public ExecutingKeyspace(String keyspace, HConnectionManager connectionManager,
- ConsistencyLevelPolicy consistencyLevelPolicy, FailoverPolicy failoverPolicy,
- Map<String, String> credentials) {
+
+ public ExecutingKeyspace(String keyspace,
+ HConnectionManager connectionManager,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy, Map<String, String> credentials) {
Assert.noneNull(consistencyLevelPolicy, connectionManager);
this.keyspace = keyspace;
this.connectionManager = connectionManager;
this.consistencyLevelPolicy = consistencyLevelPolicy;
this.failoverPolicy = failoverPolicy;
this.credentials = credentials;
// TODO make this plug-able
- this.exceptionTranslator = new ExceptionsTranslatorImpl();
+ exceptionTranslator = new ExceptionsTranslatorImpl();
}
@Override
public void setConsistencyLevelPolicy(ConsistencyLevelPolicy cp) {
// TODO remove this method
- this.consistencyLevelPolicy = cp;
+ consistencyLevelPolicy = cp;
}
@Override
public String toString() {
- return "ExecutingKeyspace(" + keyspace +"," + connectionManager + ")";
+ return "ExecutingKeyspace(" + keyspace + "," + connectionManager + ")";
}
@Override
public long createClock() {
return connectionManager.createClock();
}
- public <T> ExecutionResult<T> doExecute(KeyspaceOperationCallback<T> koc) throws HectorException {
+ public <T> ExecutionResult<T> doExecute(KeyspaceOperationCallback<T> koc)
+ throws HectorException {
KeyspaceService ks = null;
try {
- ks = new KeyspaceServiceImpl(keyspace, consistencyLevelPolicy, connectionManager, failoverPolicy, credentials);
+ ks = new KeyspaceServiceImpl(keyspace, consistencyLevelPolicy,
+ connectionManager, failoverPolicy, credentials);
return koc.doInKeyspaceAndMeasure(ks);
} finally {
if (ks != null) {
- //connectionManager.releaseClient(ks.getClient());
+ // connectionManager.releaseClient(ks.getClient());
}
}
}
-
- public <T> ExecutionResult<T> doExecuteOperation(Operation<T> operation) throws HectorException {
- operation.applyConnectionParams(keyspace,consistencyLevelPolicy,failoverPolicy,credentials);
+
+ public <T> ExecutionResult<T> doExecuteOperation(Operation<T> operation)
+ throws HectorException {
+ operation.applyConnectionParams(keyspace, consistencyLevelPolicy,
+ failoverPolicy, credentials);
connectionManager.operateWithFailover(operation);
return operation.getExecutionResult();
}
-
+
public ExceptionsTranslator getExceptionsTranslator() {
return exceptionTranslator;
}
View
55 core/src/main/java/me/prettyprint/cassandra/model/ExecutingPrefixedKeyspace.java
@@ -0,0 +1,55 @@
+package me.prettyprint.cassandra.model;
+
+import java.util.Map;
+
+import me.prettyprint.cassandra.connection.HConnectionManager;
+import me.prettyprint.cassandra.service.FailoverPolicy;
+import me.prettyprint.cassandra.service.KeyspaceService;
+import me.prettyprint.cassandra.service.PrefixedKeyspaceServiceImpl;
+import me.prettyprint.hector.api.ConsistencyLevelPolicy;
+import me.prettyprint.hector.api.Serializer;
+import me.prettyprint.hector.api.exceptions.HectorException;
+
+public class ExecutingPrefixedKeyspace<E> extends ExecutingKeyspace {
+
+ E keyPrefix;
+ Serializer<E> keyPrefixSerializer;
+
+ public ExecutingPrefixedKeyspace(String keyspace, E keyPrefix,
+ Serializer<E> keyPrefixSerializer, HConnectionManager connectionManager,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy) {
+ super(keyspace, connectionManager, consistencyLevelPolicy, failoverPolicy);
+
+ this.keyPrefix = keyPrefix;
+ this.keyPrefixSerializer = keyPrefixSerializer;
+ }
+
+ public ExecutingPrefixedKeyspace(String keyspace, E keyPrefix,
+ Serializer<E> keyPrefixSerializer, HConnectionManager connectionManager,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy, Map<String, String> credentials) {
+ super(keyspace, connectionManager, consistencyLevelPolicy, failoverPolicy,
+ credentials);
+
+ this.keyPrefix = keyPrefix;
+ this.keyPrefixSerializer = keyPrefixSerializer;
+ }
+
+ @Override
+ public <T> ExecutionResult<T> doExecute(KeyspaceOperationCallback<T> koc)
+ throws HectorException {
+ KeyspaceService ks = null;
+ try {
+ ks = new PrefixedKeyspaceServiceImpl(keyspace, keyPrefix,
+ keyPrefixSerializer, consistencyLevelPolicy, connectionManager,
+ failoverPolicy, credentials);
+ return koc.doInKeyspaceAndMeasure(ks);
+ } finally {
+ if (ks != null) {
+ // connectionManager.releaseClient(ks.getClient());
+ }
+ }
+ }
+
+}
View
50 core/src/main/java/me/prettyprint/cassandra/serializers/PrefixedSerializer.java
@@ -24,6 +24,9 @@ public PrefixedSerializer(P prefix, Serializer<P> prefixSerializer,
@Override
public ByteBuffer toByteBuffer(S s) {
+ if (s == null) {
+ return null;
+ }
ByteBuffer sb = suffixSerializer.toByteBuffer(s);
sb.rewind();
@@ -39,16 +42,59 @@ public ByteBuffer toByteBuffer(S s) {
@Override
public S fromByteBuffer(ByteBuffer bytes) {
+ if ((bytes == null) || !bytes.hasArray()) {
+ return null;
+ }
+
bytes = bytes.duplicate();
bytes.rewind();
- P p = prefixSerializer.fromByteBuffer(bytes);
- if (!prefix.equals(p)) {
+ if (compareByteArrays(prefixBytes.array(), prefixBytes.arrayOffset()
+ + prefixBytes.position(), prefixBytes.remaining(), bytes.array(),
+ bytes.arrayOffset() + bytes.position(), prefixBytes.remaining()) != 0) {
throw new HectorSerializationException("Unexpected prefix value");
}
bytes.position(prefixBytes.remaining());
S s = suffixSerializer.fromByteBuffer(bytes);
return s;
}
+
+ private static int compareByteArrays(byte[] bytes1, int offset1, int len1,
+ byte[] bytes2, int offset2, int len2) {
+ if (null == bytes1) {
+ if (null == bytes2) {
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+ if (null == bytes2) {
+ return 1;
+ }
+
+ if (len1 < 0) {
+ len1 = bytes1.length - offset1;
+ }
+ if (len2 < 0) {
+ len2 = bytes2.length - offset2;
+ }
+
+ int minLength = Math.min(len1, len2);
+ for (int i = 0; i < minLength; i++) {
+ int i1 = offset1 + i;
+ int i2 = offset2 + i;
+ if (bytes1[i1] == bytes2[i2]) {
+ continue;
+ }
+ // compare non-equal bytes as unsigned
+ return (bytes1[i1] & 0xFF) < (bytes2[i2] & 0xFF) ? -1 : 1;
+ }
+ if (len1 == len2) {
+ return 0;
+ } else {
+ return (len1 < len2) ? -1 : 1;
+ }
+ }
+
}
View
246 core/src/main/java/me/prettyprint/cassandra/service/PrefixedKeyspaceServiceImpl.java
@@ -0,0 +1,246 @@
+package me.prettyprint.cassandra.service;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+
+import me.prettyprint.cassandra.connection.HConnectionManager;
+import me.prettyprint.cassandra.serializers.ByteBufferSerializer;
+import me.prettyprint.cassandra.serializers.PrefixedSerializer;
+import me.prettyprint.cassandra.serializers.StringSerializer;
+import me.prettyprint.hector.api.ConsistencyLevelPolicy;
+import me.prettyprint.hector.api.HConsistencyLevel;
+import me.prettyprint.hector.api.Serializer;
+import me.prettyprint.hector.api.exceptions.HectorException;
+import me.prettyprint.hector.api.exceptions.HectorTransportException;
+
+import org.apache.cassandra.thrift.Column;
+import org.apache.cassandra.thrift.ColumnParent;
+import org.apache.cassandra.thrift.ColumnPath;
+import org.apache.cassandra.thrift.IndexClause;
+import org.apache.cassandra.thrift.KeyRange;
+import org.apache.cassandra.thrift.Mutation;
+import org.apache.cassandra.thrift.SlicePredicate;
+import org.apache.cassandra.thrift.SuperColumn;
+
+public class PrefixedKeyspaceServiceImpl extends KeyspaceServiceImpl {
+
+ ByteBuffer prefixBytes;
+ PrefixedSerializer<ByteBuffer, ByteBuffer> ps;
+ ByteBufferSerializer be = new ByteBufferSerializer();
+ StringSerializer se = new StringSerializer();
+
+ public <E> PrefixedKeyspaceServiceImpl(String keyspaceName, E keyPrefix,
+ Serializer<E> keyPrefixSerializer,
+ ConsistencyLevelPolicy consistencyLevel,
+ HConnectionManager connectionManager, FailoverPolicy failoverPolicy)
+ throws HectorTransportException {
+ super(keyspaceName, consistencyLevel, connectionManager, failoverPolicy);
+
+ prefixBytes = keyPrefixSerializer.toByteBuffer(keyPrefix);
+ ps = new PrefixedSerializer<ByteBuffer, ByteBuffer>(prefixBytes, be, be);
+ }
+
+ public <E> PrefixedKeyspaceServiceImpl(String keyspaceName, E keyPrefix,
+ Serializer<E> keyPrefixSerializer,
+ ConsistencyLevelPolicy consistencyLevel,
+ HConnectionManager connectionManager, FailoverPolicy failoverPolicy,
+ Map<String, String> credentials) throws HectorTransportException {
+ super(keyspaceName, consistencyLevel, connectionManager, failoverPolicy,
+ credentials);
+
+ prefixBytes = keyPrefixSerializer.toByteBuffer(keyPrefix);
+ ps = new PrefixedSerializer<ByteBuffer, ByteBuffer>(prefixBytes, be, be);
+ }
+
+ public KeyRange toCassandra(KeyRange from) {
+ KeyRange to = new KeyRange();
+ to.count = from.count;
+ to.end_token = from.end_token;
+ to.end_key = ps.toByteBuffer(from.end_key);
+ to.start_token = from.start_token;
+ to.start_key = ps.toByteBuffer(from.start_key);
+ return to;
+ }
+
+ public KeyRange fromCassandra(KeyRange from) {
+ KeyRange to = new KeyRange();
+ to.count = from.count;
+ to.end_token = from.end_token;
+ to.end_key = ps.fromByteBuffer(from.end_key);
+ to.start_token = from.start_token;
+ to.start_key = ps.fromByteBuffer(from.start_key);
+ return to;
+ }
+
+ @Override
+ public void batchMutate(
+ Map<ByteBuffer, Map<String, List<Mutation>>> mutationMap)
+ throws HectorException {
+
+ super.batchMutate(ps.toBytesMap(mutationMap));
+ }
+
+ @Override
+ public void batchMutate(BatchMutation batchMutate) throws HectorException {
+
+ super.batchMutate(batchMutate);
+ }
+
+ @Override
+ public int getCount(ByteBuffer key, ColumnParent columnParent,
+ SlicePredicate predicate) throws HectorException {
+
+ return super.getCount(ps.toByteBuffer(key), columnParent, predicate);
+ }
+
+ @Override
+ public CassandraHost getCassandraHost() {
+
+ return super.getCassandraHost();
+ }
+
+ @Override
+ public Map<ByteBuffer, List<Column>> getRangeSlices(
+ ColumnParent columnParent, SlicePredicate predicate, KeyRange keyRange)
+ throws HectorException {
+
+ return ps.fromBytesMap(super.getRangeSlices(columnParent, predicate,
+ toCassandra(keyRange)));
+ }
+
+ @Override
+ public Map<ByteBuffer, List<SuperColumn>> getSuperRangeSlices(
+ ColumnParent columnParent, SlicePredicate predicate, KeyRange keyRange)
+ throws HectorException {
+
+ return ps.fromBytesMap(super.getSuperRangeSlices(columnParent, predicate,
+ toCassandra(keyRange)));
+ }
+
+ @Override
+ public List<Column> getSlice(ByteBuffer key, ColumnParent columnParent,
+ SlicePredicate predicate) throws HectorException {
+
+ return super.getSlice(ps.toByteBuffer(key), columnParent, predicate);
+ }
+
+ @Override
+ public SuperColumn getSuperColumn(ByteBuffer key, ColumnPath columnPath)
+ throws HectorException {
+
+ return super.getSuperColumn(ps.toByteBuffer(key), columnPath);
+ }
+
+ @Override
+ public SuperColumn getSuperColumn(ByteBuffer key, ColumnPath columnPath,
+ boolean reversed, int size) throws HectorException {
+
+ return super.getSuperColumn(ps.toByteBuffer(key), columnPath, reversed,
+ size);
+ }
+
+ @Override
+ public List<SuperColumn> getSuperSlice(ByteBuffer key,
+ ColumnParent columnParent, SlicePredicate predicate)
+ throws HectorException {
+
+ return super.getSuperSlice(ps.toByteBuffer(key), columnParent, predicate);
+ }
+
+ @Override
+ public void insert(ByteBuffer key, ColumnParent columnParent, Column column)
+ throws HectorException {
+
+ super.insert(ps.toByteBuffer(key), columnParent, column);
+ }
+
+ @Override
+ public Map<ByteBuffer, List<Column>> multigetSlice(List<ByteBuffer> keys,
+ ColumnParent columnParent, SlicePredicate predicate)
+ throws HectorException {
+
+ return super.multigetSlice(ps.toBytesList(keys), columnParent, predicate);
+ }
+
+ @Override
+ public Map<ByteBuffer, SuperColumn> multigetSuperColumn(
+ List<ByteBuffer> keys, ColumnPath columnPath) throws HectorException {
+
+ return super.multigetSuperColumn(ps.toBytesList(keys), columnPath);
+ }
+
+ @Override
+ public Map<ByteBuffer, SuperColumn> multigetSuperColumn(
+ List<ByteBuffer> keys, ColumnPath columnPath, boolean reversed, int size)
+ throws HectorException {
+
+ return super.multigetSuperColumn(ps.toBytesList(keys), columnPath,
+ reversed, size);
+ }
+
+ @Override
+ public Map<ByteBuffer, List<SuperColumn>> multigetSuperSlice(
+ List<ByteBuffer> keys, ColumnParent columnParent, SlicePredicate predicate)
+ throws HectorException {
+
+ return super.multigetSuperSlice(ps.toBytesList(keys), columnParent,
+ predicate);
+ }
+
+ @Override
+ public Map<ByteBuffer, List<Column>> getIndexedSlices(
+ ColumnParent columnParent, IndexClause indexClause,
+ SlicePredicate predicate) throws HectorException {
+
+ return super.getIndexedSlices(columnParent, indexClause, predicate);
+ }
+
+ @Override
+ public void remove(ByteBuffer key, ColumnPath columnPath) {
+
+ super.remove(ps.toByteBuffer(key), columnPath);
+ }
+
+ @Override
+ public Map<ByteBuffer, Integer> multigetCount(List<ByteBuffer> keys,
+ ColumnParent columnParent, SlicePredicate slicePredicate)
+ throws HectorException {
+
+ return super.multigetCount(ps.toBytesList(keys), columnParent,
+ slicePredicate);
+ }
+
+ @Override
+ public void remove(ByteBuffer key, ColumnPath columnPath, long timestamp)
+ throws HectorException {
+
+ super.remove(ps.toByteBuffer(key), columnPath, timestamp);
+ }
+
+ @Override
+ public String getName() {
+
+ return super.getName();
+ }
+
+ @Override
+ public Column getColumn(ByteBuffer key, ColumnPath columnPath)
+ throws HectorException {
+
+ return super.getColumn(ps.toByteBuffer(key), columnPath);
+ }
+
+ @Override
+ public HConsistencyLevel getConsistencyLevel(OperationType operationType) {
+
+ return super.getConsistencyLevel(operationType);
+ }
+
+ @Override
+ public String toString() {
+
+ return super.toString();
+ }
+
+}
View
459 core/src/main/java/me/prettyprint/hector/api/factory/HFactory.java
@@ -5,6 +5,7 @@
import java.util.Map;
import me.prettyprint.cassandra.model.ExecutingKeyspace;
+import me.prettyprint.cassandra.model.ExecutingPrefixedKeyspace;
import me.prettyprint.cassandra.model.HColumnImpl;
import me.prettyprint.cassandra.model.HSuperColumnImpl;
import me.prettyprint.cassandra.model.IndexedSlicesQuery;
@@ -26,7 +27,6 @@
import me.prettyprint.cassandra.model.thrift.ThriftSuperCountQuery;
import me.prettyprint.cassandra.model.thrift.ThriftSuperSliceQuery;
import me.prettyprint.cassandra.serializers.StringSerializer;
-import me.prettyprint.cassandra.service.CassandraHost;
import me.prettyprint.cassandra.service.CassandraHostConfigurator;
import me.prettyprint.cassandra.service.FailoverPolicy;
import me.prettyprint.cassandra.service.ThriftCfDef;
@@ -63,65 +63,67 @@
import me.prettyprint.hector.api.query.SuperColumnQuery;
import me.prettyprint.hector.api.query.SuperCountQuery;
import me.prettyprint.hector.api.query.SuperSliceQuery;
+
/**
- * A convenience class with bunch of factory static methods to help create a mutator,
- * queries etc.
- *
+ * A convenience class with bunch of factory static methods to help create a
+ * mutator, queries etc.
+ *
* @author Ran
* @author zznate
*/
public final class HFactory {
private static final Map<String, Cluster> clusters = new HashMap<String, Cluster>();
- private static final ConsistencyLevelPolicy DEFAULT_CONSISTENCY_LEVEL_POLICY =
- new QuorumAllConsistencyLevelPolicy();
+ private static final ConsistencyLevelPolicy DEFAULT_CONSISTENCY_LEVEL_POLICY = new QuorumAllConsistencyLevelPolicy();
public static Cluster getCluster(String clusterName) {
return clusters.get(clusterName);
}
-
+
/**
- * Method tries to create a Cluster instance for an
- * existing Cassandra cluster. If another class already
- * called getOrCreateCluster, the factory returns the
- * cached instance. If the instance doesn't exist in
- * memory, a new ThriftCluster is created and cached.
- *
+ * Method tries to create a Cluster instance for an existing Cassandra
+ * cluster. If another class already called getOrCreateCluster, the factory
+ * returns the cached instance. If the instance doesn't exist in memory, a new
+ * ThriftCluster is created and cached.
+ *
* Example usage for a default installation of Cassandra.
- *
- * String clusterName = "Test Cluster";
- * String host = "localhost:9160";
+ *
+ * String clusterName = "Test Cluster"; String host = "localhost:9160";
* Cluster cluster = HFactory.getOrCreateCluster(clusterName, host);
- *
- * Note the host should be the hostname and port number.
- * It is preferable to use the hostname instead of the IP
- * address.
*
- * @param clusterName The cluster name. This is an identifying string for the cluster, e.g.
- * "production" or "test" etc. Clusters will be created on demand per each unique clusterName key.
- * @param hostIp host:ip format string
+ * Note the host should be the hostname and port number. It is preferable to
+ * use the hostname instead of the IP address.
+ *
+ * @param clusterName
+ * The cluster name. This is an identifying string for the cluster,
+ * e.g. "production" or "test" etc. Clusters will be created on
+ * demand per each unique clusterName key.
+ * @param hostIp
+ * host:ip format string
* @return
*/
- public static Cluster getOrCreateCluster(String clusterName, String hostIp) {
- return getOrCreateCluster(clusterName, new CassandraHostConfigurator(hostIp));
+ public static Cluster getOrCreateCluster(String clusterName, String hostIp) {
+ return getOrCreateCluster(clusterName,
+ new CassandraHostConfigurator(hostIp));
}
/**
- * Method tries to create a Cluster instance for an
- * existing Cassandra cluster. If another class already
- * called getOrCreateCluster, the factory returns the
- * cached instance. If the instance doesn't exist in
- * memory, a new ThriftCluster is created and cached.
- *
+ * Method tries to create a Cluster instance for an existing Cassandra
+ * cluster. If another class already called getOrCreateCluster, the factory
+ * returns the cached instance. If the instance doesn't exist in memory, a new
+ * ThriftCluster is created and cached.
+ *
* Example usage for a default installation of Cassandra.
- *
- * String clusterName = "Test Cluster";
- * String host = "localhost:9160";
- * Cluster cluster = HFactory.getOrCreateCluster(clusterName, new CassandraHostConfigurator(host));
- *
- * @param clusterName The cluster name. This is an identifying string for the cluster, e.g.
- * "production" or "test" etc. Clusters will be created on demand per each unique clusterName key.
+ *
+ * String clusterName = "Test Cluster"; String host = "localhost:9160";
+ * Cluster cluster = HFactory.getOrCreateCluster(clusterName, new
+ * CassandraHostConfigurator(host));
+ *
+ * @param clusterName
+ * The cluster name. This is an identifying string for the cluster,
+ * e.g. "production" or "test" etc. Clusters will be created on
+ * demand per each unique clusterName key.
* @param cassandraHostConfigurator
*/
public static Cluster getOrCreateCluster(String clusterName,
@@ -137,221 +139,307 @@ public static Cluster getOrCreateCluster(String clusterName,
}
/**
- * Method looks in the cache for the cluster by name. If
- * none exists, a new ThriftCluster instance is created.
- *
- * @param clusterName The cluster name. This is an identifying string for the cluster, e.g.
- * "production" or "test" etc. Clusters will be created on demand per each unique clusterName key.
+ * Method looks in the cache for the cluster by name. If none exists, a new
+ * ThriftCluster instance is created.
+ *
+ * @param clusterName
+ * The cluster name. This is an identifying string for the cluster,
+ * e.g. "production" or "test" etc. Clusters will be created on
+ * demand per each unique clusterName key.
* @param cassandraHostConfigurator
*
*/
- public static Cluster createCluster(String clusterName, CassandraHostConfigurator cassandraHostConfigurator) {
- return clusters.get(clusterName) == null ? new ThriftCluster(clusterName, cassandraHostConfigurator) : clusters.get(clusterName);
+ public static Cluster createCluster(String clusterName,
+ CassandraHostConfigurator cassandraHostConfigurator) {
+ return clusters.get(clusterName) == null ? new ThriftCluster(clusterName,
+ cassandraHostConfigurator) : clusters.get(clusterName);
}
/**
- * Method looks in the cache for the cluster by name. If
- * none exists, a new ThriftCluster instance is created.
- *
- * @param clusterName The cluster name. This is an identifying string for the cluster, e.g.
- * "production" or "test" etc. Clusters will be created on demand per each unique clusterName key.
+ * Method looks in the cache for the cluster by name. If none exists, a new
+ * ThriftCluster instance is created.
+ *
+ * @param clusterName
+ * The cluster name. This is an identifying string for the cluster,
+ * e.g. "production" or "test" etc. Clusters will be created on
+ * demand per each unique clusterName key.
* @param cassandraHostConfigurator
* @param credentials
*/
- public static Cluster createCluster(String clusterName, CassandraHostConfigurator cassandraHostConfigurator, Map<String, String> credentials) {
- return clusters.get(clusterName) == null ? new ThriftCluster(clusterName, cassandraHostConfigurator, credentials) : clusters.get(clusterName);
+ public static Cluster createCluster(String clusterName,
+ CassandraHostConfigurator cassandraHostConfigurator,
+ Map<String, String> credentials) {
+ return clusters.get(clusterName) == null ? new ThriftCluster(clusterName,
+ cassandraHostConfigurator, credentials) : clusters.get(clusterName);
}
/**
* Creates a Keyspace with the default consistency level policy.
+ *
* @param keyspace
* @param cluster
* @return
*/
public static Keyspace createKeyspace(String keyspace, Cluster cluster) {
return createKeyspace(keyspace, cluster,
- createDefaultConsistencyLevelPolicy(), FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
+ createDefaultConsistencyLevelPolicy(),
+ FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
}
public static Keyspace createKeyspace(String keyspace, Cluster cluster,
ConsistencyLevelPolicy consistencyLevelPolicy) {
- return createKeyspace(keyspace, cluster,
- consistencyLevelPolicy, FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
+ return createKeyspace(keyspace, cluster, consistencyLevelPolicy,
+ FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
}
public static Keyspace createKeyspace(String keyspace, Cluster cluster,
- ConsistencyLevelPolicy consistencyLevelPolicy, FailoverPolicy failoverPolicy) {
- return new ExecutingKeyspace(keyspace, cluster.getConnectionManager(), consistencyLevelPolicy, failoverPolicy, cluster.getCredentials());
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy) {
+ return new ExecutingKeyspace(keyspace, cluster.getConnectionManager(),
+ consistencyLevelPolicy, failoverPolicy, cluster.getCredentials());
}
public static Keyspace createKeyspace(String keyspace, Cluster cluster,
- ConsistencyLevelPolicy consistencyLevelPolicy, FailoverPolicy failoverPolicy,
- Map<String, String> credentials) {
- return new ExecutingKeyspace(keyspace, cluster.getConnectionManager(), consistencyLevelPolicy, failoverPolicy, credentials);
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy, Map<String, String> credentials) {
+ return new ExecutingKeyspace(keyspace, cluster.getConnectionManager(),
+ consistencyLevelPolicy, failoverPolicy, credentials);
+ }
+
+ public static <E> Keyspace createPrefixedKeyspace(String keyspace,
+ E keyPrefix, Serializer<E> keyPrefixSerializer, Cluster cluster) {
+ return createPrefixedKeyspace(keyspace, keyPrefix, keyPrefixSerializer,
+ cluster, createDefaultConsistencyLevelPolicy(),
+ FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
+ }
+
+ public static <E> Keyspace createPrefixedKeyspace(String keyspace,
+ E keyPrefix, Serializer<E> keyPrefixSerializer, Cluster cluster,
+ ConsistencyLevelPolicy consistencyLevelPolicy) {
+ return createPrefixedKeyspace(keyspace, keyPrefix, keyPrefixSerializer,
+ cluster, consistencyLevelPolicy,
+ FailoverPolicy.ON_FAIL_TRY_ALL_AVAILABLE);
+ }
+
+ public static <E> Keyspace createPrefixedKeyspace(String keyspace,
+ E keyPrefix, Serializer<E> keyPrefixSerializer, Cluster cluster,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy) {
+ return new ExecutingPrefixedKeyspace<E>(keyspace, keyPrefix,
+ keyPrefixSerializer, cluster.getConnectionManager(),
+ consistencyLevelPolicy, failoverPolicy, cluster.getCredentials());
+ }
+
+ public static <E> Keyspace createPrefixedKeyspace(String keyspace,
+ E keyPrefix, Serializer<E> keyPrefixSerializer, Cluster cluster,
+ ConsistencyLevelPolicy consistencyLevelPolicy,
+ FailoverPolicy failoverPolicy, Map<String, String> credentials) {
+ return new ExecutingPrefixedKeyspace<E>(keyspace, keyPrefix,
+ keyPrefixSerializer, cluster.getConnectionManager(),
+ consistencyLevelPolicy, failoverPolicy, credentials);
}
public static ConsistencyLevelPolicy createDefaultConsistencyLevelPolicy() {
return DEFAULT_CONSISTENCY_LEVEL_POLICY;
}
- public static <K,N,V> Mutator<K> createMutator(Keyspace keyspace, Serializer<K> keySerializer) {
+ public static <K, N, V> Mutator<K> createMutator(Keyspace keyspace,
+ Serializer<K> keySerializer) {
return new MutatorImpl<K>(keyspace, keySerializer);
}
- public static <K,N,V> ColumnQuery<K,N,V> createColumnQuery(Keyspace keyspace, Serializer<K> keySerializer,
+ public static <K, N, V> ColumnQuery<K, N, V> createColumnQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftColumnQuery<K,N,V>(keyspace, keySerializer, nameSerializer, valueSerializer);
+ return new ThriftColumnQuery<K, N, V>(keyspace, keySerializer,
+ nameSerializer, valueSerializer);
}
public static <K, N> CountQuery<K, N> createCountQuery(Keyspace keyspace,
Serializer<K> keySerializer, Serializer<N> nameSerializer) {
return new ThriftCountQuery<K, N>(keyspace, keySerializer, nameSerializer);
}
- public static <K,SN> SuperCountQuery<K,SN> createSuperCountQuery(Keyspace keyspace,
- Serializer<K> keySerializer, Serializer<SN> superNameSerializer) {
- return new ThriftSuperCountQuery<K,SN>(keyspace, keySerializer, superNameSerializer);
+ public static <K, SN> SuperCountQuery<K, SN> createSuperCountQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> superNameSerializer) {
+ return new ThriftSuperCountQuery<K, SN>(keyspace, keySerializer,
+ superNameSerializer);
}
- public static <K,SN,N> SubCountQuery<K,SN,N> createSubCountQuery(Keyspace keyspace,
- Serializer<K> keySerializer, Serializer<SN> superNameSerializer, Serializer<N> nameSerializer) {
- return new ThriftSubCountQuery<K,SN,N>(keyspace, keySerializer, superNameSerializer, nameSerializer);
+ public static <K, SN, N> SubCountQuery<K, SN, N> createSubCountQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> superNameSerializer, Serializer<N> nameSerializer) {
+ return new ThriftSubCountQuery<K, SN, N>(keyspace, keySerializer,
+ superNameSerializer, nameSerializer);
}
- public static ColumnQuery<String, String, String> createStringColumnQuery(Keyspace keyspace) {
+ public static ColumnQuery<String, String, String> createStringColumnQuery(
+ Keyspace keyspace) {
StringSerializer se = StringSerializer.get();
return createColumnQuery(keyspace, se, se, se);
}
- public static <K,SN,N,V> SuperColumnQuery<K,SN,N,V> createSuperColumnQuery(Keyspace keyspace,
- Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftSuperColumnQuery<K,SN, N, V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ public static <K, SN, N, V> SuperColumnQuery<K, SN, N, V> createSuperColumnQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ Serializer<V> valueSerializer) {
+ return new ThriftSuperColumnQuery<K, SN, N, V>(keyspace, keySerializer,
+ sNameSerializer, nameSerializer, valueSerializer);
}
-
- public static <K,N,V> MultigetSliceQuery<K,N,V> createMultigetSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftMultigetSliceQuery<K,N,V>(keyspace, keySerializer, nameSerializer, valueSerializer);
+ public static <K, N, V> MultigetSliceQuery<K, N, V> createMultigetSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
+ return new ThriftMultigetSliceQuery<K, N, V>(keyspace, keySerializer,
+ nameSerializer, valueSerializer);
}
- public static <K, SN, N, V> SubColumnQuery<K, SN, N, V> createSubColumnQuery(Keyspace keyspace,
- Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ public static <K, SN, N, V> SubColumnQuery<K, SN, N, V> createSubColumnQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new ThriftSubColumnQuery<K, SN, N, V>(keyspace, keySerializer, sNameSerializer, nameSerializer,
- valueSerializer);
+ return new ThriftSubColumnQuery<K, SN, N, V>(keyspace, keySerializer,
+ sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> MultigetSuperSliceQuery<K,SN,N,V> createMultigetSuperSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftMultigetSuperSliceQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ public static <K, SN, N, V> MultigetSuperSliceQuery<K, SN, N, V> createMultigetSuperSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ Serializer<V> valueSerializer) {
+ return new ThriftMultigetSuperSliceQuery<K, SN, N, V>(keyspace,
+ keySerializer, sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> MultigetSubSliceQuery<K,SN,N,V> createMultigetSubSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftMultigetSubSliceQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ public static <K, SN, N, V> MultigetSubSliceQuery<K, SN, N, V> createMultigetSubSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ Serializer<V> valueSerializer) {
+ return new ThriftMultigetSubSliceQuery<K, SN, N, V>(keyspace,
+ keySerializer, sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,N,V> RangeSlicesQuery<K,N,V> createRangeSlicesQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftRangeSlicesQuery<K,N,V>(keyspace, keySerializer, nameSerializer, valueSerializer);
+ public static <K, N, V> RangeSlicesQuery<K, N, V> createRangeSlicesQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
+ return new ThriftRangeSlicesQuery<K, N, V>(keyspace, keySerializer,
+ nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> RangeSuperSlicesQuery<K,SN,N,V> createRangeSuperSlicesQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ public static <K, SN, N, V> RangeSuperSlicesQuery<K, SN, N, V> createRangeSuperSlicesQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new ThriftRangeSuperSlicesQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ return new ThriftRangeSuperSlicesQuery<K, SN, N, V>(keyspace,
+ keySerializer, sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,N,V> IndexedSlicesQuery<K, N, V> createIndexedSlicesQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<N> nameSerializer,
- Serializer<V> valueSerializer) {
- return new IndexedSlicesQuery<K, N, V>(keyspace, keySerializer, nameSerializer, valueSerializer);
+ public static <K, N, V> IndexedSlicesQuery<K, N, V> createIndexedSlicesQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
+ return new IndexedSlicesQuery<K, N, V>(keyspace, keySerializer,
+ nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> RangeSubSlicesQuery<K,SN,N,V> createRangeSubSlicesQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ public static <K, SN, N, V> RangeSubSlicesQuery<K, SN, N, V> createRangeSubSlicesQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new ThriftRangeSubSlicesQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ return new ThriftRangeSubSlicesQuery<K, SN, N, V>(keyspace, keySerializer,
+ sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,N,V> SliceQuery<K,N,V> createSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new ThriftSliceQuery<K,N,V>(keyspace, keySerializer, nameSerializer, valueSerializer);
+ public static <K, N, V> SliceQuery<K, N, V> createSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
+ return new ThriftSliceQuery<K, N, V>(keyspace, keySerializer,
+ nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> SubSliceQuery<K,SN,N,V> createSubSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ public static <K, SN, N, V> SubSliceQuery<K, SN, N, V> createSubSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new ThriftSubSliceQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ return new ThriftSubSliceQuery<K, SN, N, V>(keyspace, keySerializer,
+ sNameSerializer, nameSerializer, valueSerializer);
}
- public static <K,SN,N,V> SuperSliceQuery<K,SN,N,V> createSuperSliceQuery(
- Keyspace keyspace, Serializer<K> keySerializer, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
+ public static <K, SN, N, V> SuperSliceQuery<K, SN, N, V> createSuperSliceQuery(
+ Keyspace keyspace, Serializer<K> keySerializer,
+ Serializer<SN> sNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new ThriftSuperSliceQuery<K,SN,N,V>(keyspace, keySerializer, sNameSerializer, nameSerializer, valueSerializer);
+ return new ThriftSuperSliceQuery<K, SN, N, V>(keyspace, keySerializer,
+ sNameSerializer, nameSerializer, valueSerializer);
}
/**
* createSuperColumn accepts a variable number of column arguments
- * @param name supercolumn name
+ *
+ * @param name
+ * supercolumn name
* @param columns
* @param superNameSerializer
* @param nameSerializer
* @param valueSerializer
* @return
*/
- public static <SN,N,V> HSuperColumn<SN, N, V> createSuperColumn(SN name, List<HColumn<N,V>> columns,
- Serializer<SN> superNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new HSuperColumnImpl<SN, N, V>(name, columns, createClock(), superNameSerializer,
- nameSerializer, valueSerializer);
+ public static <SN, N, V> HSuperColumn<SN, N, V> createSuperColumn(SN name,
+ List<HColumn<N, V>> columns, Serializer<SN> superNameSerializer,
+ Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
+ return new HSuperColumnImpl<SN, N, V>(name, columns, createClock(),
+ superNameSerializer, nameSerializer, valueSerializer);
}
- public static <SN,N,V> HSuperColumn<SN,N,V> createSuperColumn(SN name, List<HColumn<N,V>> columns,
- long clock, Serializer<SN> superNameSerializer, Serializer<N> nameSerializer,
+ public static <SN, N, V> HSuperColumn<SN, N, V> createSuperColumn(SN name,
+ List<HColumn<N, V>> columns, long clock,
+ Serializer<SN> superNameSerializer, Serializer<N> nameSerializer,
Serializer<V> valueSerializer) {
- return new HSuperColumnImpl<SN, N, V>(name, columns, clock, superNameSerializer, nameSerializer,
- valueSerializer);
+ return new HSuperColumnImpl<SN, N, V>(name, columns, clock,
+ superNameSerializer, nameSerializer, valueSerializer);
}
- public static <N,V> HColumn<N,V> createColumn(N name, V value, long clock,
+ public static <N, V> HColumn<N, V> createColumn(N name, V value, long clock,
Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new HColumnImpl<N, V>(name, value, clock, nameSerializer, valueSerializer);
+ return new HColumnImpl<N, V>(name, value, clock, nameSerializer,
+ valueSerializer);
}
/**
* Creates a column with the clock of now.
*/
- public static <N,V> HColumn<N, V> createColumn(N name, V value,
+ public static <N, V> HColumn<N, V> createColumn(N name, V value,
Serializer<N> nameSerializer, Serializer<V> valueSerializer) {
- return new HColumnImpl<N, V>(name, value, createClock(), nameSerializer, valueSerializer);
+ return new HColumnImpl<N, V>(name, value, createClock(), nameSerializer,
+ valueSerializer);
}
/**
- * Convienience method for creating a column with a String name and String value
+ * Convienience method for creating a column with a String name and String
+ * value
*/
- public static HColumn<String, String> createStringColumn(String name, String value) {
+ public static HColumn<String, String> createStringColumn(String name,
+ String value) {
StringSerializer se = StringSerializer.get();
return createColumn(name, value, se, se);
}
/**
- * Creates a clock of now with the default clock resolution (micorosec) as defined in
- * {@link CassandraHostConfigurator}.
- * Notice that this is a convenient method.
- * Be aware that there might be multiple {@link CassandraHostConfigurator} each of them with
- * different clock resolutions, in which case the result of {@link HFactory.createClock} will not be
- * consistent. {@link Keyspace.createClock()} should be used instead.
+ * Creates a clock of now with the default clock resolution (micorosec) as
+ * defined in {@link CassandraHostConfigurator}. Notice that this is a
+ * convenient method. Be aware that there might be multiple
+ * {@link CassandraHostConfigurator} each of them with different clock
+ * resolutions, in which case the result of {@link HFactory.createClock} will
+ * not be consistent. {@link Keyspace.createClock()} should be used instead.
*/
public static long createClock() {
return CassandraHostConfigurator.DEF_CLOCK_RESOLUTION.createClock();
}
/**
- * Use createKeyspaceDefinition to add a new Keyspace to cluster.
- * Example:
- *
- * String testKeyspace = "testKeyspace";
- * KeyspaceDefinition newKeyspace = HFactory.createKeyspaceDefinition(testKeyspace);
+ * Use createKeyspaceDefinition to add a new Keyspace to cluster. Example:
+ *
+ * String testKeyspace = "testKeyspace"; KeyspaceDefinition newKeyspace =
+ * HFactory.createKeyspaceDefinition(testKeyspace);
* cluster.addKeyspace(newKeyspace);
*
* @param keyspace
@@ -361,86 +449,93 @@ public static KeyspaceDefinition createKeyspaceDefinition(String keyspace) {
}
/**
- * Use createKeyspaceDefinition to add a new Keyspace to cluster.
- * Example:
- *
- * String testKeyspace = "testKeyspace";
- * KeyspaceDefinition newKeyspace = HFactory.createKeyspaceDefinition(testKeyspace);
+ * Use createKeyspaceDefinition to add a new Keyspace to cluster. Example:
+ *
+ * String testKeyspace = "testKeyspace"; KeyspaceDefinition newKeyspace =
+ * HFactory.createKeyspaceDefinition(testKeyspace);
* cluster.addKeyspace(newKeyspace);
*
* @param keyspace
- * @param strategyClass - example: org.apache.cassandra.locator.SimpleStrategy.class.getName()
- * @param replicationFactor - http://wiki.apache.org/cassandra/Operations
+ * @param strategyClass
+ * - example:
+ * org.apache.cassandra.locator.SimpleStrategy.class.getName()
+ * @param replicationFactor
+ * - http://wiki.apache.org/cassandra/Operations
*/
- public static KeyspaceDefinition createKeyspaceDefinition(String keyspaceName, String strategyClass, int replicationFactor,
+ public static KeyspaceDefinition createKeyspaceDefinition(
+ String keyspaceName, String strategyClass, int replicationFactor,
List<ColumnFamilyDefinition> cfDefs) {
- return new ThriftKsDef(keyspaceName, strategyClass, replicationFactor, cfDefs);
+ return new ThriftKsDef(keyspaceName, strategyClass, replicationFactor,
+ cfDefs);
}
/**
* Create a column family for a given keyspace without comparator type.
- * Example:
- * String keyspace = "testKeyspace";
- * String column1 = "testcolumn";
- * ColumnFamilyDefinition columnFamily1 = HFactory.createColumnFamilyDefinition(keyspace, column1);
- * List<ColumnFamilyDefinition> columns = new ArrayList<ColumnFamilyDefinition>();
- * columns.add(columnFamily1);
- * KeyspaceDefinition testKeyspace = HFactory.createKeyspaceDefinition(keyspace,
- * org.apache.cassandra.locator.SimpleStrategy.class.getName(),
- * 1,
- * columns);
- * cluster.addKeyspace(testKeyspace);
+ * Example: String keyspace = "testKeyspace"; String column1 = "testcolumn";
+ * ColumnFamilyDefinition columnFamily1 =
+ * HFactory.createColumnFamilyDefinition(keyspace, column1);
+ * List<ColumnFamilyDefinition> columns = new
+ * ArrayList<ColumnFamilyDefinition>(); columns.add(columnFamily1);
+ * KeyspaceDefinition testKeyspace =
+ * HFactory.createKeyspaceDefinition(keyspace,
+ * org.apache.cassandra.locator.SimpleStrategy.class.getName(), 1, columns);
+ * cluster.addKeyspace(testKeyspace);
*
* @param keyspace
* @param columnFamilyName
*/
- public static ColumnFamilyDefinition createColumnFamilyDefinition(String keyspace, String cfName) {
+ public static ColumnFamilyDefinition createColumnFamilyDefinition(
+ String keyspace, String cfName) {
return new ThriftCfDef(keyspace, cfName);
}
/**
* Create a column family for a given keyspace without comparator type.
- * Example:
- * String keyspace = "testKeyspace";
- * String column1 = "testcolumn";
- * ColumnFamilyDefinition columnFamily1 = HFactory.createColumnFamilyDefinition(keyspace, column1, ComparatorType.UTF8TYPE);
- * List<ColumnFamilyDefinition> columns = new ArrayList<ColumnFamilyDefinition>();
- * columns.add(columnFamily1);
- * KeyspaceDefinition testKeyspace = HFactory.createKeyspaceDefinition(keyspace,
- * org.apache.cassandra.locator.SimpleStrategy.class.getName(),
- * 1,
- * columns);
- * cluster.addKeyspace(testKeyspace);
+ * Example: String keyspace = "testKeyspace"; String column1 = "testcolumn";
+ * ColumnFamilyDefinition columnFamily1 =
+ * HFactory.createColumnFamilyDefinition(keyspace, column1,
+ * ComparatorType.UTF8TYPE); List<ColumnFamilyDefinition> columns = new
+ * ArrayList<ColumnFamilyDefinition>(); columns.add(columnFamily1);
+ * KeyspaceDefinition testKeyspace =
+ * HFactory.createKeyspaceDefinition(keyspace,
+ * org.apache.cassandra.locator.SimpleStrategy.class.getName(), 1, columns);
+ * cluster.addKeyspace(testKeyspace);
*
* @param keyspace
* @param columnFamilyName
* @param comparatorType
*/
- public static ColumnFamilyDefinition createColumnFamilyDefinition(String keyspace, String cfName, ComparatorType comparatorType) {
+ public static ColumnFamilyDefinition createColumnFamilyDefinition(
+ String keyspace, String cfName, ComparatorType comparatorType) {
return new ThriftCfDef(keyspace, cfName, comparatorType);
}
- public static ColumnFamilyDefinition createColumnFamilyDefinition(String keyspace, String cfName, ComparatorType comparatorType, List<ColumnDefinition> columnMetadata) {
+ public static ColumnFamilyDefinition createColumnFamilyDefinition(
+ String keyspace, String cfName, ComparatorType comparatorType,
+ List<ColumnDefinition> columnMetadata) {
return new ThriftCfDef(keyspace, cfName, comparatorType, columnMetadata);
}
/**
- * Create a clock resolution based on <code>clockResolutionName</code> which has to match any of the constants defined
- * at {@link ClockResolution}
- * @param clockResolutionName type of clock resolution to create
+ * Create a clock resolution based on <code>clockResolutionName</code> which
+ * has to match any of the constants defined at {@link ClockResolution}
+ *
+ * @param clockResolutionName
+ * type of clock resolution to create
* @return a ClockResolution
*/
public static ClockResolution createClockResolution(String clockResolutionName) {
- if (clockResolutionName.equals(ClockResolution.SECONDS)) {
- return new SecondsClockResolution();
- } else if (clockResolutionName.equals(ClockResolution.MILLISECONDS)) {
- return new MillisecondsClockResolution();
- } else if (clockResolutionName.equals(ClockResolution.MICROSECONDS)) {
- return new MicrosecondsClockResolution();
- } else if (clockResolutionName.equals(ClockResolution.MICROSECONDS_SYNC)) {
- return new MicrosecondsSyncClockResolution();
- }
- throw new RuntimeException(String.format("Unsupported clock resolution: %s", clockResolutionName));
+ if (clockResolutionName.equals(ClockResolution.SECONDS)) {
+ return new SecondsClockResolution();
+ } else if (clockResolutionName.equals(ClockResolution.MILLISECONDS)) {
+ return new MillisecondsClockResolution();
+ } else if (clockResolutionName.equals(ClockResolution.MICROSECONDS)) {
+ return new MicrosecondsClockResolution();
+ } else if (clockResolutionName.equals(ClockResolution.MICROSECONDS_SYNC)) {
+ return new MicrosecondsSyncClockResolution();
+ }
+ throw new RuntimeException(String.format(
+ "Unsupported clock resolution: %s", clockResolutionName));
}
}
View
63 core/src/test/java/me/prettyprint/cassandra/serializers/PrefixedSerializerTest.java
@@ -0,0 +1,63 @@
+package me.prettyprint.cassandra.serializers;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.UUID;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(value = Parameterized.class)
+public class PrefixedSerializerTest {
+
+ private final String str;
+
+ public PrefixedSerializerTest(String str) {
+ this.str = str;
+ }
+
+ public static UUID prefixUUID = UUID.randomUUID();
+
+ @Parameters
+ public static Collection<Object[]> data() throws UnsupportedEncodingException {
+ Object[][] data = new Object[][] { { "" }, { null }, { "123" }, { "QWER" },
+ { "!@#$#$^%&^*fdghdfghdfgh%^&*" },
+ { new String("\u05E9".getBytes(), "utf-8") } };
+ return Arrays.asList(data);
+ }
+
+ @Test
+ public void test() {
+ UUIDSerializer ue = new UUIDSerializer();
+ StringSerializer se = new StringSerializer();
+ PrefixedSerializer<UUID, String> pe = new PrefixedSerializer<UUID, String>(
+ prefixUUID, ue, se);
+ Assert.assertEquals(str, pe.fromByteBuffer(pe.toByteBuffer(str)));
+ }
+
+ @Test
+ public void testBadPrefix() {
+ if (str == null) {
+ // null serialization is always null,
+ // so no prefix comparison takes place to test
+ return;
+ }
+ UUIDSerializer ue = new UUIDSerializer();
+ StringSerializer se = new StringSerializer();
+ PrefixedSerializer<UUID, String> pe1 = new PrefixedSerializer<UUID, String>(
+ prefixUUID, ue, se);
+ UUID testUUID = UUID.randomUUID();
+ Assert.assertNotSame(prefixUUID, testUUID);
+ PrefixedSerializer<UUID, String> pe2 = new PrefixedSerializer<UUID, String>(
+ testUUID, ue, se);
+ try {
+ pe2.fromByteBuffer(pe1.toByteBuffer(str));
+ Assert.fail("Different prefixes should fail comparison");
+ } catch (Exception e) {
+ }
+ }
+}

0 comments on commit 77ecfd3

Please sign in to comment.