Skip to content

Commit

Permalink
Merge pull request #152 from apigee/USERGRID-488
Browse files Browse the repository at this point in the history
Usergrid 488
  • Loading branch information
zznate committed Aug 6, 2012
2 parents 64c9c8e + 1da0acc commit c805d24
Show file tree
Hide file tree
Showing 5 changed files with 223 additions and 231 deletions.
80 changes: 15 additions & 65 deletions core/src/main/java/org/usergrid/persistence/Results.java
Expand Up @@ -18,8 +18,10 @@
import static org.apache.commons.codec.binary.Base64.encodeBase64URLSafeString;
import static org.usergrid.persistence.SimpleEntityRef.ref;
import static org.usergrid.utils.ClassUtils.cast;
import static org.usergrid.utils.CompositeUtils.setEqualityFlag;
import static org.usergrid.utils.ConversionUtils.bytes;

import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
Expand All @@ -32,14 +34,20 @@

import javax.xml.bind.annotation.XmlRootElement;

import me.prettyprint.hector.api.beans.DynamicComposite;
import me.prettyprint.hector.api.beans.AbstractComposite.ComponentEquality;

import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
import org.usergrid.persistence.query.ir.QuerySlice.RangeValue;
import org.usergrid.utils.MapUtils;
import org.usergrid.utils.StringUtils;

@XmlRootElement
public class Results implements Iterable<Entity> {

private static final String EMPTY = "";

public enum Level {
IDS, REFS, CORE_PROPERTIES, ALL_PROPERTIES, LINKED_PROPERTIES
}
Expand Down Expand Up @@ -1084,6 +1092,13 @@ public void setCursorToLastResult() {
}
}

/**
* Set the cursor to the empty value. This signifies that no more results can be found for this range
*/
public void setCursorMax() {
cursor = EMPTY;
}

public void setCursor(String cursor) {
this.cursor = cursor;
}
Expand Down Expand Up @@ -1164,69 +1179,4 @@ public void mergeEntitiesWithMetadata() {
}
}

/*
* private void updateIndex(UUID id, String name, Object value) { Map<UUID,
* Entity> map = getEntitiesMap(); Entity entity = null; if (map != null) {
* entity = map.get(id); } // Map<String, Map<Object, List<UUID>>>
* metadataValueToIds; // Map<String, Map<Object, List<Entity>>>
* metadataValueToEntities; if (metadataValueToIds == null) {
* metadataValueToIds = new LinkedHashMap<String, Map<Object, Set<UUID>>>();
* metadataValueToEntities = new LinkedHashMap<String, Map<Object,
* List<Entity>>>(); } Map<Object, Set<UUID>> valueToIds =
* metadataValueToIds.get(name); Map<Object, List<Entity>> valueToEntities =
* metadataValueToEntities .get(name); if (valueToIds == null) { valueToIds
* = new LinkedHashMap<Object, Set<UUID>>(); metadataValueToIds.put(name,
* valueToIds); valueToEntities = new LinkedHashMap<Object, List<Entity>>();
* metadataValueToEntities.put(name, valueToEntities); } Set<UUID> idSet =
* valueToIds.get(value); List<Entity> eList = valueToEntities.get(value);
* if (idSet == null) { idSet = new LinkedHashSet<UUID>();
* valueToIds.put(value, idSet); eList = new ArrayList<Entity>();
* valueToEntities.put(value, eList); } if (!idSet.contains(id)) {
* idSet.add(id); if (entity != null) { eList.add(entity); } }
*
* }
*
* public List<Object> getValuesForMetadata(String name) { Map<Object,
* Set<UUID>> valueToIds = metadataValueToIds.get(name); if (valueToIds ==
* null) { return null; } if (valueToIds.size() > 0) { return new
* ArrayList<Object>(valueToIds.keySet()); } return null; }
*
* public List<Entity> getEntitiesForMetadata(String name, Object value) {
* Map<Object, List<Entity>> valueToEntities = metadataValueToEntities
* .get(name); if (valueToEntities == null) { return null; } List<Entity>
* eList = valueToEntities.get(value); return eList; }
*
* public List<UUID> getIdsForMetadata(String name, Object value) {
* Map<Object, Set<UUID>> valueToIds = metadataValueToIds.get(name); if
* (valueToIds == null) { return null; } Set<UUID> idSet =
* valueToIds.get(value); if ((idSet != null) && (idSet.size() > 0)) {
* List<UUID> idList = new ArrayList<UUID>(idSet); return idList; } return
* null; }
*/
/*
* public Map<String, Map<String, List<UUID>>>
* getConnectionTypeAndEntityTypeToEntityIdMap() { if
* (connectionTypeAndEntityTypeToEntityIdMap != null) { return
* connectionTypeAndEntityTypeToEntityIdMap; } if (connections != null) {
* connectionTypeAndEntityTypeToEntityIdMap = new LinkedHashMap<String,
* Map<String, List<UUID>>>(); for (Connection connection : connections) {
* MapUtils.addMapMapList( connectionTypeAndEntityTypeToEntityIdMap,
* connection.getConnectionType(), connection.getType(),
* connection.getId()); } } return connectionTypeAndEntityTypeToEntityIdMap;
* }
*
* public Map<String, Map<String, List<Entity>>>
* getConnectionTypeAndEntityTypeToEntityMap() { if
* (connectionTypeAndEntityTypeToEntityMap != null) { return
* connectionTypeAndEntityTypeToEntityMap; } if (connections != null) {
* getEntitiesMap(); connectionTypeAndEntityTypeToEntityMap = new
* LinkedHashMap<String, Map<String, List<Entity>>>(); if (entitiesMap !=
* null) { for (Connection connection : connections) { Entity entity =
* entitiesMap.get(connection .getConnectedEntity().getId()); if (entity !=
* null) { MapUtils.addMapMapList( connectionTypeAndEntityTypeToEntityMap,
* connection.getConnectionType(), connection
* .getConnectedEntity().getType(), entity); } } } } return
* connectionTypeAndEntityTypeToEntityMap; }
*/

}
Expand Up @@ -80,7 +80,8 @@ public class QueryProcessor {
private String entityType;
private CollectionInfo collectionInfo;

public QueryProcessor(Query query, CollectionInfo collectionInfo) throws PersistenceException {
public QueryProcessor(Query query, CollectionInfo collectionInfo)
throws PersistenceException {
sortCache = new SortCache(query.getSortPredicates());
cursorCache = new CursorCache(query.getCursor());
rootOperand = query.getRootOperand();
Expand Down Expand Up @@ -204,9 +205,8 @@ private class TreeEvaluator implements QueryVisitor {
// stack for nodes that will be used to construct the tree and create
// objects
private Stack<QueryNode> nodes = new Stack<QueryNode>();

private Schema schema = getDefaultSchema();

private Schema schema = getDefaultSchema();

private int contextCount = -1;

Expand Down Expand Up @@ -326,8 +326,7 @@ public void visit(ContainsOperand op) throws NoFullTextIndexException {

String propertyName = op.getProperty().getValue();

if (!schema.isPropertyFulltextIndexed(entityType,
propertyName)) {
if (!schema.isPropertyFulltextIndexed(entityType, propertyName)) {
throw new NoFullTextIndexException(entityType, propertyName);
}

Expand Down Expand Up @@ -514,7 +513,8 @@ private SliceNode newSliceNode() {
* @param child
*/
private void createNewSlice(Operand child) {
if (child instanceof EqualityOperand || child instanceof AndOperand || child instanceof ContainsOperand) {
if (child instanceof EqualityOperand || child instanceof AndOperand
|| child instanceof ContainsOperand) {
newSliceNode();
}

Expand All @@ -538,8 +538,10 @@ private String appendSuffix(String str, String suffix) {
}

private void checkIndexed(String propertyName) throws NoIndexException {

if (!schema.isPropertyIndexed(entityType, propertyName) && collectionInfo != null && !collectionInfo.isSubkeyProperty(propertyName)) {

if (!schema.isPropertyIndexed(entityType, propertyName)
&& collectionInfo != null
&& !collectionInfo.isSubkeyProperty(propertyName)) {
throw new NoIndexException(entityType, propertyName);
}
}
Expand Down Expand Up @@ -574,12 +576,17 @@ private CursorCache(String cursorString) {

String[] parts = split(c, ':');

if (parts.length == 2 && isNotBlank(parts[1])) {
if (parts.length >= 1) {

int hashCode = parseInt(parts[0]);

ByteBuffer cursorBytes = ByteBuffer
.wrap(decodeBase64(parts[1]));
ByteBuffer cursorBytes = null;

if (parts.length == 2) {
cursorBytes = ByteBuffer.wrap(decodeBase64(parts[1]));
} else {
cursorBytes = ByteBuffer.allocate(0);
}

cursors.put(hashCode, cursorBytes);
}
Expand Down

0 comments on commit c805d24

Please sign in to comment.