Skip to content

Commit

Permalink
HHH-1775
Browse files Browse the repository at this point in the history
  • Loading branch information
pb00068 authored and sebersole committed Oct 26, 2012
1 parent e4fbe2f commit 7cecc68
Show file tree
Hide file tree
Showing 9 changed files with 119 additions and 50 deletions.
Expand Up @@ -896,6 +896,9 @@ private Serializable getLoadedCollectionOwnerIdOrNull(CollectionEntry ce) {
public void addUninitializedCollection(CollectionPersister persister, PersistentCollection collection, Serializable id) {
CollectionEntry ce = new CollectionEntry(collection, persister, id, flushing);
addCollection(collection, ce, id);
if (persister.getBatchSize() > 1) {
batchFetchQueue.addBatchLoadableCollection(collection, ce);
}
}

/**
Expand Down
Expand Up @@ -251,7 +251,12 @@ private void endLoadingCollection(LoadingCollectionEntry lce, CollectionPersiste
}
else {
ce.postInitialize( lce.getCollection() );
// if (ce.getLoadedPersister().getBatchSize() > 1) { // not the best place for doing this, moved into ce.postInitialize
// getLoadContext().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce);
// }
}



boolean addToCache = hasNoQueuedAdds && // there were no queued additions
persister.hasCache() && // and the role has a cache
Expand Down
Expand Up @@ -27,15 +27,19 @@
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;

import org.hibernate.EntityMode;
import org.hibernate.cache.spi.CacheKey;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.MarkerObject;
import org.hibernate.internal.util.collections.IdentityMap;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.jboss.logging.Logger;

/**
* Tracks entity and collection keys that are available for batch
Expand All @@ -46,19 +50,21 @@
*/
public class BatchFetchQueue {

public static final Object MARKER = new MarkerObject( "MARKER" );
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, BatchFetchQueue.class.getName() );

/**
* Defines a sequence of {@link EntityKey} elements that are currently
* elegible for batch-fetching.
* <p/>
* Even though this is a map, we only use the keys. A map was chosen in
* order to utilize a {@link LinkedHashMap} to maintain sequencing
* as well as uniqueness.
* utilize a {@link LinkedHashMap} to maintain sequencing as well as uniqueness.
* <p/>
* TODO : this would be better as a SequencedReferenceSet, but no such beast exists!
*/
private final Map batchLoadableEntityKeys = new LinkedHashMap(8);
private final Map <String,LinkedHashSet<EntityKey>> batchLoadableEntityKeys = new HashMap <String,LinkedHashSet<EntityKey>>(8);

/**
* cannot use PersistentCollection as keym because PersistentSet.hashCode() would force initialization immediately
*/
private final Map <CollectionPersister, LinkedHashMap <CollectionEntry, PersistentCollection>> batchLoadableCollections = new HashMap <CollectionPersister, LinkedHashMap <CollectionEntry, PersistentCollection>>(8);

/**
* A map of {@link SubselectFetch subselect-fetch descriptors} keyed by the
Expand All @@ -85,6 +91,7 @@ public BatchFetchQueue(PersistenceContext context) {
*/
public void clear() {
batchLoadableEntityKeys.clear();
batchLoadableCollections.clear();
subselectsByEntityKey.clear();
}

Expand Down Expand Up @@ -140,17 +147,54 @@ public void clearSubselects() {
*/
public void addBatchLoadableEntityKey(EntityKey key) {
if ( key.isBatchLoadable() ) {
batchLoadableEntityKeys.put( key, MARKER );
LinkedHashSet<EntityKey> set = batchLoadableEntityKeys.get( key.getEntityName());
if (set == null) {
set = new LinkedHashSet<EntityKey>(8);
batchLoadableEntityKeys.put( key.getEntityName(), set);
}
set.add(key);
}
}


/**
* After evicting or deleting or loading an entity, we don't
* need to batch fetch it anymore, remove it from the queue
* if necessary
*/
public void removeBatchLoadableEntityKey(EntityKey key) {
if ( key.isBatchLoadable() ) batchLoadableEntityKeys.remove(key);
if ( key.isBatchLoadable() ) {
LinkedHashSet<EntityKey> set = batchLoadableEntityKeys.get( key.getEntityName());
if (set != null) {
set.remove(key);
}
}
}


/**
* If an CollectionEntry represents a batch loadable collection, add
* it to the queue.
*/
public void addBatchLoadableCollection(PersistentCollection collection, CollectionEntry ce) {
LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.get( ce.getLoadedPersister());
if (map == null) {
map = new LinkedHashMap<CollectionEntry, PersistentCollection>(8);
batchLoadableCollections.put( ce.getLoadedPersister(), map);
}
map.put(ce, collection);
}

/**
* After a collection was initialized or evicted, we don't
* need to batch fetch it anymore, remove it from the queue
* if necessary
*/
public void removeBatchLoadableCollection(CollectionEntry ce) {
LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.get( ce.getLoadedPersister());
if (map != null) {
map.remove(ce);
}
}

/**
Expand All @@ -171,46 +215,46 @@ public Serializable[] getCollectionBatch(
//int count = 0;
int end = -1;
boolean checkForEnd = false;
// this only works because collection entries are kept in a sequenced
// map by persistence context (maybe we should do like entities and
// keep a separate sequences set...)

for ( Map.Entry<PersistentCollection,CollectionEntry> me :
IdentityMap.concurrentEntries( (Map<PersistentCollection,CollectionEntry>) context.getCollectionEntries() )) {

CollectionEntry ce = me.getValue();
PersistentCollection collection = me.getKey();
if ( !collection.wasInitialized() && ce.getLoadedPersister() == collectionPersister ) {

if ( checkForEnd && i == end ) {
return keys; //the first key found after the given key
}

//if ( end == -1 && count > batchSize*10 ) return keys; //try out ten batches, max

final boolean isEqual = collectionPersister.getKeyType().isEqual(
id,
ce.getLoadedKey(),
collectionPersister.getFactory()
);

if ( isEqual ) {
end = i;
//checkForEnd = false;
}
else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) {
keys[i++] = ce.getLoadedKey();
//count++;
}

if ( i == batchSize ) {
i = 1; //end of array, start filling again from start
if ( end != -1 ) {
checkForEnd = true;
LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.get(collectionPersister);
if (map != null) {
for (Entry<CollectionEntry, PersistentCollection> me : map.entrySet()) {
CollectionEntry ce = me.getKey();
PersistentCollection collection = me.getValue();
if ( !collection.wasInitialized() ) { // should always be true

if ( checkForEnd && i == end ) {
return keys; //the first key found after the given key
}

//if ( end == -1 && count > batchSize*10 ) return keys; //try out ten batches, max

final boolean isEqual = collectionPersister.getKeyType().isEqual(
id,
ce.getLoadedKey(),
collectionPersister.getFactory()
);

if ( isEqual ) {
end = i;
//checkForEnd = false;
}
else if ( !isCached( ce.getLoadedKey(), collectionPersister ) ) {
keys[i++] = ce.getLoadedKey();
//count++;
}

if ( i == batchSize ) {
i = 1; //end of array, start filling again from start
if ( end != -1 ) {
checkForEnd = true;
}
}
}
else {
LOG.warn("Encountered initialized collection in BatchFetchQueue, this should not happen.");
}

}

}
return keys; //we ran out of keys to try
}
Expand All @@ -236,10 +280,9 @@ public Serializable[] getEntityBatch(
int end = -1;
boolean checkForEnd = false;

Iterator iter = batchLoadableEntityKeys.keySet().iterator();
while ( iter.hasNext() ) {
EntityKey key = (EntityKey) iter.next();
if ( key.getEntityName().equals( persister.getEntityName() ) ) { //TODO: this needn't exclude subclasses...
LinkedHashSet<EntityKey> set = batchLoadableEntityKeys.get( persister.getEntityName() ); //TODO: this needn't exclude subclasses...
if (set != null) {
for (EntityKey key : set) {
if ( checkForEnd && i == end ) {
//the first id found after the given id
return ids;
Expand Down
Expand Up @@ -34,6 +34,7 @@
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.collection.internal.AbstractPersistentCollection;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.persister.collection.CollectionPersister;
Expand Down Expand Up @@ -215,6 +216,9 @@ public void postInitialize(PersistentCollection collection) throws HibernateExce
collection.getSnapshot( getLoadedPersister() ) :
null;
collection.setSnapshot(loadedKey, role, snapshot);
if (getLoadedPersister().getBatchSize() > 1) {
((AbstractPersistentCollection) collection).getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(this);
}
}

/**
Expand Down
Expand Up @@ -83,6 +83,9 @@ private void evictCollection(PersistentCollection collection) {
ce.getLoadedKey(),
getSession() ) );
}
if (ce.getLoadedPersister() != null && ce.getLoadedPersister().getBatchSize() > 1) {
getSession().getPersistenceContext().getBatchFetchQueue().removeBatchLoadableCollection(ce);
}
if ( ce.getLoadedPersister() != null && ce.getLoadedKey() != null ) {
//TODO: is this 100% correct?
getSession().getPersistenceContext().getCollectionsByKey().remove(
Expand Down
Expand Up @@ -1913,6 +1913,10 @@ public CollectionInitializer getInitializer() {
return initializer;
}

public int getBatchSize() {
return batchSize;
}

private class StandardOrderByAliasResolver implements OrderByAliasResolver {
private final String rootAlias;

Expand All @@ -1933,4 +1937,5 @@ public String resolveTableAlias(String columnReference) {
}

public abstract FilterAliasGenerator getFilterAliasGenerator(final String rootAlias);

}
Expand Up @@ -307,4 +307,5 @@ public void insertRows(
public boolean indexExists(Serializable key, Object index, SessionImplementor session);
public boolean elementExists(Serializable key, Object element, SessionImplementor session);
public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner);
public int getBatchSize();
}
Expand Up @@ -169,7 +169,7 @@ private void scheduleBatchLoadIfNeeded(Serializable id, SessionImplementor sessi
if ( uniqueKeyPropertyName == null && id != null ) {
final EntityPersister persister = session.getFactory().getEntityPersister( getAssociatedEntityName() );
final EntityKey entityKey = session.generateEntityKey( id, persister );
if ( !session.getPersistenceContext().containsEntity( entityKey ) ) {
if ( entityKey.isBatchLoadable() && !session.getPersistenceContext().containsEntity( entityKey ) ) {
session.getPersistenceContext().getBatchFetchQueue().addBatchLoadableEntityKey( entityKey );
}
}
Expand Down
Expand Up @@ -802,5 +802,10 @@ public boolean elementExists(Serializable key, Object element, SessionImplemento
public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}

@Override
public int getBatchSize() {
return 0;
}
}
}

0 comments on commit 7cecc68

Please sign in to comment.