From afc99ac42ccf26c6bc873dfea3bd361c9c970650 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 13 Sep 2012 17:33:16 +0200 Subject: [PATCH] Added id cache size to nodes stats api Also added bloom cache size the node *rest* api Closes #2264 --- .../common/trove/ExtTObjectIntHasMap.java | 4 ++ .../elasticsearch/index/cache/CacheStats.java | 37 +++++++++++++++-- .../elasticsearch/index/cache/IndexCache.java | 4 +- .../elasticsearch/index/cache/id/IdCache.java | 3 ++ .../index/cache/id/IdReaderCache.java | 2 + .../index/cache/id/IdReaderTypeCache.java | 4 ++ .../index/cache/id/simple/SimpleIdCache.java | 8 ++++ .../cache/id/simple/SimpleIdReaderCache.java | 8 ++++ .../id/simple/SimpleIdReaderTypeCache.java | 40 +++++++++++++++++++ 9 files changed, 104 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java b/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java index 1538479bd3b85..41422876a2515 100644 --- a/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java +++ b/src/main/java/org/elasticsearch/common/trove/ExtTObjectIntHasMap.java @@ -48,4 +48,8 @@ public T key(T key) { int index = index(key); return index < 0 ? null : (T) _set[index]; } + + public int _valuesSize() { + return _values.length; + } } diff --git a/src/main/java/org/elasticsearch/index/cache/CacheStats.java b/src/main/java/org/elasticsearch/index/cache/CacheStats.java index e5c7e7eda10af..03cc05db41978 100644 --- a/src/main/java/org/elasticsearch/index/cache/CacheStats.java +++ b/src/main/java/org/elasticsearch/index/cache/CacheStats.java @@ -37,20 +37,22 @@ public class CacheStats implements Streamable, ToXContent { long fieldEvictions; long filterEvictions; long filterCount; - long fieldSize = 0; - long filterSize = 0; - long bloomSize = 0; + long fieldSize; + long filterSize; + long bloomSize; + long idCacheSize; public CacheStats() { } - public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) { + public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize, long idCacheSize) { this.fieldEvictions = fieldEvictions; this.filterEvictions = filterEvictions; this.fieldSize = fieldSize; this.filterSize = filterSize; this.filterCount = filterCount; this.bloomSize = bloomSize; + this.idCacheSize = idCacheSize; } public void add(CacheStats stats) { @@ -60,6 +62,7 @@ public void add(CacheStats stats) { this.filterSize += stats.filterSize; this.filterCount += stats.filterCount; this.bloomSize += stats.bloomSize; + this.idCacheSize += stats.idCacheSize; } public long fieldEvictions() { @@ -142,6 +145,22 @@ public ByteSizeValue getBloomSize() { return bloomSize(); } + public long idCacheSizeInBytes() { + return idCacheSize; + } + + public long getIdCacheSizeInBytes() { + return idCacheSizeInBytes(); + } + + public ByteSizeValue idCacheSize() { + return new ByteSizeValue(idCacheSize); + } + + public ByteSizeValue getIdCacheSize() { + return bloomSize(); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.CACHE); @@ -152,6 +171,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.FILTER_EVICTIONS, filterEvictions); builder.field(Fields.FILTER_SIZE, filterSize().toString()); builder.field(Fields.FILTER_SIZE_IN_BYTES, filterSize); + builder.field(Fields.BLOOM_SIZE, bloomSize().toString()); + builder.field(Fields.BLOOM_SIZE_IN_BYTES, bloomSize); + builder.field(Fields.ID_CACHE_SIZE, idCacheSize().toString()); + builder.field(Fields.ID_CACHE_SIZE_IN_BYTES, idCacheSize); builder.endObject(); return builder; } @@ -165,6 +188,10 @@ static final class Fields { static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count"); static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size"); static final XContentBuilderString FILTER_SIZE_IN_BYTES = new XContentBuilderString("filter_size_in_bytes"); + static final XContentBuilderString BLOOM_SIZE = new XContentBuilderString("bloom_size"); + static final XContentBuilderString BLOOM_SIZE_IN_BYTES = new XContentBuilderString("bloom_size_in_bytes"); + static final XContentBuilderString ID_CACHE_SIZE = new XContentBuilderString("id_cache_size"); + static final XContentBuilderString ID_CACHE_SIZE_IN_BYTES = new XContentBuilderString("id_cache_size_in_bytes"); } public static CacheStats readCacheStats(StreamInput in) throws IOException { @@ -181,6 +208,7 @@ public void readFrom(StreamInput in) throws IOException { filterSize = in.readVLong(); filterCount = in.readVLong(); bloomSize = in.readVLong(); + idCacheSize = in.readVLong(); } @Override @@ -191,5 +219,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(filterSize); out.writeVLong(filterCount); out.writeVLong(bloomSize); + out.writeVLong(idCacheSize); } } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/index/cache/IndexCache.java b/src/main/java/org/elasticsearch/index/cache/IndexCache.java index de39944c2c578..8350a69bbbc79 100644 --- a/src/main/java/org/elasticsearch/index/cache/IndexCache.java +++ b/src/main/java/org/elasticsearch/index/cache/IndexCache.java @@ -85,7 +85,7 @@ public void setClusterService(@Nullable ClusterService clusterService) { public synchronized void invalidateCache() { FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats(); - latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes()); + latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes()); latestCacheStatsTimestamp = System.currentTimeMillis(); } @@ -93,7 +93,7 @@ public synchronized CacheStats stats() { long timestamp = System.currentTimeMillis(); if ((timestamp - latestCacheStatsTimestamp) > refreshInterval.millis()) { FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats(); - latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes()); + latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes()); latestCacheStatsTimestamp = timestamp; } return latestCacheStats; diff --git a/src/main/java/org/elasticsearch/index/cache/id/IdCache.java b/src/main/java/org/elasticsearch/index/cache/id/IdCache.java index ec659b042a3f0..f563b6ed15280 100644 --- a/src/main/java/org/elasticsearch/index/cache/id/IdCache.java +++ b/src/main/java/org/elasticsearch/index/cache/id/IdCache.java @@ -35,4 +35,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable> builders, HashedBytesArray idAsBytes) { HashedBytesArray finalIdAsBytes; // go over and see if we can reuse this id diff --git a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderCache.java b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderCache.java index f6e23dda7a074..5d6d5d1abdc08 100644 --- a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderCache.java +++ b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderCache.java @@ -66,6 +66,14 @@ public int docById(String type, HashedBytesArray id) { return -1; } + public long sizeInBytes() { + long sizeInBytes = 0; + for (SimpleIdReaderTypeCache readerTypeCache : types.values()) { + sizeInBytes += readerTypeCache.sizeInBytes(); + } + return sizeInBytes; + } + /** * Returns an already stored instance if exists, if not, returns null; */ diff --git a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java index 1cbe8f069cead..083e79f3b5180 100644 --- a/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java +++ b/src/main/java/org/elasticsearch/index/cache/id/simple/SimpleIdReaderTypeCache.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.cache.id.simple; +import gnu.trove.impl.hash.TObjectHash; +import org.elasticsearch.common.RamUsage; import org.elasticsearch.common.bytes.HashedBytesArray; import org.elasticsearch.common.trove.ExtTObjectIntHasMap; import org.elasticsearch.index.cache.id.IdReaderTypeCache; @@ -38,6 +40,8 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache { private final int[] parentIdsOrdinals; + private long sizeInBytes = -1; + public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap idToDoc, HashedBytesArray[] docIdToId, HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) { this.type = type; @@ -64,10 +68,46 @@ public HashedBytesArray idByDoc(int docId) { return docIdToId[docId]; } + public long sizeInBytes() { + if (sizeInBytes == -1) { + sizeInBytes = computeSizeInBytes(); + } + return sizeInBytes; + } + /** * Returns an already stored instance if exists, if not, returns null; */ public HashedBytesArray canReuse(HashedBytesArray id) { return idToDoc.key(id); } + + long computeSizeInBytes() { + long sizeInBytes = 0; + // Ignore type field + // sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER; + sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsage.NUM_BYTES_INT); + for (Object o : idToDoc._set) { + if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) { + sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF; + } else { + HashedBytesArray bytesArray = (HashedBytesArray) o; + sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT); + } + } + + // The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF + sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_OBJECT_REF * docIdToId.length); + for (HashedBytesArray bytesArray : parentIdsValues) { + if (bytesArray == null) { + sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF; + } else { + sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT); + } + } + sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_INT * parentIdsOrdinals.length); + + return sizeInBytes; + } + }