Skip to content

Commit

Permalink
Added id cache size to nodes stats api
Browse files Browse the repository at this point in the history
Also added bloom cache size the node *rest* api
Closes #2264
  • Loading branch information
martijnvg committed Sep 19, 2012
1 parent 86e88a8 commit afc99ac
Show file tree
Hide file tree
Showing 9 changed files with 104 additions and 6 deletions.
Expand Up @@ -48,4 +48,8 @@ public T key(T key) {
int index = index(key);
return index < 0 ? null : (T) _set[index];
}

public int _valuesSize() {
return _values.length;
}
}
37 changes: 33 additions & 4 deletions src/main/java/org/elasticsearch/index/cache/CacheStats.java
Expand Up @@ -37,20 +37,22 @@ public class CacheStats implements Streamable, ToXContent {
long fieldEvictions;
long filterEvictions;
long filterCount;
long fieldSize = 0;
long filterSize = 0;
long bloomSize = 0;
long fieldSize;
long filterSize;
long bloomSize;
long idCacheSize;

public CacheStats() {
}

public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) {
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize, long idCacheSize) {
this.fieldEvictions = fieldEvictions;
this.filterEvictions = filterEvictions;
this.fieldSize = fieldSize;
this.filterSize = filterSize;
this.filterCount = filterCount;
this.bloomSize = bloomSize;
this.idCacheSize = idCacheSize;
}

public void add(CacheStats stats) {
Expand All @@ -60,6 +62,7 @@ public void add(CacheStats stats) {
this.filterSize += stats.filterSize;
this.filterCount += stats.filterCount;
this.bloomSize += stats.bloomSize;
this.idCacheSize += stats.idCacheSize;
}

public long fieldEvictions() {
Expand Down Expand Up @@ -142,6 +145,22 @@ public ByteSizeValue getBloomSize() {
return bloomSize();
}

public long idCacheSizeInBytes() {
return idCacheSize;
}

public long getIdCacheSizeInBytes() {
return idCacheSizeInBytes();
}

public ByteSizeValue idCacheSize() {
return new ByteSizeValue(idCacheSize);
}

public ByteSizeValue getIdCacheSize() {
return bloomSize();
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.CACHE);
Expand All @@ -152,6 +171,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(Fields.FILTER_EVICTIONS, filterEvictions);
builder.field(Fields.FILTER_SIZE, filterSize().toString());
builder.field(Fields.FILTER_SIZE_IN_BYTES, filterSize);
builder.field(Fields.BLOOM_SIZE, bloomSize().toString());
builder.field(Fields.BLOOM_SIZE_IN_BYTES, bloomSize);
builder.field(Fields.ID_CACHE_SIZE, idCacheSize().toString());
builder.field(Fields.ID_CACHE_SIZE_IN_BYTES, idCacheSize);
builder.endObject();
return builder;
}
Expand All @@ -165,6 +188,10 @@ static final class Fields {
static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count");
static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size");
static final XContentBuilderString FILTER_SIZE_IN_BYTES = new XContentBuilderString("filter_size_in_bytes");
static final XContentBuilderString BLOOM_SIZE = new XContentBuilderString("bloom_size");
static final XContentBuilderString BLOOM_SIZE_IN_BYTES = new XContentBuilderString("bloom_size_in_bytes");
static final XContentBuilderString ID_CACHE_SIZE = new XContentBuilderString("id_cache_size");
static final XContentBuilderString ID_CACHE_SIZE_IN_BYTES = new XContentBuilderString("id_cache_size_in_bytes");
}

public static CacheStats readCacheStats(StreamInput in) throws IOException {
Expand All @@ -181,6 +208,7 @@ public void readFrom(StreamInput in) throws IOException {
filterSize = in.readVLong();
filterCount = in.readVLong();
bloomSize = in.readVLong();
idCacheSize = in.readVLong();
}

@Override
Expand All @@ -191,5 +219,6 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(filterSize);
out.writeVLong(filterCount);
out.writeVLong(bloomSize);
out.writeVLong(idCacheSize);
}
}
4 changes: 2 additions & 2 deletions src/main/java/org/elasticsearch/index/cache/IndexCache.java
Expand Up @@ -85,15 +85,15 @@ public void setClusterService(@Nullable ClusterService clusterService) {

public synchronized void invalidateCache() {
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes());
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes());
latestCacheStatsTimestamp = System.currentTimeMillis();
}

public synchronized CacheStats stats() {
long timestamp = System.currentTimeMillis();
if ((timestamp - latestCacheStatsTimestamp) > refreshInterval.millis()) {
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes());
latestCacheStats = new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes(), idCache.sizeInBytes());
latestCacheStatsTimestamp = timestamp;
}
return latestCacheStats;
Expand Down
3 changes: 3 additions & 0 deletions src/main/java/org/elasticsearch/index/cache/id/IdCache.java
Expand Up @@ -35,4 +35,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
void refresh(IndexReader[] readers) throws Exception;

IdReaderCache reader(IndexReader reader);

long sizeInBytes();

}
Expand Up @@ -33,4 +33,6 @@ public interface IdReaderCache {
HashedBytesArray parentIdByDoc(String type, int docId);

int docById(String type, HashedBytesArray id);

long sizeInBytes();
}
Expand Up @@ -44,4 +44,8 @@ public interface IdReaderTypeCache {
*/
HashedBytesArray idByDoc(int docId);

/**
* @return The size in bytes for this particular instance
*/
long sizeInBytes();
}
Expand Up @@ -218,6 +218,14 @@ public void refresh(IndexReader[] readers) throws Exception {
}
}

public long sizeInBytes() {
long sizeInBytes = 0;
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
sizeInBytes += idReaderCache.sizeInBytes();
}
return sizeInBytes;
}

private HashedBytesArray checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
HashedBytesArray finalIdAsBytes;
// go over and see if we can reuse this id
Expand Down
Expand Up @@ -66,6 +66,14 @@ public int docById(String type, HashedBytesArray id) {
return -1;
}

public long sizeInBytes() {
long sizeInBytes = 0;
for (SimpleIdReaderTypeCache readerTypeCache : types.values()) {
sizeInBytes += readerTypeCache.sizeInBytes();
}
return sizeInBytes;
}

/**
* Returns an already stored instance if exists, if not, returns null;
*/
Expand Down
Expand Up @@ -19,6 +19,8 @@

package org.elasticsearch.index.cache.id.simple;

import gnu.trove.impl.hash.TObjectHash;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
Expand All @@ -38,6 +40,8 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {

private final int[] parentIdsOrdinals;

private long sizeInBytes = -1;

public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
this.type = type;
Expand All @@ -64,10 +68,46 @@ public HashedBytesArray idByDoc(int docId) {
return docIdToId[docId];
}

public long sizeInBytes() {
if (sizeInBytes == -1) {
sizeInBytes = computeSizeInBytes();
}
return sizeInBytes;
}

/**
* Returns an already stored instance if exists, if not, returns null;
*/
public HashedBytesArray canReuse(HashedBytesArray id) {
return idToDoc.key(id);
}

long computeSizeInBytes() {
long sizeInBytes = 0;
// Ignore type field
// sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER;
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsage.NUM_BYTES_INT);
for (Object o : idToDoc._set) {
if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF;
} else {
HashedBytesArray bytesArray = (HashedBytesArray) o;
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT);
}
}

// The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_OBJECT_REF * docIdToId.length);
for (HashedBytesArray bytesArray : parentIdsValues) {
if (bytesArray == null) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF;
} else {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT);
}
}
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_INT * parentIdsOrdinals.length);

return sizeInBytes;
}

}

0 comments on commit afc99ac

Please sign in to comment.