diff --git a/sample/Benchmark.java b/sample/Benchmark.java
index e9a9a7ae..665c3e44 100644
--- a/sample/Benchmark.java
+++ b/sample/Benchmark.java
@@ -4,7 +4,10 @@
import java.util.Random;
import com.fasterxml.jackson.databind.JsonNode;
+import com.maxmind.db.CHMCache;
import com.maxmind.db.InvalidDatabaseException;
+import com.maxmind.db.NoCache;
+import com.maxmind.db.NodeCache;
import com.maxmind.db.Reader;
import com.maxmind.db.Reader.FileMode;
@@ -17,14 +20,19 @@ public class Benchmark {
public static void main(String[] args) throws IOException, InvalidDatabaseException {
File file = new File(args.length > 0 ? args[0] : "GeoLite2-City.mmdb");
- loop("Warming up", file, WARMUPS);
- loop("Benchmarking", file, BENCHMARKS);
+ System.out.println("No caching");
+ loop("Warming up", file, WARMUPS, new NoCache());
+ loop("Benchmarking", file, BENCHMARKS, new NoCache());
+
+ System.out.println("With caching");
+ loop("Warming up", file, WARMUPS, new CHMCache());
+ loop("Benchmarking", file, BENCHMARKS, new CHMCache());
}
- private static void loop(String msg, File file, int loops) throws IOException {
+ private static void loop(String msg, File file, int loops, NodeCache cache) throws IOException {
System.out.println(msg);
for (int i = 0; i < loops; i++) {
- Reader r = new Reader(file, FileMode.MEMORY_MAPPED);
+ Reader r = new Reader(file, FileMode.MEMORY_MAPPED, cache);
bench(r, COUNT, i);
}
System.out.println();
diff --git a/src/main/java/com/maxmind/db/CHMCache.java b/src/main/java/com/maxmind/db/CHMCache.java
new file mode 100644
index 00000000..983cf09e
--- /dev/null
+++ b/src/main/java/com/maxmind/db/CHMCache.java
@@ -0,0 +1,51 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ContainerNode;
+
+/**
+ * A simplistic cache using a {@link ConcurrentHashMap}. There's no eviction
+ * policy, it just fills up until reaching the specified capacity (or
+ * close enough at least, bounds check is not atomic :)
+ */
+public class CHMCache implements NodeCache {
+
+ private static final int DEFAULT_CAPACITY = 4096;
+
+ private final int capacity;
+ private final ConcurrentHashMap cache;
+ private boolean cacheFull = false;
+
+ public CHMCache() {
+ this(DEFAULT_CAPACITY);
+ }
+
+ public CHMCache(int capacity) {
+ this.capacity = capacity;
+ this.cache = new ConcurrentHashMap(capacity);
+ }
+
+ @Override
+ public JsonNode get(int key, Loader loader) throws IOException {
+ Integer k = key;
+ JsonNode value = cache.get(k);
+ if (value == null) {
+ value = loader.load(key);
+ if (!cacheFull) {
+ if (cache.size() < capacity) {
+ cache.put(k, value);
+ } else {
+ cacheFull = true;
+ }
+ }
+ }
+ if (value instanceof ContainerNode) {
+ value = value.deepCopy();
+ }
+ return value;
+ }
+
+}
diff --git a/src/main/java/com/maxmind/db/Decoder.java b/src/main/java/com/maxmind/db/Decoder.java
index 68a17dbf..34863a29 100644
--- a/src/main/java/com/maxmind/db/Decoder.java
+++ b/src/main/java/com/maxmind/db/Decoder.java
@@ -28,6 +28,8 @@ final class Decoder {
// constructor to set this
boolean POINTER_TEST_HACK = false;
+ private final NodeCache cache;
+
private final long pointerBase;
private final CharsetDecoder utfDecoder = UTF_8.newDecoder();
@@ -77,13 +79,26 @@ void setOffset(int offset) {
this.offset = offset;
}
+ @Override
+ public String toString() {
+ return "Result[" + offset + " " + node.getNodeType() + " " + node.asText() + "]";
+ }
+
}
- Decoder(ByteBuffer buffer, long pointerBase) {
+ Decoder(NodeCache cache, ByteBuffer buffer, long pointerBase) {
+ this.cache = cache;
this.pointerBase = pointerBase;
this.buffer = buffer;
}
+ private final NodeCache.Loader cacheLoader = new NodeCache.Loader() {
+ @Override
+ public JsonNode load(int key) throws IOException {
+ return decode(key).getNode();
+ }
+ };
+
Result decode(int offset) throws IOException {
if (offset >= this.buffer.capacity()) {
throw new InvalidDatabaseException(
@@ -112,9 +127,9 @@ Result decode(int offset) throws IOException {
return new Result(new LongNode(pointer), newOffset);
}
- Result result = this.decode((int) pointer);
- result.setOffset(newOffset);
- return result;
+ int targetOffset = (int) pointer;
+ JsonNode node = cache.get(targetOffset, cacheLoader);
+ return new Result(node, newOffset);
}
if (type.equals(Type.EXTENDED)) {
diff --git a/src/main/java/com/maxmind/db/NoCache.java b/src/main/java/com/maxmind/db/NoCache.java
new file mode 100644
index 00000000..176747a3
--- /dev/null
+++ b/src/main/java/com/maxmind/db/NoCache.java
@@ -0,0 +1,17 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A no-op cache.
+ */
+public class NoCache implements NodeCache {
+
+ @Override
+ public JsonNode get(int key, Loader loader) throws IOException {
+ return loader.load(key);
+ }
+
+}
diff --git a/src/main/java/com/maxmind/db/NodeCache.java b/src/main/java/com/maxmind/db/NodeCache.java
new file mode 100644
index 00000000..f1c73635
--- /dev/null
+++ b/src/main/java/com/maxmind/db/NodeCache.java
@@ -0,0 +1,15 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+public interface NodeCache {
+
+ public interface Loader {
+ JsonNode load(int key) throws IOException;
+ }
+
+ public JsonNode get(int key, Loader loader) throws IOException;
+
+}
diff --git a/src/main/java/com/maxmind/db/Reader.java b/src/main/java/com/maxmind/db/Reader.java
index 0c4048c5..004e3268 100644
--- a/src/main/java/com/maxmind/db/Reader.java
+++ b/src/main/java/com/maxmind/db/Reader.java
@@ -20,9 +20,12 @@ public final class Reader implements Closeable {
(byte) 0xCD, (byte) 0xEF, 'M', 'a', 'x', 'M', 'i', 'n', 'd', '.',
'c', 'o', 'm'};
+ private static final NodeCache NO_CACHE = new NoCache();
+
private final int ipV4Start;
private final Metadata metadata;
private final AtomicReference bufferHolderReference;
+ private final NodeCache cache;
/**
* The file mode to use when opening a MaxMind DB.
@@ -41,47 +44,93 @@ public enum FileMode {
}
/**
- * Constructs a Reader for the MaxMind DB format. The file passed to it must
- * be a valid MaxMind DB file such as a GeoIP2 database file.
+ * Constructs a Reader for the MaxMind DB format, with no caching. The file
+ * passed to it must be a valid MaxMind DB file such as a GeoIP2 database
+ * file.
*
* @param database the MaxMind DB file to use.
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database) throws IOException {
- this(database, FileMode.MEMORY_MAPPED);
+ this(database, NO_CACHE);
}
/**
- * Constructs a Reader as if in mode {@link FileMode#MEMORY}, without using
- * a File instance.
+ * Constructs a Reader for the MaxMind DB format, with the specified backing
+ * cache. The file passed to it must be a valid MaxMind DB file such as a
+ * GeoIP2 database file.
+ *
+ * @param database the MaxMind DB file to use.
+ * @param cache backing cache instance
+ * @throws IOException if there is an error opening or reading from the file.
+ */
+ public Reader(File database, NodeCache cache) throws IOException {
+ this(database, FileMode.MEMORY_MAPPED, cache);
+ }
+
+ /**
+ * Constructs a Reader with no caching, as if in mode
+ * {@link FileMode#MEMORY}, without using a File instance.
*
* @param source the InputStream that contains the MaxMind DB file.
* @throws IOException if there is an error reading from the Stream.
*/
public Reader(InputStream source) throws IOException {
- this(new BufferHolder(source), "");
+ this(source, NO_CACHE);
}
/**
- * Constructs a Reader for the MaxMind DB format. The file passed to it must
- * be a valid MaxMind DB file such as a GeoIP2 database file.
+ * Constructs a Reader with the specified backing cache, as if in mode
+ * {@link FileMode#MEMORY}, without using a File instance.
+ *
+ * @param source the InputStream that contains the MaxMind DB file.
+ * @param cache backing cache instance
+ * @throws IOException if there is an error reading from the Stream.
+ */
+ public Reader(InputStream source, NodeCache cache) throws IOException {
+ this(new BufferHolder(source), "", cache);
+ }
+
+ /**
+ * Constructs a Reader for the MaxMind DB format, with no caching. The file
+ * passed to it must be a valid MaxMind DB file such as a GeoIP2 database
+ * file.
*
* @param database the MaxMind DB file to use.
* @param fileMode the mode to open the file with.
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database, FileMode fileMode) throws IOException {
- this(new BufferHolder(database, fileMode), database.getName());
+ this(database, fileMode, NO_CACHE);
+ }
+
+ /**
+ * Constructs a Reader for the MaxMind DB format, with the specified backing
+ * cache. The file passed to it must be a valid MaxMind DB file such as a
+ * GeoIP2 database file.
+ *
+ * @param database the MaxMind DB file to use.
+ * @param fileMode the mode to open the file with.
+ * @param cache backing cache instance
+ * @throws IOException if there is an error opening or reading from the file.
+ */
+ public Reader(File database, FileMode fileMode, NodeCache cache) throws IOException {
+ this(new BufferHolder(database, fileMode), database.getName(), cache);
}
- private Reader(BufferHolder bufferHolder, String name) throws IOException {
+ private Reader(BufferHolder bufferHolder, String name, NodeCache cache) throws IOException {
this.bufferHolderReference = new AtomicReference(
bufferHolder);
+ if (cache == null) {
+ throw new NullPointerException("Cache cannot be null");
+ }
+ this.cache = cache;
+
ByteBuffer buffer = bufferHolder.get();
int start = this.findMetadataStart(buffer, name);
- Decoder metadataDecoder = new Decoder(buffer, start);
+ Decoder metadataDecoder = new Decoder(this.cache, buffer, start);
this.metadata = new Metadata(metadataDecoder.decode(start).getNode());
this.ipV4Start = this.findIpV4StartNode(buffer);
@@ -200,8 +249,8 @@ private JsonNode resolveDataPointer(ByteBuffer buffer, int pointer)
// We only want the data from the decoder, not the offset where it was
// found.
- Decoder decoder = new Decoder(buffer, this.metadata.getSearchTreeSize()
- + DATA_SECTION_SEPARATOR_SIZE);
+ Decoder decoder = new Decoder(this.cache, buffer,
+ this.metadata.getSearchTreeSize() + DATA_SECTION_SEPARATOR_SIZE);
return decoder.decode(resolved).getNode();
}
diff --git a/src/test/java/com/maxmind/db/DecoderTest.java b/src/test/java/com/maxmind/db/DecoderTest.java
index 7ce266a7..85554637 100644
--- a/src/test/java/com/maxmind/db/DecoderTest.java
+++ b/src/test/java/com/maxmind/db/DecoderTest.java
@@ -414,6 +414,8 @@ public void testArrays() throws IOException {
private static void testTypeDecoding(Decoder.Type type, Map tests)
throws IOException {
+ NodeCache cache = new CHMCache();
+
for (Map.Entry entry : tests.entrySet()) {
T expect = entry.getKey();
byte[] input = entry.getValue();
@@ -423,7 +425,7 @@ private static void testTypeDecoding(Decoder.Type type, Map tests
MappedByteBuffer mmap = fc.map(MapMode.READ_ONLY, 0, fc.size());
try {
- Decoder decoder = new Decoder(mmap, 0);
+ Decoder decoder = new Decoder(cache, mmap, 0);
decoder.POINTER_TEST_HACK = true;
// XXX - this could be streamlined
diff --git a/src/test/java/com/maxmind/db/PointerTest.java b/src/test/java/com/maxmind/db/PointerTest.java
index c7ccde33..f1d5d320 100644
--- a/src/test/java/com/maxmind/db/PointerTest.java
+++ b/src/test/java/com/maxmind/db/PointerTest.java
@@ -20,7 +20,7 @@ public void testWithPointers() throws
File file = new File(PointerTest.class.getResource(
"/maxmind-db/test-data/maps-with-pointers.raw").toURI());
BufferHolder ptf = new BufferHolder(file, FileMode.MEMORY);
- Decoder decoder = new Decoder(ptf.get(), 0);
+ Decoder decoder = new Decoder(new NoCache(), ptf.get(), 0);
ObjectMapper om = new ObjectMapper();