diff --git a/src/main/java/com/maxmind/db/Buffer.java b/src/main/java/com/maxmind/db/Buffer.java
new file mode 100644
index 00000000..eef66f73
--- /dev/null
+++ b/src/main/java/com/maxmind/db/Buffer.java
@@ -0,0 +1,117 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.CharsetDecoder;
+
+/**
+ * A generic buffer abstraction that supports sequential and random access
+ * to binary data. Implementations may be backed by a single {@link
+ * java.nio.ByteBuffer} or multiple buffers for larger capacities.
+ *
+ *
This interface is designed to provide a long-based API while
+ * remaining compatible with the limitations of underlying storage.
+ */
+interface Buffer {
+ /**
+ * Returns the total capacity of this buffer in bytes.
+ *
+ * @return the capacity
+ */
+ long capacity();
+
+ /**
+ * Returns the current position of this buffer.
+ *
+ * @return the position
+ */
+ long position();
+
+ /**
+ * Sets the buffer's position.
+ *
+ * @param newPosition the new position
+ * @return this buffer
+ */
+ Buffer position(long newPosition);
+
+ /**
+ * Returns the current limit of this buffer.
+ *
+ * @return the limit
+ */
+ long limit();
+
+ /**
+ * Sets the buffer's limit.
+ *
+ * @param newLimit the new limit
+ * @return this buffer
+ */
+ Buffer limit(long newLimit);
+
+ /**
+ * Reads the next byte at the current position and advances the position.
+ *
+ * @return the byte value
+ */
+ byte get();
+
+ /**
+ * Reads bytes into the given array and advances the position.
+ *
+ * @param dst the destination array
+ * @return this buffer
+ */
+ Buffer get(byte[] dst);
+
+ /**
+ * Reads a byte at the given absolute index without changing the position.
+ *
+ * @param index the index to read from
+ * @return the byte value
+ */
+ byte get(long index);
+
+ /**
+ * Reads the next 8 bytes as a double and advances the position.
+ *
+ * @return the double value
+ */
+ double getDouble();
+
+ /**
+ * Reads the next 4 bytes as a float and advances the position.
+ *
+ * @return the float value
+ */
+ float getFloat();
+
+ /**
+ * Creates a new buffer that shares the same content but has independent
+ * position, limit, and mark values.
+ *
+ * @return a duplicate buffer
+ */
+ Buffer duplicate();
+
+ /**
+ * Reads data from the given channel into this buffer starting at the
+ * current position.
+ *
+ * @param channel the file channel
+ * @return the number of bytes read
+ * @throws IOException if an I/O error occurs
+ */
+ long readFrom(FileChannel channel) throws IOException;
+
+ /**
+ * Decodes the buffer's content into a string using the given decoder.
+ *
+ * @param decoder the charset decoder
+ * @return the decoded string
+ * @throws CharacterCodingException if decoding fails
+ */
+ String decode(CharsetDecoder decoder) throws CharacterCodingException;
+}
diff --git a/src/main/java/com/maxmind/db/BufferHolder.java b/src/main/java/com/maxmind/db/BufferHolder.java
index dbbe8aa5..c0ad56cf 100644
--- a/src/main/java/com/maxmind/db/BufferHolder.java
+++ b/src/main/java/com/maxmind/db/BufferHolder.java
@@ -1,34 +1,46 @@
package com.maxmind.db;
import com.maxmind.db.Reader.FileMode;
-import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
+import java.util.ArrayList;
+import java.util.List;
final class BufferHolder {
// DO NOT PASS OUTSIDE THIS CLASS. Doing so will remove thread safety.
- private final ByteBuffer buffer;
+ private final Buffer buffer;
BufferHolder(File database, FileMode mode) throws IOException {
- try (
- final RandomAccessFile file = new RandomAccessFile(database, "r");
- final FileChannel channel = file.getChannel()
- ) {
+ this(database, mode, MultiBuffer.DEFAULT_CHUNK_SIZE);
+ }
+
+ BufferHolder(File database, FileMode mode, int chunkSize) throws IOException {
+ try (RandomAccessFile file = new RandomAccessFile(database, "r");
+ FileChannel channel = file.getChannel()) {
+ long size = channel.size();
if (mode == FileMode.MEMORY) {
- final ByteBuffer buf = ByteBuffer.wrap(new byte[(int) channel.size()]);
- if (channel.read(buf) != buf.capacity()) {
+ Buffer buf;
+ if (size <= chunkSize) {
+ buf = new SingleBuffer(size);
+ } else {
+ buf = new MultiBuffer(size);
+ }
+ if (buf.readFrom(channel) != buf.capacity()) {
throw new IOException("Unable to read "
- + database.getName()
- + " into memory. Unexpected end of stream.");
+ + database.getName()
+ + " into memory. Unexpected end of stream.");
}
- this.buffer = buf.asReadOnlyBuffer();
+ this.buffer = buf;
} else {
- this.buffer = channel.map(MapMode.READ_ONLY, 0, channel.size()).asReadOnlyBuffer();
+ if (size <= chunkSize) {
+ this.buffer = SingleBuffer.mapFromChannel(channel);
+ } else {
+ this.buffer = MultiBuffer.mapFromChannel(channel);
+ }
}
}
}
@@ -41,23 +53,44 @@ final class BufferHolder {
* @throws NullPointerException if you provide a NULL InputStream
*/
BufferHolder(InputStream stream) throws IOException {
+ this(stream, MultiBuffer.DEFAULT_CHUNK_SIZE);
+ }
+
+ BufferHolder(InputStream stream, int chunkSize) throws IOException {
if (null == stream) {
throw new NullPointerException("Unable to use a NULL InputStream");
}
- final ByteArrayOutputStream baos = new ByteArrayOutputStream();
- final byte[] bytes = new byte[16 * 1024];
- int br;
- while (-1 != (br = stream.read(bytes))) {
- baos.write(bytes, 0, br);
+ List chunks = new ArrayList<>();
+ long total = 0;
+ byte[] tmp = new byte[chunkSize];
+ int read;
+
+ while (-1 != (read = stream.read(tmp))) {
+ ByteBuffer chunk = ByteBuffer.allocate(read);
+ chunk.put(tmp, 0, read);
+ chunk.flip();
+ chunks.add(chunk);
+ total += read;
+ }
+
+ if (total <= chunkSize) {
+ byte[] data = new byte[(int) total];
+ int pos = 0;
+ for (ByteBuffer chunk : chunks) {
+ System.arraycopy(chunk.array(), 0, data, pos, chunk.capacity());
+ pos += chunk.capacity();
+ }
+ this.buffer = SingleBuffer.wrap(data);
+ } else {
+ this.buffer = new MultiBuffer(chunks.toArray(new ByteBuffer[0]), chunkSize);
}
- this.buffer = ByteBuffer.wrap(baos.toByteArray()).asReadOnlyBuffer();
}
/*
- * Returns a duplicate of the underlying ByteBuffer. The returned ByteBuffer
+ * Returns a duplicate of the underlying Buffer. The returned Buffer
* should not be shared between threads.
*/
- ByteBuffer get() {
+ Buffer get() {
// The Java API docs for buffer state:
//
// Buffers are not safe for use by multiple concurrent threads. If a buffer is to be
@@ -70,7 +103,7 @@ ByteBuffer get() {
// * https://github.com/maxmind/MaxMind-DB-Reader-java/issues/65
// * https://github.com/maxmind/MaxMind-DB-Reader-java/pull/69
//
- // Given that we are not modifying the original ByteBuffer in any way and all currently
+ // Given that we are not modifying the original Buffer in any way and all currently
// known and most reasonably imaginable implementations of duplicate() only do read
// operations on the original buffer object, the risk of not synchronizing this call seems
// relatively low and worth taking for the performance benefit when lookups are being done
diff --git a/src/main/java/com/maxmind/db/CacheKey.java b/src/main/java/com/maxmind/db/CacheKey.java
index 3a2c0d46..85c4c522 100644
--- a/src/main/java/com/maxmind/db/CacheKey.java
+++ b/src/main/java/com/maxmind/db/CacheKey.java
@@ -10,5 +10,5 @@
* @param cls the class of the value
* @param type the type of the value
*/
-public record CacheKey(int offset, Class cls, java.lang.reflect.Type type) {
+public record CacheKey(long offset, Class cls, java.lang.reflect.Type type) {
}
diff --git a/src/main/java/com/maxmind/db/CtrlData.java b/src/main/java/com/maxmind/db/CtrlData.java
index ff9d0ce9..4c3f5622 100644
--- a/src/main/java/com/maxmind/db/CtrlData.java
+++ b/src/main/java/com/maxmind/db/CtrlData.java
@@ -1,4 +1,4 @@
package com.maxmind.db;
-record CtrlData(Type type, int ctrlByte, int offset, int size) {
+record CtrlData(Type type, int ctrlByte, long offset, int size) {
}
diff --git a/src/main/java/com/maxmind/db/Decoder.java b/src/main/java/com/maxmind/db/Decoder.java
index c1c7446d..48dc24ce 100644
--- a/src/main/java/com/maxmind/db/Decoder.java
+++ b/src/main/java/com/maxmind/db/Decoder.java
@@ -34,11 +34,11 @@ class Decoder {
private final CharsetDecoder utfDecoder = UTF_8.newDecoder();
- private final ByteBuffer buffer;
+ private final Buffer buffer;
private final ConcurrentHashMap, CachedConstructor>> constructors;
- Decoder(NodeCache cache, ByteBuffer buffer, long pointerBase) {
+ Decoder(NodeCache cache, Buffer buffer, long pointerBase) {
this(
cache,
buffer,
@@ -49,7 +49,7 @@ class Decoder {
Decoder(
NodeCache cache,
- ByteBuffer buffer,
+ Buffer buffer,
long pointerBase,
ConcurrentHashMap, CachedConstructor>> constructors
) {
@@ -61,7 +61,7 @@ class Decoder {
private final NodeCache.Loader cacheLoader = this::decode;
- T decode(int offset, Class cls) throws IOException {
+ T decode(long offset, Class cls) throws IOException {
if (offset >= this.buffer.capacity()) {
throw new InvalidDatabaseException(
"The MaxMind DB file's data section contains bad data: "
@@ -73,7 +73,7 @@ T decode(int offset, Class cls) throws IOException {
}
private DecodedValue decode(CacheKey key) throws IOException {
- int offset = key.offset();
+ long offset = key.offset();
if (offset >= this.buffer.capacity()) {
throw new InvalidDatabaseException(
"The MaxMind DB file's data section contains bad data: "
@@ -132,8 +132,8 @@ private DecodedValue decode(Class cls, java.lang.reflect.Type genericType
DecodedValue decodePointer(long pointer, Class> cls, java.lang.reflect.Type genericType)
throws IOException {
- int targetOffset = (int) pointer;
- int position = buffer.position();
+ long targetOffset = pointer;
+ long position = buffer.position();
CacheKey> key = new CacheKey<>(targetOffset, cls, genericType);
DecodedValue o = cache.get(key, cacheLoader);
@@ -185,10 +185,10 @@ private Object decodeByType(
}
}
- private String decodeString(int size) throws CharacterCodingException {
- int oldLimit = buffer.limit();
+ private String decodeString(long size) throws CharacterCodingException {
+ long oldLimit = buffer.limit();
buffer.limit(buffer.position() + size);
- String s = utfDecoder.decode(buffer).toString();
+ String s = buffer.decode(utfDecoder);
buffer.limit(oldLimit);
return s;
}
@@ -202,9 +202,13 @@ private int decodeInt32(int size) {
}
private long decodeLong(int size) {
- long integer = 0;
+ return Decoder.decodeLong(this.buffer, 0, size);
+ }
+
+ static long decodeLong(Buffer buffer, int base, int size) {
+ long integer = base;
for (int i = 0; i < size; i++) {
- integer = (integer << 8) | (this.buffer.get() & 0xFF);
+ integer = (integer << 8) | (buffer.get() & 0xFF);
}
return integer;
}
@@ -221,7 +225,7 @@ private int decodeInteger(int base, int size) {
return Decoder.decodeInteger(this.buffer, base, size);
}
- static int decodeInteger(ByteBuffer buffer, int base, int size) {
+ static int decodeInteger(Buffer buffer, int base, int size) {
int integer = base;
for (int i = 0; i < size; i++) {
integer = (integer << 8) | (buffer.get() & 0xFF);
@@ -412,7 +416,7 @@ private Object decodeMapIntoObject(int size, Class cls)
Integer parameterIndex = parameterIndexes.get(key);
if (parameterIndex == null) {
- int offset = this.nextValueOffset(this.buffer.position(), 1);
+ long offset = this.nextValueOffset(this.buffer.position(), 1);
this.buffer.position(offset);
continue;
}
@@ -485,7 +489,7 @@ private static String getParameterName(
+ " is not annotated with MaxMindDbParameter.");
}
- private int nextValueOffset(int offset, int numberToSkip)
+ private long nextValueOffset(long offset, int numberToSkip)
throws InvalidDatabaseException {
if (numberToSkip == 0) {
return offset;
@@ -518,7 +522,7 @@ private int nextValueOffset(int offset, int numberToSkip)
return nextValueOffset(offset, numberToSkip - 1);
}
- private CtrlData getCtrlData(int offset)
+ private CtrlData getCtrlData(long offset)
throws InvalidDatabaseException {
if (offset >= this.buffer.capacity()) {
throw new InvalidDatabaseException(
@@ -566,7 +570,7 @@ private byte[] getByteArray(int length) {
return Decoder.getByteArray(this.buffer, length);
}
- private static byte[] getByteArray(ByteBuffer buffer, int length) {
+ private static byte[] getByteArray(Buffer buffer, int length) {
byte[] bytes = new byte[length];
buffer.get(bytes);
return bytes;
diff --git a/src/main/java/com/maxmind/db/Metadata.java b/src/main/java/com/maxmind/db/Metadata.java
index 3c31f469..aa2d733d 100644
--- a/src/main/java/com/maxmind/db/Metadata.java
+++ b/src/main/java/com/maxmind/db/Metadata.java
@@ -24,11 +24,11 @@ public final class Metadata {
private final int nodeByteSize;
- private final int nodeCount;
+ private final long nodeCount;
private final int recordSize;
- private final int searchTreeSize;
+ private final long searchTreeSize;
/**
* Constructs a {@code Metadata} object.
@@ -71,7 +71,7 @@ public Metadata(
this.languages = languages;
this.description = description;
this.ipVersion = ipVersion;
- this.nodeCount = (int) nodeCount;
+ this.nodeCount = nodeCount;
this.recordSize = recordSize;
this.nodeByteSize = this.recordSize / 4;
@@ -140,7 +140,7 @@ int getNodeByteSize() {
/**
* @return the number of nodes in the search tree.
*/
- int getNodeCount() {
+ long getNodeCount() {
return this.nodeCount;
}
@@ -155,7 +155,7 @@ int getRecordSize() {
/**
* @return the searchTreeSize
*/
- int getSearchTreeSize() {
+ long getSearchTreeSize() {
return this.searchTreeSize;
}
diff --git a/src/main/java/com/maxmind/db/MultiBuffer.java b/src/main/java/com/maxmind/db/MultiBuffer.java
new file mode 100644
index 00000000..9113a7d5
--- /dev/null
+++ b/src/main/java/com/maxmind/db/MultiBuffer.java
@@ -0,0 +1,359 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.CharsetDecoder;
+import java.nio.charset.CoderResult;
+
+/**
+ * A {@link Buffer} implementation backed by multiple {@link ByteBuffer}s,
+ * allowing support for capacities larger than {@link Integer#MAX_VALUE}.
+ *
+ * This implementation virtually concatenates several
+ * {@link ByteBuffer}s (each up to {@link Integer#MAX_VALUE}) and maintains
+ * a single logical position and limit across them.
+ *
+ *
Use this when working with databases/files that may exceed 2GB.
+ */
+class MultiBuffer implements Buffer {
+
+ /** Default maximum size per underlying chunk. */
+ static final int DEFAULT_CHUNK_SIZE = Integer.MAX_VALUE - 8;
+
+ final ByteBuffer[] buffers;
+ private final int chunkSize;
+ private final long capacity;
+
+ private long position = 0;
+ private long limit;
+
+ /**
+ * Creates a new {@code MultiBuffer} with the given capacity, backed by
+ * heap-allocated {@link ByteBuffer}s.
+ *
+ * @param capacity the total capacity in bytes
+ */
+ public MultiBuffer(long capacity) {
+ this(capacity, DEFAULT_CHUNK_SIZE);
+ }
+
+ /**
+ * Creates a new {@code MultiBuffer} backed by the given
+ * {@link ByteBuffer}s.
+ *
+ *
The total capacity and limit are set to the sum of the
+ * buffer capacities.
+ *
+ * @param buffers the backing buffers (cloned into an internal array)
+ * @param chunkSize the size of each buffer chunk
+ */
+ MultiBuffer(ByteBuffer[] buffers, int chunkSize) {
+ for (int i = 0; i < buffers.length; i++) {
+ ByteBuffer chunk = buffers[i];
+ if (chunk.capacity() == chunkSize) {
+ continue;
+ }
+ if (i == buffers.length - 1) {
+ // The last chunk can have a different size
+ continue;
+ }
+ throw new IllegalArgumentException("Chunk at index " + i
+ + " is smaller than expected chunk size");
+ }
+
+ this.buffers = buffers.clone();
+ long capacity = 0;
+ for (ByteBuffer buffer : buffers) {
+ capacity += buffer.capacity();
+ }
+ this.capacity = capacity;
+ this.limit = capacity;
+ this.chunkSize = chunkSize;
+ }
+
+ /**
+ * Creates a new {@code MultiBuffer} with the given capacity, backed by
+ * heap-allocated {@link ByteBuffer}s with the given chunk size.
+ *
+ * @param capacity the total capacity in bytes
+ * @param chunkSize the size of each buffer chunk
+ */
+ MultiBuffer(long capacity, int chunkSize) {
+ if (capacity <= 0) {
+ throw new IllegalArgumentException("Capacity must be positive");
+ }
+ this.capacity = capacity;
+ this.limit = capacity;
+ this.chunkSize = chunkSize;
+
+ int fullChunks = (int) (capacity / chunkSize);
+ int remainder = (int) (capacity % chunkSize);
+ int totalChunks = fullChunks + (remainder > 0 ? 1 : 0);
+
+ this.buffers = new ByteBuffer[totalChunks];
+
+ for (int i = 0; i < fullChunks; i++) {
+ buffers[i] = ByteBuffer.allocate(chunkSize);
+ }
+ if (remainder > 0) {
+ buffers[totalChunks - 1] = ByteBuffer.allocate(remainder);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long capacity() {
+ return capacity;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long position() {
+ return position;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Buffer position(long newPosition) {
+ if (newPosition < 0 || newPosition > limit) {
+ throw new IllegalArgumentException("Invalid position: " + newPosition);
+ }
+ this.position = newPosition;
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long limit() {
+ return limit;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Buffer limit(long newLimit) {
+ if (newLimit < 0 || newLimit > capacity) {
+ throw new IllegalArgumentException("Invalid limit: " + newLimit);
+ }
+ this.limit = newLimit;
+ if (position > limit) {
+ position = limit;
+ }
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public byte get() {
+ byte value = get(position);
+ position++;
+ return value;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Buffer get(byte[] dst) {
+ if (position + dst.length > limit) {
+ throw new IndexOutOfBoundsException(
+ "Read exceeds limit: position=" + position
+ + ", length=" + dst.length
+ + ", limit=" + limit
+ );
+ }
+ long pos = position;
+ int offset = 0;
+ int length = dst.length;
+ while (length > 0) {
+ int bufIndex = (int) (pos / this.chunkSize);
+ int bufOffset = (int) (pos % this.chunkSize);
+ ByteBuffer buf = buffers[bufIndex];
+ buf.position(bufOffset);
+ int toRead = Math.min(buf.remaining(), length);
+ buf.get(dst, offset, toRead);
+ pos += toRead;
+ offset += toRead;
+ length -= toRead;
+ }
+ position = pos;
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public byte get(long index) {
+ if (index < 0 || index >= limit) {
+ throw new IndexOutOfBoundsException("Index: " + index);
+ }
+ int bufIndex = (int) (index / this.chunkSize);
+ int offset = (int) (index % this.chunkSize);
+ return buffers[bufIndex].get(offset);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public double getDouble() {
+ int bufIndex = (int) (position / this.chunkSize);
+ int off = (int) (position % this.chunkSize);
+ ByteBuffer buf = buffers[bufIndex];
+ buf.position(off);
+ if (buf.remaining() >= 8) {
+ double value = buf.getDouble();
+ position += 8;
+ return value;
+ } else {
+ byte[] eight = new byte[8];
+ get(eight);
+ return ByteBuffer.wrap(eight).getDouble();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public float getFloat() {
+ int bufIndex = (int) (position / this.chunkSize);
+ int off = (int) (position % this.chunkSize);
+ ByteBuffer buf = buffers[bufIndex];
+ buf.position(off);
+ if (buf.remaining() >= 4) {
+ float value = buf.getFloat();
+ position += 4;
+ return value;
+ } else {
+ byte[] four = new byte[4];
+ get(four);
+ return ByteBuffer.wrap(four).getFloat();
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Buffer duplicate() {
+ ByteBuffer[] duplicatedBuffers = new ByteBuffer[buffers.length];
+ for (int i = 0; i < buffers.length; i++) {
+ duplicatedBuffers[i] = buffers[i].duplicate();
+ }
+ MultiBuffer copy = new MultiBuffer(duplicatedBuffers, chunkSize);
+ copy.position = this.position;
+ copy.limit = this.limit;
+ return copy;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long readFrom(FileChannel channel) throws IOException {
+ return this.readFrom(channel, DEFAULT_CHUNK_SIZE);
+ }
+
+ /**
+ * Reads data from the given channel into this buffer starting at the
+ * current position.
+ *
+ * @param channel the file channel
+ * @param chunkSize the chunk size to use for positioning reads
+ * @return the number of bytes read
+ * @throws IOException if an I/O error occurs
+ */
+ long readFrom(FileChannel channel, int chunkSize) throws IOException {
+ long totalRead = 0;
+ long pos = position;
+ for (int i = (int) (pos / chunkSize); i < buffers.length; i++) {
+ ByteBuffer buf = buffers[i];
+ buf.position((int) (pos % chunkSize));
+ int read = channel.read(buf);
+ if (read == -1) {
+ break;
+ }
+ totalRead += read;
+ pos += read;
+ if (pos >= limit) {
+ break;
+ }
+ }
+ position = pos;
+ return totalRead;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public String decode(CharsetDecoder decoder)
+ throws CharacterCodingException {
+ return this.decode(decoder, Integer.MAX_VALUE);
+ }
+
+ String decode(CharsetDecoder decoder, int maxCharBufferSize)
+ throws CharacterCodingException {
+ long remainingBytes = limit - position;
+
+ // Cannot allocate more than maxCharBufferSize for CharBuffer
+ if (remainingBytes > maxCharBufferSize) {
+ throw new IllegalStateException(
+ "Decoding region too large to fit in a CharBuffer: " + remainingBytes
+ );
+ }
+
+ CharBuffer out = CharBuffer.allocate((int) remainingBytes);
+ long pos = position;
+
+ while (remainingBytes > 0) {
+ // Locate which underlying buffer we are in
+ int bufIndex = (int) (pos / this.chunkSize);
+ int bufOffset = (int) (pos % this.chunkSize);
+
+ ByteBuffer srcView = buffers[bufIndex].duplicate();
+ srcView.position(bufOffset);
+
+ int toRead = (int) Math.min(srcView.remaining(), remainingBytes);
+ srcView.limit(bufOffset + toRead);
+
+ CoderResult result = decoder.decode(srcView, out, false);
+ if (result.isError()) {
+ result.throwException();
+ }
+
+ pos += toRead;
+ remainingBytes -= toRead;
+ }
+
+ // Update this MultiBuffer’s logical position
+ this.position = pos;
+
+ out.flip();
+ return out.toString();
+ }
+
+ /**
+ * Creates a read-only {@code MultiBuffer} by memory-mapping the given
+ * {@link FileChannel}.
+ *
+ * @param channel the file channel to map
+ * @return a new {@code MultiBuffer} backed by memory-mapped segments
+ * @throws IOException if an I/O error occurs
+ */
+ public static MultiBuffer mapFromChannel(FileChannel channel) throws IOException {
+ long size = channel.size();
+ if (size <= 0) {
+ throw new IllegalArgumentException("File channel has no data");
+ }
+
+ int fullChunks = (int) (size / DEFAULT_CHUNK_SIZE);
+ int remainder = (int) (size % DEFAULT_CHUNK_SIZE);
+ int totalChunks = fullChunks + (remainder > 0 ? 1 : 0);
+
+ ByteBuffer[] buffers = new ByteBuffer[totalChunks];
+ long remaining = size;
+
+ for (int i = 0; i < totalChunks; i++) {
+ long chunkPos = (long) i * DEFAULT_CHUNK_SIZE;
+ long chunkSize = Math.min(DEFAULT_CHUNK_SIZE, remaining);
+ buffers[i] = channel.map(
+ FileChannel.MapMode.READ_ONLY,
+ chunkPos,
+ chunkSize
+ );
+ remaining -= chunkSize;
+ }
+ return new MultiBuffer(buffers, DEFAULT_CHUNK_SIZE);
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/com/maxmind/db/Networks.java b/src/main/java/com/maxmind/db/Networks.java
index da6a0056..cc77af25 100644
--- a/src/main/java/com/maxmind/db/Networks.java
+++ b/src/main/java/com/maxmind/db/Networks.java
@@ -19,7 +19,7 @@ public final class Networks implements Iterator> {
private final Stack nodes;
private NetworkNode lastNode;
private final boolean includeAliasedNetworks;
- private final ByteBuffer buffer; /* Stores the buffer for Next() calls */
+ private final Buffer buffer; /* Stores the buffer for Next() calls */
private final Class typeParameterClass;
/**
@@ -154,7 +154,7 @@ public boolean hasNext() {
ipRight[node.prefix >> 3] |= 1 << (7 - (node.prefix % 8));
try {
- int rightPointer = this.reader.readNode(this.buffer, node.pointer, 1);
+ long rightPointer = this.reader.readNode(this.buffer, node.pointer, 1);
node.prefix++;
this.nodes.push(new NetworkNode(ipRight, node.prefix, rightPointer));
@@ -173,7 +173,7 @@ static class NetworkNode {
/** The prefix of the node. */
public int prefix;
/** The node number. */
- public int pointer;
+ public long pointer;
/**
* Constructs a network node for internal use.
@@ -182,7 +182,7 @@ static class NetworkNode {
* @param prefix The prefix of the node.
* @param pointer The node number
*/
- NetworkNode(byte[] ip, int prefix, int pointer) {
+ NetworkNode(byte[] ip, int prefix, long pointer) {
this.ip = ip;
this.prefix = prefix;
this.pointer = pointer;
diff --git a/src/main/java/com/maxmind/db/Reader.java b/src/main/java/com/maxmind/db/Reader.java
index c9e41591..2e6b1007 100644
--- a/src/main/java/com/maxmind/db/Reader.java
+++ b/src/main/java/com/maxmind/db/Reader.java
@@ -22,7 +22,7 @@ public final class Reader implements Closeable {
(byte) 0xCD, (byte) 0xEF, 'M', 'a', 'x', 'M', 'i', 'n', 'd', '.',
'c', 'o', 'm'};
- private final int ipV4Start;
+ private final long ipV4Start;
private final Metadata metadata;
private final AtomicReference bufferHolderReference;
private final NodeCache cache;
@@ -56,6 +56,14 @@ public Reader(File database) throws IOException {
this(database, NoCache.getInstance());
}
+ Reader(File database, int chunkSize) throws IOException {
+ this(
+ new BufferHolder(database, FileMode.MEMORY_MAPPED, chunkSize),
+ database.getName(),
+ NoCache.getInstance()
+ );
+ }
+
/**
* Constructs a Reader for the MaxMind DB format, with the specified backing
* cache. The file passed to it must be a valid MaxMind DB file such as a
@@ -69,6 +77,14 @@ public Reader(File database, NodeCache cache) throws IOException {
this(database, FileMode.MEMORY_MAPPED, cache);
}
+ Reader(File database, NodeCache cache, int chunkSize) throws IOException {
+ this(
+ new BufferHolder(database, FileMode.MEMORY_MAPPED, chunkSize),
+ database.getName(),
+ cache
+ );
+ }
+
/**
* Constructs a Reader with no caching, as if in mode
* {@link FileMode#MEMORY}, without using a File
instance.
@@ -80,6 +96,10 @@ public Reader(InputStream source) throws IOException {
this(source, NoCache.getInstance());
}
+ Reader(InputStream source, int chunkSize) throws IOException {
+ this(source, NoCache.getInstance(), chunkSize);
+ }
+
/**
* Constructs a Reader with the specified backing cache, as if in mode
* {@link FileMode#MEMORY}, without using a File
instance.
@@ -89,7 +109,11 @@ public Reader(InputStream source) throws IOException {
* @throws IOException if there is an error reading from the Stream.
*/
public Reader(InputStream source, NodeCache cache) throws IOException {
- this(new BufferHolder(source), "", cache);
+ this(source, cache, MultiBuffer.DEFAULT_CHUNK_SIZE);
+ }
+
+ Reader(InputStream source, NodeCache cache, int chunkSize) throws IOException {
+ this(new BufferHolder(source, chunkSize), "", cache);
}
/**
@@ -128,8 +152,8 @@ private Reader(BufferHolder bufferHolder, String name, NodeCache cache) throws I
}
this.cache = cache;
- ByteBuffer buffer = bufferHolder.get();
- int start = this.findMetadataStart(buffer, name);
+ Buffer buffer = bufferHolder.get();
+ long start = this.findMetadataStart(buffer, name);
Decoder metadataDecoder = new Decoder(this.cache, buffer, start);
this.metadata = metadataDecoder.decode(start, Metadata.class);
@@ -152,7 +176,7 @@ public T get(InetAddress ipAddress, Class cls) throws IOException {
return getRecord(ipAddress, cls).getData();
}
- int getIpv4Start() {
+ long getIpv4Start() {
return this.ipV4Start;
}
@@ -171,13 +195,13 @@ public DatabaseRecord getRecord(InetAddress ipAddress, Class cls)
byte[] rawAddress = ipAddress.getAddress();
- int[] traverseResult = traverseTree(rawAddress, rawAddress.length * 8);
+ long[] traverseResult = traverseTree(rawAddress, rawAddress.length * 8);
- int pl = traverseResult[1];
- int record = traverseResult[0];
+ long record = traverseResult[0];
+ int pl = (int) traverseResult[1];
- int nodeCount = this.metadata.getNodeCount();
- ByteBuffer buffer = this.getBufferHolder().get();
+ long nodeCount = this.metadata.getNodeCount();
+ Buffer buffer = this.getBufferHolder().get();
T dataRecord = null;
if (record > nodeCount) {
// record is a data pointer
@@ -253,7 +277,7 @@ BufferHolder getBufferHolder() throws ClosedDatabaseException {
return bufferHolder;
}
- private int startNode(int bitLength) {
+ private long startNode(int bitLength) {
// Check if we are looking up an IPv4 address in an IPv6 tree. If this
// is the case, we can skip over the first 96 nodes.
if (this.metadata.getIpVersion() == 6 && bitLength == 32) {
@@ -264,13 +288,13 @@ private int startNode(int bitLength) {
return 0;
}
- private int findIpV4StartNode(ByteBuffer buffer)
+ private long findIpV4StartNode(Buffer buffer)
throws InvalidDatabaseException {
if (this.metadata.getIpVersion() == 4) {
return 0;
}
- int node = 0;
+ long node = 0;
for (int i = 0; i < 96 && node < this.metadata.getNodeCount(); i++) {
node = this.readNode(buffer, node, 0);
}
@@ -319,9 +343,9 @@ public Networks networksWithin(
prefixLength += 96;
}
- int[] traverseResult = this.traverseTree(ipBytes, prefixLength);
- int node = traverseResult[0];
- int prefix = traverseResult[1];
+ long[] traverseResult = this.traverseTree(ipBytes, prefixLength);
+ long node = traverseResult[0];
+ int prefix = (int) traverseResult[1];
return new Networks<>(this, includeAliasedNetworks,
new Networks.NetworkNode[] {new Networks.NetworkNode(ipBytes, prefix, node)},
@@ -335,12 +359,12 @@ public Networks networksWithin(
* @param bitCount The prefix.
* @return int[]
*/
- private int[] traverseTree(byte[] ip, int bitCount)
+ private long[] traverseTree(byte[] ip, int bitCount)
throws ClosedDatabaseException, InvalidDatabaseException {
- ByteBuffer buffer = this.getBufferHolder().get();
+ Buffer buffer = this.getBufferHolder().get();
int bitLength = ip.length * 8;
- int record = this.startNode(bitLength);
- int nodeCount = this.metadata.getNodeCount();
+ long record = this.startNode(bitLength);
+ long nodeCount = this.metadata.getNodeCount();
int i = 0;
for (; i < bitCount && record < nodeCount; i++) {
@@ -352,20 +376,20 @@ int record = this.startNode(bitLength);
record = this.readNode(buffer, record, bit);
}
- return new int[]{record, i};
+ return new long[]{record, i};
}
- int readNode(ByteBuffer buffer, int nodeNumber, int index)
+ long readNode(Buffer buffer, long nodeNumber, int index)
throws InvalidDatabaseException {
// index is the index of the record within the node, which
// can either be 0 or 1.
- int baseOffset = nodeNumber * this.metadata.getNodeByteSize();
+ long baseOffset = nodeNumber * this.metadata.getNodeByteSize();
switch (this.metadata.getRecordSize()) {
case 24:
// For a 24 bit record, each record is 3 bytes.
- buffer.position(baseOffset + index * 3);
- return Decoder.decodeInteger(buffer, 0, 3);
+ buffer.position(baseOffset + (long) index * 3);
+ return Decoder.decodeLong(buffer, 0, 3);
case 28:
int middle = buffer.get(baseOffset + 3);
@@ -377,11 +401,11 @@ int readNode(ByteBuffer buffer, int nodeNumber, int index)
// We get the most significant byte of the second record.
middle = 0x0F & middle;
}
- buffer.position(baseOffset + index * 4);
- return Decoder.decodeInteger(buffer, middle, 3);
+ buffer.position(baseOffset + (long) index * 4);
+ return Decoder.decodeLong(buffer, middle, 3);
case 32:
- buffer.position(baseOffset + index * 4);
- return Decoder.decodeInteger(buffer, 0, 4);
+ buffer.position(baseOffset + (long) index * 4);
+ return Decoder.decodeLong(buffer, 0, 4);
default:
throw new InvalidDatabaseException("Unknown record size: "
+ this.metadata.getRecordSize());
@@ -389,11 +413,11 @@ int readNode(ByteBuffer buffer, int nodeNumber, int index)
}
T resolveDataPointer(
- ByteBuffer buffer,
- int pointer,
+ Buffer buffer,
+ long pointer,
Class cls
) throws IOException {
- int resolved = (pointer - this.metadata.getNodeCount())
+ long resolved = (pointer - this.metadata.getNodeCount())
+ this.metadata.getSearchTreeSize();
if (resolved >= buffer.capacity()) {
@@ -421,12 +445,12 @@ T resolveDataPointer(
* are much faster algorithms (e.g., Boyer-Moore) for this if speed is ever
* an issue, but I suspect it won't be.
*/
- private int findMetadataStart(ByteBuffer buffer, String databaseName)
+ private long findMetadataStart(Buffer buffer, String databaseName)
throws InvalidDatabaseException {
- int fileSize = buffer.capacity();
+ long fileSize = buffer.capacity();
FILE:
- for (int i = 0; i < fileSize - METADATA_START_MARKER.length + 1; i++) {
+ for (long i = 0; i < fileSize - METADATA_START_MARKER.length + 1; i++) {
for (int j = 0; j < METADATA_START_MARKER.length; j++) {
byte b = buffer.get(fileSize - i - j - 1);
if (b != METADATA_START_MARKER[METADATA_START_MARKER.length - j
diff --git a/src/main/java/com/maxmind/db/SingleBuffer.java b/src/main/java/com/maxmind/db/SingleBuffer.java
new file mode 100644
index 00000000..89c95980
--- /dev/null
+++ b/src/main/java/com/maxmind/db/SingleBuffer.java
@@ -0,0 +1,150 @@
+package com.maxmind.db;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.FileChannel.MapMode;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.CharsetDecoder;
+
+/**
+ * A {@link Buffer} implementation backed by a single {@link ByteBuffer}.
+ *
+ * This implementation is limited to capacities up to
+ * {@link Integer#MAX_VALUE}, as {@link ByteBuffer} cannot exceed that size.
+ */
+class SingleBuffer implements Buffer {
+
+ private final ByteBuffer buffer;
+
+ /**
+ * Creates a new {@code SingleBuffer} with the given capacity.
+ *
+ * @param capacity the capacity in bytes (must be <= Integer.MAX_VALUE)
+ * @throws IllegalArgumentException if the capacity exceeds
+ * {@link Integer#MAX_VALUE}
+ */
+ public SingleBuffer(long capacity) {
+ if (capacity > Integer.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "SingleBuffer cannot exceed Integer.MAX_VALUE capacity"
+ );
+ }
+ this.buffer = ByteBuffer.allocate((int) capacity);
+ }
+
+ /**
+ * Creates a new {@code SingleBuffer} wrapping the given {@link ByteBuffer}.
+ *
+ * @param buffer the underlying buffer
+ */
+ private SingleBuffer(ByteBuffer buffer) {
+ this.buffer = buffer;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long capacity() {
+ return buffer.capacity();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long position() {
+ return buffer.position();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public SingleBuffer position(long newPosition) {
+ buffer.position((int) newPosition);
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long limit() {
+ return buffer.limit();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public SingleBuffer limit(long newLimit) {
+ buffer.limit((int) newLimit);
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public byte get() {
+ return buffer.get();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public SingleBuffer get(byte[] dst) {
+ buffer.get(dst);
+ return this;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public byte get(long index) {
+ return buffer.get((int) index);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public double getDouble() {
+ return buffer.getDouble();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public float getFloat() {
+ return buffer.getFloat();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public SingleBuffer duplicate() {
+ return new SingleBuffer(this.buffer.duplicate());
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public long readFrom(FileChannel channel) throws IOException {
+ return channel.read(buffer);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public String decode(CharsetDecoder decoder)
+ throws CharacterCodingException {
+ return decoder.decode(buffer).toString();
+ }
+
+ /**
+ * Wraps the given byte array in a new {@code SingleBuffer}.
+ *
+ * @param array the byte array to wrap
+ * @return a new {@code SingleBuffer} backed by the array
+ */
+ public static SingleBuffer wrap(byte[] array) {
+ return new SingleBuffer(ByteBuffer.wrap(array));
+ }
+
+ /**
+ * Creates a read-only {@code SingleBuffer} by memory-mapping the given
+ * {@link FileChannel}.
+ *
+ * @param channel the file channel to map
+ * @return a new read-only {@code SingleBuffer}
+ * @throws IOException if an I/O error occurs
+ */
+ public static SingleBuffer mapFromChannel(FileChannel channel)
+ throws IOException {
+ ByteBuffer buffer = channel.map(MapMode.READ_ONLY, 0, channel.size());
+ return new SingleBuffer(buffer.asReadOnlyBuffer());
+ }
+}
diff --git a/src/test/java/com/maxmind/db/DecoderTest.java b/src/test/java/com/maxmind/db/DecoderTest.java
index 92d6f3b1..5e02c347 100644
--- a/src/test/java/com/maxmind/db/DecoderTest.java
+++ b/src/test/java/com/maxmind/db/DecoderTest.java
@@ -8,7 +8,6 @@
import java.io.IOException;
import java.math.BigInteger;
-import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
@@ -399,7 +398,7 @@ public void testArrays() throws IOException {
@Test
public void testInvalidControlByte() {
- ByteBuffer buffer = ByteBuffer.wrap(new byte[] {0x0, 0xF});
+ SingleBuffer buffer = SingleBuffer.wrap(new byte[] {0x0, 0xF});
Decoder decoder = new Decoder(new CHMCache(), buffer, 0);
InvalidDatabaseException ex = assertThrows(
@@ -418,7 +417,7 @@ private static void testTypeDecoding(Type type, Map tests)
byte[] input = entry.getValue();
String desc = "decoded " + type.name() + " - " + expect;
- ByteBuffer buffer = ByteBuffer.wrap(input);
+ SingleBuffer buffer = SingleBuffer.wrap(input);
Decoder decoder = new TestDecoder(cache, buffer, 0);
diff --git a/src/test/java/com/maxmind/db/MultiBufferTest.java b/src/test/java/com/maxmind/db/MultiBufferTest.java
new file mode 100644
index 00000000..caf85f21
--- /dev/null
+++ b/src/test/java/com/maxmind/db/MultiBufferTest.java
@@ -0,0 +1,340 @@
+package com.maxmind.db;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+
+public class MultiBufferTest {
+ static MultiBuffer createBuffer(int chunkSize) {
+ try {
+ Path tmpFile = Files.createTempFile("test-data", ".bin");
+ byte[] data = new byte[]{
+ // uint16: 500
+ (byte) 0xa2, 0x1, (byte) 0xf4,
+
+ // uint32: 10872
+ (byte) 0xc2, 0x2a, 0x78,
+
+ // int32: 500
+ 0x2, 0x1, 0x1, (byte) 0xf4,
+
+ // boolean: true
+ 0x1, 0x7,
+
+ // double: 3.14159265359
+ 0x68, 0x40, 0x9, 0x21, (byte) 0xFB, 0x54, 0x44, 0x2E, (byte) 0xEA,
+
+ // float: 3.14f
+ 0x40, 0x48, (byte) 0xF5, (byte) 0xC3,
+
+ // string: "123"
+ 0x43, 0x31, 0x32, 0x33,
+
+ // pointer: 3017
+ 0x28, 0x3, (byte) 0xc9,
+
+ // array: ["Foo", "人"]
+ 0x2, 0x4,
+ 0x43, 0x46, 0x6f, 0x6f, // "Foo"
+ 0x43, (byte) 0xe4, (byte) 0xba, (byte) 0xba, // "人"
+
+ // map: {"en": "Foo", "zh": "人"}
+ (byte) 0xe2,
+ 0x42, 0x65, 0x6e, // "en"
+ 0x43, 0x46, 0x6f, 0x6f, // "Foo"
+ 0x42, 0x7a, 0x68, // "zh"
+ 0x43, (byte) 0xe4, (byte) 0xba, (byte) 0xba, // "人"
+
+ // uint64: large value (2^16 - 1)
+ 0x2, 0x2, (byte) 0xff, (byte) 0xff,
+
+ // longer string: "123456789012345678901234567"
+ 0x5b, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30,
+ 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30,
+ 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37
+ };
+ Files.write(tmpFile, data);
+
+ try (RandomAccessFile file = new RandomAccessFile(tmpFile.toFile(), "r");
+ FileChannel channel = file.getChannel()) {
+
+ MultiBuffer buffer = new MultiBuffer(channel.size(), chunkSize);
+
+ buffer.readFrom(channel, chunkSize);
+ buffer.position(0);
+ buffer.limit(channel.size());
+
+ return buffer;
+ }
+ } catch (IOException e) {
+ fail("Could not create test buffer: " + e.getMessage());
+ return null;
+ }
+ }
+
+ @Test
+ public void testPositionSetter() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ buffer.position(500);
+ assertEquals(500, buffer.position());
+ }
+
+ @Test
+ public void testPositionSetterInvalidNegative() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ assertThrows(IllegalArgumentException.class, () -> buffer.position(-1));
+ }
+
+ @Test
+ public void testPositionSetterExceedsLimit() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ buffer.limit(500);
+ assertThrows(IllegalArgumentException.class, () -> buffer.position(600));
+ }
+
+ @Test
+ public void testLimitSetter() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ buffer.limit(500);
+ assertEquals(500, buffer.limit());
+ }
+
+ @Test
+ public void testLimitSetterInvalidNegative() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ assertThrows(IllegalArgumentException.class, () -> buffer.limit(-1));
+ }
+
+ @Test
+ public void testLimitSetterExceedsCapacity() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ assertThrows(IllegalArgumentException.class, () -> buffer.limit(1001));
+ }
+
+ @Test
+ public void testLimitSetterAdjustsPosition() {
+ MultiBuffer buffer = new MultiBuffer(1000);
+ buffer.position(800);
+ buffer.limit(500);
+ assertEquals(500, buffer.position());
+ }
+
+ @Test
+ public void testGetByIndex() {
+ MultiBuffer buffer = createBuffer(24);
+ assertEquals(0x2a, buffer.get(4));
+ assertEquals(0x1, buffer.get(10));
+ }
+
+ @Test
+ public void testGetByIndexOutOfBounds() {
+ MultiBuffer buffer = createBuffer(24);
+ buffer.limit(50);
+ assertThrows(IndexOutOfBoundsException.class, () -> buffer.get(50));
+ assertThrows(IndexOutOfBoundsException.class, () -> buffer.get(-1));
+ }
+
+ @Test
+ public void testGetSingleByte() {
+ MultiBuffer buffer = createBuffer(24);
+ assertEquals((byte) 0xa2, buffer.get());
+ assertEquals(1, buffer.position());
+ }
+
+ @Test
+ public void testGetByteArray() {
+ MultiBuffer buffer = createBuffer(24);
+ byte[] dst = new byte[10];
+ buffer.position(32);
+ buffer.get(dst);
+ byte[] expectedBytes = new byte[]{
+ 0x2, 0x4,
+ 0x43, 0x46, 0x6f, 0x6f,
+ 0x43, (byte) 0xe4, (byte) 0xba, (byte) 0xba};
+ assertArrayEquals(expectedBytes, dst);
+ assertEquals(42, buffer.position());
+ }
+
+ @Test
+ public void testGetByteArrayExceedsLimit() {
+ MultiBuffer buffer = new MultiBuffer(100);
+ buffer.limit(5);
+ byte[] dst = new byte[10];
+ assertThrows(IndexOutOfBoundsException.class, () -> buffer.get(dst));
+ }
+
+ @Test
+ public void testGetByteArrayAcrossChunks() {
+ MultiBuffer buffer = createBuffer(35);
+ byte[] dst = new byte[10];
+ buffer.position(32);
+ buffer.get(dst);
+ byte[] expectedBytes = new byte[]{
+ 0x2, 0x4,
+ 0x43, 0x46, 0x6f, 0x6f,
+ 0x43, (byte) 0xe4, (byte) 0xba, (byte) 0xba};
+ assertArrayEquals(expectedBytes, dst);
+ assertEquals(42, buffer.position());
+ }
+
+ @Test
+ public void testGetDouble() {
+ MultiBuffer buffer = createBuffer(24);
+ buffer.position(13);
+ assertEquals(3.14159265359, buffer.getDouble());
+ assertEquals(21, buffer.position());
+ }
+
+ @Test
+ public void testGetDoubleAcrossChunks() {
+ MultiBuffer buffer = createBuffer(16);
+ buffer.position(13);
+ assertEquals(3.14159265359, buffer.getDouble());
+ assertEquals(21, buffer.position());
+ }
+
+ @Test
+ public void testGetFloat() {
+ MultiBuffer buffer = createBuffer(26);
+ buffer.position(21);
+ assertEquals(3.14f, buffer.getFloat());
+ assertEquals(25, buffer.position());
+ }
+
+ @Test
+ public void testGetFloatAcrossChunks() {
+ MultiBuffer buffer = createBuffer(22);
+ buffer.position(21);
+ assertEquals(3.14f, buffer.getFloat());
+ assertEquals(25, buffer.position());
+ }
+
+ @Test
+ public void testDuplicate() {
+ MultiBuffer original = new MultiBuffer(1000);
+ original.position(100);
+ original.limit(800);
+
+ MultiBuffer duplicate = (MultiBuffer) original.duplicate();
+
+ assertEquals(original.capacity(), duplicate.capacity());
+ assertEquals(original.position(), duplicate.position());
+ assertEquals(original.limit(), duplicate.limit());
+ assertEquals(original.buffers.length, duplicate.buffers.length);
+
+ duplicate.position(200);
+ assertEquals(100, original.position());
+ assertEquals(200, duplicate.position());
+ }
+
+ @Test
+ public void testWrapValidChunks() {
+ ByteBuffer[] chunks = new ByteBuffer[] {
+ ByteBuffer.allocate(8),
+ ByteBuffer.allocate(3)
+ };
+
+ MultiBuffer buffer = new MultiBuffer(chunks, 8);
+ assertEquals(11, buffer.capacity());
+ }
+
+ @Test
+ public void testWrapInvalidChunkSize() {
+ ByteBuffer[] chunks = new ByteBuffer[] {
+ ByteBuffer.allocate(3),
+ ByteBuffer.allocate(8)
+ };
+
+ assertThrows(IllegalArgumentException.class, () -> new MultiBuffer(chunks, 8));
+ }
+
+ @Test
+ public void testReadFromFileChannel(@TempDir Path tempDir) throws IOException {
+ // Create test file
+ Path testFile = tempDir.resolve("test.dat");
+ byte[] testData = new byte[]{
+ (byte) 0xa2, 0x1, (byte) 0xf4,
+ (byte) 0xc2, 0x2a, 0x78,
+ 0x2, 0x1, 0x1, (byte) 0xf4,
+ 0x1, 0x7,
+ 0x68, 0x40, 0x9, 0x21, (byte) 0xFB, 0x54, 0x44, 0x2E, (byte) 0xEA,
+ };
+ Files.write(testFile, testData);
+
+ try (FileChannel channel = FileChannel.open(testFile, StandardOpenOption.READ)) {
+ MultiBuffer buffer = new MultiBuffer(testData.length);
+ long bytesRead = buffer.readFrom(channel);
+ assertEquals(21, bytesRead);
+ assertEquals(21, buffer.position());
+ }
+ }
+
+ @Test
+ public void testMapFromChannel(@TempDir Path tempDir) throws IOException {
+ // Create test file
+ Path testFile = tempDir.resolve("test.dat");
+ byte[] testData = new byte[]{
+ (byte) 0xa2, 0x1, (byte) 0xf4,
+ (byte) 0xc2, 0x2a, 0x78,
+ 0x2, 0x1, 0x1, (byte) 0xf4,
+ 0x1, 0x7,
+ 0x68, 0x40, 0x9, 0x21, (byte) 0xFB, 0x54, 0x44, 0x2E, (byte) 0xEA,
+ };
+ Files.write(testFile, testData);
+
+ try (FileChannel channel = FileChannel.open(testFile, StandardOpenOption.READ)) {
+ MultiBuffer buffer = MultiBuffer.mapFromChannel(channel);
+ assertEquals(21, buffer.capacity());
+ }
+ }
+
+ @Test
+ public void testMapFromEmptyChannel(@TempDir Path tempDir) throws IOException {
+ Path emptyFile = tempDir.resolve("empty.dat");
+ Files.createFile(emptyFile);
+
+ try (FileChannel channel = FileChannel.open(emptyFile, StandardOpenOption.READ)) {
+ assertThrows(IllegalArgumentException.class, () -> MultiBuffer.mapFromChannel(channel));
+ }
+ }
+
+ @Test
+ public void testDecodeString() throws CharacterCodingException {
+ MultiBuffer buffer = createBuffer(22);
+ buffer.position(26);
+ buffer.limit(29);
+ String result = buffer.decode(StandardCharsets.UTF_8.newDecoder());
+ assertEquals("123", result);
+ assertEquals(29, buffer.position());
+ }
+
+ @Test
+ public void testDecodeStringTooLarge() {
+ MultiBuffer buffer = createBuffer(65);
+ buffer.position(62);
+ buffer.limit(89);
+ assertThrows(IllegalStateException.class, () ->
+ buffer.decode(StandardCharsets.UTF_8.newDecoder(), 20));
+ }
+
+ @Test
+ public void testDecodeAcrossChunks() throws CharacterCodingException {
+ MultiBuffer buffer = createBuffer(65);
+ buffer.position(62);
+ buffer.limit(89);
+ String result = buffer.decode(StandardCharsets.UTF_8.newDecoder());
+ assertEquals("123456789012345678901234567", result);
+ assertEquals(89, buffer.position());
+ }
+}
diff --git a/src/test/java/com/maxmind/db/MultiThreadedTest.java b/src/test/java/com/maxmind/db/MultiThreadedTest.java
index 2a6e192b..22852af2 100644
--- a/src/test/java/com/maxmind/db/MultiThreadedTest.java
+++ b/src/test/java/com/maxmind/db/MultiThreadedTest.java
@@ -29,7 +29,7 @@ public void multipleMmapOpens() throws InterruptedException,
@Test
public void streamThreadTest() throws IOException, InterruptedException,
ExecutionException {
- try (Reader reader = new Reader(ReaderTest.getStream("MaxMind-DB-test-decoder.mmdb"))) {
+ try (Reader reader = new Reader(ReaderTest.getStream("MaxMind-DB-test-decoder.mmdb"), 2048)) {
MultiThreadedTest.threadTest(reader);
}
}
diff --git a/src/test/java/com/maxmind/db/ReaderTest.java b/src/test/java/com/maxmind/db/ReaderTest.java
index d2ff0904..98dfda03 100644
--- a/src/test/java/com/maxmind/db/ReaderTest.java
+++ b/src/test/java/com/maxmind/db/ReaderTest.java
@@ -27,9 +27,13 @@
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.IntStream;
+
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
public class ReaderTest {
private Reader testReader;
@@ -46,12 +50,27 @@ public void teardownReader() throws IOException {
}
}
- @Test
- public void test() throws IOException {
+ static IntStream chunkSizes() {
+ int[] sizes = new int[] {
+ 512,
+ 2048,
+ // The default chunk size of the MultiBuffer is close to max int, that causes
+ // some issues when running tests in CI as we try to allocate some byte arrays
+ // that are too big to fit in the heap.
+ // We use half of that just to be sure nothing breaks, but big enough that we
+ // ensure SingleBuffer is tested too using the test MMDBs.
+ MultiBuffer.DEFAULT_CHUNK_SIZE / 4,
+ };
+ return IntStream.of(sizes);
+ }
+
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void test(int chunkSize) throws IOException {
for (long recordSize : new long[] {24, 28, 32}) {
for (int ipVersion : new int[] {4, 6}) {
File file = getFile("MaxMind-DB-test-ipv" + ipVersion + "-" + recordSize + ".mmdb");
- try (Reader reader = new Reader(file)) {
+ try (Reader reader = new Reader(file, chunkSize)) {
this.testMetadata(reader, ipVersion, recordSize);
if (ipVersion == 4) {
this.testIpV4(reader, file);
@@ -78,13 +97,14 @@ static class GetRecordTest {
}
}
- @Test
- public void testNetworks() throws IOException, InvalidDatabaseException, InvalidNetworkException {
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testNetworks(int chunkSize) throws IOException, InvalidDatabaseException, InvalidNetworkException {
for (long recordSize : new long[] {24, 28, 32}) {
for (int ipVersion : new int[] {4, 6}) {
File file = getFile("MaxMind-DB-test-ipv" + ipVersion + "-" + recordSize + ".mmdb");
- Reader reader = new Reader(file);
+ Reader reader = new Reader(file, chunkSize);
var networks = reader.networks(false, Map.class);
while(networks.hasNext()) {
@@ -105,10 +125,11 @@ public void testNetworks() throws IOException, InvalidDatabaseException, Invalid
}
}
- @Test
- public void testNetworksWithInvalidSearchTree() throws IOException, InvalidNetworkException{
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testNetworksWithInvalidSearchTree(int chunkSize) throws IOException, InvalidNetworkException{
File file = getFile("MaxMind-DB-test-broken-search-tree-24.mmdb");
- Reader reader = new Reader(file);
+ Reader reader = new Reader(file, chunkSize);
var networks = reader.networks(false, Map.class);
@@ -328,12 +349,13 @@ public networkTest(String network, int prefix,String database, String[] expecte
)
};
- @Test
- public void testNetworksWithin() throws IOException, InvalidNetworkException{
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testNetworksWithin(int chunkSize) throws IOException, InvalidNetworkException{
for(networkTest test : tests){
for(int recordSize : new int[]{24, 28, 32}){
File file = getFile("MaxMind-DB-test-"+test.database+"-"+recordSize+".mmdb");
- Reader reader = new Reader(file);
+ Reader reader = new Reader(file, chunkSize);
InetAddress address = InetAddress.getByName(test.network);
Network network = new Network(address, test.prefix);
@@ -367,11 +389,12 @@ public void testNetworksWithin() throws IOException, InvalidNetworkException{
)
};
- @Test
- public void testGeoIPNetworksWithin() throws IOException, InvalidNetworkException{
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testGeoIPNetworksWithin(int chunkSize) throws IOException, InvalidNetworkException{
for (networkTest test : geoipTests){
File file = getFile(test.database);
- Reader reader = new Reader(file);
+ Reader reader = new Reader(file, chunkSize);
InetAddress address = InetAddress.getByName(test.network);
Network network = new Network(address, test.prefix);
@@ -390,8 +413,9 @@ public void testGeoIPNetworksWithin() throws IOException, InvalidNetworkExceptio
}
}
- @Test
- public void testGetRecord() throws IOException {
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testGetRecord(int chunkSize) throws IOException {
GetRecordTest[] mapTests = {
new GetRecordTest("1.1.1.1", "MaxMind-DB-test-ipv6-32.mmdb", "1.0.0.0/8", false),
new GetRecordTest("::1:ffff:ffff", "MaxMind-DB-test-ipv6-24.mmdb",
@@ -407,7 +431,7 @@ public void testGetRecord() throws IOException {
"0:0:0:0:0:0:101:100/120", true),
};
for (GetRecordTest test : mapTests) {
- try (Reader reader = new Reader(test.db)) {
+ try (Reader reader = new Reader(test.db, chunkSize)) {
DatabaseRecord> record = reader.getRecord(test.ip, Map.class);
assertEquals(test.network, record.getNetwork().toString());
@@ -431,7 +455,7 @@ public void testGetRecord() throws IOException {
"8000:0:0:0:0:0:0:0/1", false)
};
for (GetRecordTest test : stringTests) {
- try (Reader reader = new Reader(test.db)) {
+ try (Reader reader = new Reader(test.db, chunkSize)) {
var record = reader.getRecord(test.ip, String.class);
assertEquals(test.network, record.getNetwork().toString());
@@ -445,21 +469,24 @@ var record = reader.getRecord(test.ip, String.class);
}
}
- @Test
- public void testMetadataPointers() throws IOException {
- Reader reader = new Reader(getFile("MaxMind-DB-test-metadata-pointers.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testMetadataPointers(int chunkSize) throws IOException {
+ Reader reader = new Reader(getFile("MaxMind-DB-test-metadata-pointers.mmdb"), chunkSize);
assertEquals("Lots of pointers in metadata", reader.getMetadata().getDatabaseType());
}
- @Test
- public void testNoIpV4SearchTreeFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-no-ipv4-search-tree.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testNoIpV4SearchTreeFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-no-ipv4-search-tree.mmdb"), chunkSize);
this.testNoIpV4SearchTree(this.testReader);
}
- @Test
- public void testNoIpV4SearchTreeStream() throws IOException {
- this.testReader = new Reader(getStream("MaxMind-DB-no-ipv4-search-tree.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testNoIpV4SearchTreeStream(int chunkSizes) throws IOException {
+ this.testReader = new Reader(getStream("MaxMind-DB-no-ipv4-search-tree.mmdb"), chunkSizes);
this.testNoIpV4SearchTree(this.testReader);
}
@@ -469,27 +496,30 @@ private void testNoIpV4SearchTree(Reader reader) throws IOException {
assertEquals("::0/64", reader.get(InetAddress.getByName("192.1.1.1"), String.class));
}
- @Test
- public void testDecodingTypesFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodingTypesFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
this.testDecodingTypes(this.testReader, true);
this.testDecodingTypesIntoModelObject(this.testReader, true);
this.testDecodingTypesIntoModelObjectBoxed(this.testReader, true);
this.testDecodingTypesIntoModelWithList(this.testReader);
}
- @Test
- public void testDecodingTypesStream() throws IOException {
- this.testReader = new Reader(getStream("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodingTypesStream(int chunkSize) throws IOException {
+ this.testReader = new Reader(getStream("MaxMind-DB-test-decoder.mmdb"), chunkSize);
this.testDecodingTypes(this.testReader, true);
this.testDecodingTypesIntoModelObject(this.testReader, true);
this.testDecodingTypesIntoModelObjectBoxed(this.testReader, true);
this.testDecodingTypesIntoModelWithList(this.testReader);
}
- @Test
- public void testDecodingTypesPointerDecoderFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-pointer-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodingTypesPointerDecoderFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-pointer-decoder.mmdb"), chunkSize);
this.testDecodingTypes(this.testReader, false);
this.testDecodingTypesIntoModelObject(this.testReader, false);
this.testDecodingTypesIntoModelObjectBoxed(this.testReader, false);
@@ -799,16 +829,18 @@ public TestModelList(
}
}
- @Test
- public void testZerosFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testZerosFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
this.testZeros(this.testReader);
this.testZerosModelObject(this.testReader);
}
- @Test
- public void testZerosStream() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testZerosStream(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
this.testZeros(this.testReader);
this.testZerosModelObject(this.testReader);
}
@@ -860,9 +892,10 @@ private void testZerosModelObject(Reader reader) throws IOException {
assertEquals(BigInteger.ZERO, model.uint128Field);
}
- @Test
- public void testDecodeSubdivisions() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeSubdivisions(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
TestModelSubdivisions model = this.testReader.get(
InetAddress.getByName("2.125.160.216"),
@@ -898,9 +931,10 @@ public TestModelSubdivision(
}
}
- @Test
- public void testDecodeWrongTypeWithConstructorException() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeWrongTypeWithConstructorException(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
DeserializationException ex = assertThrows(DeserializationException.class,
() -> this.testReader.get(InetAddress.getByName("2.125.160.216"),
TestModelSubdivisionsWithUnknownException.class));
@@ -921,18 +955,20 @@ public TestModelSubdivisionsWithUnknownException(
}
}
- @Test
- public void testDecodeWrongTypeWithWrongArguments() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeWrongTypeWithWrongArguments(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
DeserializationException ex = assertThrows(DeserializationException.class,
() -> this.testReader.get(InetAddress.getByName("2.125.160.216"),
TestWrongModelSubdivisions.class));
assertThat(ex.getMessage(), containsString("Error getting record for IP"));
}
- @Test
- public void testDecodeWithDataTypeMismatchInModel() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeWithDataTypeMismatchInModel(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
DeserializationException ex = assertThrows(DeserializationException.class,
() -> this.testReader.get(InetAddress.getByName("2.125.160.216"),
TestDataTypeMismatchInModel.class));
@@ -953,9 +989,10 @@ public TestConstructorMismatchModel(
}
}
- @Test
- public void testDecodeWithDataTypeMismatchInModelAndNullValue() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeWithDataTypeMismatchInModelAndNullValue(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
DeserializationException ex = assertThrows(DeserializationException.class,
() -> this.testReader.get(
@@ -1002,9 +1039,10 @@ public TestDataTypeMismatchInModel(
}
}
- @Test
- public void testDecodeConcurrentHashMap() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeConcurrentHashMap(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
var m = this.testReader.get(
InetAddress.getByName("2.125.160.216"),
@@ -1019,9 +1057,10 @@ public void testDecodeConcurrentHashMap() throws IOException {
assertEquals("ENG", isoCode);
}
- @Test
- public void testDecodeVector() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testDecodeVector(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
TestModelVector model = this.testReader.get(
InetAddress.getByName("::1.1.1.0"),
@@ -1047,13 +1086,15 @@ public TestModelVector(
}
// Test that we cache differently depending on more than the offset.
- @Test
- public void testCacheWithDifferentModels() throws IOException {
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testCacheWithDifferentModels(int chunkSize) throws IOException {
NodeCache cache = new CHMCache();
this.testReader = new Reader(
getFile("MaxMind-DB-test-decoder.mmdb"),
- cache
+ cache,
+ chunkSize
);
TestModelA modelA = this.testReader.get(
@@ -1132,15 +1173,17 @@ public TestModelCacheKey(List a, List b) {
}
}
- @Test
- public void testBrokenDatabaseFile() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test-Broken-Double-Format.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenDatabaseFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test-Broken-Double-Format.mmdb"), chunkSize);
this.testBrokenDatabase(this.testReader);
}
- @Test
- public void testBrokenDatabaseStream() throws IOException {
- this.testReader = new Reader(getStream("GeoIP2-City-Test-Broken-Double-Format.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenDatabaseStream(int chunkSize) throws IOException {
+ this.testReader = new Reader(getStream("GeoIP2-City-Test-Broken-Double-Format.mmdb"), chunkSize);
this.testBrokenDatabase(this.testReader);
}
@@ -1152,15 +1195,17 @@ private void testBrokenDatabase(Reader reader) {
containsString("The MaxMind DB file's data section contains bad data"));
}
- @Test
- public void testBrokenSearchTreePointerFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-broken-pointers-24.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenSearchTreePointerFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-broken-pointers-24.mmdb"), chunkSize);
this.testBrokenSearchTreePointer(this.testReader);
}
- @Test
- public void testBrokenSearchTreePointerStream() throws IOException {
- this.testReader = new Reader(getStream("MaxMind-DB-test-broken-pointers-24.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenSearchTreePointerStream(int chunkSize) throws IOException {
+ this.testReader = new Reader(getStream("MaxMind-DB-test-broken-pointers-24.mmdb"), chunkSize);
this.testBrokenSearchTreePointer(this.testReader);
}
@@ -1170,15 +1215,17 @@ private void testBrokenSearchTreePointer(Reader reader) {
assertThat(ex.getMessage(), containsString("The MaxMind DB file's search tree is corrupt"));
}
- @Test
- public void testBrokenDataPointerFile() throws IOException {
- this.testReader = new Reader(getFile("MaxMind-DB-test-broken-pointers-24.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenDataPointerFile(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("MaxMind-DB-test-broken-pointers-24.mmdb"), chunkSize);
this.testBrokenDataPointer(this.testReader);
}
- @Test
- public void testBrokenDataPointerStream() throws IOException {
- this.testReader = new Reader(getStream("MaxMind-DB-test-broken-pointers-24.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testBrokenDataPointerStream(int chunkSize) throws IOException {
+ this.testReader = new Reader(getStream("MaxMind-DB-test-broken-pointers-24.mmdb"), chunkSize);
this.testBrokenDataPointer(this.testReader);
}
@@ -1189,9 +1236,10 @@ private void testBrokenDataPointer(Reader reader) {
containsString("The MaxMind DB file's data section contains bad data"));
}
- @Test
- public void testClosedReaderThrowsException() throws IOException {
- Reader reader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void testClosedReaderThrowsException(int chunkSize) throws IOException {
+ Reader reader = new Reader(getFile("MaxMind-DB-test-decoder.mmdb"), chunkSize);
reader.close();
ClosedDatabaseException ex = assertThrows(ClosedDatabaseException.class,
@@ -1199,9 +1247,10 @@ public void testClosedReaderThrowsException() throws IOException {
assertEquals("The MaxMind DB has been closed.", ex.getMessage());
}
- @Test
- public void voidTestMapKeyIsString() throws IOException {
- this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"));
+ @ParameterizedTest
+ @MethodSource("chunkSizes")
+ public void voidTestMapKeyIsString(int chunkSize) throws IOException {
+ this.testReader = new Reader(getFile("GeoIP2-City-Test.mmdb"), chunkSize);
DeserializationException ex = assertThrows(
DeserializationException.class,
diff --git a/src/test/java/com/maxmind/db/TestDecoder.java b/src/test/java/com/maxmind/db/TestDecoder.java
index 445749bb..99bf9896 100644
--- a/src/test/java/com/maxmind/db/TestDecoder.java
+++ b/src/test/java/com/maxmind/db/TestDecoder.java
@@ -5,7 +5,7 @@
final class TestDecoder extends Decoder {
- TestDecoder(NodeCache cache, ByteBuffer buffer, long pointerBase) {
+ TestDecoder(NodeCache cache, Buffer buffer, long pointerBase) {
super(cache, buffer, pointerBase);
}