Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add LZF safe encoder in LZFCompressor #7466

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -22,7 +22,6 @@
import com.ning.compress.BufferRecycler;
import com.ning.compress.lzf.ChunkEncoder;
import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.util.ChunkEncoderFactory;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;

Expand All @@ -35,12 +34,12 @@ public class LZFCompressedStreamOutput extends CompressedStreamOutput<LZFCompres
private final BufferRecycler recycler;
private final ChunkEncoder encoder;

public LZFCompressedStreamOutput(StreamOutput out) throws IOException {
public LZFCompressedStreamOutput(StreamOutput out, ChunkEncoder encoder) throws IOException {
super(out, LZFCompressorContext.INSTANCE);
this.recycler = BufferRecycler.instance();
this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN);
this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN;
this.encoder = ChunkEncoderFactory.safeInstance();
this.encoder = encoder;
}

@Override
Expand Down
Expand Up @@ -20,9 +20,11 @@
package org.elasticsearch.common.compress.lzf;

import com.ning.compress.lzf.ChunkDecoder;
import com.ning.compress.lzf.ChunkEncoder;
import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.LZFEncoder;
import com.ning.compress.lzf.util.ChunkDecoderFactory;
import com.ning.compress.lzf.util.ChunkEncoderFactory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.bytes.BytesReference;
Expand All @@ -46,11 +48,16 @@ public class LZFCompressor implements Compressor {

public static final String TYPE = "lzf";

private ChunkEncoder encoder;

private ChunkDecoder decoder;

public LZFCompressor() {
this.encoder = ChunkEncoderFactory.safeInstance();
this.decoder = ChunkDecoderFactory.safeInstance();
Loggers.getLogger(LZFCompressor.class).debug("using [{}] decoder", this.decoder.getClass().getSimpleName());
Loggers.getLogger(LZFCompressor.class).debug("using encoder [{}] and decoder[{}] ",
this.encoder.getClass().getSimpleName(),
this.decoder.getClass().getSimpleName());
}

@Override
Expand Down Expand Up @@ -110,7 +117,7 @@ public byte[] uncompress(byte[] data, int offset, int length) throws IOException

@Override
public byte[] compress(byte[] data, int offset, int length) throws IOException {
return LZFEncoder.encode(data, offset, length);
return LZFEncoder.encode(encoder, data, offset, length);
}

@Override
Expand All @@ -120,7 +127,7 @@ public CompressedStreamInput streamInput(StreamInput in) throws IOException {

@Override
public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException {
return new LZFCompressedStreamOutput(out);
return new LZFCompressedStreamOutput(out, encoder);
}

@Override
Expand Down