diff --git a/pinot-common/pom.xml b/pinot-common/pom.xml
index 6d47cc65821..a90cc050db2 100644
--- a/pinot-common/pom.xml
+++ b/pinot-common/pom.xml
@@ -139,6 +139,10 @@
org.xerial.snappy
snappy-java
+
+ com.github.luben
+ zstd-jni
+
org.apache.logging.log4j
log4j-slf4j-impl
diff --git a/pinot-common/src/test/java/org/apache/pinot/common/utils/config/TableConfigSerDeTest.java b/pinot-common/src/test/java/org/apache/pinot/common/utils/config/TableConfigSerDeTest.java
index 435c182d012..01afcd3f8a5 100644
--- a/pinot-common/src/test/java/org/apache/pinot/common/utils/config/TableConfigSerDeTest.java
+++ b/pinot-common/src/test/java/org/apache/pinot/common/utils/config/TableConfigSerDeTest.java
@@ -228,8 +228,8 @@ public void testSerDe()
properties.put("foo", "bar");
properties.put("foobar", "potato");
List fieldConfigList = Arrays.asList(
- new FieldConfig("column1", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.INVERTED, properties),
- new FieldConfig("column2", null, null, null));
+ new FieldConfig("column1", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.INVERTED, null, properties),
+ new FieldConfig("column2", null, null, null, null));
TableConfig tableConfig = tableConfigBuilder.setFieldConfigList(fieldConfigList).build();
checkFieldConfig(tableConfig);
diff --git a/pinot-core/src/test/java/org/apache/pinot/core/util/TableConfigUtilsTest.java b/pinot-core/src/test/java/org/apache/pinot/core/util/TableConfigUtilsTest.java
index 995cb9f9705..675c180777b 100644
--- a/pinot-core/src/test/java/org/apache/pinot/core/util/TableConfigUtilsTest.java
+++ b/pinot-core/src/test/java/org/apache/pinot/core/util/TableConfigUtilsTest.java
@@ -641,7 +641,7 @@ public void testValidateFieldConfig() {
try {
FieldConfig fieldConfig =
- new FieldConfig("myCol1", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null);
+ new FieldConfig("myCol1", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail for with conflicting encoding type of myCol1");
@@ -654,7 +654,7 @@ public void testValidateFieldConfig() {
.setNoDictionaryColumns(Arrays.asList("myCol1")).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("myCol1", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.FST, null);
+ new FieldConfig("myCol1", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.FST, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since FST index is enabled on RAW encoding type");
@@ -665,7 +665,7 @@ public void testValidateFieldConfig() {
tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("myCol2", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null);
+ new FieldConfig("myCol2", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since FST index is enabled on multi value column");
@@ -676,7 +676,7 @@ public void testValidateFieldConfig() {
tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("intCol", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null);
+ new FieldConfig("intCol", FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since FST index is enabled on non String column");
@@ -688,7 +688,7 @@ public void testValidateFieldConfig() {
.setNoDictionaryColumns(Arrays.asList("myCol2", "intCol")).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("myCol2", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.TEXT, null);
+ new FieldConfig("myCol2", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.TEXT, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since TEXT index is enabled on multi value column");
@@ -700,7 +700,7 @@ public void testValidateFieldConfig() {
.setNoDictionaryColumns(Arrays.asList("myCol2", "intCol")).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("intCol", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.TEXT, null);
+ new FieldConfig("intCol", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.TEXT, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since TEXT index is enabled on non String column");
@@ -712,7 +712,7 @@ public void testValidateFieldConfig() {
.setNoDictionaryColumns(Arrays.asList("myCol1")).build();
try {
FieldConfig fieldConfig =
- new FieldConfig("myCol21", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.FST, null);
+ new FieldConfig("myCol21", FieldConfig.EncodingType.RAW, FieldConfig.IndexType.FST, null, null);
tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
TableConfigUtils.validate(tableConfig, schema);
Assert.fail("Should fail since field name is not present in schema");
@@ -720,6 +720,28 @@ public void testValidateFieldConfig() {
Assert.assertEquals(e.getMessage(),
"Column Name myCol21 defined in field config list must be a valid column defined in the schema");
}
+
+ tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build();
+ try {
+ FieldConfig fieldConfig =
+ new FieldConfig("intCol", FieldConfig.EncodingType.DICTIONARY, null, FieldConfig.CompressionCodec.SNAPPY, null);
+ tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
+ TableConfigUtils.validate(tableConfig, schema);
+ Assert.fail("Should fail since dictionary encoding does not support compression codec snappy");
+ } catch (Exception e) {
+ Assert.assertEquals(e.getMessage(), "Set compression codec to null for dictionary encoding type");
+ }
+
+ tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build();
+ try {
+ FieldConfig fieldConfig =
+ new FieldConfig("intCol", FieldConfig.EncodingType.DICTIONARY, null, FieldConfig.CompressionCodec.ZSTANDARD, null);
+ tableConfig.setFieldConfigList(Arrays.asList(fieldConfig));
+ TableConfigUtils.validate(tableConfig, schema);
+ Assert.fail("Should fail since dictionary encoding does not support compression codec zstandard");
+ } catch (Exception e) {
+ Assert.assertEquals(e.getMessage(), "Set compression codec to null for dictionary encoding type");
+ }
}
@Test
@@ -888,7 +910,7 @@ public void testValidateIndexingConfig() {
// expected
}
- FieldConfig fieldConfig = new FieldConfig("myCol2", null, null, null);
+ FieldConfig fieldConfig = new FieldConfig("myCol2", null, null, null, null);
tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME)
.setFieldConfigList(Arrays.asList(fieldConfig)).build();
try {
diff --git a/pinot-core/src/test/java/org/apache/pinot/queries/FSTBasedRegexpLikeQueriesTest.java b/pinot-core/src/test/java/org/apache/pinot/queries/FSTBasedRegexpLikeQueriesTest.java
index 012059cba54..7155844f6bb 100644
--- a/pinot-core/src/test/java/org/apache/pinot/queries/FSTBasedRegexpLikeQueriesTest.java
+++ b/pinot-core/src/test/java/org/apache/pinot/queries/FSTBasedRegexpLikeQueriesTest.java
@@ -156,8 +156,8 @@ private void buildSegment()
List rows = createTestData(NUM_ROWS);
List fieldConfigs = new ArrayList<>();
fieldConfigs
- .add(new FieldConfig(DOMAIN_NAMES_COL, FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null));
- fieldConfigs.add(new FieldConfig(URL_COL, FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null));
+ .add(new FieldConfig(DOMAIN_NAMES_COL, FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null, null));
+ fieldConfigs.add(new FieldConfig(URL_COL, FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.FST, null, null));
TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME)
.setInvertedIndexColumns(Arrays.asList(DOMAIN_NAMES_COL)).setFieldConfigList(fieldConfigs).build();
diff --git a/pinot-core/src/test/java/org/apache/pinot/queries/H3IndexQueriesTest.java b/pinot-core/src/test/java/org/apache/pinot/queries/H3IndexQueriesTest.java
index 075606cabb7..741c0e36bf1 100644
--- a/pinot-core/src/test/java/org/apache/pinot/queries/H3IndexQueriesTest.java
+++ b/pinot-core/src/test/java/org/apache/pinot/queries/H3IndexQueriesTest.java
@@ -72,7 +72,7 @@ public class H3IndexQueriesTest extends BaseQueriesTest {
private static final TableConfig TABLE_CONFIG = new TableConfigBuilder(TableType.OFFLINE).setTableName(RAW_TABLE_NAME)
.setFieldConfigList(Collections.singletonList(
new FieldConfig(H3_INDEX_COLUMN, FieldConfig.EncodingType.DICTIONARY, FieldConfig.IndexType.H3,
- H3_INDEX_PROPERTIES))).build();
+ null, H3_INDEX_PROPERTIES))).build();
private IndexSegment _indexSegment;
diff --git a/pinot-core/src/test/java/org/apache/pinot/queries/NoDictionaryCompressionQueriesTest.java b/pinot-core/src/test/java/org/apache/pinot/queries/NoDictionaryCompressionQueriesTest.java
new file mode 100644
index 00000000000..0b76bf7191d
--- /dev/null
+++ b/pinot-core/src/test/java/org/apache/pinot/queries/NoDictionaryCompressionQueriesTest.java
@@ -0,0 +1,375 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.queries;
+
+import java.io.File;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.commons.lang3.RandomUtils;
+import org.apache.pinot.core.operator.blocks.IntermediateResultsBlock;
+import org.apache.pinot.core.operator.query.SelectionOnlyOperator;
+import org.apache.pinot.segment.local.indexsegment.immutable.ImmutableSegmentLoader;
+import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
+import org.apache.pinot.segment.local.segment.index.loader.IndexLoadingConfig;
+import org.apache.pinot.segment.local.segment.readers.GenericRowRecordReader;
+import org.apache.pinot.segment.spi.ImmutableSegment;
+import org.apache.pinot.segment.spi.IndexSegment;
+import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
+import org.apache.pinot.spi.config.table.FieldConfig;
+import org.apache.pinot.spi.config.table.TableConfig;
+import org.apache.pinot.spi.config.table.TableType;
+import org.apache.pinot.spi.data.FieldSpec;
+import org.apache.pinot.spi.data.Schema;
+import org.apache.pinot.spi.data.readers.GenericRow;
+import org.apache.pinot.spi.data.readers.RecordReader;
+import org.apache.pinot.spi.utils.builder.TableConfigBuilder;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+
+/**
+ * Functional tests for compression type feature.
+ * The tests use three kinds of input data
+ * (1) string
+ * (2) integer
+ * (3) long
+ */
+public class NoDictionaryCompressionQueriesTest extends BaseQueriesTest {
+ private static final File INDEX_DIR = new File(FileUtils.getTempDirectory(), "CompressionCodecQueriesTest");
+ private static final String TABLE_NAME = "MyTable";
+ private static final String SEGMENT_NAME = "testSegment";
+
+ private static final String SNAPPY_STRING = "SNAPPY_STRING";
+ private static final String PASS_THROUGH_STRING = "PASS_THROUGH_STRING";
+ private static final String ZSTANDARD_STRING = "ZSTANDARD_STRING";
+
+ private static final String SNAPPY_LONG = "SNAPPY_LONG";
+ private static final String PASS_THROUGH_LONG = "PASS_THROUGH_LONG";
+ private static final String ZSTANDARD_LONG = "ZSTANDARD_LONG";
+
+ private static final String SNAPPY_INTEGER = "SNAPPY_INTEGER";
+ private static final String PASS_THROUGH_INTEGER = "PASS_THROUGH_INTEGER";
+ private static final String ZSTANDARD_INTEGER = "ZSTANDARD_INTEGER";
+
+ private static final List RAW_SNAPPY_INDEX_COLUMNS = Arrays
+ .asList(SNAPPY_STRING, SNAPPY_LONG, SNAPPY_INTEGER);
+
+ private static final List RAW_ZSTANDARD_INDEX_COLUMNS = Arrays
+ .asList(ZSTANDARD_STRING, ZSTANDARD_LONG, ZSTANDARD_INTEGER);
+
+ private static final List RAW_PASS_THROUGH_INDEX_COLUMNS = Arrays
+ .asList(PASS_THROUGH_STRING, PASS_THROUGH_LONG, PASS_THROUGH_INTEGER);
+
+ private final List _rows = new ArrayList<>();
+
+ private IndexSegment _indexSegment;
+ private List _indexSegments;
+ private List rows;
+
+ @Override
+ protected String getFilter() {
+ return "";
+ }
+
+ @Override
+ protected IndexSegment getIndexSegment() {
+ return _indexSegment;
+ }
+
+ @Override
+ protected List getIndexSegments() {
+ return _indexSegments;
+ }
+
+ @BeforeClass
+ public void setUp()
+ throws Exception {
+ FileUtils.deleteQuietly(INDEX_DIR);
+
+ buildSegment();
+
+ IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig();
+ Set indexColumns = new HashSet<>();
+ indexColumns.addAll(RAW_SNAPPY_INDEX_COLUMNS);
+ indexColumns.addAll(RAW_PASS_THROUGH_INDEX_COLUMNS);
+ indexColumns.addAll(RAW_ZSTANDARD_INDEX_COLUMNS);
+
+ indexLoadingConfig.getNoDictionaryColumns().addAll(indexColumns);
+ ImmutableSegment immutableSegment =
+ ImmutableSegmentLoader.load(new File(INDEX_DIR, SEGMENT_NAME), indexLoadingConfig);
+ _indexSegment = immutableSegment;
+ _indexSegments = Arrays.asList(immutableSegment, immutableSegment);
+ }
+
+ @AfterClass
+ public void tearDown() {
+ _indexSegment.destroy();
+ FileUtils.deleteQuietly(INDEX_DIR);
+ }
+
+ private void buildSegment()
+ throws Exception {
+ rows = createTestData();
+
+ List fieldConfigs = new ArrayList<>(RAW_SNAPPY_INDEX_COLUMNS.size() + RAW_ZSTANDARD_INDEX_COLUMNS.size() + RAW_PASS_THROUGH_INDEX_COLUMNS.size());
+ for (String indexColumn : RAW_SNAPPY_INDEX_COLUMNS) {
+ fieldConfigs
+ .add(new FieldConfig(indexColumn, FieldConfig.EncodingType.RAW, null, FieldConfig.CompressionCodec.SNAPPY, null));
+ }
+
+ for (String indexColumn : RAW_ZSTANDARD_INDEX_COLUMNS) {
+ fieldConfigs
+ .add(new FieldConfig(indexColumn, FieldConfig.EncodingType.RAW, null, FieldConfig.CompressionCodec.ZSTANDARD, null));
+ }
+
+ for (String indexColumn : RAW_PASS_THROUGH_INDEX_COLUMNS) {
+ fieldConfigs
+ .add(new FieldConfig(indexColumn, FieldConfig.EncodingType.RAW, null, FieldConfig.CompressionCodec.PASS_THROUGH, null));
+ }
+
+ List _noDictionaryColumns = new ArrayList<>();
+ _noDictionaryColumns.addAll(RAW_SNAPPY_INDEX_COLUMNS);
+ _noDictionaryColumns.addAll(RAW_ZSTANDARD_INDEX_COLUMNS);
+ _noDictionaryColumns.addAll(RAW_PASS_THROUGH_INDEX_COLUMNS);
+
+ TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME)
+ .setNoDictionaryColumns(_noDictionaryColumns)
+ .setFieldConfigList(fieldConfigs).build();
+ Schema schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME)
+ .addSingleValueDimension(SNAPPY_STRING, FieldSpec.DataType.STRING)
+ .addSingleValueDimension(PASS_THROUGH_STRING, FieldSpec.DataType.STRING)
+ .addSingleValueDimension(ZSTANDARD_STRING, FieldSpec.DataType.STRING)
+ .addSingleValueDimension(SNAPPY_INTEGER, FieldSpec.DataType.INT)
+ .addSingleValueDimension(ZSTANDARD_INTEGER, FieldSpec.DataType.INT)
+ .addSingleValueDimension(PASS_THROUGH_INTEGER, FieldSpec.DataType.INT)
+ .addSingleValueDimension(SNAPPY_LONG, FieldSpec.DataType.LONG)
+ .addSingleValueDimension(ZSTANDARD_LONG, FieldSpec.DataType.LONG)
+ .addSingleValueDimension(PASS_THROUGH_LONG, FieldSpec.DataType.LONG)
+ .build();
+ SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, schema);
+ config.setOutDir(INDEX_DIR.getPath());
+ config.setTableName(TABLE_NAME);
+ config.setSegmentName(SEGMENT_NAME);
+ SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
+ try (RecordReader recordReader = new GenericRowRecordReader(rows)) {
+ driver.init(config, recordReader);
+ driver.build();
+ }
+ }
+
+ private List createTestData()
+ throws Exception {
+ List rows = new ArrayList<>();
+
+ //Generate random data
+ int rowLength = 1000;
+ Random random = new Random();
+ String[] tempStringRows = new String[rowLength];
+ Integer[] tempIntRows = new Integer[rowLength];
+ Long[] tempLongRows = new Long[rowLength];
+
+ for (int i = 0; i < rowLength; i++) {
+ //Adding a fixed value to check for filter queries
+ if(i % 10 == 0) {
+ tempStringRows[i] = "hello_world_123";
+ tempIntRows[i] = 1001;
+ tempLongRows[i] = 1001L;
+ }
+ else {
+ tempStringRows[i] = RandomStringUtils.random(random.nextInt(100), true, true);
+ tempIntRows[i] = RandomUtils.nextInt(0, rowLength);
+ tempLongRows[i] = RandomUtils.nextLong(0, rowLength);
+ }
+
+ }
+
+ for (int i = 0; i < rowLength; i++) {
+ GenericRow row = new GenericRow();
+ row.putValue(SNAPPY_STRING, tempStringRows[i]);
+ row.putValue(ZSTANDARD_STRING, tempStringRows[i]);
+ row.putValue(PASS_THROUGH_STRING, tempStringRows[i]);
+ row.putValue(SNAPPY_INTEGER, tempIntRows[i]);
+ row.putValue(ZSTANDARD_INTEGER, tempIntRows[i]);
+ row.putValue(PASS_THROUGH_INTEGER, tempIntRows[i]);
+ row.putValue(SNAPPY_LONG, tempLongRows[i]);
+ row.putValue(ZSTANDARD_LONG, tempLongRows[i]);
+ row.putValue(PASS_THROUGH_LONG, tempLongRows[i]);
+ rows.add(row);
+ }
+ return rows;
+ }
+
+ /**
+ * Tests for basic compression codec queries.
+ */
+ @Test
+ public void testQueriesWithCompressionCodec()
+ throws Exception {
+
+ String query =
+ "SELECT SNAPPY_STRING, ZSTANDARD_STRING, PASS_THROUGH_STRING, SNAPPY_INTEGER, ZSTANDARD_INTEGER, PASS_THROUGH_INTEGER, "
+ + "SNAPPY_LONG, ZSTANDARD_LONG, PASS_THROUGH_LONG FROM MyTable LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ expected.add(new Serializable[]{
+ String.valueOf(row.getValue(SNAPPY_STRING)), String.valueOf(row.getValue(ZSTANDARD_STRING)), String.valueOf(row.getValue(PASS_THROUGH_STRING)),
+ (Integer) row.getValue(SNAPPY_INTEGER), (Integer) row.getValue(ZSTANDARD_INTEGER), (Integer) row.getValue(PASS_THROUGH_INTEGER),
+ (Long) row.getValue(SNAPPY_LONG), (Long)row.getValue(ZSTANDARD_LONG), (Long) row.getValue(PASS_THROUGH_LONG),
+ });
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /**
+ * Tests for filter over integer values compression codec queries.
+ */
+ @Test
+ public void testZstandardIntegerFilterQueriesWithCompressionCodec()
+ throws Exception {
+
+ String query =
+ "SELECT ZSTANDARD_INTEGER FROM MyTable "
+ + "WHERE ZSTANDARD_INTEGER > 1000 LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ int value = (Integer) row.getValue(ZSTANDARD_INTEGER);
+ if(value > 1000) {
+ expected.add(new Serializable[]{value});
+ }
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /**
+ * Tests for filter over integer values compression codec queries.
+ */
+ @Test
+ public void testSnappyIntegerFilterQueriesWithCompressionCodec()
+ throws Exception {
+
+ String query =
+ "SELECT SNAPPY_INTEGER FROM MyTable "
+ + "WHERE SNAPPY_INTEGER > 100 LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ int value = (Integer) row.getValue(SNAPPY_INTEGER);
+ if(value > 100) {
+ expected.add(new Serializable[]{value});
+ }
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /**
+ * Tests for filter over integer values compression codec queries.
+ */
+ @Test
+ public void testPassThroughIntegerFilterQueriesWithCompressionCodec()
+ throws Exception {
+
+ String query =
+ "SELECT PASS_THROUGH_INTEGER FROM MyTable "
+ + "WHERE PASS_THROUGH_INTEGER > 100 LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ int value = (Integer) row.getValue(PASS_THROUGH_INTEGER);
+ if(value > 100) {
+ expected.add(new Serializable[]{value});
+ }
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /**
+ * Tests for filter over string values zstandard compression codec queries.
+ */
+ @Test
+ public void testZstandardStringFilterQueriesWithCompressionCodec()
+ throws Exception {
+ String query =
+ "SELECT ZSTANDARD_STRING FROM MyTable WHERE ZSTANDARD_STRING = 'hello_world_123' LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ String value = String.valueOf(row.getValue(ZSTANDARD_STRING));
+ if(value.equals("hello_world_123")) {
+ expected.add(new Serializable[]{value});
+ }
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /**
+ * Tests for filter over string values snappy compression codec queries.
+ */
+ @Test
+ public void testSnappyStringFilterQueriesWithCompressionCodec()
+ throws Exception {
+ String query =
+ "SELECT SNAPPY_STRING FROM MyTable WHERE SNAPPY_STRING = 'hello_world_123' LIMIT 1000";
+ ArrayList expected = new ArrayList<>();
+
+ for(GenericRow row: rows) {
+ String value = String.valueOf(row.getValue(SNAPPY_STRING));
+ if(value.equals("hello_world_123")) {
+ expected.add(new Serializable[]{value});
+ }
+ }
+ testSelectQueryHelper(query, expected.size(), expected);
+ }
+
+ /*
+ * Helper methods for tests
+ */
+ private void testSelectQueryHelper(String query, int expectedResultSize, List expectedResults)
+ throws Exception {
+ SelectionOnlyOperator operator = getOperatorForSqlQuery(query);
+ IntermediateResultsBlock operatorResult = operator.nextBlock();
+ List
+
+ com.github.luben
+ zstd-jni
+ ${zstd-jni.version}
+
org.apache.commons
commons-compress