Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-44805][SQL] getBytes/getShorts/getInts/etc. should work in a column vector that has a dictionary #42850

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
public final class ColumnDictionary implements Dictionary {
private int[] intDictionary;
private long[] longDictionary;
private float[] floatDictionary;
private double[] doubleDictionary;

public ColumnDictionary(int[] dictionary) {
this.intDictionary = dictionary;
Expand All @@ -31,6 +33,14 @@ public ColumnDictionary(long[] dictionary) {
this.longDictionary = dictionary;
}

public ColumnDictionary(float[] dictionary) {
this.floatDictionary = dictionary;
}

public ColumnDictionary(double[] dictionary) {
this.doubleDictionary = dictionary;
}

@Override
public int decodeToInt(int id) {
return intDictionary[id];
Expand All @@ -42,14 +52,10 @@ public long decodeToLong(int id) {
}

@Override
public float decodeToFloat(int id) {
throw new UnsupportedOperationException("Dictionary encoding does not support float");
}
public float decodeToFloat(int id) { return floatDictionary[id]; }

@Override
public double decodeToDouble(int id) {
throw new UnsupportedOperationException("Dictionary encoding does not support double");
}
public double decodeToDouble(int id) { return doubleDictionary[id]; }

@Override
public byte[] decodeToBinary(int id) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,14 @@ public byte getByte(int rowId) {

@Override
public byte[] getBytes(int rowId, int count) {
assert(dictionary == null);
byte[] array = new byte[count];
Platform.copyMemory(null, data + rowId, array, Platform.BYTE_ARRAY_OFFSET, count);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId, array, Platform.BYTE_ARRAY_OFFSET, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getByte(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -269,9 +274,14 @@ public short getShort(int rowId) {

@Override
public short[] getShorts(int rowId, int count) {
assert(dictionary == null);
short[] array = new short[count];
Platform.copyMemory(null, data + rowId * 2L, array, Platform.SHORT_ARRAY_OFFSET, count * 2L);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId * 2L, array, Platform.SHORT_ARRAY_OFFSET, count * 2L);
} else {
for (int i = 0; i < count; i++) {
array[i] = getShort(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -330,9 +340,14 @@ public int getInt(int rowId) {

@Override
public int[] getInts(int rowId, int count) {
assert(dictionary == null);
int[] array = new int[count];
Platform.copyMemory(null, data + rowId * 4L, array, Platform.INT_ARRAY_OFFSET, count * 4L);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId * 4L, array, Platform.INT_ARRAY_OFFSET, count * 4L);
} else {
for (int i = 0; i < count; i++) {
array[i] = getInt(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -403,9 +418,14 @@ public long getLong(int rowId) {

@Override
public long[] getLongs(int rowId, int count) {
assert(dictionary == null);
long[] array = new long[count];
Platform.copyMemory(null, data + rowId * 8L, array, Platform.LONG_ARRAY_OFFSET, count * 8L);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId * 8L, array, Platform.LONG_ARRAY_OFFSET, count * 8L);
} else {
for (int i = 0; i < count; i++) {
array[i] = getLong(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -462,9 +482,14 @@ public float getFloat(int rowId) {

@Override
public float[] getFloats(int rowId, int count) {
assert(dictionary == null);
float[] array = new float[count];
Platform.copyMemory(null, data + rowId * 4L, array, Platform.FLOAT_ARRAY_OFFSET, count * 4L);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId * 4L, array, Platform.FLOAT_ARRAY_OFFSET, count * 4L);
} else {
for (int i = 0; i < count; i++) {
array[i] = getFloat(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -522,9 +547,15 @@ public double getDouble(int rowId) {

@Override
public double[] getDoubles(int rowId, int count) {
assert(dictionary == null);
double[] array = new double[count];
Platform.copyMemory(null, data + rowId * 8L, array, Platform.DOUBLE_ARRAY_OFFSET, count * 8L);
if (dictionary == null) {
Platform.copyMemory(null, data + rowId * 8L, array, Platform.DOUBLE_ARRAY_OFFSET,
count * 8L);
} else {
for (int i = 0; i < count; i++) {
array[i] = getDouble(rowId + i);
}
}
return array;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,14 @@ public byte getByte(int rowId) {

@Override
public byte[] getBytes(int rowId, int count) {
assert(dictionary == null);
byte[] array = new byte[count];
System.arraycopy(byteData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(byteData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getByte(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -266,9 +271,14 @@ public short getShort(int rowId) {

@Override
public short[] getShorts(int rowId, int count) {
assert(dictionary == null);
short[] array = new short[count];
System.arraycopy(shortData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(shortData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getShort(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -322,9 +332,14 @@ public int getInt(int rowId) {

@Override
public int[] getInts(int rowId, int count) {
assert(dictionary == null);
int[] array = new int[count];
System.arraycopy(intData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(intData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getInt(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -389,9 +404,14 @@ public long getLong(int rowId) {

@Override
public long[] getLongs(int rowId, int count) {
assert(dictionary == null);
long[] array = new long[count];
System.arraycopy(longData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(longData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getLong(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -441,9 +461,14 @@ public float getFloat(int rowId) {

@Override
public float[] getFloats(int rowId, int count) {
assert(dictionary == null);
float[] array = new float[count];
System.arraycopy(floatData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(floatData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getFloat(rowId + i);
}
}
return array;
}

Expand Down Expand Up @@ -495,9 +520,14 @@ public double getDouble(int rowId) {

@Override
public double[] getDoubles(int rowId, int count) {
assert(dictionary == null);
double[] array = new double[count];
System.arraycopy(doubleData, rowId, array, 0, count);
if (dictionary == null) {
System.arraycopy(doubleData, rowId, array, 0, count);
} else {
for (int i = 0; i < count; i++) {
array[i] = getDouble(rowId + i);
}
}
return array;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1108,6 +1108,16 @@ abstract class ParquetQuerySuite extends QueryTest with ParquetTest with SharedS
checkAnswer(sql("select * from tbl"), expected)
}
}

test("SPARK-44805: cast of struct with two arrays") {
withTable("tbl") {
sql("create table tbl (value struct<f1:array<int>,f2:array<int>>) using parquet")
sql("insert into tbl values (named_struct('f1', array(1, 2, 3), 'f2', array(1, 1, 2)))")
val df = sql("select cast(value as struct<f1:array<double>,f2:array<int>>) AS value from tbl")
val expected = Row(Row(Array(1.0d, 2.0d, 3.0d), Array(1, 1, 2))) :: Nil
checkAnswer(df, expected)
}
}
}

class ParquetV1QuerySuite extends ParquetQuerySuite {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.vectorized
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.SpecificInternalRow
import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.execution.columnar.ColumnAccessor
import org.apache.spark.sql.execution.columnar.{ColumnAccessor, ColumnDictionary}
import org.apache.spark.sql.execution.columnar.compression.ColumnBuilderHelper
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -397,6 +397,84 @@ class ColumnVectorSuite extends SparkFunSuite with SQLHelper {
assert(testVector.getStruct(1).get(1, DoubleType) === 5.67)
}

testVectors("SPARK-44805: getInts with dictionary", 3, IntegerType) { testVector =>
val dict = new ColumnDictionary(Array[Int](7, 8, 9))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getInts(0, 3)(0) == 7)
assert(testVector.getInts(0, 3)(1) == 8)
assert(testVector.getInts(0, 3)(2) == 9)
}

testVectors("SPARK-44805: getShorts with dictionary", 3, ShortType) { testVector =>
val dict = new ColumnDictionary(Array[Int](7, 8, 9))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getShorts(0, 3)(0) == 7)
assert(testVector.getShorts(0, 3)(1) == 8)
assert(testVector.getShorts(0, 3)(2) == 9)
}

testVectors("SPARK-44805: getBytes with dictionary", 3, ByteType) { testVector =>
val dict = new ColumnDictionary(Array[Int](7, 8, 9))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getBytes(0, 3)(0) == 7)
assert(testVector.getBytes(0, 3)(1) == 8)
assert(testVector.getBytes(0, 3)(2) == 9)
}

testVectors("SPARK-44805: getLongs with dictionary", 3, LongType) { testVector =>
val dict = new ColumnDictionary(Array[Long](2147483648L, 2147483649L, 2147483650L))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getLongs(0, 3)(0) == 2147483648L)
assert(testVector.getLongs(0, 3)(1) == 2147483649L)
assert(testVector.getLongs(0, 3)(2) == 2147483650L)
}

testVectors("SPARK-44805: getFloats with dictionary", 3, FloatType) { testVector =>
val dict = new ColumnDictionary(Array[Float](0.1f, 0.2f, 0.3f))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getFloats(0, 3)(0) == 0.1f)
assert(testVector.getFloats(0, 3)(1) == 0.2f)
assert(testVector.getFloats(0, 3)(2) == 0.3f)
}

testVectors("SPARK-44805: getDoubles with dictionary", 3, DoubleType) { testVector =>
val dict = new ColumnDictionary(Array[Double](1342.17727d, 1342.17728d, 1342.17729d))
testVector.setDictionary(dict)
testVector.reserveDictionaryIds(3)
testVector.getDictionaryIds.putInt(0, 0)
testVector.getDictionaryIds.putInt(1, 1)
testVector.getDictionaryIds.putInt(2, 2)

assert(testVector.getDoubles(0, 3)(0) == 1342.17727d)
assert(testVector.getDoubles(0, 3)(1) == 1342.17728d)
assert(testVector.getDoubles(0, 3)(2) == 1342.17729d)
}

test("[SPARK-22092] off-heap column vector reallocation corrupts array data") {
withVector(new OffHeapColumnVector(8, arrayType)) { testVector =>
val data = testVector.arrayData()
Expand Down