From d994330a53de0f680d386b56c1db3d5abbb0747c Mon Sep 17 00:00:00 2001 From: Liang-Chi Hsieh Date: Sun, 5 Jul 2015 12:07:50 +0800 Subject: [PATCH] Minor fix for CatalystSchemaConverter. --- .../main/scala/org/apache/spark/sql/SQLConf.scala | 2 +- .../spark/sql/parquet/CatalystSchemaConverter.scala | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala index 2c258b6ee399c..6005d35f015a9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala @@ -273,7 +273,7 @@ private[spark] object SQLConf { val PARQUET_FOLLOW_PARQUET_FORMAT_SPEC = booleanConf( key = "spark.sql.parquet.followParquetFormatSpec", defaultValue = Some(false), - doc = "Wether to stick to Parquet format specification when converting Parquet schema to " + + doc = "Whether to stick to Parquet format specification when converting Parquet schema to " + "Spark SQL schema and vice versa. Sticks to the specification if set to true; falls back " + "to compatible mode if set to false.", isPublic = false) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/CatalystSchemaConverter.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/CatalystSchemaConverter.scala index 2be7c64612cd2..4ab274ec17a02 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/CatalystSchemaConverter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/CatalystSchemaConverter.scala @@ -142,7 +142,7 @@ private[parquet] class CatalystSchemaConverter( DecimalType(precision, scale) } - field.getPrimitiveTypeName match { + typeName match { case BOOLEAN => BooleanType case FLOAT => FloatType @@ -150,7 +150,7 @@ private[parquet] class CatalystSchemaConverter( case DOUBLE => DoubleType case INT32 => - field.getOriginalType match { + originalType match { case INT_8 => ByteType case INT_16 => ShortType case INT_32 | null => IntegerType @@ -161,7 +161,7 @@ private[parquet] class CatalystSchemaConverter( } case INT64 => - field.getOriginalType match { + originalType match { case INT_64 | null => LongType case DECIMAL => makeDecimalType(maxPrecisionForBytes(8)) case TIMESTAMP_MILLIS => typeNotImplemented() @@ -176,7 +176,7 @@ private[parquet] class CatalystSchemaConverter( TimestampType case BINARY => - field.getOriginalType match { + originalType match { case UTF8 | ENUM => StringType case null if assumeBinaryIsString => StringType case null => BinaryType @@ -185,7 +185,7 @@ private[parquet] class CatalystSchemaConverter( } case FIXED_LEN_BYTE_ARRAY => - field.getOriginalType match { + originalType match { case DECIMAL => makeDecimalType(maxPrecisionForBytes(field.getTypeLength)) case INTERVAL => typeNotImplemented() case _ => illegalType() @@ -261,7 +261,7 @@ private[parquet] class CatalystSchemaConverter( // Here we implement Parquet LIST backwards-compatibility rules. // See: https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#backward-compatibility-rules // scalastyle:on - private def isElementType(repeatedType: Type, parentName: String) = { + private def isElementType(repeatedType: Type, parentName: String): Boolean = { { // For legacy 2-level list types with primitive element type, e.g.: //