Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import org.apache.spark.sql.catalyst.encoders.AgnosticEncoder
import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.objects._
import org.apache.spark.sql.catalyst.types.{PhysicalBinaryType, PhysicalIntegerType, PhysicalLongType}
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, MapData}
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -728,10 +729,10 @@ object ScalaReflection extends ScalaReflection {
FloatType -> classOf[Float],
DoubleType -> classOf[Double],
StringType -> classOf[UTF8String],
DateType -> classOf[DateType.InternalType],
TimestampType -> classOf[TimestampType.InternalType],
TimestampNTZType -> classOf[TimestampNTZType.InternalType],
BinaryType -> classOf[BinaryType.InternalType],
DateType -> classOf[PhysicalIntegerType.InternalType],
TimestampType -> classOf[PhysicalLongType.InternalType],
TimestampNTZType -> classOf[PhysicalLongType.InternalType],
BinaryType -> classOf[PhysicalBinaryType.InternalType],
CalendarIntervalType -> classOf[CalendarInterval]
)

Expand All @@ -751,8 +752,8 @@ object ScalaReflection extends ScalaReflection {
def dataTypeJavaClass(dt: DataType): Class[_] = {
dt match {
case _: DecimalType => classOf[Decimal]
case it: DayTimeIntervalType => classOf[it.InternalType]
case it: YearMonthIntervalType => classOf[it.InternalType]
case _: DayTimeIntervalType => classOf[PhysicalLongType.InternalType]
case _: YearMonthIntervalType => classOf[PhysicalIntegerType.InternalType]
case _: StructType => classOf[InternalRow]
case _: ArrayType => classOf[ArrayData]
case _: MapType => classOf[MapData]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,8 @@ case class ApproxCountDistinctForIntervals(
// convert the value into a double value for searching in the double array
val doubleValue = child.dataType match {
case n: NumericType =>
PhysicalNumericType.numeric(n).toDouble(value.asInstanceOf[n.InternalType])
PhysicalNumericType.numeric(n)
.toDouble(value.asInstanceOf[PhysicalNumericType#InternalType])
case _: DateType | _: YearMonthIntervalType =>
value.asInstanceOf[Int].toDouble
case TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,8 @@ case class ApproximatePercentile(
case TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
value.asInstanceOf[Long].toDouble
case n: NumericType =>
PhysicalNumericType.numeric(n).toDouble(value.asInstanceOf[n.InternalType])
PhysicalNumericType.numeric(n)
.toDouble(value.asInstanceOf[PhysicalNumericType#InternalType])
case other: DataType =>
throw QueryExecutionErrors.dataTypeUnexpectedError(other)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.TypeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.errors.QueryExecutionErrors
Expand Down Expand Up @@ -121,10 +119,7 @@ protected[sql] object AnyDataType extends AbstractDataType with Serializable {
/**
* An internal type used to represent everything that is not null, UDTs, arrays, structs, and maps.
*/
protected[sql] abstract class AtomicType extends DataType {
private[sql] type InternalType
private[sql] val tag: TypeTag[InternalType]
}
protected[sql] abstract class AtomicType extends DataType

object AtomicType {
/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,29 +17,15 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalBinaryType, PhysicalDataType}
import org.apache.spark.unsafe.types.ByteArray

/**
* The data type representing `Array[Byte]` values.
* Please use the singleton `DataTypes.BinaryType`.
*/
@Stable
class BinaryType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "BinaryType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.

private[sql] type InternalType = Array[Byte]

@transient private[sql] lazy val tag = typeTag[InternalType]

private[sql] val ordering =
(x: Array[Byte], y: Array[Byte]) => ByteArray.compareBinary(x, y)

/**
* The default size of a value of the BinaryType is 100 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@

package org.apache.spark.sql.types

import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalBooleanType, PhysicalDataType}

Expand All @@ -30,13 +27,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalBooleanType, PhysicalDataTyp
*/
@Stable
class BooleanType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "BooleanType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Boolean
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val ordering = implicitly[Ordering[InternalType]]

/**
* The default size of a value of the BooleanType is 1 byte.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalByteType, PhysicalDataType}

Expand All @@ -29,12 +27,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalByteType, PhysicalDataType}
*/
@Stable
class ByteType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "ByteType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Byte
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the ByteType is 1 byte.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,13 @@

package org.apache.spark.sql.types

import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalStringType}
import org.apache.spark.unsafe.types.UTF8String

@Experimental
case class CharType(length: Int) extends AtomicType {
require(length >= 0, "The length of char type cannot be negative.")

private[sql] type InternalType = UTF8String
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val ordering = implicitly[Ordering[InternalType]]

override def defaultSize: Int = length
private[sql] override def physicalDataType: PhysicalDataType = PhysicalStringType
override def typeName: String = s"char($length)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalIntegerType}

Expand All @@ -31,14 +29,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalIntegerTyp
*/
@Stable
class DateType private() extends DatetimeType {
/**
* Internally, a date is stored as a simple incrementing count of days
* where day 0 is 1970-01-01. Negative numbers represent earlier days.
*/
private[sql] type InternalType = Int

@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the DateType is 4 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Unstable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalLongType}
import org.apache.spark.sql.errors.QueryCompilationErrors
Expand All @@ -43,15 +41,6 @@ import org.apache.spark.sql.types.DayTimeIntervalType.fieldToString
*/
@Unstable
case class DayTimeIntervalType(startField: Byte, endField: Byte) extends AnsiIntervalType {
/**
* Internally, values of day-time intervals are stored in `Long` values as amount of time in terms
* of microseconds that are calculated by the formula:
* -/+ (24*60*60 * DAY + 60*60 * HOUR + 60 * MINUTE + SECOND) * 1000000
*/
private[sql] type InternalType = Long

@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The day-time interval type has constant precision. A value of the type always occupies 8 bytes.
* The DAY field is constrained by the upper bound 106751991 to fit to `Long`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ package org.apache.spark.sql.types
import java.util.Locale

import scala.annotation.tailrec
import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
Expand Down Expand Up @@ -59,9 +58,6 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
def this(precision: Int) = this(precision, 0)
def this() = this(10)

private[sql] type InternalType = Decimal
@transient private[sql] lazy val tag = typeTag[InternalType]

override def typeName: String = s"decimal($precision,$scale)"

override def toString: String = s"DecimalType($precision,$scale)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag
import scala.util.Try

import org.apache.spark.annotation.Stable
Expand All @@ -30,12 +29,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalDoubleType
*/
@Stable
class DoubleType private() extends FractionalType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "DoubleType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Double
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the DoubleType is 8 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag
import scala.util.Try

import org.apache.spark.annotation.Stable
Expand All @@ -30,12 +29,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalFloatType}
*/
@Stable
class FloatType private() extends FractionalType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "FloatType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Float
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the FloatType is 4 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalIntegerType}

Expand All @@ -29,12 +27,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalIntegerTyp
*/
@Stable
class IntegerType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "IntegerType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Int
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the IntegerType is 4 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalLongType}

Expand All @@ -29,12 +27,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalLongType}
*/
@Stable
class LongType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "LongType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Long
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the LongType is 8 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalShortType}

Expand All @@ -29,12 +27,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalShortType}
*/
@Stable
class ShortType private() extends IntegralType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "ShortType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = Short
@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the ShortType is 2 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,8 @@

package org.apache.spark.sql.types

import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalStringType}
import org.apache.spark.unsafe.types.UTF8String

/**
* The data type representing `String` values. Please use the singleton `DataTypes.StringType`.
Expand All @@ -31,13 +27,6 @@ import org.apache.spark.unsafe.types.UTF8String
*/
@Stable
class StringType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "StringType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = UTF8String
@transient private[sql] lazy val tag = typeTag[InternalType]
private[sql] val ordering = implicitly[Ordering[InternalType]]

/**
* The default size of a value of the StringType is 20 bytes.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.types

import scala.reflect.runtime.universe.typeTag

import org.apache.spark.annotation.Unstable
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalLongType}

Expand All @@ -33,14 +31,6 @@ import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalLongType}
*/
@Unstable
class TimestampNTZType private() extends DatetimeType {
/**
* Internally, a timestamp is stored as the number of microseconds from
* the epoch of 1970-01-01T00:00:00.000000(Unix system time zero)
*/
private[sql] type InternalType = Long

@transient private[sql] lazy val tag = typeTag[InternalType]

/**
* The default size of a value of the TimestampNTZType is 8 bytes.
*/
Expand Down
Loading