Skip to content

Commit

Permalink
remove BaseMutableRow
Browse files Browse the repository at this point in the history
  • Loading branch information
Davies Liu committed Jun 27, 2015
1 parent 6f99a97 commit bdfb78f
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 237 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,12 @@
import java.util.HashSet;
import java.util.Set;

import scala.collection.Seq;
import scala.collection.mutable.ArraySeq;

import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.BaseMutableRow;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.unsafe.types.UTF8String;
import org.apache.spark.unsafe.PlatformDependent;
import org.apache.spark.unsafe.bitset.BitSetMethods;
import org.apache.spark.unsafe.types.UTF8String;

import static org.apache.spark.sql.types.DataTypes.*;

Expand All @@ -52,7 +48,7 @@
*
* Instances of `UnsafeRow` act as pointers to row data stored in this format.
*/
public final class UnsafeRow extends BaseMutableRow {
public final class UnsafeRow extends MutableRow {

private Object baseObject;
private long baseOffset;
Expand Down Expand Up @@ -346,13 +342,4 @@ public InternalRow copy() {
public boolean anyNull() {
return BitSetMethods.anySet(baseObject, baseOffset, bitSetWidthInBytes);
}

@Override
public Seq<Object> toSeq() {
final ArraySeq<Object> values = new ArraySeq<Object>(numFields);
for (int fieldNumber = 0; fieldNumber < numFields; fieldNumber++) {
values.update(fieldNumber, get(fieldNumber));
}
return values;
}
}
41 changes: 25 additions & 16 deletions sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala
Original file line number Diff line number Diff line change
Expand Up @@ -179,47 +179,47 @@ trait Row extends Serializable {
def get(i: Int): Any = apply(i)

/** Checks whether the value at position i is null. */
def isNullAt(i: Int): Boolean
def isNullAt(i: Int): Boolean = apply(i) == null

/**
* Returns the value at position i as a primitive boolean.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getBoolean(i: Int): Boolean
def getBoolean(i: Int): Boolean = getAs[Boolean](i)

/**
* Returns the value at position i as a primitive byte.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getByte(i: Int): Byte
def getByte(i: Int): Byte = getAs[Byte](i)

/**
* Returns the value at position i as a primitive short.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getShort(i: Int): Short
def getShort(i: Int): Short = getAs[Short](i)

/**
* Returns the value at position i as a primitive int.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getInt(i: Int): Int
def getInt(i: Int): Int = getAs[Int](i)

/**
* Returns the value at position i as a primitive long.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getLong(i: Int): Long
def getLong(i: Int): Long = getAs[Long](i)

/**
* Returns the value at position i as a primitive float.
Expand All @@ -228,51 +228,51 @@ trait Row extends Serializable {
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getFloat(i: Int): Float
def getFloat(i: Int): Float = getAs[Float](i)

/**
* Returns the value at position i as a primitive double.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getDouble(i: Int): Double
def getDouble(i: Int): Double = getAs[Double](i)

/**
* Returns the value at position i as a String object.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
*/
def getString(i: Int): String
def getString(i: Int): String = getAs[String](i)

/**
* Returns the value at position i of decimal type as java.math.BigDecimal.
*
* @throws ClassCastException when data type does not match.
*/
def getDecimal(i: Int): java.math.BigDecimal = apply(i).asInstanceOf[java.math.BigDecimal]
def getDecimal(i: Int): java.math.BigDecimal = getAs[java.math.BigDecimal](i)

/**
* Returns the value at position i of date type as java.sql.Date.
*
* @throws ClassCastException when data type does not match.
*/
def getDate(i: Int): java.sql.Date = apply(i).asInstanceOf[java.sql.Date]
def getDate(i: Int): java.sql.Date = getAs[java.sql.Date](i)

/**
* Returns the value at position i of date type as java.sql.Timestamp.
*
* @throws ClassCastException when data type does not match.
*/
def getTimestamp(i: Int): java.sql.Timestamp = apply(i).asInstanceOf[java.sql.Timestamp]
def getTimestamp(i: Int): java.sql.Timestamp = getAs[java.sql.Timestamp](i)

/**
* Returns the value at position i of array type as a Scala Seq.
*
* @throws ClassCastException when data type does not match.
*/
def getSeq[T](i: Int): Seq[T] = apply(i).asInstanceOf[Seq[T]]
def getSeq[T](i: Int): Seq[T] = getAs[Seq[T]](i)

/**
* Returns the value at position i of array type as [[java.util.List]].
Expand All @@ -288,7 +288,7 @@ trait Row extends Serializable {
*
* @throws ClassCastException when data type does not match.
*/
def getMap[K, V](i: Int): scala.collection.Map[K, V] = apply(i).asInstanceOf[Map[K, V]]
def getMap[K, V](i: Int): scala.collection.Map[K, V] = getAs[Map[K, V]](i)

/**
* Returns the value at position i of array type as a [[java.util.Map]].
Expand Down Expand Up @@ -366,9 +366,18 @@ trait Row extends Serializable {
/* ---------------------- utility methods for Scala ---------------------- */

/**
* Return a Scala Seq representing the row. ELements are placed in the same order in the Seq.
* Return a Scala Seq representing the row. Elements are placed in the same order in the Seq.
*/
def toSeq: Seq[Any]
def toSeq: Seq[Any] = {
val n = length
val values = new Array[Any](n)
var i = 0
while (i < n) {
values.update(i, get(i))
i += 1
}
values.toSeq
}

/** Displays all elements of this sequence in a string (without a separator). */
def mkString: String = toSeq.mkString
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,6 @@ import org.apache.spark.unsafe.types.UTF8String
*/
abstract class InternalRow extends Row {

// default implementation for codegen (for a Row which does not have those types)
override def getBoolean(i: Int): Boolean = throw new UnsupportedOperationException
override def getByte(i: Int): Byte = throw new UnsupportedOperationException
override def getShort(i: Int): Short = throw new UnsupportedOperationException
override def getInt(i: Int): Int = throw new UnsupportedOperationException
override def getLong(i: Int): Long = throw new UnsupportedOperationException
override def getFloat(i: Int): Float = throw new UnsupportedOperationException
override def getDouble(i: Int): Double = throw new UnsupportedOperationException

// This is only use for test
override def getString(i: Int): String = getAs[UTF8String](i).toString

Expand All @@ -57,16 +48,6 @@ abstract class InternalRow extends Row {
// A default implementation to change the return type
override def copy(): InternalRow = this
override def apply(i: Int): Any = get(i)
def toSeq(): Seq[Any] = {
val n = length
val values = new Array[Any](n)
var i = 0
while (i < n) {
values.update(i, get(i))
i += 1
}
values.toSeq
}

override def equals(o: Any): Boolean = {
if (!o.isInstanceOf[Row]) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions.codegen
import org.apache.spark.sql.catalyst.expressions._

// MutableProjection is not accessible in Java
abstract class BaseMutableProjection extends MutableProjection {}
abstract class BaseMutableProjection extends MutableProjection

/**
* Generates byte code that produces a [[MutableRow]] object that can update itself based on a new
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.catalyst.expressions.codegen

import org.apache.spark.sql.BaseMutableRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._

Expand Down Expand Up @@ -165,7 +164,7 @@ object GenerateProjection extends CodeGenerator[Seq[Expression], Projection] {
}
}

final class SpecificRow extends ${typeOf[BaseMutableRow]} {
final class SpecificRow extends ${typeOf[MutableRow]} {

$columns

Expand Down
Loading

0 comments on commit bdfb78f

Please sign in to comment.