Skip to content

Commit

Permalink
organize imports
Browse files Browse the repository at this point in the history
  • Loading branch information
mengxr committed Oct 14, 2014
1 parent 1fcbf13 commit c9d7301
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,23 +19,21 @@ package org.apache.spark.sql.catalyst.types

import java.sql.Timestamp

import org.apache.spark.sql.catalyst.util.Metadata

import scala.math.Numeric.{BigDecimalAsIfIntegral, DoubleAsIfIntegral, FloatAsIfIntegral}
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.{TypeTag, runtimeMirror, typeTag}
import scala.util.parsing.combinator.RegexParsers

import org.json4s.JsonAST.JValue
import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression}
import org.apache.spark.sql.catalyst.util.Metadata
import org.apache.spark.util.Utils


object DataType {
def fromJson(json: String): DataType = parseDataType(parse(json))

Expand Down Expand Up @@ -380,8 +378,7 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT
* @param name The name of this field.
* @param dataType The data type of this field.
* @param nullable Indicates if values of this field can be `null` values.
* @param metadata The metadata of this field, which is a map from string to simple type that can be
* serialized to JSON automatically. The metadata should be preserved during
* @param metadata The metadata of this field. The metadata should be preserved during
* transformation if the content of the column is not modified, e.g, in selection.
*/
case class StructField(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

package org.apache.spark.sql.api.java;

import org.apache.spark.sql.catalyst.util.Metadata;

import java.util.*;

import org.apache.spark.sql.catalyst.util.Metadata;

/**
* The base type of all Spark SQL data types.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,21 @@

package org.apache.spark.sql.api.java;

import org.apache.spark.sql.catalyst.util.Metadata;

import java.util.Map;

import org.apache.spark.sql.catalyst.util.Metadata;

/**
* A StructField object represents a field in a StructType object.
* A StructField object comprises three fields, {@code String name}, {@code DataType dataType},
* and {@code boolean nullable}. The field of {@code name} is the name of a StructField.
* The field of {@code dataType} specifies the data type of a StructField.
* The field of {@code nullable} specifies if values of a StructField can contain {@code null}
* values.
* The field of {@code metadata} provides extra information of the StructField, which is a map from
* string to simple type that can be serialized to JSON automatically
* The field of {@code metadata} provides extra information of the StructField.
*
* To create a {@link StructField},
* {@link DataType#createStructField(String, DataType, boolean, Map)}
* {@link DataType#createStructField(String, DataType, boolean, Metadata)}
* should be used.
*/
public class StructField {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

package org.apache.spark.sql

import org.apache.spark.sql.catalyst.util.MetadataBuilder
import org.scalatest.FunSuite

import org.apache.spark.sql.catalyst.types.DataType
import org.apache.spark.sql.catalyst.util.MetadataBuilder

class DataTypeSuite extends FunSuite {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,12 @@

package org.apache.spark.sql.api.java

import org.apache.spark.sql.catalyst.util.MetadataBuilder
import org.apache.spark.sql.types.util.DataTypeConversions
import org.scalatest.FunSuite

import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField}
import org.apache.spark.sql.{StructType => SStructType}
import DataTypeConversions._
import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField,
StructType => SStructType}
import org.apache.spark.sql.catalyst.util.MetadataBuilder
import org.apache.spark.sql.types.util.DataTypeConversions._

class ScalaSideDataTypeConversionSuite extends FunSuite {

Expand Down

0 comments on commit c9d7301

Please sign in to comment.