Skip to content

Commit

Permalink
merge remote
Browse files Browse the repository at this point in the history
  • Loading branch information
mengxr committed Nov 1, 2014
2 parents 5ef930a + c35203f commit dedda56
Show file tree
Hide file tree
Showing 11 changed files with 90 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,9 @@ class MetadataBuilder {

private val map: mutable.Map[String, Any] = mutable.Map.empty

/** Returns the immutable version of this map. Used for java interop. */
protected def getMap = map.toMap

/** Include the content of an existing [[Metadata]] instance. */
def withMetadata(metadata: Metadata): this.type = {
map ++= metadata.map
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import java.util.*;

import org.apache.spark.sql.catalyst.util.Metadata;

/**
* The base type of all Spark SQL data types.
*
Expand Down Expand Up @@ -175,7 +173,7 @@ public static StructField createStructField(
* @see #createStructField(String, DataType, boolean, Metadata)
*/
public static StructField createStructField(String name, DataType dataType, boolean nullable) {
return createStructField(name, dataType, nullable, Metadata.empty());
return createStructField(name, dataType, nullable, (new MetadataBuilder()).build());
}

/**
Expand Down Expand Up @@ -207,5 +205,4 @@ public static StructType createStructType(StructField[] fields) {

return new StructType(fields);
}

}
31 changes: 31 additions & 0 deletions sql/core/src/main/java/org/apache/spark/sql/api/java/Metadata.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.api.java;

/**
* Metadata is a wrapper over Map[String, Any] that limits the value type to simple ones: Boolean,
* Long, Double, String, Metadata, Array[Boolean], Array[Long], Array[Double], Array[String], and
* Array[Metadata]. JSON is used for serialization.
*
* The default constructor is private. User should use [[MetadataBuilder]].
*/
class Metadata extends org.apache.spark.sql.catalyst.util.Metadata {
Metadata(scala.collection.immutable.Map<String, Object> map) {
super(map);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.api.java;

/**
* Builder for [[Metadata]]. If there is a key collision, the latter will overwrite the former.
*/
public class MetadataBuilder extends org.apache.spark.sql.catalyst.util.MetadataBuilder {
@Override
public Metadata build() {
return new Metadata(getMap());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import java.util.Map;

import org.apache.spark.sql.catalyst.util.Metadata;

/**
* A StructField object represents a field in a StructType object.
* A StructField object comprises three fields, {@code String name}, {@code DataType dataType},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.optimizer.{Optimizer, DefaultOptimizer}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.types.DataType
import org.apache.spark.sql.execution.{SparkStrategies, _}
import org.apache.spark.sql.json._
import org.apache.spark.sql.parquet.ParquetRelation
Expand Down
23 changes: 23 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,9 @@ package object sql {
@DeveloperApi
type DataType = catalyst.types.DataType

@DeveloperApi
val DataType = catalyst.types.DataType

/**
* :: DeveloperApi ::
*
Expand Down Expand Up @@ -414,4 +417,24 @@ package object sql {
*/
@DeveloperApi
val StructField = catalyst.types.StructField

/**
* :: DeveloperApi ::
*
* Metadata is a wrapper over Map[String, Any] that limits the value type to simple ones: Boolean,
* Long, Double, String, Metadata, Array[Boolean], Array[Long], Array[Double], Array[String], and
* Array[Metadata]. JSON is used for serialization.
*
* The default constructor is private. User should use either [[MetadataBuilder]] or
* [[Metadata$#fromJson]] to create Metadata instances.
*
* @param map an immutable map that stores the data
*/
@DeveloperApi
type Metadata = catalyst.util.Metadata

/**
* Builder for [[Metadata]]. If there is a key collision, the latter will overwrite the former.
*/
type MetadataBuilder = catalyst.util.MetadataBuilder
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql.types.util

import org.apache.spark.sql._
import org.apache.spark.sql.api.java.{DataType => JDataType, StructField => JStructField}
import org.apache.spark.sql.api.java.{DataType => JDataType, StructField => JStructField, MetadataBuilder => JMetaDataBuilder}

import scala.collection.JavaConverters._

Expand All @@ -32,7 +32,7 @@ protected[sql] object DataTypeConversions {
scalaStructField.name,
asJavaDataType(scalaStructField.dataType),
scalaStructField.nullable,
scalaStructField.metadata)
(new JMetaDataBuilder).withMetadata(scalaStructField.metadata).build())
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@ package org.apache.spark.sql

import org.scalatest.FunSuite

import org.apache.spark.sql.catalyst.types.DataType
import org.apache.spark.sql.catalyst.util.MetadataBuilder

class DataTypeSuite extends FunSuite {

test("construct an ArrayType") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ import org.apache.spark.sql.TestData._
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.joins.BroadcastHashJoin
import org.apache.spark.sql.catalyst.util.MetadataBuilder

import org.apache.spark.sql.test.TestSQLContext._

class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ package org.apache.spark.sql.api.java

import org.scalatest.FunSuite

import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField,
StructType => SStructType}
import org.apache.spark.sql.catalyst.util.MetadataBuilder
import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField, StructType => SStructType}
import org.apache.spark.sql.types.util.DataTypeConversions._

class ScalaSideDataTypeConversionSuite extends FunSuite {
Expand Down

0 comments on commit dedda56

Please sign in to comment.