Skip to content

Commit

Permalink
removed pi and e from python api and dataframe api; added _to_java_co…
Browse files Browse the repository at this point in the history
…lumn(col) for strlen
  • Loading branch information
tarekbecker committed Jun 30, 2015
1 parent 4d07318 commit 66f0d2b
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 55 deletions.
40 changes: 3 additions & 37 deletions python/pyspark/sql/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,9 @@
'bin',
'coalesce',
'countDistinct',
'e',
'explode',
'md5',
'monotonicallyIncreasingId',
'pi',
'rand',
'randn',
'sha1',
Expand Down Expand Up @@ -302,22 +300,6 @@ def countDistinct(col, *cols):
return Column(jc)


@since(1.5)
def e():
"""
Returns the float value that is closer than any other to e, the base of the natural
logarithms.
>>> e = sqlContext.createDataFrame([(1,)], ['a']).select((e()).alias('e')).collect()
>>> round(e[0][0], 10)
2.7182818285
"""
sc = SparkContext._active_spark_context
jc = sc._jvm.functions.e()
return Column(jc)


@since(1.4)
def explode(col):
"""Returns a new row for each element in the given array or map.
Expand Down Expand Up @@ -373,22 +355,6 @@ def monotonicallyIncreasingId():
return Column(sc._jvm.functions.monotonicallyIncreasingId())


@since(1.5)
def pi():
"""
Returns the float value that is closer than any other to pi, the ratio of the circumference
of a circle to its diameter.
>>> pi = sqlContext.createDataFrame([(1,)], ['a']).select((pi()).alias('pi')).collect()
>>> round(pi[0][0], 10)
3.1415926536
"""
sc = SparkContext._active_spark_context
jc = sc._jvm.functions.pi()
return Column(jc)


@since(1.4)
def rand(seed=None):
"""Generates a random column with i.i.d. samples from U[0.0, 1.0].
Expand Down Expand Up @@ -462,11 +428,11 @@ def sparkPartitionId():
def strlen(col):
"""Calculates the length of a string expression.
>>> sqlContext.createDataFrame([('ABC',)], ['a']).select(strlen('a').alias('e')).collect()
[Row(e=3)]
>>> sqlContext.createDataFrame([('ABC',)], ['a']).select(strlen('a').alias('length')).collect()
[Row(length=3)]
"""
sc = SparkContext._active_spark_context
return Column(sc._jvm.functions.strlen(col))
return Column(sc._jvm.functions.strlen(_to_java_column(col)))


@ignore_unicode_prefix
Expand Down
18 changes: 0 additions & 18 deletions sql/core/src/main/scala/org/apache/spark/sql/functions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -989,15 +989,6 @@ object functions {
*/
def cosh(columnName: String): Column = cosh(Column(columnName))

/**
* Returns the double value that is closer than any other to e, the base of the natural
* logarithms.
*
* @group math_funcs
* @since 1.5.0
*/
def e(): Column = EulerNumber()

/**
* Computes the exponential of the given value.
*
Expand Down Expand Up @@ -1191,15 +1182,6 @@ object functions {
*/
def log1p(columnName: String): Column = log1p(Column(columnName))

/**
* Returns the double value that is closer than any other to pi, the ratio of the circumference
* of a circle to its diameter.
*
* @group math_funcs
* @since 1.5.0
*/
def pi(): Column = Pi()

/**
* Computes the logarithm of the given column in base 2.
*
Expand Down

0 comments on commit 66f0d2b

Please sign in to comment.