From 7972d3b7f0e18d4ee693cf5dfa9867005e348dda Mon Sep 17 00:00:00 2001 From: Ahir Reddy Date: Mon, 4 Aug 2014 11:31:43 -0700 Subject: [PATCH] Correctly set JVM HiveContext if it is passed into Python HiveContext constructor --- python/pyspark/sql.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/python/pyspark/sql.py b/python/pyspark/sql.py index 1a829c6fafe03..633ac6690b6fb 100644 --- a/python/pyspark/sql.py +++ b/python/pyspark/sql.py @@ -891,6 +891,8 @@ def __init__(self, sparkContext, sqlContext=None): """Create a new SQLContext. @param sparkContext: The SparkContext to wrap. + @param sqlContext: An optional JVM Scala SQLContext. If set, we do not instatiate a new + SQLContext in the JVM, instead we make all calls to this object. >>> srdd = sqlCtx.inferSchema(rdd) >>> sqlCtx.inferSchema(srdd) # doctest: +IGNORE_EXCEPTION_DETAIL @@ -1275,6 +1277,18 @@ class HiveContext(SQLContext): It supports running both SQL and HiveQL commands. """ + def __init__(self, sparkContext, hiveContext=None): + """Create a new HiveContext. + + @param sparkContext: The SparkContext to wrap. + @param hiveContext: An optional JVM Scala HiveContext. If set, we do not instatiate a new + HiveContext in the JVM, instead we make all calls to this object. + """ + SQLContext.__init__(self, sparkContext) + + if hiveContext: + self._scala_HiveContext = hiveContext + @property def _ssql_ctx(self): try: