Skip to content

Commit

Permalink
[SPARK-3458] enable python "with" statements for SparkContext
Browse files Browse the repository at this point in the history
allow for best practice code,

try:
  sc = SparkContext()
  app(sc)
finally:
  sc.stop()

to be written using a "with" statement,

with SparkContext() as sc:
  app(sc)
  • Loading branch information
mattf committed Sep 9, 2014
1 parent c419e4f commit 5b4e37c
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 0 deletions.
14 changes: 14 additions & 0 deletions python/pyspark/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,20 @@ def _ensure_initialized(cls, instance=None, gateway=None):
else:
SparkContext._active_spark_context = instance

def __enter__(self):
"""
Enable 'with SparkContext(...) as sc: app(sc)' syntax.
"""
return self

def __exit__(self, type, value, trace):
"""
Enable 'with SparkContext(...) as sc: app' syntax.
Specifically stop the context on exit of the with block.
"""
self.stop()

@classmethod
def setSystemProperty(cls, key, value):
"""
Expand Down
29 changes: 29 additions & 0 deletions python/pyspark/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1254,6 +1254,35 @@ def test_single_script_on_cluster(self):
self.assertIn("[2, 4, 6]", out)


class ContextStopTests(unittest.TestCase):

def test_stop(self):
sc = SparkContext()
self.assertNotEqual(SparkContext._active_spark_context, None)
sc.stop()
self.assertEqual(SparkContext._active_spark_context, None)

def test_with(self):
with SparkContext() as sc:
self.assertNotEqual(SparkContext._active_spark_context, None)
self.assertEqual(SparkContext._active_spark_context, None)

def test_with_exception(self):
try:
with SparkContext() as sc:
self.assertNotEqual(SparkContext._active_spark_context, None)
raise Exception()
except:
pass
self.assertEqual(SparkContext._active_spark_context, None)

def test_with_stop(self):
with SparkContext() as sc:
self.assertNotEqual(SparkContext._active_spark_context, None)
sc.stop()
self.assertEqual(SparkContext._active_spark_context, None)


@unittest.skipIf(not _have_scipy, "SciPy not installed")
class SciPyTests(PySparkTestCase):

Expand Down

0 comments on commit 5b4e37c

Please sign in to comment.