Skip to content

Commit

Permalink
fix tests in python 2.6
Browse files Browse the repository at this point in the history
  • Loading branch information
Davies Liu committed Feb 3, 2015
1 parent 35ccb9f commit 8dd19a9
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
21 changes: 14 additions & 7 deletions python/pyspark/sql_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,19 @@
individual modules.
"""
import os
import sys
import pydoc
import shutil
import tempfile
import unittest

if sys.version_info[:2] <= (2, 6):
try:
import unittest2 as unittest
except ImportError:
sys.stderr.write('Please install unittest2 to test with Python 2.6 or earlier')
sys.exit(1)
else:
import unittest

from pyspark.sql import SQLContext, IntegerType, Row, ArrayType, StructType, StructField, \
UserDefinedType, DoubleType
Expand Down Expand Up @@ -83,18 +92,16 @@ def setUpClass(cls):
ReusedPySparkTestCase.setUpClass()
cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(cls.tempdir.name)
cls.sqlCtx = SQLContext(cls.sc)
cls.testData = [Row(key=i, value=str(i)) for i in range(100)]
rdd = cls.sc.parallelize(cls.testData)
cls.df = cls.sqlCtx.inferSchema(rdd)

@classmethod
def tearDownClass(cls):
ReusedPySparkTestCase.tearDownClass()
shutil.rmtree(cls.tempdir.name, ignore_errors=True)

def setUp(self):
self.sqlCtx = SQLContext(self.sc)
self.testData = [Row(key=i, value=str(i)) for i in range(100)]
rdd = self.sc.parallelize(self.testData)
self.df = self.sqlCtx.inferSchema(rdd)

def test_udf(self):
self.sqlCtx.registerFunction("twoArgs", lambda x, y: len(x) + y, IntegerType())
[row] = self.sqlCtx.sql("SELECT twoArgs('test', 1)").collect()
Expand Down
1 change: 0 additions & 1 deletion python/pyspark/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from fileinput import input
from glob import glob
import os
import pydoc
import re
import shutil
import subprocess
Expand Down

0 comments on commit 8dd19a9

Please sign in to comment.