Skip to content

Commit

Permalink
OPS-5046 Fix spark 3.4.0 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
xuzeng012 committed Jun 22, 2023
1 parent d186b08 commit a6a47bf
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 4 deletions.
2 changes: 1 addition & 1 deletion tests/integration/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class SparklyTestSession(SparklySession):
packages = [
'com.datastax.spark:spark-cassandra-connector_2.12:3.2.0',
'org.elasticsearch:elasticsearch-spark-30_2.12:7.17.8',
'org.apache.spark:spark-sql-kafka-0-10_2.12:3.3.1',
'org.apache.spark:spark-sql-kafka-0-10_2.12:{}'.format(os.getenv('spark_version')),
'mysql:mysql-connector-java:8.0.31',
'io.confluent:kafka-avro-serializer:3.0.1',
]
Expand Down
6 changes: 3 additions & 3 deletions tests/integration/test_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,11 @@ def test_rename_table_non_default_db(self):
self.assertTrue(self.spark.catalog_ext.has_table('test_db.test_table'))
self.assertFalse(self.spark.catalog_ext.has_table('test_db.new_test_table'))

self.spark.catalog_ext.rename_table('test_db.test_table', 'new_test_table')
self.spark.catalog_ext.rename_table('test_db.test_table', 'test_db.new_test_table')

self.assertFalse(self.spark.catalog_ext.has_table('test_db.test_table'))
self.assertTrue(self.spark.catalog_ext.has_table('default.new_test_table'))
self.assertEqual(self.spark.table('default.new_test_table').count(), 2)
self.assertTrue(self.spark.catalog_ext.has_table('test_db.new_test_table'))
self.assertEqual(self.spark.table('test_db.new_test_table').count(), 2)

def test_get_table_properties(self):
properties = self.spark.catalog_ext.get_table_properties('test_table')
Expand Down
10 changes: 10 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ deps =
-rrequirements_dev.txt
-rrequirements_extras.txt
pyspark==3.2.3
setenv =
spark_version = 3.2.3

[testenv:spark33]
commands = py.test --cov=sparkly --cov-report term-missing tests/integration tests/unit
Expand All @@ -32,6 +34,8 @@ deps =
-rrequirements_dev.txt
-rrequirements_extras.txt
pyspark==3.3.1
setenv =
spark_version = 3.3.1

[testenv:spark34]
commands = py.test --cov=sparkly --cov-report term-missing tests/integration tests/unit
Expand All @@ -40,13 +44,17 @@ deps =
-rrequirements_dev.txt
-rrequirements_extras.txt
pyspark==3.4.0
setenv =
spark_version = 3.4.0

[testenv:no_extras]
commands = py.test tests/no_extras
deps =
-rrequirements.txt
-rrequirements_dev.txt
pyspark==3.3.1
setenv =
spark_version = 3.3.1

[testenv:docs]
commands = sphinx-build -b html docs/source docs/build
Expand All @@ -55,3 +63,5 @@ deps =
-rrequirements_extras.txt
-rrequirements.txt
pyspark==3.3.1
setenv =
spark_version = 3.3.1

0 comments on commit a6a47bf

Please sign in to comment.