Skip to content

Commit

Permalink
Fixing more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
betodealmeida committed Nov 4, 2021
1 parent e0cea70 commit c8c5868
Show file tree
Hide file tree
Showing 11 changed files with 148 additions and 56 deletions.
10 changes: 6 additions & 4 deletions tests/integration_tests/cachekeys/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

from superset.extensions import cache_manager, db
from superset.models.cache import CacheKey
from superset.utils.core import get_example_default_schema
from tests.integration_tests.base_tests import (
SupersetTestCase,
post_assert_metric,
Expand Down Expand Up @@ -93,6 +94,7 @@ def test_invalidate_cache_bad_request(logged_in_admin):


def test_invalidate_existing_caches(logged_in_admin):
schema = get_example_default_schema() or ""
bn = SupersetTestCase.get_birth_names_dataset()

db.session.add(CacheKey(cache_key="cache_key1", datasource_uid="3__druid"))
Expand All @@ -113,25 +115,25 @@ def test_invalidate_existing_caches(logged_in_admin):
{
"datasource_name": "birth_names",
"database_name": "examples",
"schema": "",
"schema": schema,
"datasource_type": "table",
},
{ # table exists, no cache to invalidate
"datasource_name": "energy_usage",
"database_name": "examples",
"schema": "",
"schema": schema,
"datasource_type": "table",
},
{ # table doesn't exist
"datasource_name": "does_not_exist",
"database_name": "examples",
"schema": "",
"schema": schema,
"datasource_type": "table",
},
{ # database doesn't exist
"datasource_name": "birth_names",
"database_name": "does_not_exist",
"schema": "",
"schema": schema,
"datasource_type": "table",
},
{ # database doesn't exist
Expand Down
6 changes: 5 additions & 1 deletion tests/integration_tests/charts/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
AnnotationType,
ChartDataResultFormat,
get_example_database,
get_example_default_schema,
get_main_database,
)

Expand Down Expand Up @@ -541,6 +542,9 @@ def test_update_chart(self):
"""
Chart API: Test update
"""
schema = get_example_default_schema()
full_table_name = f"{schema}.birth_names" if schema else "birth_names"

admin = self.get_user("admin")
gamma = self.get_user("gamma")
birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id
Expand Down Expand Up @@ -575,7 +579,7 @@ def test_update_chart(self):
self.assertEqual(model.cache_timeout, 1000)
self.assertEqual(model.datasource_id, birth_names_table_id)
self.assertEqual(model.datasource_type, "table")
self.assertEqual(model.datasource_name, "birth_names")
self.assertEqual(model.datasource_name, full_table_name)
self.assertIn(model.id, [slice.id for slice in related_dashboard.slices])
db.session.delete(model)
db.session.commit()
Expand Down
41 changes: 26 additions & 15 deletions tests/integration_tests/csv_upload_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,16 @@ def upload_columnar(
filename: str, table_name: str, extra: Optional[Dict[str, str]] = None
):
columnar_upload_db_id = get_upload_db().id
schema = utils.get_example_default_schema()
form_data = {
"columnar_file": open(filename, "rb"),
"name": table_name,
"con": columnar_upload_db_id,
"if_exists": "fail",
"index_label": "test_label",
}
if schema:
form_data["schema"] = schema
if extra:
form_data.update(extra)
return get_resp(test_client, "/columnartodatabaseview/form", data=form_data)
Expand Down Expand Up @@ -259,14 +262,18 @@ def test_import_csv_enforced_schema(mock_event_logger):

@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3)
def test_import_csv_explore_database(setup_csv_upload, create_csv_files):
schema = utils.get_example_default_schema()
full_table_name = (
f"{schema}.{CSV_UPLOAD_TABLE_W_EXPLORE}"
if schema
else CSV_UPLOAD_TABLE_W_EXPLORE
)

if utils.backend() == "sqlite":
pytest.skip("Sqlite doesn't support schema / database creation")

resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_EXPLORE)
assert (
f'CSV file "{CSV_FILENAME1}" uploaded to table "{CSV_UPLOAD_TABLE_W_EXPLORE}"'
in resp
)
assert f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"' in resp
table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE_W_EXPLORE)
assert table.database_id == utils.get_example_database().id

Expand All @@ -276,9 +283,9 @@ def test_import_csv_explore_database(setup_csv_upload, create_csv_files):
@mock.patch("superset.db_engine_specs.hive.upload_to_s3", mock_upload_to_s3)
@mock.patch("superset.views.database.views.event_logger.log_with_context")
def test_import_csv(mock_event_logger):
success_msg_f1 = (
f'CSV file "{CSV_FILENAME1}" uploaded to table "{CSV_UPLOAD_TABLE}"'
)
schema = utils.get_example_default_schema()
full_table_name = f"{schema}.{CSV_UPLOAD_TABLE}" if schema else CSV_UPLOAD_TABLE
success_msg_f1 = f'CSV file "{CSV_FILENAME1}" uploaded to table "{full_table_name}"'

test_db = get_upload_db()

Expand All @@ -302,7 +309,7 @@ def test_import_csv(mock_event_logger):
mock_event_logger.assert_called_with(
action="successful_csv_upload",
database=test_db.name,
schema=None,
schema=schema,
table=CSV_UPLOAD_TABLE,
)

Expand Down Expand Up @@ -331,9 +338,7 @@ def test_import_csv(mock_event_logger):

# replace table from file with different schema
resp = upload_csv(CSV_FILENAME2, CSV_UPLOAD_TABLE, extra={"if_exists": "replace"})
success_msg_f2 = (
f'CSV file "{CSV_FILENAME2}" uploaded to table "{CSV_UPLOAD_TABLE}"'
)
success_msg_f2 = f'CSV file "{CSV_FILENAME2}" uploaded to table "{full_table_name}"'
assert success_msg_f2 in resp

table = SupersetTestCase.get_table(name=CSV_UPLOAD_TABLE)
Expand Down Expand Up @@ -423,9 +428,13 @@ def test_import_parquet(mock_event_logger):
if utils.backend() == "hive":
pytest.skip("Hive doesn't allow parquet upload.")

schema = utils.get_example_default_schema()
full_table_name = (
f"{schema}.{PARQUET_UPLOAD_TABLE}" if schema else PARQUET_UPLOAD_TABLE
)
test_db = get_upload_db()

success_msg_f1 = f'Columnar file "[\'{PARQUET_FILENAME1}\']" uploaded to table "{PARQUET_UPLOAD_TABLE}"'
success_msg_f1 = f'Columnar file "[\'{PARQUET_FILENAME1}\']" uploaded to table "{full_table_name}"'

# initial upload with fail mode
resp = upload_columnar(PARQUET_FILENAME1, PARQUET_UPLOAD_TABLE)
Expand All @@ -445,7 +454,7 @@ def test_import_parquet(mock_event_logger):
mock_event_logger.assert_called_with(
action="successful_columnar_upload",
database=test_db.name,
schema=None,
schema=schema,
table=PARQUET_UPLOAD_TABLE,
)

Expand All @@ -458,7 +467,7 @@ def test_import_parquet(mock_event_logger):
assert success_msg_f1 in resp

# make sure only specified column name was read
table = SupersetTestCase.get_table(name=PARQUET_UPLOAD_TABLE)
table = SupersetTestCase.get_table(name=PARQUET_UPLOAD_TABLE, schema=None)
assert "b" not in table.column_names

# upload again with replace mode
Expand All @@ -478,7 +487,9 @@ def test_import_parquet(mock_event_logger):
resp = upload_columnar(
ZIP_FILENAME, PARQUET_UPLOAD_TABLE, extra={"if_exists": "replace"}
)
success_msg_f2 = f'Columnar file "[\'{ZIP_FILENAME}\']" uploaded to table "{PARQUET_UPLOAD_TABLE}"'
success_msg_f2 = (
f'Columnar file "[\'{ZIP_FILENAME}\']" uploaded to table "{full_table_name}"'
)
assert success_msg_f2 in resp

data = (
Expand Down
19 changes: 16 additions & 3 deletions tests/integration_tests/datasets/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,12 @@
)
from superset.extensions import db, security_manager
from superset.models.core import Database
from superset.utils.core import backend, get_example_database, get_main_database
from superset.utils.core import (
backend,
get_example_database,
get_example_default_schema,
get_main_database,
)
from superset.utils.dict_import_export import export_to_dict
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import CTAS_SCHEMA_NAME
Expand Down Expand Up @@ -134,7 +139,11 @@ def get_energy_usage_dataset():
example_db = get_example_database()
return (
db.session.query(SqlaTable)
.filter_by(database=example_db, table_name="energy_usage")
.filter_by(
database=example_db,
table_name="energy_usage",
schema=get_example_default_schema(),
)
.one()
)

Expand Down Expand Up @@ -243,7 +252,7 @@ def test_get_dataset_item(self):
"main_dttm_col": None,
"offset": 0,
"owners": [],
"schema": None,
"schema": get_example_default_schema(),
"sql": None,
"table_name": "energy_usage",
"template_params": None,
Expand Down Expand Up @@ -477,12 +486,15 @@ def test_create_dataset_validate_uniqueness(self):
"""
Dataset API: Test create dataset validate table uniqueness
"""
schema = get_example_default_schema()
energy_usage_ds = self.get_energy_usage_dataset()
self.login(username="admin")
table_data = {
"database": energy_usage_ds.database_id,
"table_name": energy_usage_ds.table_name,
}
if schema:
table_data["schema"] = schema
rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
Expand Down Expand Up @@ -1446,6 +1458,7 @@ def test_export_dataset_bundle_gamma(self):
# gamma users by default do not have access to this dataset
assert rv.status_code == 404

@unittest.skip("Number of related objects depend on DB")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_get_dataset_related_objects(self):
"""
Expand Down
4 changes: 2 additions & 2 deletions tests/integration_tests/datasets/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.commands.importers import v0, v1
from superset.models.core import Database
from superset.utils.core import get_example_database
from superset.utils.core import get_example_database, get_example_default_schema
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_with_slice,
Expand Down Expand Up @@ -152,7 +152,7 @@ def test_export_dataset_command(self, mock_g):
],
"offset": 0,
"params": None,
"schema": None,
"schema": get_example_default_schema(),
"sql": None,
"table_name": "energy_usage",
"template_params": None,
Expand Down
20 changes: 15 additions & 5 deletions tests/integration_tests/datasource_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.exceptions import SupersetGenericDBErrorException
from superset.models.core import Database
from superset.utils.core import get_example_database
from superset.utils.core import get_example_database, get_example_default_schema
from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
Expand All @@ -37,18 +37,21 @@

@contextmanager
def create_test_table_context(database: Database):
schema = get_example_default_schema()
full_table_name = f"{schema}.test_table" if schema else "test_table"

database.get_sqla_engine().execute(
"CREATE TABLE test_table AS SELECT 1 as first, 2 as second"
f"CREATE TABLE IF NOT EXISTS {full_table_name} AS SELECT 1 as first, 2 as second"
)
database.get_sqla_engine().execute(
"INSERT INTO test_table (first, second) VALUES (1, 2)"
f"INSERT INTO {full_table_name} (first, second) VALUES (1, 2)"
)
database.get_sqla_engine().execute(
"INSERT INTO test_table (first, second) VALUES (3, 4)"
f"INSERT INTO {full_table_name} (first, second) VALUES (3, 4)"
)

yield db.session
database.get_sqla_engine().execute("DROP TABLE test_table")
database.get_sqla_engine().execute(f"DROP TABLE {full_table_name}")


class TestDatasource(SupersetTestCase):
Expand All @@ -75,6 +78,7 @@ def test_external_metadata_for_virtual_table(self):
table = SqlaTable(
table_name="dummy_sql_table",
database=get_example_database(),
schema=get_example_default_schema(),
sql="select 123 as intcol, 'abc' as strcol",
)
session.add(table)
Expand Down Expand Up @@ -112,6 +116,7 @@ def test_external_metadata_by_name_for_virtual_table(self):
table = SqlaTable(
table_name="dummy_sql_table",
database=get_example_database(),
schema=get_example_default_schema(),
sql="select 123 as intcol, 'abc' as strcol",
)
session.add(table)
Expand Down Expand Up @@ -141,6 +146,7 @@ def test_external_metadata_by_name_from_sqla_inspector(self):
"datasource_type": "table",
"database_name": example_database.database_name,
"table_name": "test_table",
"schema_name": get_example_default_schema(),
}
)
url = f"/datasource/external_metadata_by_name/?q={params}"
Expand Down Expand Up @@ -188,6 +194,7 @@ def test_external_metadata_for_virtual_table_template_params(self):
table = SqlaTable(
table_name="dummy_sql_table_with_template_params",
database=get_example_database(),
schema=get_example_default_schema(),
sql="select {{ foo }} as intcol",
template_params=json.dumps({"foo": "123"}),
)
Expand All @@ -206,6 +213,7 @@ def test_external_metadata_for_malicious_virtual_table(self):
table = SqlaTable(
table_name="malicious_sql_table",
database=get_example_database(),
schema=get_example_default_schema(),
sql="delete table birth_names",
)
with db_insert_temp_object(table):
Expand All @@ -218,6 +226,7 @@ def test_external_metadata_for_mutistatement_virtual_table(self):
table = SqlaTable(
table_name="multistatement_sql_table",
database=get_example_database(),
schema=get_example_default_schema(),
sql="select 123 as intcol, 'abc' as strcol;"
"select 123 as intcol, 'abc' as strcol",
)
Expand Down Expand Up @@ -269,6 +278,7 @@ def test_save(self):
elif k == "database":
self.assertEqual(resp[k]["id"], datasource_post[k]["id"])
else:
print(k)
self.assertEqual(resp[k], datasource_post[k])

def save_datasource_from_dict(self, datasource_post):
Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/fixtures/datasource.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get_datasource_post() -> Dict[str, Any]:
"description": "Adding a DESCRip",
"default_endpoint": "",
"filter_select_enabled": True,
"name": "birth_names",
"name": f"{schema}.birth_names" if schema else "birth_names",
"table_name": "birth_names",
"datasource_name": "birth_names",
"type": "table",
Expand Down
7 changes: 5 additions & 2 deletions tests/integration_tests/fixtures/world_bank_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database
from superset.utils.core import get_example_database, get_example_default_schema
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_table_for_dashboard,
Expand Down Expand Up @@ -58,14 +58,17 @@ def _load_data():

with app.app_context():
database = get_example_database()
schema = get_example_default_schema()
df = _get_dataframe(database)
dtype = {
"year": DateTime if database.backend != "presto" else String(255),
"country_code": String(3),
"country_name": String(255),
"region": String(255),
}
table = create_table_for_dashboard(df, table_name, database, dtype)
table = create_table_for_dashboard(
df, table_name, database, dtype, schema=schema
)
slices = _create_world_bank_slices(table)
dash = _create_world_bank_dashboard(table, slices)
slices_ids_to_delete = [slice.id for slice in slices]
Expand Down

0 comments on commit c8c5868

Please sign in to comment.