diff --git a/superset/assets/javascripts/SqlLab/actions.js b/superset/assets/javascripts/SqlLab/actions.js index 6d62436fcbe9..6a01a92026c9 100644 --- a/superset/assets/javascripts/SqlLab/actions.js +++ b/superset/assets/javascripts/SqlLab/actions.js @@ -252,11 +252,12 @@ export function addTable(query, tableName, schemaName) { queryEditorId: query.id, schema: schemaName, name: tableName, + }; + dispatch(mergeTable(Object.assign({}, table, { isMetadataLoading: true, isExtraMetadataLoading: true, expanded: false, - }; - dispatch(mergeTable(table)); + }))); let url = `/superset/table/${query.dbId}/${tableName}/${schemaName}/`; $.get(url, (data) => { @@ -271,15 +272,19 @@ export function addTable(query, tableName, schemaName) { ctas: false, }; // Merge table to tables in state - table = Object.assign({}, table, data, { + const newTable = Object.assign({}, table, data, { expanded: true, isMetadataLoading: false, }); - dispatch(mergeTable(table, dataPreviewQuery)); + dispatch(mergeTable(newTable, dataPreviewQuery)); // Run query to get preview data for table dispatch(runQuery(dataPreviewQuery)); }) .fail(() => { + const newTable = Object.assign({}, table, { + isMetadataLoading: false, + }); + dispatch(mergeTable(newTable)); notify.error('Error occurred while fetching table metadata'); }); @@ -287,6 +292,13 @@ export function addTable(query, tableName, schemaName) { $.get(url, (data) => { table = Object.assign({}, table, data, { isExtraMetadataLoading: false }); dispatch(mergeTable(table)); + }) + .fail(() => { + const newTable = Object.assign({}, table, { + isExtraMetadataLoading: false, + }); + dispatch(mergeTable(newTable)); + notify.error('Error occurred while fetching table metadata'); }); }; } diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index ffcfcec1084e..537ebdfd8222 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -540,7 +540,9 @@ def _partition_query( @classmethod def _latest_partition_from_df(cls, df): - return df.to_records(index=False)[0][0] + recs = df.to_records(index=False) + if recs: + return recs[0][0] @classmethod def latest_partition(cls, table_name, schema, database, show_first=False):