Skip to content
This repository has been archived by the owner on Nov 3, 2023. It is now read-only.

Commit

Permalink
Use json for imports and exports, not pickle (apache#4243)
Browse files Browse the repository at this point in the history
* make superset imports and exports use json, not pickle

* fix tests
  • Loading branch information
timifasubaa authored and john-bodley committed Jan 24, 2018
1 parent b04a32a commit a4a2f6c
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 19 deletions.
7 changes: 3 additions & 4 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import functools
import json
import logging
import pickle
import textwrap

from flask import escape, g, Markup, request
Expand Down Expand Up @@ -395,7 +394,7 @@ def import_obj(cls, dashboard_to_import, import_time=None):
be overridden or just copies over. Slices that belong to this
dashboard will be wired to existing tables. This function can be used
to import/export dashboards between multiple superset instances.
Audit metadata isn't copies over.
Audit metadata isn't copied over.
"""
def alter_positions(dashboard, old_to_new_slc_id_dict):
""" Updates slice_ids in the position json.
Expand Down Expand Up @@ -533,10 +532,10 @@ def export_dashboards(cls, dashboard_ids):
make_transient(eager_datasource)
eager_datasources.append(eager_datasource)

return pickle.dumps({
return json.dumps({
'dashboards': copied_dashboards,
'datasources': eager_datasources,
})
}, cls=utils.DashboardEncoder, indent=4)


class Database(Model, AuditMixinNullable, ImportMixin):
Expand Down
50 changes: 50 additions & 0 deletions superset/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from sqlalchemy import event, exc, select
from sqlalchemy.types import TEXT, TypeDecorator


logging.getLogger('MARKDOWN').setLevel(logging.INFO)

PY3K = sys.version_info >= (3, 0)
Expand Down Expand Up @@ -240,6 +241,55 @@ def dttm_from_timtuple(d):
d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)


def decode_dashboards(o):
"""
Function to be passed into json.loads obj_hook parameter
Recreates the dashboard object from a json representation.
"""
import superset.models.core as models
from superset.connectors.sqla.models import (
SqlaTable, SqlMetric, TableColumn,
)

if '__Dashboard__' in o:
d = models.Dashboard()
d.__dict__.update(o['__Dashboard__'])
return d
elif '__Slice__' in o:
d = models.Slice()
d.__dict__.update(o['__Slice__'])
return d
elif '__TableColumn__' in o:
d = TableColumn()
d.__dict__.update(o['__TableColumn__'])
return d
elif '__SqlaTable__' in o:
d = SqlaTable()
d.__dict__.update(o['__SqlaTable__'])
return d
elif '__SqlMetric__' in o:
d = SqlMetric()
d.__dict__.update(o['__SqlMetric__'])
return d
elif '__datetime__' in o:
return datetime.strptime(o['__datetime__'], '%Y-%m-%dT%H:%M:%S')
else:
return o


class DashboardEncoder(json.JSONEncoder):
# pylint: disable=E0202
def default(self, o):
try:
vals = {
k: v for k, v in o.__dict__.items() if k != '_sa_instance_state'}
return {'__{}__'.format(o.__class__.__name__): vals}
except Exception:
if type(o) == datetime:
return {'__datetime__': o.replace(microsecond=0).isoformat()}
return json.JSONEncoder.default(self, o)


def parse_human_timedelta(s):
"""
Returns ``datetime.datetime`` from natural language time deltas
Expand Down
10 changes: 4 additions & 6 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import json
import logging
import os
import pickle
import re
import time
import traceback
Expand Down Expand Up @@ -601,7 +600,7 @@ def download_dashboards(self):
ids = request.args.getlist('id')
return Response(
models.Dashboard.export_dashboards(ids),
headers=generate_download_headers('pickle'),
headers=generate_download_headers('json'),
mimetype='application/text')
return self.render_template(
'superset/export_dashboards.html',
Expand Down Expand Up @@ -1115,15 +1114,14 @@ def explore_json(self, datasource_type, datasource_id):
@has_access
@expose('/import_dashboards', methods=['GET', 'POST'])
def import_dashboards(self):
"""Overrides the dashboards using pickled instances from the file."""
"""Overrides the dashboards using json instances from the file."""
f = request.files.get('file')
if request.method == 'POST' and f:
current_tt = int(time.time())
data = pickle.load(f)
data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
ds_class = ConnectorRegistry.sources.get(table.type)
ds_class.import_obj(table, import_time=current_tt)
type(table).import_obj(table, import_time=current_tt)
db.session.commit()
for dashboard in data['dashboards']:
models.Dashboard.import_obj(
Expand Down
33 changes: 24 additions & 9 deletions tests/import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,11 @@
from __future__ import unicode_literals

import json
import pickle
import unittest

from sqlalchemy.orm.session import make_transient

from superset import db
from superset import db, utils
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric,
)
Expand Down Expand Up @@ -205,13 +204,22 @@ def test_export_1_dashboard(self):
.format(birth_dash.id)
)
resp = self.client.get(export_dash_url)
exported_dashboards = pickle.loads(resp.data)['dashboards']
exported_dashboards = json.loads(
resp.data.decode('utf-8'),
object_hook=utils.decode_dashboards,
)['dashboards']
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
birth_dash.id,
json.loads(exported_dashboards[0].json_metadata)['remote_id'])

exported_tables = pickle.loads(resp.data)['datasources']
json.loads(
exported_dashboards[0].json_metadata,
object_hook=utils.decode_dashboards,
)['remote_id'])

exported_tables = json.loads(
resp.data.decode('utf-8'),
object_hook=utils.decode_dashboards,
)['datasources']
self.assertEquals(1, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name('birth_names'), exported_tables[0])
Expand All @@ -223,8 +231,12 @@ def test_export_2_dashboards(self):
'/dashboardmodelview/export_dashboards_form?id={}&id={}&action=go'
.format(birth_dash.id, world_health_dash.id))
resp = self.client.get(export_dash_url)
exported_dashboards = sorted(pickle.loads(resp.data)['dashboards'],
key=lambda d: d.dashboard_title)
exported_dashboards = sorted(
json.loads(
resp.data.decode('utf-8'),
object_hook=utils.decode_dashboards,
)['dashboards'],
key=lambda d: d.dashboard_title)
self.assertEquals(2, len(exported_dashboards))
self.assert_dash_equals(birth_dash, exported_dashboards[0])
self.assertEquals(
Expand All @@ -239,7 +251,10 @@ def test_export_2_dashboards(self):
)

exported_tables = sorted(
pickle.loads(resp.data)['datasources'], key=lambda t: t.table_name)
json.loads(
resp.data.decode('utf-8'),
object_hook=utils.decode_dashboards)['datasources'],
key=lambda t: t.table_name)
self.assertEquals(2, len(exported_tables))
self.assert_table_equals(
self.get_table_by_name('birth_names'), exported_tables[0])
Expand Down

0 comments on commit a4a2f6c

Please sign in to comment.