Skip to content

Commit

Permalink
Fixed issue with URL display when using ArcticDB and maintain column …
Browse files Browse the repository at this point in the history
…ordering when specifying 'locked' on startup
  • Loading branch information
aschonfeld committed Jun 19, 2023
1 parent b5518d8 commit ffb6557
Show file tree
Hide file tree
Showing 16 changed files with 233 additions and 94 deletions.
9 changes: 9 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,15 @@ global_state.use_arcticdb_store(uri='lmdb:///<path>', library='my_lib')
dtale.show('my_symbol')
```

Or you can set your library using `dtale.show` with a pipe-delimited identifier:
```python
import dtale.global_state as global_state
import dtale

global_state.use_arcticdb_store(uri='lmdb:///<path>')
dtale.show('my_lib|my_symbol')
```

You can also do everything using `dtale.show_arcticdb`:
```python
import dtale
Expand Down
3 changes: 1 addition & 2 deletions dtale/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import dtale.config as dtale_config
from dtale import dtale
from dtale.cli.clickutils import retrieve_meta_info_and_version, setup_logging
from dtale.dash_application import views as dash_views
from dtale.utils import (
DuplicateDataError,
build_shutdown_url,
Expand Down Expand Up @@ -507,8 +508,6 @@ def handle_data_id(_endpoint, values):
auth.setup_auth(app)

with app.app_context():
from .dash_application import views as dash_views

app = dash_views.add_dash(app)
return app

Expand Down
9 changes: 8 additions & 1 deletion dtale/cli/loaders/arcticdb_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,14 @@ def loader_func(**kwargs):
if symbol is None: # select symbol from the UI
return None

startup(data=symbol)
if library is None and symbol is not None:
raise ValueError(
"When trying to load the symbol, {}, a library must be specified!".format(
symbol
)
)

startup(data="{}|{}".format(library, symbol))
return symbol

if not library:
Expand Down
2 changes: 1 addition & 1 deletion dtale/column_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self, data_id, column, cfg):
if not dtype:
if global_state.is_arcticdb:
instance = global_state.store.get(data_id)
data, _ = format_data(instance.load_data(row_range=[0, 1]))
data, _ = format_data(instance.base_df)
s = data[column]
else:
s = global_state.get_data(data_id, columns=[column])[column]
Expand Down
16 changes: 12 additions & 4 deletions dtale/dash_application/charts.py
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,13 @@ def build_hoverable(link, msg):
export_png_link,
export_csv_link,
],
style={"position": "absolute", "zIndex": 5, "left": 5, "top": 2},
style={
"position": "absolute",
"zIndex": 5,
"left": 5,
"top": 2,
"height": "100%",
},
)
return html.Div(
[links] + make_list(chart), style={"position": "relative", "height": "100%"}
Expand Down Expand Up @@ -706,12 +712,14 @@ def cpg_chunker(charts, columns=2):
return charts

def _formatter(chart):
if hasattr(chart, "style"):
chart.style.pop("height", None)
return html.Div(chart, className="col-md-6")

return [
html.Div([_formatter(c) for c in chunk], className="row pb-3")
html.Div(
[_formatter(c) for c in chunk],
className="row pb-3",
style={"height": "100%"},
)
for chunk in divide_chunks(charts, columns)
]

Expand Down
9 changes: 6 additions & 3 deletions dtale/dash_application/layout/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -1407,10 +1407,12 @@ def build_slider_counts(df, data_id, query_value):
)
)
slider_counts = {
v * 20: {"label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct)}
"{}".format(v * 20): {
"label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct)
}
for v in range(1, 6)
}
slider_counts[100]["style"] = {"white-space": "nowrap"}
slider_counts["100"]["style"] = {"white-space": "nowrap"}
return slider_counts


Expand Down Expand Up @@ -2645,7 +2647,8 @@ def show_map_style(show):
id="chart-inputs",
),
dcc.Loading(
html.Div(id="chart-content", style={"max-height": "69vh"}), type="circle"
html.Div(id="chart-content", style={"height": "calc(100vh - 380px"}),
type="circle",
),
dcc.Textarea(id="copy-text", style=dict(position="absolute", left="-110%")),
]
Expand Down
2 changes: 1 addition & 1 deletion dtale/dash_application/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1365,7 +1365,7 @@ def display_page(pathname, search):
settings = global_state.get_settings(params["data_id"]) or {}
return html.Div(
charts_layout(df, settings, **params) + saved_charts.build_layout(),
className="charts-body",
className="charts-body pb-0",
)

custom_geojson.init_callbacks(dash_app)
Expand Down
51 changes: 39 additions & 12 deletions dtale/global_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from six import PY3

from dtale.utils import dict_merge
from dtale.utils import dict_merge, format_data

try:
from collections.abc import MutableMapping
Expand Down Expand Up @@ -55,6 +55,10 @@ def load_data(self):
def rows(self, **kwargs):
return self._rows

@property
def is_large(self):
return False

@property
def data(self):
return self.load_data()
Expand Down Expand Up @@ -138,18 +142,29 @@ def settings(self, settings):


class DtaleArcticDBInstance(DtaleInstance):
def __init__(self, data, lib, symbol, parent):
def __init__(self, data, data_id, parent):
super(DtaleArcticDBInstance, self).__init__(data)
self.lib = lib
self.parent = parent
data_id_segs = (data_id or "").split("|")
symbol = data_id_segs[-1]
if len(data_id_segs) > 1:
lib_name = data_id_segs[0]
if not parent.lib or lib_name != parent.lib.name:
parent.update_library(lib_name)
self.lib = parent.lib
self.symbol = symbol
self._rows = 0
if self.lib and self.symbol and self.symbol in parent.symbols:
self._cols = 0
self._base_df = None
if self.lib and self.symbol and self.symbol in self.parent.symbols:
self._rows = self.lib._nvs.get_num_rows(self.symbol)
self._base_df = self.load_data(row_range=[0, 1])
self._cols = len(format_data(self._base_df)[0].columns)

def load_data(self, **kwargs):
from arcticdb.version_store._store import VersionedItem

if not self.lib.has_symbol(self.symbol):
if self.symbol not in self.parent.symbols:
raise ValueError(
"{} does not exist in {}!".format(self.symbol, self.lib.name)
)
Expand All @@ -170,6 +185,18 @@ def rows(self, **kwargs):
return len(read_result.frame_data.value.data[0])
return self._rows

@property
def base_df(self):
return self._base_df

@property
def is_large(self):
if self.rows() > LARGE_ARCTICDB:
return True
if self._cols > 50:
return True
return False

@property
def data(self):
return self.load_data()
Expand Down Expand Up @@ -207,22 +234,22 @@ def update_library(self, library=None):
return
if library in self._libraries:
self.lib = self.conn[library]
self._db.clear()
self.load_symbols()
if library not in self._symbols:
self.load_symbols()
elif library is not None:
raise ValueError("Library '{}' does not exist!".format(library))

def load_libraries(self):
self._libraries = self.conn.list_libraries()
self._libraries = sorted(self.conn.list_libraries())

@property
def libraries(self):
return self._libraries

def load_symbols(self, library=None):
self._symbols[library or self.lib.name] = (
self.conn[library] if library else self.lib
).list_symbols()
self._symbols[library or self.lib.name] = sorted(
(self.conn[library] if library else self.lib).list_symbols()
)

@property
def symbols(self):
Expand All @@ -231,7 +258,7 @@ def symbols(self):
def build_instance(self, data_id, data=None):
if data_id is None:
return DtaleInstance(data)
return DtaleArcticDBInstance(data, self.lib, data_id, self)
return DtaleArcticDBInstance(data, data_id, self)

def get(self, key, **kwargs):
if key is None:
Expand Down
3 changes: 3 additions & 0 deletions dtale/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -861,6 +861,9 @@ def format_data(data, inplace=False, drop_index=False):
else:
data = data.reset_index(drop=drop_index)

if drop_index:
index = []

if drop:
if inplace:
data.drop("index", axis=1, errors="ignore", inplace=True)
Expand Down

0 comments on commit ffb6557

Please sign in to comment.