Skip to content

Commit

Permalink
Fixed issue with URL display when using ArcticDB and maintain column …
Browse files Browse the repository at this point in the history
…ordering when specifying 'locked' on startup
  • Loading branch information
aschonfeld committed Jun 16, 2023
1 parent b5518d8 commit af3fdfc
Show file tree
Hide file tree
Showing 6 changed files with 79 additions and 14 deletions.
16 changes: 12 additions & 4 deletions dtale/dash_application/charts.py
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,13 @@ def build_hoverable(link, msg):
export_png_link,
export_csv_link,
],
style={"position": "absolute", "zIndex": 5, "left": 5, "top": 2},
style={
"position": "absolute",
"zIndex": 5,
"left": 5,
"top": 2,
"height": "100%",
},
)
return html.Div(
[links] + make_list(chart), style={"position": "relative", "height": "100%"}
Expand Down Expand Up @@ -706,12 +712,14 @@ def cpg_chunker(charts, columns=2):
return charts

def _formatter(chart):
if hasattr(chart, "style"):
chart.style.pop("height", None)
return html.Div(chart, className="col-md-6")

return [
html.Div([_formatter(c) for c in chunk], className="row pb-3")
html.Div(
[_formatter(c) for c in chunk],
className="row pb-3",
style={"height": "100%"},
)
for chunk in divide_chunks(charts, columns)
]

Expand Down
9 changes: 6 additions & 3 deletions dtale/dash_application/layout/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -1407,10 +1407,12 @@ def build_slider_counts(df, data_id, query_value):
)
)
slider_counts = {
v * 20: {"label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct)}
"{}".format(v * 20): {
"label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct)
}
for v in range(1, 6)
}
slider_counts[100]["style"] = {"white-space": "nowrap"}
slider_counts["100"]["style"] = {"white-space": "nowrap"}
return slider_counts


Expand Down Expand Up @@ -2645,7 +2647,8 @@ def show_map_style(show):
id="chart-inputs",
),
dcc.Loading(
html.Div(id="chart-content", style={"max-height": "69vh"}), type="circle"
html.Div(id="chart-content", style={"height": "calc(100vh - 380px"}),
type="circle",
),
dcc.Textarea(id="copy-text", style=dict(position="absolute", left="-110%")),
]
Expand Down
2 changes: 1 addition & 1 deletion dtale/dash_application/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1365,7 +1365,7 @@ def display_page(pathname, search):
settings = global_state.get_settings(params["data_id"]) or {}
return html.Div(
charts_layout(df, settings, **params) + saved_charts.build_layout(),
className="charts-body",
className="charts-body pb-0",
)

custom_geojson.init_callbacks(dash_app)
Expand Down
8 changes: 4 additions & 4 deletions dtale/global_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,16 +213,16 @@ def update_library(self, library=None):
raise ValueError("Library '{}' does not exist!".format(library))

def load_libraries(self):
self._libraries = self.conn.list_libraries()
self._libraries = sorted(self.conn.list_libraries())

@property
def libraries(self):
return self._libraries

def load_symbols(self, library=None):
self._symbols[library or self.lib.name] = (
self.conn[library] if library else self.lib
).list_symbols()
self._symbols[library or self.lib.name] = sorted(
(self.conn[library] if library else self.lib).list_symbols()
)

@property
def symbols(self):
Expand Down
3 changes: 3 additions & 0 deletions dtale/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -861,6 +861,9 @@ def format_data(data, inplace=False, drop_index=False):
else:
data = data.reset_index(drop=drop_index)

if drop_index:
index = []

if drop:
if inplace:
data.drop("index", axis=1, errors="ignore", inplace=True)
Expand Down
55 changes: 53 additions & 2 deletions dtale/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,6 +427,7 @@ def _build_iframe(
iframe_url = "{}?{}".format(iframe_url, params)
else:
iframe_url = "{}?{}".format(iframe_url, url_encode_func()(params))

return IFrame(iframe_url, width=width, height=height)

def notebook(self, route="/dtale/iframe/", params=None, width="100%", height=475):
Expand Down Expand Up @@ -968,7 +969,34 @@ def startup(
global_state.new_data_inst(data_id)
instance = global_state.store.get(data_id)
data = instance.load_data(row_range=[0, 1])
ret_data = startup(data=data, data_id=data_id, force_save=False)
ret_data = startup(
url=url,
data=data,
data_id=data_id,
force_save=False,
name=name,
context_vars=context_vars,
ignore_duplicate=ignore_duplicate,
allow_cell_edits=allow_cell_edits,
precision=precision,
show_columns=show_columns,
hide_columns=hide_columns,
column_formats=column_formats,
nan_display=nan_display,
sort=sort,
locked=locked,
background_mode=background_mode,
range_highlights=range_highlights,
app_root=app_root,
is_proxy=is_proxy,
vertical_headers=vertical_headers,
hide_shutdown=hide_shutdown,
column_edit_options=column_edit_options,
auto_hide_empty_columns=auto_hide_empty_columns,
highlight_filter=highlight_filter,
hide_header_editor=hide_header_editor,
lock_header_menu=lock_header_menu,
)
startup_code = (
"from arcticdb import Arctic\n"
"from arcticdb.version_store._store import VersionedItem\n\n"
Expand Down Expand Up @@ -1105,6 +1133,7 @@ def startup(
if force_save or (
global_state.is_arcticdb and not global_state.contains(data_id)
):
data = data[curr_locked + [c for c in data.columns if c not in curr_locked]]
global_state.set_data(data_id, data)
dtypes_data = data
ranges = None
Expand All @@ -1116,6 +1145,10 @@ def startup(
dtypes_data, inplace=inplace, drop_index=drop_index
)
ranges = calc_data_ranges(dtypes_data)
dtypes_data = dtypes_data[
curr_locked
+ [c for c in dtypes_data.columns if c not in curr_locked]
]
dtypes_state = build_dtypes_state(
dtypes_data, global_state.get_dtypes(data_id) or [], ranges=ranges
)
Expand Down Expand Up @@ -2582,6 +2615,7 @@ def get_data(data_id):
return jsonify({})

curr_settings = global_state.get_settings(data_id) or {}
curr_locked = curr_settings.get("locked", [])
final_query = build_query(data_id, curr_settings.get("query"))
highlight_filter = curr_settings.get("highlightFilter") or False

Expand Down Expand Up @@ -2610,6 +2644,10 @@ def get_data(data_id):
data = data.head(export_rows)
else:
data = instance.load_data(row_range=[0, export_rows])
data, _ = format_data(data)
data = data[
curr_locked + [c for c in data.columns if c not in curr_locked]
]
results = f.format_dicts(data.itertuples())
results = [dict_merge({IDX_COL: i}, r) for i, r in enumerate(results)]
elif query_builder:
Expand All @@ -2618,6 +2656,7 @@ def get_data(data_id):
)
total = len(df)
df, _ = format_data(df)
df = df[curr_locked + [c for c in df.columns if c not in curr_locked]]
for sub_range in ids:
sub_range = list(map(int, sub_range.split("-")))
if len(sub_range) == 1:
Expand All @@ -2640,6 +2679,7 @@ def get_data(data_id):
df = instance.load_data(**date_range)
total = len(df)
df, _ = format_data(df)
df = df[curr_locked + [c for c in df.columns if c not in curr_locked]]
for sub_range in ids:
sub_range = list(map(int, sub_range.split("-")))
if len(sub_range) == 1:
Expand All @@ -2666,6 +2706,10 @@ def get_data(data_id):
row_range=[sub_range[0], sub_range[0] + 1]
)
sub_df, _ = format_data(sub_df)
sub_df = sub_df[
curr_locked
+ [c for c in sub_df.columns if c not in curr_locked]
]
sub_df = f.format_dicts(sub_df.itertuples())
results[sub_range[0]] = dict_merge(
{IDX_COL: sub_range[0]}, sub_df[0]
Expand All @@ -2676,6 +2720,10 @@ def get_data(data_id):
row_range=[start, total if end >= total else end + 1]
)
sub_df, _ = format_data(sub_df)
sub_df = sub_df[
curr_locked
+ [c for c in sub_df.columns if c not in curr_locked]
]
sub_df = f.format_dicts(sub_df.itertuples())
for i, d in zip(range(start, end + 1), sub_df):
results[i] = dict_merge({IDX_COL: i}, d)
Expand All @@ -2687,6 +2735,7 @@ def get_data(data_id):
curr_dtypes = [c["name"] for c in global_state.get_dtypes(data_id)]
if any(c not in curr_dtypes for c in data.columns):
data, _ = format_data(data)
data = data[curr_locked + [c for c in data.columns if c not in curr_locked]]
global_state.set_data(data_id, data)
global_state.set_dtypes(
data_id,
Expand Down Expand Up @@ -4144,10 +4193,12 @@ def load_arcticdb_description():
zip(description.index.name, description.index.dtype),
)
)
rows = description.row_count

description_str = (
"ROWS: {rows:,.0f}\n" "INDEX:\n" "\t- {index}\n" "COLUMNS:\n" "\t- {columns}\n"
).format(
rows=description.row_count,
rows=rows,
index="\n\t- ".join(index),
columns="\n\t- ".join(columns),
)
Expand Down

0 comments on commit af3fdfc

Please sign in to comment.