Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 72 additions & 13 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@
"https://cdn.jsdelivr.net/gh/AnnMarieW/dash-bootstrap-templates@V1.0.4/dbc.min.css"
)

# GLOBAL VARS
SUMMARY_ALL = None

# APP
app = dash.Dash(
APP_TITLE,
Expand Down Expand Up @@ -184,20 +181,19 @@ def callback_download_table(_, table_data, table_columns):
prevent_initial_call=True,
)
def callback_analyze(_, table_data, table_columns):
global SUMMARY_ALL

button_viz_analysis_disabled = True
button_viz_analysis_outline = True
row_button_download_analysis_style = {"visibility": "hidden"}

try:
dataframe = pyfunc.transform_to_dataframe(table_data, table_columns)
SUMMARY_ALL = pyfunc.generate_summary_all(dataframe, n_days=["16D", "MS", "YS"])
summary_all = pyfunc.generate_summary_all(dataframe, n_days=["16D", "MS", "YS"])
tables = [
pylayoutfunc.create_table_summary(
summary, f"table-analyze-{counter}", deletable=False
)
for counter, summary in enumerate(SUMMARY_ALL)
for counter, summary in enumerate(summary_all)
]

children = pylayoutfunc.create_tabcard_table_layout(tables)
Expand All @@ -218,43 +214,106 @@ def callback_analyze(_, table_data, table_columns):
@app.callback(
Output("download-analysis-csv", "data"),
Input("button-download-analysis-csv", "n_clicks"),
State("table-analyze-0", "data"),
State("table-analyze-0", "columns"),
State("table-analyze-1", "data"),
State("table-analyze-1", "columns"),
State("table-analyze-2", "data"),
State("table-analyze-2", "columns"),
prevent_initial_call=True,
)
def callback_download_results(_):
def callback_download_results(
_,
biweekly_data,
biweekly_columns,
monthly_data,
monthly_columns,
yearly_data,
yearly_columns,
):

biweekly = (biweekly_data, biweekly_columns)
monthly = (monthly_data, monthly_columns)
yearly = (yearly_data, yearly_columns)

summary_all = []
for period in (biweekly, monthly, yearly):
data, columns = period
dataframe = pyfunc.transform_to_dataframe(
data,
columns,
multiindex=True,
apply_numeric=False,
parse_dates=["max_date"],
)
summary_all.append(dataframe)

dataframe_all = pd.concat(
summary_all, axis=1, keys=["Biweekly", "Monthly", "Yearly"]
)

dataframe = pd.concat(SUMMARY_ALL, axis=1, keys=["Biweekly", "Monthly", "Yearly"])
return dcc.send_data_frame(dataframe.to_csv, "results.csv")
return dcc.send_data_frame(dataframe_all.to_csv, "results.csv")


@app.callback(
Output("tab-graph-analysis", "children"),
Input("button-viz-analysis", "n_clicks"),
State("table-analyze-0", "data"),
State("table-analyze-0", "columns"),
State("table-analyze-1", "data"),
State("table-analyze-1", "columns"),
State("table-analyze-2", "data"),
State("table-analyze-2", "columns"),
prevent_initial_call=True,
)
def callback_troubleshoot(_):
def callback_graph_analysis(
_,
biweekly_data,
biweekly_columns,
monthly_data,
monthly_columns,
yearly_data,
yearly_columns,
):
from itertools import product

label_periods = ["Biweekly", "Monthly", "Yearly"]
label_maxsum = ["Max + Sum"]
label_raindry = ["Dry + Rain"]
label_ufunc = label_maxsum + label_raindry

biweekly = (biweekly_data, biweekly_columns)
monthly = (monthly_data, monthly_columns)
yearly = (yearly_data, yearly_columns)

summary_all = []
for summary_period in (biweekly, monthly, yearly):
data, columns = summary_period
dataframe = pyfunc.transform_to_dataframe(
data,
columns,
multiindex=True,
apply_numeric=False,
parse_dates=["max_date"],
)
summary_all.append(dataframe)

graphs_maxsum = [
pyfigure.figure_summary_maxsum(
summary,
title=f"<b>{period}: {title}</b>",
period=period,
subplot_titles=["Max", "Sum"],
)
for summary, title, period in zip(SUMMARY_ALL, label_maxsum * 3, label_periods)
for summary, title, period in zip(summary_all, label_maxsum * 3, label_periods)
]
graphs_raindry = [
pyfigure.figure_summary_raindry(
summary, title=f"<b>{period}: {title}</b>", period=period
)
for summary, title, period in zip(SUMMARY_ALL, label_raindry * 3, label_periods)
for summary, title, period in zip(summary_all, label_raindry * 3, label_periods)
]
graph_maxdate = [pyfigure.figure_summary_maxdate(SUMMARY_ALL)]
graph_maxdate = [pyfigure.figure_summary_maxdate(summary_all)]

all_graphs = graphs_maxsum + graphs_raindry + graph_maxdate
labels = [": ".join(i) for i in product(label_ufunc, label_periods)]
Expand Down
33 changes: 30 additions & 3 deletions pyfunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def max_date(vector):
if vector.any():
return vector.idxmax().date()
else:
return np.nan
return pd.NaT

def max(vector):
return vector.max()
Expand All @@ -70,7 +70,7 @@ def max(vector):
dataframe, ufunc=ufunc, ufunc_col=ufunc_col, n_days=n_days
)

return summary
return summary.infer_objects()


def generate_summary_all(dataframe, n_days: list = None):
Expand All @@ -84,10 +84,15 @@ def generate_summary_all(dataframe, n_days: list = None):


def transform_to_dataframe(
table_data, table_columns, multiindex: bool = False, apply_numeric: bool = True
table_data,
table_columns,
multiindex: bool = False,
apply_numeric: bool = True,
parse_dates: list = None,
):

dataframe = pd.DataFrame(table_data)

if multiindex is True:
dataframe.columns = pd.MultiIndex.from_tuples(
[item["name"] for item in table_columns]
Expand All @@ -98,9 +103,31 @@ def transform_to_dataframe(
dataframe["DATE"] = pd.to_datetime(dataframe.DATE)
dataframe = dataframe.set_index("DATE").sort_index()

if multiindex is True:
# removing date (index.name) from top level multiindex
dataframe.columns = pd.MultiIndex.from_tuples(dataframe.columns.to_flat_index())

if apply_numeric is True:
dataframe = dataframe.apply(pd.to_numeric, errors="coerce")
else:
dataframe = dataframe.infer_objects()

if parse_dates is not None:
if multiindex:
for col_dates in parse_dates:
col_parsing = [
col_tuple
for col_tuple in dataframe.columns
if col_dates in col_tuple
]
for col_dates in col_parsing:
dataframe[col_dates] = pd.to_datetime(
dataframe[col_dates], errors="coerce"
)
else:
for col_dates in parse_dates:
dataframe[col_dates] = pd.to_datetime(
dataframe[col_dates], errors="coerce"
)

return dataframe