Skip to content

Commit

Permalink
Merge pull request #3 from arnaudmiribel/upgrade-cache
Browse files Browse the repository at this point in the history
Use cache_data and cache_resource when possible
  • Loading branch information
arnaudmiribel committed Oct 23, 2023
2 parents bd4e1f1 + c16ad10 commit 14f71c8
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 17 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "streamlit_faker"
version = "0.0.2"
version = "0.0.3"
authors = [
{ name="Arnaud Miribel", email="arnaudmiribel@gmail.com" },
]
Expand All @@ -29,4 +29,4 @@ keywords = [

[project.urls]
"Homepage" = "https://github.com/arnaudmiribel/streamlit-faker"
"Bug Tracker" = "https://github.com/arnaudmiribel/streamlit-faker/issues"
"Bug Tracker" = "https://github.com/arnaudmiribel/streamlit-faker/issues"
36 changes: 21 additions & 15 deletions streamlit_faker/chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,14 @@

from .common import st_command_with_default

try:
from streamlit import cache_data, cache_resource # streamlit >= 1.18.0

except ImportError:
from streamlit import experimental_memo as cache_data, experimental_singleton as cache_resource # streamlit >= 0.89

@st.experimental_memo

@cache_data
def url_to_dataframe(url: str, parse_dates: list = ["date"]) -> pd.DataFrame:
"""Collects a CSV/JSON file from a URL and load it into a dataframe, with appropriate caching (memo)
Args:
Expand Down Expand Up @@ -39,7 +45,7 @@ def url_to_dataframe(url: str, parse_dates: list = ["date"]) -> pd.DataFrame:
)


@st.experimental_singleton()
@cache_resource
def get_datasets():
N = 50
rand = pd.DataFrame()
Expand Down Expand Up @@ -78,7 +84,7 @@ def get_datasets():
datasets = get_datasets()


@st.experimental_memo
@cache_data
def _line_chart():
st.line_chart(
data=datasets["stocks"].query("symbol == 'GOOG'"),
Expand All @@ -88,7 +94,7 @@ def _line_chart():
)


@st.experimental_memo
@cache_data
def _multi_line_chart():
altex.line_chart(
data=datasets["stocks"],
Expand All @@ -111,7 +117,7 @@ def _bar_chart():
)


@st.experimental_memo
@cache_data
def _hist_chart():
altex.hist_chart(
data=datasets["stocks"].assign(price=datasets["stocks"].price.round(0)),
Expand All @@ -120,7 +126,7 @@ def _hist_chart():
)


@st.experimental_memo
@cache_data
def _scatter_chart():
altex.scatter_chart(
data=datasets["seattle_weather"],
Expand All @@ -131,7 +137,7 @@ def _scatter_chart():
)


@st.experimental_memo
@cache_data
def _bar_chart_horizontal():
altex.bar_chart(
data=datasets["seattle_weather"].head(15),
Expand All @@ -141,7 +147,7 @@ def _bar_chart_horizontal():
)


@st.experimental_memo
@cache_data
def _bar_chart_log():
altex.bar_chart(
data=datasets["seattle_weather"],
Expand All @@ -155,7 +161,7 @@ def _bar_chart_log():
)


@st.experimental_memo
@cache_data
def _bar_chart_sorted():
altex.bar_chart(
data=datasets["seattle_weather"]
Expand All @@ -167,7 +173,7 @@ def _bar_chart_sorted():
)


@st.experimental_memo
@cache_data
def _time_heatmap_chart():
altex.hist_chart(
data=datasets["seattle_weather"],
Expand All @@ -181,7 +187,7 @@ def _time_heatmap_chart():
)


@st.experimental_memo
@cache_data
def _sparkline_chart():
altex.line_chart(
data=datasets["stocks"].query("symbol == 'GOOG'"),
Expand All @@ -193,7 +199,7 @@ def _sparkline_chart():
)


@st.experimental_memo
@cache_data
def _sparkbar_chart():
altex.bar_chart(
data=datasets["stocks"].query("symbol == 'GOOG'"),
Expand All @@ -204,7 +210,7 @@ def _sparkbar_chart():
)


@st.experimental_memo
@cache_data
def _bar_stacked_chart():
altex.bar_chart(
data=datasets["barley"],
Expand All @@ -215,7 +221,7 @@ def _bar_stacked_chart():
)


@st.experimental_memo
@cache_data
def _bar_normalized_chart():
altex.bar_chart(
data=datasets["barley"],
Expand All @@ -226,7 +232,7 @@ def _bar_normalized_chart():
)


@st.experimental_memo
@cache_data
def _bar_grouped_chart():
altex.bar_chart(
data=datasets["barley"],
Expand Down

0 comments on commit 14f71c8

Please sign in to comment.