Skip to content

Commit

Permalink
review release changes (#597)
Browse files Browse the repository at this point in the history
  • Loading branch information
dbrakenhoff committed May 12, 2023
1 parent c4f4c2d commit a7b11cc
Show file tree
Hide file tree
Showing 5 changed files with 66 additions and 73 deletions.
2 changes: 1 addition & 1 deletion doc/about/courses.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Courses

Every now and then (on-site) Pastas courses and workshops are held. Please check the `GitHub Discussion <https://github
.com/pastas/pastas/discussions>`_ page for announcements of any public Pastas courses to be held. We can also provide
private courses on request. To organize such a workshop please contact of the following.
private courses on request. To organize such a workshop please contact one of the following:

- For courses in the Netherlands, please contact Artesia (info[AT]artesia-water.nl)
- For international courses, please contact Raoul Collenteur (Raoul.Collenteur[AT]eawag.ch)
59 changes: 21 additions & 38 deletions doc/benchmarks/autocorrelation.ipynb

Large diffs are not rendered by default.

51 changes: 25 additions & 26 deletions doc/benchmarks/noisemodel.ipynb

Large diffs are not rendered by default.

25 changes: 18 additions & 7 deletions pastas/modelcompare.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,13 +317,16 @@ def get_metrics(
"""
if models is None:
models = self.models
modelnames = self.modelnames
else:
modelnames = [iml.name for iml in models]

metrics = concat(
[ml.stats.summary(stats=metric_selection) for ml in models],
axis=1,
sort=False,
)
metrics.columns = self.modelnames
metrics.columns = modelnames
metrics.index.name = None

return metrics
Expand Down Expand Up @@ -352,9 +355,12 @@ def get_parameters(
"""
if models is None:
models = self.models
modelnames = self.modelnames
else:
modelnames = [iml.name for iml in models]

params = concat([ml.parameters[param_col] for ml in models], axis=1, sort=False)
params.columns = self.modelnames
params.columns = modelnames

if param_selection:
sel = np.array([])
Expand All @@ -378,11 +384,14 @@ def get_diagnostics(
"""
if models is None:
models = self.models
modelnames = self.modelnames
else:
modelnames = [iml.name for iml in models]

diags = DataFrame(index=self.modelnames)
diags = DataFrame(index=modelnames)
for i, ml in enumerate(models):
mldiag = ml.stats.diagnostics()
diags.loc[self.modelnames[i], mldiag.index] = mldiag[diag_col].values
diags.loc[modelnames[i], mldiag.index] = mldiag[diag_col].values

return diags.transpose()

Expand Down Expand Up @@ -811,7 +820,7 @@ def plot_table_metrics(
index={met: f"\N{GREEK CAPITAL LETTER DELTA}{met.upper()}"}
)
if "rsq" in metrics.index:
metrics = metrics.rename(index={"rsq": f"R\N{SUPERSCRIPT TWO}"})
metrics = metrics.rename(index={"rsq": "R\N{SUPERSCRIPT TWO}"})

# add seperate column with parameter names
metrics.loc[:, "Metrics"] = metrics.index
Expand Down Expand Up @@ -930,9 +939,11 @@ def plot(
if legend and not axn.startswith("rf"):
if legend_kwargs is None:
legend_kwargs = {}
_, l = self.axes[axn].get_legend_handles_labels()
_, labels = self.axes[axn].get_legend_handles_labels()
self.axes[axn].legend(
ncol=legend_kwargs.pop("ncol", max([int(np.ceil(len(l))), 4])),
ncol=legend_kwargs.pop(
"ncol", max([int(np.ceil(len(labels))), 4])
),
loc=legend_kwargs.pop("loc", (0, 1)),
frameon=legend_kwargs.pop("frameon", False),
markerscale=legend_kwargs.pop("markerscale", 1.0),
Expand Down
2 changes: 1 addition & 1 deletion pastas/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

logger = logging.getLogger(__name__)

__version__ = "1.0.1"
__version__ = "1.1.0"


def check_numba_scipy() -> bool:
Expand Down

0 comments on commit a7b11cc

Please sign in to comment.