Skip to content

Commit

Permalink
Merge pull request #237 from tobac-project/hotfix
Browse files Browse the repository at this point in the history
Version 1.4.1 release
  • Loading branch information
w-k-jones committed Feb 2, 2023
2 parents 49c4c27 + 3fefe10 commit 09b176b
Show file tree
Hide file tree
Showing 10 changed files with 202 additions and 86 deletions.
5 changes: 5 additions & 0 deletions .zenodo.json
Expand Up @@ -55,6 +55,11 @@
"affiliation": "Leibniz Institute for Tropospheric Research, Leipzig (Germany)",
"orcid": "0000-0002-5350-1445"
},
{
"name": "Lettl, Kolya",
"affiliation": "Leibniz Institute for Tropospheric Research, Leipzig (Germany)",
"orcid": "0000-0002-4524-8152"
},
{
"name": "Raut, Bhupendra A.",
"affiliation": "Northwestern-Argonne Institute of Science and Engineering, Argonne National Laboratory",
Expand Down
11 changes: 11 additions & 0 deletions CHANGELOG.md
@@ -1,5 +1,16 @@
### Tobac Changelog

_**Version 1.4.1:**_

**Bug fixes**

- Fixed a bug with predictive tracking that would duplicate column names if the input dataset has coordinates x and/or y [#217](https://github.com/tobac-project/tobac/pull/217)
- Set extrapolate parameter to 0 in example notebooks to prevent not implemented error [#217](https://github.com/tobac-project/tobac/pull/217)

**Documentation**

- Regenerated example notebooks so that they are up to date for the present version [#233](https://github.com/tobac-project/tobac/pull/233)

_**Version 1.4.0:**_

**Enhancements**
Expand Down

Large diffs are not rendered by default.

Large diffs are not rendered by default.

36 changes: 18 additions & 18 deletions examples/Example_Precip_Tracking/Example_Precip_Tracking.ipynb

Large diffs are not rendered by default.

90 changes: 77 additions & 13 deletions examples/Example_Updraft_Tracking/Example_Updraft_Tracking.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion tobac/__init__.py
Expand Up @@ -77,4 +77,4 @@
from . import merge_split

# Set version number
__version__ = "1.4.0"
__version__ = "1.4.1"
33 changes: 33 additions & 0 deletions tobac/tests/test_feature_detection.py
Expand Up @@ -194,3 +194,36 @@ def test_feature_detection_threshold_sort(test_threshs, target):
target=target,
)
assert_frame_equal(fd_output_first, fd_output_test)


def test_feature_detection_coords():
"""Tests that the output features dataframe contains all the coords of the input iris cube"""
test_dset_size = (50, 50)
test_hdim_1_pt = 20.0
test_hdim_2_pt = 20.0
test_hdim_1_sz = 5
test_hdim_2_sz = 5
test_amp = 2
test_min_num = 2

test_data = np.zeros(test_dset_size)
test_data = tbtest.make_feature_blob(
test_data,
test_hdim_1_pt,
test_hdim_2_pt,
h1_size=test_hdim_1_sz,
h2_size=test_hdim_2_sz,
amplitude=test_amp,
)
test_data_iris = tbtest.make_dataset_from_arr(test_data, data_type="iris")
fd_output_first = feat_detect.feature_detection_multithreshold_timestep(
test_data_iris,
0,
threshold=[1, 2, 3],
n_min_threshold=test_min_num,
dxy=1,
target="maximum",
)

for coord in test_data_iris.coords():
assert coord.name() in fd_output_first
12 changes: 12 additions & 0 deletions tobac/tests/test_tracking.py
Expand Up @@ -49,6 +49,14 @@ def test_linking_trackpy():
expected_out_feature.sort_index(axis=1), actual_out_feature.sort_index(axis=1)
)

# Check that we only add two columns, and all the other columns are the same as the input features
assert len(actual_out_feature.columns.tolist()) == len(
set(actual_out_feature.columns.tolist())
)
assert set(actual_out_feature.columns.tolist()) - set(
test_feature.columns.tolist()
) == {"cell"}


@pytest.mark.parametrize(
"max_trackpy, max_tobac, adaptive_step, adaptive_stop",
Expand Down Expand Up @@ -142,6 +150,10 @@ def test_trackpy_predict():

assert_frame_equal(expected_output.sort_index(), output.sort_index())

# Check that we only add two columns, and all the other columns are the same as the input features
assert len(output.columns.tolist()) == len(set(output.columns.tolist()))
assert set(output.columns.tolist()) - set(features.columns.tolist()) == {"cell"}


def test_tracking_extrapolation():
"""Tests the extrapolation capabilities of tracking.
Expand Down
19 changes: 12 additions & 7 deletions tobac/tracking.py
Expand Up @@ -3,18 +3,18 @@
The individual features and associated area/volumes identified in
each timestep have to be linked into trajectories to analyse
the time evolution of their properties for a better understanding of
the underlying physical processes.
the underlying physical processes.
The implementations are structured in a way that allows for the future
addition of more complex tracking methods recording a more complex
network of relationships between features at different points in
time.
time.
References
----------
.. Heikenfeld, M., Marinescu, P. J., Christensen, M.,
Watson-Parris, D., Senf, F., van den Heever, S. C.
& Stier, P. (2019). tobac 1.2: towards a flexible
framework for tracking and analysis of clouds in
& Stier, P. (2019). tobac 1.2: towards a flexible
framework for tracking and analysis of clouds in
diverse datasets. Geoscientific Model Development,
12(11), 4551-4570.
"""
Expand Down Expand Up @@ -233,12 +233,12 @@ def linking_trackpy(
tp.linking.Linker.MAX_SUB_NET_SIZE_ADAPTIVE = subnetwork_size

# deep copy to preserve features field:
features_linking = deepcopy(features)
features = deepcopy(features)

if method_linking == "random":
# link features into trajectories:
trajectories_unfiltered = tp.link(
features_linking,
features,
search_range=search_range,
memory=memory,
t_column="frame",
Expand All @@ -251,6 +251,9 @@ def linking_trackpy(
elif method_linking == "predict":

# avoid setting pos_columns by renaimng to default values to avoid trackpy bug
features.rename(
columns={"y": "__temp_y_coord", "x": "__temp_x_coord"}, inplace=True
)
features.rename(columns={"hdim_1": "y", "hdim_2": "x"}, inplace=True)

# generate list of features as input for df_link_iter to avoid bug in df_link
Expand Down Expand Up @@ -280,7 +283,9 @@ def linking_trackpy(
trajectories_unfiltered.rename(
columns={"y": "hdim_1", "x": "hdim_2"}, inplace=True
)
features.rename(columns={"y": "hdim_1", "x": "hdim_2"}, inplace=True)
trajectories_unfiltered.rename(
columns={"__temp_y_coord": "y", "__temp_x_coord": "x"}, inplace=True
)
else:
raise ValueError("method_linking unknown")

Expand Down

0 comments on commit 09b176b

Please sign in to comment.