diff --git a/.travis.yml b/.travis.yml
index 8bf1a29383..7ae0af3a30 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -112,11 +112,15 @@ install:
- python setup.py --quiet install
- # JUST FOR NOW : Install latest version of iris-grib.
+ # Install latest version of iris-grib.
+ # ONLY for Python 3, as Iris grib tests now require iris-grib >= v0.15.
# TODO : remove when iris doesn't do an integration test requiring iris-grib.
- - if [[ "${TEST_MINIMAL}" != true && ${PYTHON_VERSION} == 2* ]]; then
- conda install --quiet -n ${ENV_NAME} python-ecmwf_grib;
- pip install git+https://github.com/SciTools/iris-grib.git@v0.11.0;
+ # test against the latest version of python-eccodes.
+ # Conda-forge versioning is out of order (0.9.* is later than 2.12.*).
+ - >
+ if [[ "${TEST_MINIMAL}" != true && "${PYTHON_VERSION}" == 3* ]]; then
+ conda install --quiet -n ${ENV_NAME} python-eccodes">=0.9.1, <2";
+ conda install --quiet -n ${ENV_NAME} --no-deps iris-grib;
fi
script:
@@ -127,11 +131,11 @@ script:
- >
if [[ ${TEST_TARGET} == 'default' ]]; then
export IRIS_REPO_DIR=${INSTALL_DIR};
- python -m iris.tests.runner --default-tests --system-tests --print-failed-images;
+ python -m iris.tests.runner --default-tests --system-tests;
fi
- if [[ ${TEST_TARGET} == 'example' ]]; then
- python -m iris.tests.runner --example-tests --print-failed-images;
+ python -m iris.tests.runner --example-tests;
fi
# A call to check "whatsnew" contributions are valid, because the Iris test
@@ -164,12 +168,18 @@ script:
fi
# Split the organisation out of the slug. See https://stackoverflow.com/a/5257398/741316 for description.
- - ORG=(${TRAVIS_REPO_SLUG//\// })
+ # NOTE: a *separate* "export" command appears to be necessary here : A command of the
+ # form "export ORG=.." failed to define ORG for the following command (?!)
+ - >
+ ORG=$(echo ${TRAVIS_REPO_SLUG} | cut -d/ -f1);
+ export ORG
+
+ - echo "Travis job context ORG=${ORG}; TRAVIS_EVENT_TYPE=${TRAVIS_EVENT_TYPE}; PUSH_BUILT_DOCS=${PUSH_BUILT_DOCS}"
# When we merge a change to SciTools/iris, we can push docs to github pages.
# At present, only the Python 3.7 "doctest" job does this.
# Results appear at https://scitools-docs.github.io/iris/<>/index.html
- - if [[ ${ORG} == "SciTools" && ${TRAVIS_EVENT_TYPE} == 'push' && ${PUSH_BUILT_DOCS} == 'true' ]]; then
+ - if [[ "${ORG}" == 'SciTools' && "${TRAVIS_EVENT_TYPE}" == 'push' && "${PUSH_BUILT_DOCS}" == 'true' ]]; then
cd ${INSTALL_DIR};
pip install doctr;
doctr deploy --deploy-repo SciTools-docs/iris --built-docs docs/iris/build/html
diff --git a/docs/iris/example_code/Meteorology/COP_maps.py b/docs/iris/example_code/Meteorology/COP_maps.py
index aa5049feb9..a84f550004 100644
--- a/docs/iris/example_code/Meteorology/COP_maps.py
+++ b/docs/iris/example_code/Meteorology/COP_maps.py
@@ -103,7 +103,7 @@ def main():
# Add the first subplot showing the E1 scenario
plt.subplot(121)
- plt.title('HadGEM2 E1 Scenario', fontsize=10)
+ plt.title('HadGEM2 E1 Scenario', fontsize=10)
iplt.contourf(delta_e1, levels, colors=colors, extend='both')
plt.gca().coastlines()
# get the current axes' subplot for use later on
@@ -111,7 +111,7 @@ def main():
# Add the second subplot showing the A1B scenario
plt.subplot(122)
- plt.title('HadGEM2 A1B-Image Scenario', fontsize=10)
+ plt.title('HadGEM2 A1B-Image Scenario', fontsize=10)
contour_result = iplt.contourf(delta_a1b, levels, colors=colors,
extend='both')
plt.gca().coastlines()
@@ -131,8 +131,7 @@ def main():
width = left - first_plot_left + width
# Add axes to the figure, to place the colour bar
- colorbar_axes = fig.add_axes([first_plot_left, bottom + 0.07,
- width, 0.03])
+ colorbar_axes = fig.add_axes([first_plot_left, 0.18, width, 0.03])
# Add the colour bar
cbar = plt.colorbar(contour_result, colorbar_axes,
diff --git a/docs/iris/src/_templates/index.html b/docs/iris/src/_templates/index.html
index e9f9b16111..391bce68d9 100644
--- a/docs/iris/src/_templates/index.html
+++ b/docs/iris/src/_templates/index.html
@@ -134,7 +134,7 @@
extra information on specific technical issues
- Iris v2.3
+ Iris v2.4
A powerful, format-agnostic, community-driven Python library for analysing and
diff --git a/docs/iris/src/conf.py b/docs/iris/src/conf.py
index 6cdfe634c4..d1cf00aa58 100644
--- a/docs/iris/src/conf.py
+++ b/docs/iris/src/conf.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2018, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -64,7 +64,6 @@
'sphinx.ext.imgmath',
'sphinx.ext.intersphinx',
'matplotlib.sphinxext.mathmpl',
- 'matplotlib.sphinxext.only_directives',
'matplotlib.sphinxext.plot_directive',
# better class documentation
diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/iris/src/userguide/cube_maths.rst
index 6c6f846bc3..8fe6eb12d5 100644
--- a/docs/iris/src/userguide/cube_maths.rst
+++ b/docs/iris/src/userguide/cube_maths.rst
@@ -205,12 +205,12 @@ Finally, the cube we have created needs to be given a suitable name::
The result could now be plotted using the guidance provided in the
:doc:`plotting_a_cube` section.
-.. htmlonly::
+.. only:: html
A very similar example to this can be found in
:doc:`/examples/Meteorology/deriving_phenomena`.
-.. latexonly::
+.. only:: latex
A very similar example to this can be found in the examples section,
with the title "Deriving Exner Pressure and Air Temperature".
diff --git a/docs/iris/src/userguide/index.rst b/docs/iris/src/userguide/index.rst
index 8c0b24bec3..4fb7b62155 100644
--- a/docs/iris/src/userguide/index.rst
+++ b/docs/iris/src/userguide/index.rst
@@ -13,7 +13,7 @@ fully before experimenting with your own data files.
Much of the content has supplementary links to the reference documentation; you will not need to follow these
links in order to understand the guide but they may serve as a useful reference for future exploration.
-.. htmlonly::
+.. only:: html
Since later pages depend on earlier ones, try reading this user guide sequentially using the ``next`` and ``previous`` links.
diff --git a/docs/iris/src/userguide/loading_iris_cubes.rst b/docs/iris/src/userguide/loading_iris_cubes.rst
index 2cb3b9b259..bf50acc614 100644
--- a/docs/iris/src/userguide/loading_iris_cubes.rst
+++ b/docs/iris/src/userguide/loading_iris_cubes.rst
@@ -166,18 +166,36 @@ As we have seen, loading the following file creates several Cubes::
cubes = iris.load(filename)
Specifying a name as a constraint argument to :py:func:`iris.load` will mean
-only cubes with a matching :meth:`name `
+only cubes with matching :meth:`name `
will be returned::
filename = iris.sample_data_path('uk_hires.pp')
- cubes = iris.load(filename, 'specific_humidity')
+ cubes = iris.load(filename, 'surface_altitude')
-To constrain the load to multiple distinct constraints, a list of constraints
+Note that, the provided name will match against either the standard name,
+long name, NetCDF variable name or STASH metadata of a cube. Therefore, the
+previous example using the ``surface_altitude`` standard name constraint can
+also be achieved using the STASH value of ``m01s00i033``::
+
+ filename = iris.sample_data_path('uk_hires.pp')
+ cubes = iris.load(filename, 'm01s00i033')
+
+If further specific name constraint control is required i.e., to constrain
+against a combination of standard name, long name, NetCDF variable name and/or
+STASH metadata, consider using the :class:`iris.NameConstraint`. For example,
+to constrain against both a standard name of ``surface_altitude`` **and** a STASH
+of ``m01s00i033``::
+
+ filename = iris.sample_data_path('uk_hires.pp')
+ constraint = iris.NameConstraint(standard_name='surface_altitude', STASH='m01s00i033')
+ cubes = iris.load(filename, constraint)
+
+To constrain the load to multiple distinct constraints, a list of constraints
can be provided. This is equivalent to running load once for each constraint
but is likely to be more efficient::
filename = iris.sample_data_path('uk_hires.pp')
- cubes = iris.load(filename, ['air_potential_temperature', 'specific_humidity'])
+ cubes = iris.load(filename, ['air_potential_temperature', 'surface_altitude'])
The :class:`iris.Constraint` class can be used to restrict coordinate values
on load. For example, to constrain the load to match
diff --git a/docs/iris/src/whatsnew/2.4.rst b/docs/iris/src/whatsnew/2.4.rst
new file mode 100644
index 0000000000..d82cfd10fa
--- /dev/null
+++ b/docs/iris/src/whatsnew/2.4.rst
@@ -0,0 +1,49 @@
+What's New in Iris 2.4.0
+************************
+
+:Release: 2.4.0
+:Date: 2020-01-15
+
+This document explains the new/changed features of Iris in version 2.4.0
+(:doc:`View all changes `.)
+
+
+Iris 2.4.0 Features
+===================
+
+.. admonition:: Last python 2 version of Iris
+
+ Iris 2.4 is a final extra release of Iris 2, which back-ports specific desired features from
+ Iris 3 (not yet released).
+
+ The purpose of this is both to support early adoption of certain newer features,
+ and to provide a final release for Python 2.
+
+ The next release of Iris will be version 3.0 : a major-version release which
+ introduces breaking API and behavioural changes, and only supports Python 3.
+
+* :class:`iris.coord_systems.Geostationary` can now accept creation arguments of
+ `false_easting=None` or `false_northing=None`, equivalent to values of 0.
+ Previously these kwargs could be omitted, but could not be set to `None`.
+ This also enables loading of netcdf data on a Geostationary grid, where either of these
+ keys is not present as a grid-mapping variable property : Previously, loading any
+ such data caused an exception.
+* The area weights used when performing area weighted regridding with :class:`iris.analysis.AreaWeighted`
+ are now cached.
+ This allows a significant speedup when regridding multiple similar cubes, by repeatedly using
+ a `'regridder' object <../iris/iris/analysis.html?highlight=regridder#iris.analysis.AreaWeighted.regridder>`_
+ which you created first.
+* Name constraint matching against cubes during loading or extracting has been relaxed from strictly matching
+ against the :meth:`~iris.cube.Cube.name`, to matching against either the
+ ``standard_name``, ``long_name``, NetCDF ``var_name``, or ``STASH`` attributes metadata of a cube.
+* Cubes and coordinates now have a new ``names`` property that contains a tuple of the
+ ``standard_name``, ``long_name``, NetCDF ``var_name``, and ``STASH`` attributes metadata.
+* The :class:`~iris.NameConstraint` provides richer name constraint matching when loading or extracting
+ against cubes, by supporting a constraint against any combination of
+ ``standard_name``, ``long_name``, NetCDF ``var_name`` and ``STASH``
+ from the attributes dictionary of a :class:`~iris.cube.Cube`.
+
+
+Iris 2.4.0 Dependency Updates
+=============================
+* Iris is now able to use the latest version of matplotlib.
diff --git a/docs/iris/src/whatsnew/index.rst b/docs/iris/src/whatsnew/index.rst
index a4a472de53..fffcd406a1 100644
--- a/docs/iris/src/whatsnew/index.rst
+++ b/docs/iris/src/whatsnew/index.rst
@@ -9,6 +9,7 @@ Iris versions.
.. toctree::
:maxdepth: 2
+ 2.4.rst
2.3.rst
2.2.rst
2.1.rst
diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py
index a07ab27d8b..30e1c66818 100644
--- a/lib/iris/__init__.py
+++ b/lib/iris/__init__.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -121,17 +121,29 @@ def callback(cube, field, filename):
# Iris revision.
-__version__ = '2.3.0'
+__version__ = '2.4.0rc0'
# Restrict the names imported when using "from iris import *"
-__all__ = ['load', 'load_cube', 'load_cubes', 'load_raw',
- 'save', 'Constraint', 'AttributeConstraint', 'sample_data_path',
- 'site_configuration', 'Future', 'FUTURE',
- 'IrisDeprecation']
+__all__ = [
+ "load",
+ "load_cube",
+ "load_cubes",
+ "load_raw",
+ "save",
+ "Constraint",
+ "AttributeConstraint",
+ "NameConstraint",
+ "sample_data_path",
+ "site_configuration",
+ "Future",
+ "FUTURE",
+ "IrisDeprecation",
+]
Constraint = iris._constraints.Constraint
AttributeConstraint = iris._constraints.AttributeConstraint
+NameConstraint = iris._constraints.NameConstraint
class Future(threading.local):
diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py
index 18b7fb1f54..a757a80e21 100644
--- a/lib/iris/_constraints.py
+++ b/lib/iris/_constraints.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -52,7 +52,8 @@ def __init__(self, name=None, cube_func=None, coord_values=None, **kwargs):
Args:
* name: string or None
- If a string, it is used as the name to match against Cube.name().
+ If a string, it is used as the name to match against the
+ `~iris.cube.Cube.names` property.
* cube_func: callable or None
If a callable, it must accept a Cube as its first and only argument
and return either True or False.
@@ -140,7 +141,9 @@ def _coordless_match(self, cube):
"""
match = True
if self._name:
- match = self._name == cube.name()
+ # Require to also check against cube.name() for the fallback
+ # "unknown" default case, when there is no name metadata available.
+ match = self._name in cube.names or self._name == cube.name()
if match and self._cube_func:
match = self._cube_func(cube)
return match
@@ -477,4 +480,104 @@ def _cube_func(self, cube):
return match
def __repr__(self):
- return 'AttributeConstraint(%r)' % self._attributes
+ return "AttributeConstraint(%r)" % self._attributes
+
+
+class NameConstraint(Constraint):
+ """Provides a simple Cube name based :class:`Constraint`."""
+
+ def __init__(
+ self,
+ standard_name="none",
+ long_name="none",
+ var_name="none",
+ STASH="none",
+ ):
+ """
+ Provides a simple Cube name based :class:`Constraint`, which matches
+ against each of the names provided, which may be either standard name,
+ long name, NetCDF variable name and/or the STASH from the attributes
+ dictionary.
+
+ The name constraint will only succeed if *all* of the provided names
+ match.
+
+ Kwargs:
+ * standard_name:
+ A string or callable representing the standard name to match
+ against.
+ * long_name:
+ A string or callable representing the long name to match against.
+ * var_name:
+ A string or callable representing the NetCDF variable name to match
+ against.
+ * STASH:
+ A string or callable representing the UM STASH code to match
+ against.
+
+ .. note::
+ The default value of each of the keyword arguments is the string
+ "none", rather than the singleton None, as None may be a legitimate
+ value to be matched against e.g., to constrain against all cubes
+ where the standard_name is not set, then use standard_name=None.
+
+ Returns:
+ * Boolean
+
+ Example usage::
+
+ iris.NameConstraint(long_name='air temp', var_name=None)
+
+ iris.NameConstraint(long_name=lambda name: 'temp' in name)
+
+ iris.NameConstraint(standard_name='air_temperature',
+ STASH=lambda stash: stash.item == 203)
+
+ """
+ self.standard_name = standard_name
+ self.long_name = long_name
+ self.var_name = var_name
+ self.STASH = STASH
+ self._names = ("standard_name", "long_name", "var_name", "STASH")
+ Constraint.__init__(self, cube_func=self._cube_func)
+
+ def _cube_func(self, cube):
+ def matcher(target, value):
+ if callable(value):
+ result = False
+ if target is not None:
+ #
+ # Don't pass None through into the callable. Users should
+ # use the "name=None" pattern instead. Otherwise, users
+ # will need to explicitly handle the None case, which is
+ # unnecessary and pretty darn ugly e.g.,
+ #
+ # lambda name: name is not None and name.startswith('ick')
+ #
+ result = value(target)
+ else:
+ result = value == target
+ return result
+
+ match = True
+ for name in self._names:
+ expected = getattr(self, name)
+ if expected != "none":
+ if name == "STASH":
+ actual = cube.attributes.get(name)
+ else:
+ actual = getattr(cube, name)
+ match = matcher(actual, expected)
+ # Make this is a short-circuit match.
+ if match is False:
+ break
+
+ return match
+
+ def __repr__(self):
+ names = []
+ for name in self._names:
+ value = getattr(self, name)
+ if value != "none":
+ names.append("{}={!r}".format(name, value))
+ return "{}({})".format(self.__class__.__name__, ", ".join(names))
diff --git a/lib/iris/_cube_coord_common.py b/lib/iris/_cube_coord_common.py
index 6225b6f64c..9c75a815d2 100644
--- a/lib/iris/_cube_coord_common.py
+++ b/lib/iris/_cube_coord_common.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -19,6 +19,8 @@
from six.moves import (filter, input, map, range, zip) # noqa
import six
+
+from collections import namedtuple
import re
import string
@@ -31,6 +33,30 @@
_TOKEN_PARSE = re.compile(r'''^[a-zA-Z0-9][\w\.\+\-@]*$''')
+class Names(
+ namedtuple("Names", ["standard_name", "long_name", "var_name", "STASH"])
+):
+ """
+ Immutable container for name metadata.
+
+ Args:
+
+ * standard_name:
+ A string representing the CF Conventions and Metadata standard name, or
+ None.
+ * long_name:
+ A string representing the CF Conventions and Metadata long name, or
+ None
+ * var_name:
+ A string representing the associated NetCDF variable name, or None.
+ * STASH:
+ A string representing the `~iris.fileformats.pp.STASH` code, or None.
+
+ """
+
+ __slots__ = ()
+
+
def get_valid_standard_name(name):
# Standard names are optionally followed by a standard name
# modifier, separated by one or more blank spaces
@@ -177,6 +203,22 @@ def _check(item):
return result
+ @property
+ def names(self):
+ """
+ A tuple containing all of the metadata names. This includes the
+ standard name, long name, NetCDF variable name, and attributes
+ STASH name.
+
+ """
+ standard_name = self.standard_name
+ long_name = self.long_name
+ var_name = self.var_name
+ stash_name = self.attributes.get("STASH")
+ if stash_name is not None:
+ stash_name = str(stash_name)
+ return Names(standard_name, long_name, var_name, stash_name)
+
def rename(self, name):
"""
Changes the human-readable name.
diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py
index 8c63ccb28d..34d8345fdd 100644
--- a/lib/iris/analysis/__init__.py
+++ b/lib/iris/analysis/__init__.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -1080,7 +1080,7 @@ def _percentile(data, axis, percent, fast_percentile_method=False,
# Perform the percentile calculation.
if fast_percentile_method:
msg = 'Cannot use fast np.percentile method with masked array.'
- if ma.isMaskedArray(data):
+ if ma.is_masked(data):
raise TypeError(msg)
result = np.percentile(data, percent, axis=-1)
result = result.T
@@ -1090,6 +1090,8 @@ def _percentile(data, axis, percent, fast_percentile_method=False,
**kwargs)
if not ma.isMaskedArray(data) and not ma.is_masked(result):
result = np.asarray(result)
+ else:
+ result = ma.MaskedArray(result)
# Ensure to unflatten any leading dimensions.
if shape:
diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py
index 2c484e4626..6e69b59b8e 100644
--- a/lib/iris/analysis/_area_weighted.py
+++ b/lib/iris/analysis/_area_weighted.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2016, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -55,36 +55,38 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1):
.. Note::
- Both sourge and target cubes must have an XY grid defined by
+ Both source and target cubes must have an XY grid defined by
separate X and Y dimensions with dimension coordinates.
All of the XY dimension coordinates must also be bounded, and have
the same cooordinate system.
"""
- # Snapshot the state of the cubes to ensure that the regridder is
- # impervious to external changes to the original source cubes.
+ # Snapshot the state of the source cube to ensure that the regridder is
+ # impervious to external changes to the original cubes.
self._src_grid = snapshot_grid(src_grid_cube)
- self._target_grid = snapshot_grid(target_grid_cube)
+
# Missing data tolerance.
if not (0 <= mdtol <= 1):
msg = 'Value for mdtol must be in range 0 - 1, got {}.'
raise ValueError(msg.format(mdtol))
self._mdtol = mdtol
- # The need for an actual Cube is an implementation quirk caused by the
- # current usage of the experimental regrid function.
- self._target_grid_cube_cache = None
-
- @property
- def _target_grid_cube(self):
- if self._target_grid_cube_cache is None:
- x, y = self._target_grid
- data = np.empty((y.points.size, x.points.size))
- cube = iris.cube.Cube(data)
- cube.add_dim_coord(y, 0)
- cube.add_dim_coord(x, 1)
- self._target_grid_cube_cache = cube
- return self._target_grid_cube_cache
+ # Store regridding information
+ _regrid_info =\
+ eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
+ src_grid_cube, target_grid_cube
+ )
+ (
+ src_x,
+ src_y,
+ src_x_dim,
+ src_y_dim,
+ self.grid_x,
+ self.grid_y,
+ self.meshgrid_x,
+ self.meshgrid_y,
+ self.weights_info,
+ ) = _regrid_info
def __call__(self, cube):
"""
@@ -106,8 +108,25 @@ def __call__(self, cube):
area-weighted regridding.
"""
- if get_xy_dim_coords(cube) != self._src_grid:
- raise ValueError('The given cube is not defined on the same '
- 'source grid as this regridder.')
- return eregrid.regrid_area_weighted_rectilinear_src_and_grid(
- cube, self._target_grid_cube, mdtol=self._mdtol)
+ src_x, src_y = get_xy_dim_coords(cube)
+ if (src_x, src_y) != self._src_grid:
+ raise ValueError(
+ "The given cube is not defined on the same "
+ "source grid as this regridder."
+ )
+ src_x_dim = cube.coord_dims(src_x)[0]
+ src_y_dim = cube.coord_dims(src_y)[0]
+ _regrid_info = (
+ src_x,
+ src_y,
+ src_x_dim,
+ src_y_dim,
+ self.grid_x,
+ self.grid_y,
+ self.meshgrid_x,
+ self.meshgrid_y,
+ self.weights_info,
+ )
+ return eregrid._regrid_area_weighted_rectilinear_src_and_grid__perform(
+ cube, _regrid_info, mdtol=self._mdtol
+ )
diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py
index f80d797642..44f91f9b82 100644
--- a/lib/iris/analysis/_regrid.py
+++ b/lib/iris/analysis/_regrid.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2019, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -829,9 +829,9 @@ def _create_cube(data, src, x_dim, y_dim, src_x_coord, src_y_coord,
def copy_coords(src_coords, add_method):
for coord in src_coords:
dims = src.coord_dims(coord)
- if coord is src_x_coord:
+ if coord == src_x_coord:
coord = grid_x_coord
- elif coord is src_y_coord:
+ elif coord == src_y_coord:
coord = grid_y_coord
elif x_dim in dims or y_dim in dims:
continue
diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py
index eab49e36de..90d1cd47ff 100644
--- a/lib/iris/coord_systems.py
+++ b/lib/iris/coord_systems.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -647,10 +647,16 @@ class Geostationary(CoordSystem):
grid_mapping_name = 'geostationary'
- def __init__(self, latitude_of_projection_origin,
- longitude_of_projection_origin,
- perspective_point_height, sweep_angle_axis, false_easting=0,
- false_northing=0, ellipsoid=None):
+ def __init__(
+ self,
+ latitude_of_projection_origin,
+ longitude_of_projection_origin,
+ perspective_point_height,
+ sweep_angle_axis,
+ false_easting=None,
+ false_northing=None,
+ ellipsoid=None,
+ ):
"""
Constructs a Geostationary coord system.
@@ -697,9 +703,13 @@ def __init__(self, latitude_of_projection_origin,
self.perspective_point_height = float(perspective_point_height)
#: X offset from planar origin in metres.
+ if false_easting is None:
+ false_easting = 0
self.false_easting = float(false_easting)
#: Y offset from planar origin in metres.
+ if false_northing is None:
+ false_northing = 0
self.false_northing = float(false_northing)
#: The axis along which the satellite instrument sweeps - 'x' or 'y'.
diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py
index edcff3d3c4..d35501b313 100644
--- a/lib/iris/experimental/regrid.py
+++ b/lib/iris/experimental/regrid.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2013 - 2019, Met Office
+# (C) British Crown Copyright 2013 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -22,7 +22,6 @@
from six.moves import (filter, input, map, range, zip) # noqa
import six
-from collections import namedtuple
import copy
import functools
import warnings
@@ -47,11 +46,6 @@
from iris.util import _meshgrid
-_Version = namedtuple('Version', ('major', 'minor', 'micro'))
-_NP_VERSION = _Version(*(int(val) for val in
- np.version.version.split('.') if val.isdigit()))
-
-
def _get_xy_coords(cube):
"""
Return the x and y coordinates from a cube.
@@ -405,11 +399,7 @@ def _weighted_mean_with_mdtol(data, weights, axis=None, mdtol=0):
return res
-def _regrid_area_weighted_array(src_data, x_dim, y_dim,
- src_x_bounds, src_y_bounds,
- grid_x_bounds, grid_y_bounds,
- grid_x_decreasing, grid_y_decreasing,
- area_func, circular=False, mdtol=0):
+def _regrid_area_weighted_array(src_data, x_dim, y_dim, weights_info, mdtol=0):
"""
Regrid the given data from its source grid to a new grid using
an area weighted mean to determine the resulting data values.
@@ -428,30 +418,12 @@ def _regrid_area_weighted_array(src_data, x_dim, y_dim,
The X dimension within `src_data`.
* y_dim:
The Y dimension within `src_data`.
- * src_x_bounds:
- A NumPy array of bounds along the X axis defining the source grid.
- * src_y_bounds:
- A NumPy array of bounds along the Y axis defining the source grid.
- * grid_x_bounds:
- A NumPy array of bounds along the X axis defining the new grid.
- * grid_y_bounds:
- A NumPy array of bounds along the Y axis defining the new grid.
- * grid_x_decreasing:
- Boolean indicating whether the X coordinate of the new grid is
- in descending order.
- * grid_y_decreasing:
- Boolean indicating whether the Y coordinate of the new grid is
- in descending order.
- * area_func:
- A function that returns an (p, q) array of weights given an (p, 2)
- shaped array of Y bounds and an (q, 2) shaped array of X bounds.
+ * weights_info:
+ The area weights information to be used for area-weighted
+ regridding.
Kwargs:
- * circular:
- A boolean indicating whether the `src_x_bounds` are periodic. Default
- is False.
-
* mdtol:
Tolerance of missing data. The value returned in each element of the
returned array will be masked if the fraction of missing data exceeds
@@ -467,164 +439,149 @@ def _regrid_area_weighted_array(src_data, x_dim, y_dim,
grid.
"""
- # Create empty data array to match the new grid.
- # Note that dtype is not preserved and that the array is
- # masked to allow for regions that do not overlap.
+ (
+ cached_x_indices,
+ cached_y_indices,
+ max_x_indices,
+ max_y_indices,
+ cached_weights,
+ ) = weights_info
+
+ # Ensure we have x_dim and y_dim.
+ x_dim_orig = x_dim
+ y_dim_orig = y_dim
+ if y_dim is None:
+ src_data = np.expand_dims(src_data, axis=src_data.ndim)
+ y_dim = src_data.ndim - 1
+ if x_dim is None:
+ src_data = np.expand_dims(src_data, axis=src_data.ndim)
+ x_dim = src_data.ndim - 1
+ # Move y_dim and x_dim to last dimensions
+ if not x_dim == src_data.ndim - 1:
+ src_data = np.moveaxis(src_data, x_dim, -1)
+ if not y_dim == src_data.ndim - 2:
+ if x_dim < y_dim:
+ # note: y_dim was shifted along by one position when
+ # x_dim was moved to the last dimension
+ src_data = np.moveaxis(src_data, y_dim - 1, -2)
+ elif x_dim > y_dim:
+ src_data = np.moveaxis(src_data, y_dim, -2)
+ x_dim = src_data.ndim - 1
+ y_dim = src_data.ndim - 2
+
+ # Create empty "pre-averaging" data array that will enable the
+ # src_data data coresponding to a given target grid point,
+ # to be stacked per point.
+ # Note that dtype is not preserved and that the array mask
+ # allows for regions that do not overlap.
new_shape = list(src_data.shape)
- if x_dim is not None:
- new_shape[x_dim] = grid_x_bounds.shape[0]
- if y_dim is not None:
- new_shape[y_dim] = grid_y_bounds.shape[0]
-
+ new_shape[x_dim] = len(cached_x_indices)
+ new_shape[y_dim] = len(cached_y_indices)
+ num_target_pts = len(cached_y_indices) * len(cached_x_indices)
+ src_areas_shape = list(src_data.shape)
+ src_areas_shape[y_dim] = max_y_indices
+ src_areas_shape[x_dim] = max_x_indices
+ src_areas_shape += [num_target_pts]
# Use input cube dtype or convert values to the smallest possible float
# dtype when necessary.
dtype = np.promote_types(src_data.dtype, np.float16)
+ # Create empty arrays to hold src_data per target point, and weights
+ src_area_datas = np.zeros(src_areas_shape, dtype=np.float64)
+ src_area_weights = np.zeros(
+ list((max_y_indices, max_x_indices, num_target_pts))
+ )
# Flag to indicate whether the original data was a masked array.
- src_masked = ma.isMaskedArray(src_data)
+ src_masked = src_data.mask.any() if ma.isMaskedArray(src_data) else False
if src_masked:
- new_data = ma.zeros(new_shape, fill_value=src_data.fill_value,
- dtype=dtype)
+ src_area_masks = np.full(src_areas_shape, True, dtype=np.bool)
else:
- new_data = ma.zeros(new_shape, dtype=dtype)
- # Assign to mask to explode it, allowing indexed assignment.
- new_data.mask = False
-
- indices = [slice(None)] * new_data.ndim
-
- # Determine which grid bounds are within src extent.
- y_within_bounds = _within_bounds(src_y_bounds, grid_y_bounds,
- grid_y_decreasing)
- x_within_bounds = _within_bounds(src_x_bounds, grid_x_bounds,
- grid_x_decreasing)
-
- # Cache which src_bounds are within grid bounds
- cached_x_bounds = []
- cached_x_indices = []
- for (x_0, x_1) in grid_x_bounds:
- if grid_x_decreasing:
- x_0, x_1 = x_1, x_0
- x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1)
- cached_x_bounds.append(x_bounds)
- cached_x_indices.append(x_indices)
-
- # Simple for loop approach.
- for j, (y_0, y_1) in enumerate(grid_y_bounds):
- # Reverse lower and upper if dest grid is decreasing.
- if grid_y_decreasing:
- y_0, y_1 = y_1, y_0
- y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1)
- for i, (x_0, x_1) in enumerate(grid_x_bounds):
- # Reverse lower and upper if dest grid is decreasing.
- if grid_x_decreasing:
- x_0, x_1 = x_1, x_0
- x_bounds = cached_x_bounds[i]
- x_indices = cached_x_indices[i]
-
- # Determine whether to mask element i, j based on overlap with
- # src.
- # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in the case
- # of wrapped longitudes. However if the src grid is not global
- # (i.e. circular) this new cell would include a region outside of
- # the extent of the src grid and should therefore be masked.
- outside_extent = x_0 > x_1 and not circular
- if (outside_extent or not y_within_bounds[j] or not
- x_within_bounds[i]):
- # Mask out element(s) in new_data
- if x_dim is not None:
- indices[x_dim] = i
- if y_dim is not None:
- indices[y_dim] = j
- new_data[tuple(indices)] = ma.masked
+ new_data_mask = np.full(new_shape, False, dtype=np.bool)
+
+ # Axes of data over which the weighted mean is calculated.
+ axis = (y_dim, x_dim)
+
+ # Stack the src_area data and weights for each target point
+ target_pt_ji = -1
+ for j, y_indices in enumerate(cached_y_indices):
+ for i, x_indices in enumerate(cached_x_indices):
+ target_pt_ji += 1
+ # Determine whether to mask element i, j based on whether
+ # there are valid weights.
+ weights = cached_weights[j][i]
+ if isinstance(weights, bool) and not weights:
+ if not src_masked:
+ # Cheat! Fill the data with zeros and weights as one.
+ # The weighted average result will be the same, but
+ # we avoid dividing by zero.
+ src_area_weights[..., target_pt_ji] = 1
+ new_data_mask[..., j, i] = True
else:
# Calculate weighted mean of data points.
# Slice out relevant data (this may or may not be a view()
# depending on x_indices being a slice or not).
- if x_dim is not None:
- indices[x_dim] = x_indices
- if y_dim is not None:
- indices[y_dim] = y_indices
- if isinstance(x_indices, tuple) and \
- isinstance(y_indices, tuple):
- raise RuntimeError('Cannot handle split bounds '
- 'in both x and y.')
- data = src_data[tuple(indices)]
-
- # Calculate weights based on areas of cropped bounds.
- weights = area_func(y_bounds, x_bounds)
-
- # Numpy 1.7 allows the axis keyword arg to be a tuple.
- # If the version of NumPy is less than 1.7 manipulate the axes
- # of the data so the x and y dimensions can be flattened.
- if _NP_VERSION.minor < 7:
- if y_dim is not None and x_dim is not None:
- flattened_shape = list(data.shape)
- if y_dim > x_dim:
- data = np.rollaxis(data, y_dim, data.ndim)
- data = np.rollaxis(data, x_dim, data.ndim)
- del flattened_shape[y_dim]
- del flattened_shape[x_dim]
- else:
- data = np.rollaxis(data, x_dim, data.ndim)
- data = np.rollaxis(data, y_dim, data.ndim)
- del flattened_shape[x_dim]
- del flattened_shape[y_dim]
- weights = weights.T
- flattened_shape.append(-1)
- data = data.reshape(*flattened_shape)
- elif y_dim is not None:
- flattened_shape = list(data.shape)
- del flattened_shape[y_dim]
- flattened_shape.append(-1)
- data = data.swapaxes(y_dim, -1).reshape(
- *flattened_shape)
- elif x_dim is not None:
- flattened_shape = list(data.shape)
- del flattened_shape[x_dim]
- flattened_shape.append(-1)
- data = data.swapaxes(x_dim, -1).reshape(
- *flattened_shape)
- weights = weights.ravel()
- axis = -1
- else:
- # Transpose weights to match dim ordering in data.
- weights_shape_y = weights.shape[0]
- weights_shape_x = weights.shape[1]
- if x_dim is not None and y_dim is not None and \
- x_dim < y_dim:
- weights = weights.T
- # Broadcast the weights array to allow numpy's ma.average
- # to be called.
- weights_padded_shape = [1] * data.ndim
- axes = []
- if y_dim is not None:
- weights_padded_shape[y_dim] = weights_shape_y
- axes.append(y_dim)
- if x_dim is not None:
- weights_padded_shape[x_dim] = weights_shape_x
- axes.append(x_dim)
- # Assign new shape to raise error on copy.
- weights.shape = weights_padded_shape
- # Broadcast weights to match shape of data.
- _, weights = np.broadcast_arrays(data, weights)
- # Axes of data over which the weighted mean is calculated.
- axis = tuple(axes)
-
- # Calculate weighted mean taking into account missing data.
- new_data_pt = _weighted_mean_with_mdtol(
- data, weights=weights, axis=axis, mdtol=mdtol)
-
- # Insert data (and mask) values into new array.
- if x_dim is not None:
- indices[x_dim] = i
- if y_dim is not None:
- indices[y_dim] = j
- new_data[tuple(indices)] = new_data_pt
-
- # Remove new mask if original data was not masked
- # and no values in the new array are masked.
- if not src_masked and not new_data.mask.any():
- new_data = new_data.data
+ data = src_data[..., y_indices, x_indices]
+ len_x = data.shape[-1]
+ len_y = data.shape[-2]
+ src_area_datas[..., 0:len_y, 0:len_x, target_pt_ji] = data
+ src_area_weights[0:len_y, 0:len_x, target_pt_ji] = weights
+ if src_masked:
+ src_area_masks[
+ ..., 0:len_y, 0:len_x, target_pt_ji
+ ] = data.mask
+
+ # Broadcast the weights array to allow numpy's ma.average
+ # to be called.
+ # Assign new shape to raise error on copy.
+ src_area_weights.shape = src_area_datas.shape[-3:]
+ # Broadcast weights to match shape of data.
+ _, src_area_weights = np.broadcast_arrays(src_area_datas, src_area_weights)
+
+ # Mask the data points
+ if src_masked:
+ src_area_datas = np.ma.array(src_area_datas, mask=src_area_masks)
+
+ # Calculate weighted mean taking into account missing data.
+ new_data = _weighted_mean_with_mdtol(
+ src_area_datas, weights=src_area_weights, axis=axis, mdtol=mdtol
+ )
+ new_data = new_data.reshape(new_shape)
+ if src_masked:
+ new_data_mask = new_data.mask
+
+ # Mask the data if originally masked or if the result has masked points
+ if ma.isMaskedArray(src_data):
+ new_data = ma.array(
+ new_data,
+ mask=new_data_mask,
+ fill_value=src_data.fill_value,
+ dtype=dtype,
+ )
+ elif new_data_mask.any():
+ new_data = ma.array(new_data, mask=new_data_mask, dtype=dtype)
+ else:
+ new_data = new_data.astype(dtype)
+
+ # Restore data to original form
+ if x_dim_orig is None and y_dim_orig is None:
+ new_data = np.squeeze(new_data, axis=x_dim)
+ new_data = np.squeeze(new_data, axis=y_dim)
+ elif y_dim_orig is None:
+ new_data = np.squeeze(new_data, axis=y_dim)
+ new_data = np.moveaxis(new_data, -1, x_dim_orig)
+ elif x_dim_orig is None:
+ new_data = np.squeeze(new_data, axis=x_dim)
+ new_data = np.moveaxis(new_data, -1, y_dim_orig)
+ elif x_dim_orig < y_dim_orig:
+ # move the x_dim back first, so that the y_dim will
+ # then be moved to its original position
+ new_data = np.moveaxis(new_data, -1, x_dim_orig)
+ new_data = np.moveaxis(new_data, -1, y_dim_orig)
+ else:
+ # move the y_dim back first, so that the x_dim will
+ # then be moved to its original position
+ new_data = np.moveaxis(new_data, -2, y_dim_orig)
+ new_data = np.moveaxis(new_data, -1, x_dim_orig)
return new_data
@@ -671,6 +628,25 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube,
Returns:
A new :class:`iris.cube.Cube` instance.
+ """
+ regrid_info = _regrid_area_weighted_rectilinear_src_and_grid__prepare(
+ src_cube, grid_cube
+ )
+ result = _regrid_area_weighted_rectilinear_src_and_grid__perform(
+ src_cube, regrid_info, mdtol
+ )
+ return result
+
+
+def _regrid_area_weighted_rectilinear_src_and_grid__prepare(
+ src_cube, grid_cube
+):
+ """
+ First (setup) part of 'regrid_area_weighted_rectilinear_src_and_grid'.
+
+ Check inputs and calculate related info. The 'regrid info' returned
+ can be re-used over many 2d slices.
+
"""
# Get the 1d monotonic (or scalar) src and grid coordinates.
src_x, src_y = _get_xy_coords(src_cube)
@@ -727,6 +703,9 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube,
grid_x_bounds = _get_bounds_in_units(grid_x, x_units, dtype)
grid_y_bounds = _get_bounds_in_units(grid_y, y_units, dtype)
+ # Create 2d meshgrids as required by _create_cube func.
+ meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points)
+
# Determine whether target grid bounds are decreasing. This must
# be determined prior to wrap_lons being called.
grid_x_decreasing = grid_x_bounds[-1, 0] < grid_x_bounds[0, 0]
@@ -753,17 +732,185 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube,
else:
area_func = _cartesian_area
+ def _calculate_regrid_area_weighted_weights(
+ src_x_bounds,
+ src_y_bounds,
+ grid_x_bounds,
+ grid_y_bounds,
+ grid_x_decreasing,
+ grid_y_decreasing,
+ area_func,
+ circular=False,
+ ):
+ """
+ Compute the area weights used for area-weighted regridding.
+
+ Args:
+
+ * src_x_bounds:
+ A NumPy array of bounds along the X axis defining the source grid.
+ * src_y_bounds:
+ A NumPy array of bounds along the Y axis defining the source grid.
+ * grid_x_bounds:
+ A NumPy array of bounds along the X axis defining the new grid.
+ * grid_y_bounds:
+ A NumPy array of bounds along the Y axis defining the new grid.
+ * grid_x_decreasing:
+ Boolean indicating whether the X coordinate of the new grid is
+ in descending order.
+ * grid_y_decreasing:
+ Boolean indicating whether the Y coordinate of the new grid is
+ in descending order.
+ * area_func:
+ A function that returns an (p, q) array of weights given an (p, 2)
+ shaped array of Y bounds and an (q, 2) shaped array of X bounds.
+
+ Kwargs:
+
+ * circular:
+ A boolean indicating whether the `src_x_bounds` are periodic.
+ Default is False.
+
+ Returns:
+ The area weights to be used for area-weighted regridding.
+
+ """
+ # Determine which grid bounds are within src extent.
+ y_within_bounds = _within_bounds(
+ src_y_bounds, grid_y_bounds, grid_y_decreasing
+ )
+ x_within_bounds = _within_bounds(
+ src_x_bounds, grid_x_bounds, grid_x_decreasing
+ )
+
+ # Cache which src_bounds are within grid bounds
+ cached_x_bounds = []
+ cached_x_indices = []
+ max_x_indices = 0
+ for (x_0, x_1) in grid_x_bounds:
+ if grid_x_decreasing:
+ x_0, x_1 = x_1, x_0
+ x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1)
+ cached_x_bounds.append(x_bounds)
+ cached_x_indices.append(x_indices)
+ # Keep record of the largest slice
+ if isinstance(x_indices, slice):
+ x_indices_size = np.sum(x_indices.stop - x_indices.start)
+ else: # is tuple of indices
+ x_indices_size = len(x_indices)
+ if x_indices_size > max_x_indices:
+ max_x_indices = x_indices_size
+
+ # Cache which y src_bounds areas and weights are within grid bounds
+ cached_y_indices = []
+ cached_weights = []
+ max_y_indices = 0
+ for j, (y_0, y_1) in enumerate(grid_y_bounds):
+ # Reverse lower and upper if dest grid is decreasing.
+ if grid_y_decreasing:
+ y_0, y_1 = y_1, y_0
+ y_bounds, y_indices = _cropped_bounds(src_y_bounds, y_0, y_1)
+ cached_y_indices.append(y_indices)
+ # Keep record of the largest slice
+ if isinstance(y_indices, slice):
+ y_indices_size = np.sum(y_indices.stop - y_indices.start)
+ else: # is tuple of indices
+ y_indices_size = len(y_indices)
+ if y_indices_size > max_y_indices:
+ max_y_indices = y_indices_size
+
+ weights_i = []
+ for i, (x_0, x_1) in enumerate(grid_x_bounds):
+ # Reverse lower and upper if dest grid is decreasing.
+ if grid_x_decreasing:
+ x_0, x_1 = x_1, x_0
+ x_bounds = cached_x_bounds[i]
+ x_indices = cached_x_indices[i]
+
+ # Determine whether element i, j overlaps with src and hence
+ # an area weight should be computed.
+ # If x_0 > x_1 then we want [0]->x_1 and x_0->[0] + mod in
+ # the case of wrapped longitudes. However if the src grid is
+ # not global (i.e. circular) this new cell would include a
+ # region outside of the extent of the src grid and thus the
+ # weight is therefore invalid.
+ outside_extent = x_0 > x_1 and not circular
+ if (
+ outside_extent or
+ not y_within_bounds[j] or
+ not x_within_bounds[i]
+ ):
+ weights = False
+ else:
+ # Calculate weights based on areas of cropped bounds.
+ if isinstance(x_indices, tuple) and isinstance(
+ y_indices, tuple
+ ):
+ raise RuntimeError(
+ "Cannot handle split bounds " "in both x and y."
+ )
+ weights = area_func(y_bounds, x_bounds)
+ weights_i.append(weights)
+ cached_weights.append(weights_i)
+ return (
+ tuple(cached_x_indices),
+ tuple(cached_y_indices),
+ max_x_indices,
+ max_y_indices,
+ tuple(cached_weights),
+ )
+
+ weights_info = _calculate_regrid_area_weighted_weights(
+ src_x_bounds,
+ src_y_bounds,
+ grid_x_bounds,
+ grid_y_bounds,
+ grid_x_decreasing,
+ grid_y_decreasing,
+ area_func,
+ circular,
+ )
+
+ return (
+ src_x,
+ src_y,
+ src_x_dim,
+ src_y_dim,
+ grid_x,
+ grid_y,
+ meshgrid_x,
+ meshgrid_y,
+ weights_info,
+ )
+
+
+def _regrid_area_weighted_rectilinear_src_and_grid__perform(
+ src_cube, regrid_info, mdtol
+):
+ """
+ Second (regrid) part of 'regrid_area_weighted_rectilinear_src_and_grid'.
+
+ Perform the prepared regrid calculation on a single 2d cube.
+
+ """
+ (
+ src_x,
+ src_y,
+ src_x_dim,
+ src_y_dim,
+ grid_x,
+ grid_y,
+ meshgrid_x,
+ meshgrid_y,
+ weights_info,
+ ) = regrid_info
+
# Calculate new data array for regridded cube.
- new_data = _regrid_area_weighted_array(src_cube.data, src_x_dim, src_y_dim,
- src_x_bounds, src_y_bounds,
- grid_x_bounds, grid_y_bounds,
- grid_x_decreasing,
- grid_y_decreasing,
- area_func, circular, mdtol)
+ new_data = _regrid_area_weighted_array(
+ src_cube.data, src_x_dim, src_y_dim, weights_info, mdtol,
+ )
# Wrap up the data as a Cube.
- # Create 2d meshgrids as required by _create_cube func.
- meshgrid_x, meshgrid_y = _meshgrid(grid_x.points, grid_y.points)
regrid_callback = RectilinearRegridder._regrid
new_cube = RectilinearRegridder._create_cube(new_data, src_cube,
src_x_dim, src_y_dim,
diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py
index 0ce09b6ef1..dbb88b45d3 100644
--- a/lib/iris/tests/__init__.py
+++ b/lib/iris/tests/__init__.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2019, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -1210,9 +1210,10 @@ class MyPlotTests(test.GraphicsTest):
'which is not available.')
-skip_sample_data = unittest.skipIf(not SAMPLE_DATA_AVAILABLE,
- ('Test(s) require "iris-sample-data", '
- 'which is not available.'))
+skip_sample_data = unittest.skipIf(
+ not SAMPLE_DATA_AVAILABLE,
+ ('Test(s) require "iris-sample-data", ' "which is not available."),
+)
skip_nc_time_axis = unittest.skipIf(
diff --git a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py
index cce1c0e671..8c32029246 100644
--- a/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py
+++ b/lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2013 - 2018, Met Office
+# (C) British Crown Copyright 2013 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -327,19 +327,36 @@ def test_regrid_latlon_reduced_res(self):
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('latlonreduced.cml',))
- def test_regrid_transposed(self):
- src = self.simple_cube.copy()
- dest = _subsampled_grid(src, 2, 3)
- # Transpose src so that the coords are not y, x ordered.
- src.transpose()
+ def test_regrid_reorder_axis(self):
+ src = self.realistic_cube[0, :4, :3, :2]
+ z = src.coord("model_level_number")
+ lat = src.coord("grid_latitude")
+ lon = src.coord("grid_longitude")
+ dest = _resampled_grid(self.realistic_cube[0, 0, :3, :2], 3, 3)
res = regrid_area_weighted(src, dest)
- self.assertCMLApproxData(res, RESULT_DIR + ('trasposed.cml',))
- # Using original and transposing the result should give the
- # same answer.
- src = self.simple_cube.copy()
+ self.assertArrayShapeStats(src, (4, 3, 2), 288.08868, 0.008262919)
+ self.assertArrayShapeStats(res, (4, 9, 6), 288.08865, 0.00826281)
+ # Reshape src so that the coords are ordered [x, z, y],
+ # the mean and std statistics should be the same
+ data = np.moveaxis(src.data.copy(), 2, 0)
+ src = iris.cube.Cube(data)
+ src.add_dim_coord(lat, 2)
+ src.add_dim_coord(z, 1)
+ src.add_dim_coord(lon, 0)
res = regrid_area_weighted(src, dest)
- res.transpose()
- self.assertCMLApproxData(res, RESULT_DIR + ('trasposed.cml',))
+ self.assertArrayShapeStats(src, (2, 4, 3), 288.08868, 0.008262919)
+ self.assertArrayShapeStats(res, (6, 4, 9), 288.08865, 0.00826281)
+ # Reshape src so that the coords are ordered [y, x, z],
+ # the mean and std statistics should be the same
+ data = np.moveaxis(src.data.copy(), 2, 0)
+ src = iris.cube.Cube(data)
+ src.add_dim_coord(z, 2)
+ src.add_dim_coord(lon, 1)
+ src.add_dim_coord(lat, 0)
+ dest = _resampled_grid(self.realistic_cube[0, 0, :3, :2], 3, 3)
+ res = regrid_area_weighted(src, dest)
+ self.assertArrayShapeStats(src, (3, 2, 4), 288.08868, 0.008262919)
+ self.assertArrayShapeStats(res, (9, 6, 4), 288.08865, 0.00826281)
def test_regrid_lon_to_half_res(self):
src = self.simple_cube
@@ -370,13 +387,22 @@ def test_hybrid_height(self):
def test_missing_data(self):
src = self.simple_cube.copy()
- src.data = ma.masked_array(src.data)
+ src.data = ma.masked_array(src.data, fill_value=999)
src.data[1, 2] = ma.masked
dest = _resampled_grid(self.simple_cube, 2.3, 2.4)
res = regrid_area_weighted(src, dest)
mask = np.zeros((7, 9), bool)
mask[slice(2, 5), slice(4, 7)] = True
self.assertArrayEqual(res.data.mask, mask)
+ self.assertArrayEqual(res.data.fill_value, 999)
+
+ def test_masked_data_all_false(self):
+ src = self.simple_cube.copy()
+ src.data = ma.masked_array(src.data, mask=False, fill_value=999)
+ dest = _resampled_grid(self.simple_cube, 2.3, 2.4)
+ res = regrid_area_weighted(src, dest)
+ self.assertArrayEqual(res.data.mask, False)
+ self.assertArrayEqual(res.data.fill_value, 999)
def test_no_x_overlap(self):
src = self.simple_cube
@@ -436,8 +462,19 @@ def test_cross_section(self):
dest.add_dim_coord(lon, 1)
dest.add_aux_coord(src.coord('grid_latitude').copy(), None)
res = regrid_area_weighted(src, dest)
- self.assertCMLApproxData(res, RESULT_DIR +
- ('const_lat_cross_section.cml',))
+ self.assertCMLApproxData(
+ res, RESULT_DIR + ("const_lat_cross_section.cml",)
+ )
+ # Constant latitude, data order [x, z]
+ # Using original and transposing the result should give the
+ # same answer.
+ src.transpose()
+ dest.transpose()
+ res = regrid_area_weighted(src, dest)
+ res.transpose()
+ self.assertCMLApproxData(
+ res, RESULT_DIR + ("const_lat_cross_section.cml",)
+ )
# Constant longitude
src = self.realistic_cube[0, :, :, 10]
@@ -449,8 +486,19 @@ def test_cross_section(self):
dest.add_dim_coord(lat, 1)
dest.add_aux_coord(src.coord('grid_longitude').copy(), None)
res = regrid_area_weighted(src, dest)
- self.assertCMLApproxData(res, RESULT_DIR +
- ('const_lon_cross_section.cml',))
+ self.assertCMLApproxData(
+ res, RESULT_DIR + ("const_lon_cross_section.cml",)
+ )
+ # Constant longitude, data order [y, z]
+ # Using original and transposing the result should give the
+ # same answer.
+ src.transpose()
+ dest.transpose()
+ res = regrid_area_weighted(src, dest)
+ res.transpose()
+ self.assertCMLApproxData(
+ res, RESULT_DIR + ("const_lon_cross_section.cml",)
+ )
def test_scalar_source_cube(self):
src = self.simple_cube[1, 2]
diff --git a/lib/iris/tests/integration/test_grib2.py b/lib/iris/tests/integration/test_grib2.py
index 71e80147b9..5f0a8d5177 100644
--- a/lib/iris/tests/integration/test_grib2.py
+++ b/lib/iris/tests/integration/test_grib2.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2017, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -38,6 +38,10 @@
if tests.GRIB_AVAILABLE:
from iris_grib import load_pairs_from_fields
from iris_grib.message import GribMessage
+ try:
+ from iris_grib.grib_phenom_translation import GRIBCode
+ except ImportError:
+ GRIBCode = None
@tests.skip_data
@@ -146,7 +150,9 @@ def test_save_load(self):
fpcoord = DimCoord(24, 'forecast_period', units=Unit('hours'))
cube.add_aux_coord(tcoord)
cube.add_aux_coord(fpcoord)
- cube.attributes['WMO_constituent_type'] = 0
+ cube.attributes["WMO_constituent_type"] = 0
+ if GRIBCode is not None:
+ cube.attributes["GRIB_PARAM"] = GRIBCode("GRIB2:d000c014n000")
with self.temp_filename('test_grib_pdt40.grib2') as temp_file_path:
save(cube, temp_file_path)
@@ -218,9 +224,14 @@ def test_save_load(self):
self.assertEqual(test_cube.shape, (744, 744))
self.assertEqual(test_cube.cell_methods, ())
- # Check no cube attributes on the re-loaded cube.
- # Note: this does *not* match the original, but is as expected.
- self.assertEqual(cube_loaded_from_saved.attributes, {})
+ if GRIBCode is not None:
+ # Python3 only --> iris-grib version >= 0.15
+ # Check only the GRIB_PARAM attribute exists on the re-loaded cube.
+ # Note: this does *not* match the original, but is as expected.
+ self.assertEqual(
+ cube_loaded_from_saved.attributes,
+ {"GRIB_PARAM": GRIBCode("GRIB2:d000c003n001")},
+ )
# Now remaining to check: coordinates + data...
diff --git a/lib/iris/tests/integration/test_grib_load.py b/lib/iris/tests/integration/test_grib_load.py
index 2cf18154fc..7e5aaf7234 100644
--- a/lib/iris/tests/integration/test_grib_load.py
+++ b/lib/iris/tests/integration/test_grib_load.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py
index 4894de292c..2033cf76a6 100644
--- a/lib/iris/tests/integration/test_netcdf.py
+++ b/lib/iris/tests/integration/test_netcdf.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2019, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -27,7 +27,9 @@
from contextlib import contextmanager
from itertools import repeat
import os.path
+from os.path import join as path_join
import shutil
+from subprocess import check_call
import tempfile
import warnings
@@ -535,5 +537,79 @@ def test_standard_name_roundtrip(self):
self.assertEqual(detection_limit_cube.standard_name, standard_name)
+class TestLoadMinimalGeostationary(tests.IrisTest):
+ """
+ Check we can load data with a geostationary grid-mapping, even when the
+ 'false-easting' and 'false_northing' properties are missing.
+
+ """
+
+ _geostationary_problem_cdl = """
+netcdf geostationary_problem_case {
+dimensions:
+ y = 2 ;
+ x = 3 ;
+variables:
+ short radiance(y, x) ;
+ radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ;
+ radiance:units = "W m-2 sr-1 um-1" ;
+ radiance:coordinates = "y x" ;
+ radiance:grid_mapping = "imager_grid_mapping" ;
+ short y(y) ;
+ y:units = "rad" ;
+ y:axis = "Y" ;
+ y:long_name = "fixed grid projection y-coordinate" ;
+ y:standard_name = "projection_y_coordinate" ;
+ short x(x) ;
+ x:units = "rad" ;
+ x:axis = "X" ;
+ x:long_name = "fixed grid projection x-coordinate" ;
+ x:standard_name = "projection_x_coordinate" ;
+ int imager_grid_mapping ;
+ imager_grid_mapping:grid_mapping_name = "geostationary" ;
+ imager_grid_mapping:perspective_point_height = 35786023. ;
+ imager_grid_mapping:semi_major_axis = 6378137. ;
+ imager_grid_mapping:semi_minor_axis = 6356752.31414 ;
+ imager_grid_mapping:latitude_of_projection_origin = 0. ;
+ imager_grid_mapping:longitude_of_projection_origin = -75. ;
+ imager_grid_mapping:sweep_angle_axis = "x" ;
+
+data:
+
+ // coord values, just so these can be dim-coords
+ y = 0, 1 ;
+ x = 0, 1, 2 ;
+
+}
+"""
+
+ @classmethod
+ def setUpClass(cls):
+ # Create a temp directory for transient test files.
+ cls.temp_dir = tempfile.mkdtemp()
+ cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl")
+ cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc")
+ # Create a reference file from the CDL text.
+ with open(cls.path_test_cdl, "w") as f_out:
+ f_out.write(cls._geostationary_problem_cdl)
+ # Call 'ncgen' to make an actual netCDF file from the CDL.
+ command = "ncgen -o {} {}".format(cls.path_test_nc, cls.path_test_cdl)
+ check_call(command, shell=True)
+
+ @classmethod
+ def tearDownClass(cls):
+ # Destroy the temp directory.
+ shutil.rmtree(cls.temp_dir)
+
+ def test_geostationary_no_false_offsets(self):
+ # Check we can load test data and coord system properties are correct.
+ cube = iris.load_cube(self.path_test_nc)
+ # Check the coord system properties has the correct default properties.
+ cs = cube.coord_system()
+ self.assertIsInstance(cs, iris.coord_systems.Geostationary)
+ self.assertEqual(cs.false_easting, 0.0)
+ self.assertEqual(cs.false_northing, 0.0)
+
+
if __name__ == "__main__":
tests.main()
diff --git a/lib/iris/tests/integration/test_pickle.py b/lib/iris/tests/integration/test_pickle.py
index a6506ea91e..2153457818 100644
--- a/lib/iris/tests/integration/test_pickle.py
+++ b/lib/iris/tests/integration/test_pickle.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2017, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -23,6 +23,7 @@
# importing anything else.
import iris.tests as tests
+import unittest
import six.moves.cPickle as pickle
import iris
@@ -31,57 +32,82 @@
from iris_grib.message import GribMessage
-@tests.skip_data
-@tests.skip_grib
-class TestGribMessage(tests.IrisTest):
- def test(self):
- # Check that a GribMessage pickles without errors.
- path = tests.get_data_path(('GRIB', 'fp_units', 'hours.grib2'))
- messages = GribMessage.messages_from_filename(path)
- message = next(messages)
+class Common(object):
+ def pickle_cube(self, protocol):
+ # Ensure that data proxies are pickleable.
+ cube = iris.load(self.path)[0]
with self.temp_filename('.pkl') as filename:
with open(filename, 'wb') as f:
- pickle.dump(message, f)
+ pickle.dump(cube, f, protocol)
+ with open(filename, 'rb') as f:
+ ncube = pickle.load(f)
+ self.assertEqual(ncube, cube)
- def test_data(self):
- # Check that GribMessage.data pickles without errors.
- path = tests.get_data_path(('GRIB', 'fp_units', 'hours.grib2'))
- messages = GribMessage.messages_from_filename(path)
- message = next(messages)
- with self.temp_filename('.pkl') as filename:
- with open(filename, 'wb') as f:
- pickle.dump(message.data, f)
+ def test_protocol_0(self):
+ self.pickle_cube(0)
+ def test_protocol_1(self):
+ self.pickle_cube(1)
-class Common(object):
- # Ensure that data proxies are pickleable.
- def pickle_cube(self, path):
- cube = iris.load(path)[0]
+ def test_protocol_2(self):
+ self.pickle_cube(2)
+
+
+@tests.skip_data
+@tests.skip_grib
+class TestGribMessage(Common, tests.IrisTest):
+ def setUp(self):
+ self.path = tests.get_data_path(('GRIB', 'fp_units', 'hours.grib2'))
+
+ def pickle_obj(self, obj):
with self.temp_filename('.pkl') as filename:
with open(filename, 'wb') as f:
- pickle.dump(cube, f)
+ pickle.dump(obj, f)
+
+ # These probably "ought" to work, but currently fail.
+ # see https://github.com/SciTools/iris/pull/2608
+ @unittest.expectedFailure
+ def test_protocol_0(self):
+ super(TestGribMessage, self).test_protocol_0()
+
+ @unittest.expectedFailure
+ def test_protocol_1(self):
+ super(TestGribMessage, self).test_protocol_1()
+
+ @unittest.expectedFailure
+ def test_protocol_2(self):
+ super(TestGribMessage, self).test_protocol_2()
+
+ def test(self):
+ # Check that a GribMessage pickles without errors.
+ messages = GribMessage.messages_from_filename(self.path)
+ obj = next(messages)
+ self.pickle_obj(obj)
+
+ def test_data(self):
+ # Check that GribMessage.data pickles without errors.
+ messages = GribMessage.messages_from_filename(self.path)
+ obj = next(messages).data
+ self.pickle_obj(obj)
@tests.skip_data
class test_netcdf(Common, tests.IrisTest):
- def test(self):
- path = tests.get_data_path(('NetCDF', 'global', 'xyt',
- 'SMALL_hires_wind_u_for_ipcc4.nc'))
- self.pickle_cube(path)
+ def setUp(self):
+ self.path = tests.get_data_path(('NetCDF', 'global', 'xyt',
+ 'SMALL_hires_wind_u_for_ipcc4.nc'))
@tests.skip_data
class test_pp(Common, tests.IrisTest):
- def test(self):
- path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
- self.pickle_cube(path)
+ def setUp(self):
+ self.path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
@tests.skip_data
class test_ff(Common, tests.IrisTest):
- def test(self):
- path = tests.get_data_path(('FF', 'n48_multi_field'))
- self.pickle_cube(path)
+ def setUp(self):
+ self.path = tests.get_data_path(('FF', 'n48_multi_field'))
if __name__ == '__main__':
diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.cml
deleted file mode 100644
index 9aecbfc388..0000000000
--- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.cml
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.data.0.json b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.data.0.json
deleted file mode 100644
index 0525283f0a..0000000000
--- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/trasposed.data.0.json
+++ /dev/null
@@ -1 +0,0 @@
-{"std": 1.0, "min": 4.499593812507495, "max": 6.499593812507495, "shape": [2], "masked": false, "mean": 5.499593812507495}
\ No newline at end of file
diff --git a/lib/iris/tests/results/grib_load/3_layer.cml b/lib/iris/tests/results/grib_load/3_layer.cml
index 24f24ed777..76cc41a04a 100644
--- a/lib/iris/tests/results/grib_load/3_layer.cml
+++ b/lib/iris/tests/results/grib_load/3_layer.cml
@@ -1,6 +1,9 @@
+
+
+
@@ -31,6 +34,9 @@
+
+
+
@@ -82,6 +88,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_0.cml b/lib/iris/tests/results/grib_load/earth_shape_0.cml
index 1e1e491d58..bb51db3201 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_0.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_0.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_1.cml b/lib/iris/tests/results/grib_load/earth_shape_1.cml
index dd409ebb20..774e9921b5 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_1.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_1.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_2.cml b/lib/iris/tests/results/grib_load/earth_shape_2.cml
index 0e3a4a14ea..3ff9ccccb5 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_2.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_3.cml b/lib/iris/tests/results/grib_load/earth_shape_3.cml
index 0213c4a4a0..47d11467ee 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_3.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_3.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_4.cml b/lib/iris/tests/results/grib_load/earth_shape_4.cml
index 2573e867d1..e6aa14e45a 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_4.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_4.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_5.cml b/lib/iris/tests/results/grib_load/earth_shape_5.cml
index 56462c684b..1257c9c2ad 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_5.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_5.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_6.cml b/lib/iris/tests/results/grib_load/earth_shape_6.cml
index 1ad54d1f77..eb96657104 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_6.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_6.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/earth_shape_7.cml b/lib/iris/tests/results/grib_load/earth_shape_7.cml
index cea76b2739..d27ce04a4c 100644
--- a/lib/iris/tests/results/grib_load/earth_shape_7.cml
+++ b/lib/iris/tests/results/grib_load/earth_shape_7.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/ineg_jneg.cml b/lib/iris/tests/results/grib_load/ineg_jneg.cml
index 344fbbacf2..a7d7741092 100644
--- a/lib/iris/tests/results/grib_load/ineg_jneg.cml
+++ b/lib/iris/tests/results/grib_load/ineg_jneg.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/ineg_jpos.cml b/lib/iris/tests/results/grib_load/ineg_jpos.cml
index 14967e6a88..f578fceadb 100644
--- a/lib/iris/tests/results/grib_load/ineg_jpos.cml
+++ b/lib/iris/tests/results/grib_load/ineg_jpos.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/ipos_jneg.cml b/lib/iris/tests/results/grib_load/ipos_jneg.cml
index 1e1e491d58..bb51db3201 100644
--- a/lib/iris/tests/results/grib_load/ipos_jneg.cml
+++ b/lib/iris/tests/results/grib_load/ipos_jneg.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/ipos_jpos.cml b/lib/iris/tests/results/grib_load/ipos_jpos.cml
index 373d8fc475..4dc6d7f980 100644
--- a/lib/iris/tests/results/grib_load/ipos_jpos.cml
+++ b/lib/iris/tests/results/grib_load/ipos_jpos.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/lambert_grib2.cml b/lib/iris/tests/results/grib_load/lambert_grib2.cml
index e8b3f1c4c6..dc938f0aca 100644
--- a/lib/iris/tests/results/grib_load/lambert_grib2.cml
+++ b/lib/iris/tests/results/grib_load/lambert_grib2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/missing_values_grib2.cml b/lib/iris/tests/results/grib_load/missing_values_grib2.cml
index b090d56a92..c4c0d81915 100644
--- a/lib/iris/tests/results/grib_load/missing_values_grib2.cml
+++ b/lib/iris/tests/results/grib_load/missing_values_grib2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/reduced_gg_grib2.cml b/lib/iris/tests/results/grib_load/reduced_gg_grib2.cml
index f34938ce3f..fa3ba45e3d 100644
--- a/lib/iris/tests/results/grib_load/reduced_gg_grib2.cml
+++ b/lib/iris/tests/results/grib_load/reduced_gg_grib2.cml
@@ -2,6 +2,7 @@
+
diff --git a/lib/iris/tests/results/grib_load/regular_gg_grib2.cml b/lib/iris/tests/results/grib_load/regular_gg_grib2.cml
index 20230aee0f..14213c1602 100644
--- a/lib/iris/tests/results/grib_load/regular_gg_grib2.cml
+++ b/lib/iris/tests/results/grib_load/regular_gg_grib2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/grib_load/time_bound_grib2.cml b/lib/iris/tests/results/grib_load/time_bound_grib2.cml
index 1e1e491d58..bb51db3201 100644
--- a/lib/iris/tests/results/grib_load/time_bound_grib2.cml
+++ b/lib/iris/tests/results/grib_load/time_bound_grib2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json
index 3a5f0ab17d..e22b147de6 100644
--- a/lib/iris/tests/results/imagerepo.json
+++ b/lib/iris/tests/results/imagerepo.json
@@ -5,12 +5,14 @@
],
"example_tests.test_COP_maps.TestCOPMaps.test_cop_maps.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ea9138db95668524913e6ac168997e85957e917e876396b96a81b5ce3c496935.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913c6ac178995b0d956e917ec76396b96a853dcf94696935.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913c6ac178995b0d956e917ec76396b96a853dcf94696935.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94796931.png"
],
"example_tests.test_SOI_filtering.TestSOIFiltering.test_soi_filtering.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/fac460b9c17b78723e05a5a9954edaf062332799954e9ca5c63b9a52d24e5a95.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/fa8460b9c17b78723e05a5a9954edaf062333799954e9ca5c63b9a52d24e4a9d.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/fa167295c5e0696a3c17a58c9568da536233da19994cdab487739b4b9b444eb5.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/fa167295c5e0696a3c17a58c9568da536233da19994cdab487739b4b9b444eb5.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4.png"
],
"example_tests.test_TEC.TestTEC.test_TEC.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9a42846e9a49c7596e3cce6c907b3a83c17e1b8239b3e4f33bc4.png",
@@ -25,11 +27,13 @@
"https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc5d08fcd00fdb1c93fcb21c.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc7c09f4d00fdb1c93fcb21c.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/9f8a60536bd28e1320739437b5f437b0a53d66f4cc5c08f4d00fdb1c93fcb21c.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/9fc060f462a08f07203ebc77a1f36707e61f4e38d8f7d08a910197fc877cec58.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/9fc060f462a08f07203ebc77a1f36707e61f4e38d8f7d08a910197fc877cec58.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58.png"
],
"example_tests.test_atlantic_profiles.TestAtlanticProfiles.test_atlantic_profiles.1": [
"https://scitools.github.io/test-iris-imagehash/images/v4/a6eaa57e6e81ddf999311ba3b3775e20845d5889c199673b4e22a4675e8ca11c.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991f1322b3761e06845718d89995b3131f32a4765ec2a1cd.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991f1322b3761e06845718d89995b3131f32a4765ec2a1cd.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc.png"
],
"example_tests.test_coriolis_plot.TestCoriolisPlot.test_coriolis_plot.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/e78665de9a699659e55e9965886979966986c5e63e98c19e3a256679e1981a24.png",
@@ -252,11 +256,13 @@
],
"iris.tests.test_mapping.TestLowLevel.test_simple.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/eae0943295154bcc844e6c314fb093ce7bc7c4b3a4307bc4916f3f316ed2b4ce.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c3ea1e55e856658a5c11837096e8fe17a.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c3ea1e55e856658a5c11837096e8fe17a.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c36a1e55e854658b5c13837096e8fe17a.png"
],
"iris.tests.test_mapping.TestMappingSubRegion.test_simple.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/bd913e01d07ee07e926e87876f8196c1e0d36967393c1f181e2c3cb8b0f960d7.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5b2f00392cb3496695621d34db6c92.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5b2f00392cb3496695621d34db6c92.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5a2f003924b3496695e21db4db6c92.png"
],
"iris.tests.test_mapping.TestUnmappable.test_simple.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/fe818d6ac17e5a958d7ab12b9d677615986e666dc4f20dea7281d98833889b22.png",
@@ -393,31 +399,37 @@
],
"iris.tests.test_plot.TestContour.test_tx.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/cff8a55f7a15b55a7817854ad007a5e8c04f3ce8c04f3e2ac4706ab295b37a96.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/eaece0173d17951fbd03974a914964e8c04a72e8c1531ee1cc746bb293973ecd.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eaece0173d17951fbd03974a914964e8c04a72e8c1531ee1cc746bb293973ecd.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eeece0173c07951fbd038748914964e8c14e72e9c1531ee1cc746bb293973ecd.png"
],
"iris.tests.test_plot.TestContour.test_ty.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8bfc815e78018597fc019b65b425d121955e7eda854b7d6a80db7eb481b72b61.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15ab4044a269546caa5956b7e9bc0b97f2cc2d62d360b363b49.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15ab4044a269546caa5956b7e9bc0b97f2cc2d62d360b363b49.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15af4055a069546caa5954b7e9bc0f97d2cc2d62d360b362b49.png"
],
"iris.tests.test_plot.TestContour.test_tz.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780185fff800955ad4027e00d517d400855f7e0085ff7e8085ff6aed.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780085fff800855fd4027e00d517d400855f7e0085ff7e8085ff6aed.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8bfe817ffc00855ef0007e81d4027e80815fd56a03ff7a8085ff3aa883ff6aa5.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/8bff817ffc00857ef0007a81d4027e80815fd56a03ff7a8085ff3aa881ff6aa5.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/8bff817ffc00857ef0007a81d4027e80815fd56a03ff7a8085ff3aa881ff6aa5.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe805ffc00857ef0007a01d4027e80815fd56a83ff7a8085ff3aaa03ff6af5.png"
],
"iris.tests.test_plot.TestContour.test_yx.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/fa56c3cc34e891b1c9a91c36c5a170e3c71b3e5993a784e492c49b4ecec76393.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999765cd3694b06478c7396329958434c2cecb6c6d69ce1b92.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999765cd3694b06478c7396329958434c2cecb6c6d69ce1b92.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95a19999876d4d3694b06c78c7396329958434c2cecb6c6d69ce3b92.png"
],
"iris.tests.test_plot.TestContour.test_zx.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8bfe857f7a01a56afa05854ad015bd00d015d50a90577e80857f7ea0857f7abf.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/affe815ffc008554f8007e01d0027e808557d5ea815f7ea0817f2fea817d2aff.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/affe805ffc008554f8007e01d0027e808557d5ea815f7ea0817f2eea817f2bff.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/affe805ffc008554f8007e01d0027e808557d5ea815f7ea0817f2eea817f2bff.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/affe8057fc00855cf8007e01d0027e808557d5ea815f7ea0817f2fea815f2bff.png"
],
"iris.tests.test_plot.TestContour.test_zy.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8bff81ff7a0195fcf8019578d4027e00d550d402857c7e0185fe7a8385fe6aaf.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/abff857ff8018578f8017a80d4027e00855ec42a81fe7a8185fe6a8f85fe6ab7.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff8018578fc017a80d4027e00855ec42a81fe7a8185fe7a8f85fe6ab5.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff8018578fc017a80d4027e00855ec42a81fe7a8185fe7a8f85fe6ab5.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff801857afc017a80d4027e00855ec42a81fe7a8185fe6a8f05fe2abf.png"
],
"iris.tests.test_plot.TestContourf.test_tx.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/faa562ed68569d52857abd12953a8f12951f64e0d30f3ac96a4d6a696ee06a32.png",
@@ -624,7 +636,8 @@
"https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15cdb6b10178d7d4082e57d7290906f685814277b1dc88724cfd26.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15c9b6a10178d7d4082c57d7290906f6c58942f7b1dc88724cfd26.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/abffd4a02a01cc84f10078d7d4082c77d73909ded6ef816273bd9c98725cdd26.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8a7e054d83f5067bc1c1423471927ba73c8d9f864e09a1a7b358c8276f.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8a7e054d83f5067bc1c1423471927ba73c8d9f864e09a1a7b358c8276f.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8b7e044d81f5037bd4c14324749279a73e8d9d864f09e4a7b348dc2769.png"
],
"iris.tests.test_plot.TestPlot.test_x.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8ffe95297e87c74a6a059158f89c3d6ed0536597c0387836d0f87866d0697097.png",
@@ -669,11 +682,13 @@
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8bf88f457a03b5307e16b561f007b53ed067217ac1786afec0f570bf8178681a.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8bf98f057a03b5307e16b561f007b53ad067217ac1786afec0f570bf8178685a.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9bc219530b696a56694c2852a95656b7b81986acdc0e516adad186eda.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9bc219530b696a56694c2852a95656b7b81986acdc0e516adad186eda.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8ffe8f367e05952afe05a50b980ded4bd05d69c2c1fb71c1c06272f4d0a06af4.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7bd05952fbd0f950f914fcd48c47860f3e1b9329094266e345a850f6c.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7bd05952fbd0f950f914fcd48c47860f3e1b9329094266e345a850f6c.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7fd05952dbd0f950f914fcd40c47868f3e1b9329094266e345a850f6c.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/aa953d0f85fab50fd0f2956a7a1785fafa176877d00f68f1d02c60f2f008d0f0.png",
@@ -688,7 +703,8 @@
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/aeb8b5095a87cd60386592d9ec97ad6dd23ca4f6d0797827f0096216c1f878e6.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/affa950ddb13c03634359ad8a4c80f26911f26f3c06e0ff3f4007b4285fd6e72.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/affa950ddb13c03634359ad8a4c80f26911f26f3c06e0ff3f4007b4285fd6e72.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/afea950ddb13c03e34359ad8a4c86f24913f2693807e3ff1f4087b4285fd28f2.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8fea97194f07c9c830d79169ce16269f91097af6c47861f6d0796076d0797a16.png",
@@ -707,11 +723,13 @@
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.2": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65767039740bc069d9ad00b8dadd03f52f181dd347a847a62ff81e8626c.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65777039740bc069d9ad00b8dadd03d52f181dd707a847a62ff81e8626c.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/ebffca44f502b36498309c9b940999add1bb62bba784374acc5a6a246acc6b65.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ebffca44f502b36498309c9b940999add1bb62bba784374acc5a6a246acc6b65.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/ebfeca44f102b3649c309c9b940d19add1bb63b3a7843e4acc5a6aa56acc6b64.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ea5649c434ac92e5d9c9361b95b39c38c3835a5ec6d966ced34c633099ace5a5.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c96a597a591c9949b94b61b69c7926b5bccce66646b3869b831a52c26.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c96a597a591c9949b94b61b69c7926b5bccce66646b3869b831a52c26.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85e6b6c86a595a791c9349b94b71b69c7926b5bccca66646b1869b831a52ca6.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2dd2d09295c3c0c7d13c1bc6d23d2c696de0e53c3ac393daf6d205c2c4.png",
@@ -736,11 +754,13 @@
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.2": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a327c847860fdc57a69beb0be68bd.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a32fd847860fdc57269beb0be689d.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/8bedcf25bc03a4929c103a5bf03fdbbc81cb364d86e46da70f86899b3a0f6cc0.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/8bedcf25bc03a4929c103a5bf03fdbbc81cb364d86e46da70f86899b3a0f6cc0.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/cbedcd25bc02a4929c103a5bf03fdbbc81cb364d84e46da70f86899b3a0f6ec1.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/cee8953a7a15856978579696d03d672cc49a6e5a842d3d2cc0b66bd1c2ea39f1.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/aee1f93a63168569b852d697913d632485ca2e43952d3bbcc2b66bd1426b3c71.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aee1f93a63168569b852d697913d632485ca2e43952d3bbcc2b66bd1426b3c71.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aee1793a6b168569b852d697913c622cc5ca2e4b952d3bb4c2b66bd1426b3c71.png"
],
"iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ee953f0591ea3f07914a95fa7e07d1fa68156a15d07c6a3dd038c0fef000d0fa.png",
@@ -794,7 +814,8 @@
"https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd5ae7f51efb6200378d7d4082c17d7280906d6e58962db31d800da6cdd26.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4ae7f55efbe200178d7d4082c17d7280906d6e58962df319800da6cdd26.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4827f51ef94200078d7c4082c57d739095ed6ed8962db759808da6cdd26.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/82fd958a7e006f9ba0077bc5c9462c759873dd3c8d8f826699a187b358c82f67.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/82fd958a7e006f9ba0077bc5c9462c759873dd3c8d8f826699a187b358c82f67.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69.png"
],
"iris.tests.test_plot.TestQuickplotPlot.test_x.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/83ffb5097e84c54a621799d8601d9966d213cd67c039d876d078d866d869d8f7.png",
@@ -831,7 +852,8 @@
"iris.tests.test_quickplot.TestLabels.test_contour.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee321fc96666919b6ec15fdca593600d2586785a259dfa5a01.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee3217c9e66691996ec15fdca593680d2586785a259dfa5a01.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd95da7a01654a3217c962e4819a56c96f3c8593624da584da3b658db662db.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd95da7a01654a3217c962e4819a56c96f3c8593624da584da3b658db662db.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db.png"
],
"iris.tests.test_quickplot.TestLabels.test_contour.1": [
"https://scitools.github.io/test-iris-imagehash/images/v4/faa12bc1954ef43fc0bf9f02854a4ee48548c17a5ab5c17e7a0d7875a17e3a81.png",
@@ -855,11 +877,13 @@
],
"iris.tests.test_quickplot.TestLabels.test_map.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png"
],
"iris.tests.test_quickplot.TestLabels.test_map.1": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4ecef19a6e9b64cb609925cd25.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png"
],
"iris.tests.test_quickplot.TestLabels.test_pcolor.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/bb423d4e94a5c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png",
@@ -884,13 +908,15 @@
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": [
"https://scitools.github.io/test-iris-imagehash/images/v4/8aff878b7f00953062179561f087953ad167997a80784a7fc1e5d86d9978485f.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/8aff878b7f80953860179561f087953ad167997a80784a7fc1e5d86d9978485b.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/eafdc6c9f720953030968d6795d28d6a95674b7b81304aedc9e51cad8d186c9a.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eafdc6c9f720953030968d6795d28d6a95674b7b81304aedc9e51cad8d186c9a.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eafdc6c9f720943030968d67d5d28d6e95674b7b81304aedc9651cad8d186c9a.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/82ff8db67f94952e76159d6bb01dcd629059c962c1fbd9c1c062da74d820ca74.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/82be8db67f95952e761d9d6bb01dcd628059c962c1fbd9e1c072da64d060ca74.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/82fe8db67f95952e76159d6bb01dcd629059c962c1fbd9e1c072da64d020ca74.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/a2ff6a967f00952eb40d9d0f900fcd62c47069f3d1f93a909c266e34d8a56f68.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a2ff6a967f00952eb40d9d0f900fcd62c47069f3d1f93a909c266e34d8a56f68.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a2ff4b967f00950eb40d9d0f900fcd62d470e9f2c1f93a909c266e34d8a56f6c.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/aa97b70ff5f0970f20b2956a6a17957af805da71d06f5a75d02cd870d800d8f2.png",
@@ -905,7 +931,8 @@
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/a6ffb5097e84cde2224598d1649f8d6cd2388c76d0799867d009da76c9f8d866.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/a6bfb5097f84cde2224599d1649f8d6cd2388c76d0799867d009da76c1f8d866.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/a6fbb50cfbd0c036203598dce4c88d26d32f8cf3886e1df3dc047b4289ec6e72.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a6fbb50cfbd0c036203598dce4c88d26d32f8cf3886e1df3dc047b4289ec6e72.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb958dfb50c03e203598dca4c9cd26933f9cb3886e1df1dc047b4289ec2e72.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": [
"https://scitools.github.io/test-iris-imagehash/images/v4/a7ff978b7f00c9c830d7992166179e969509d866c478d964d079c876d869da26.png",
@@ -923,11 +950,13 @@
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.2": [
"https://scitools.github.io/test-iris-imagehash/images/v4/afffe6d67700958636179d92e019992dd039daf5817d987a807a48e499684a6d.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/aeffe6d67780958636179d92e019892dd139daf5815d987a807a48e699684a6d.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/eaff6ad4f74ab16490109c9b942999add1b74bb785a41d4acd526a254acc6365.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/eaff6ad4f74ab16490109c9b942999add1b74bb785a41d4acd526a254acc6365.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aaffead4f7cab16490109c9b946d99add1b34bb385a41c4acd526a254acc6365.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c2d1c3d33c1bc2d67d2c696ce0653c3ac2b1d976da05c2c4.png",
@@ -952,12 +981,14 @@
"https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f867f008d8220179852f01fd9bed1789a6c847cc877c46ac972987ec8fd.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179852f01fd9bed1789a6c847cc877c468c9f6987ec8fd.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179c52f01fd9bed1789a6c847cc877c560c976987ec8fd.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded05fe11a492b000985af07fdbb4d1e3366d8c644da79fa68993180f6ec1.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded05fe11a492b000985af07fdbb4d1e3366d8c644da79fa68993180f6ec1.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded04ff11a492b000985af07fdbb4d1eb366d8c644da79fa68993180f6e81.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.3": [
"https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600a56962df9e96f01dc926c498cc46847f9d6cd0244bf19a6b19f1.png",
"https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600856962df9e96f01dcd26c498cc46847f9d6cd0244bf19a6b1975.png",
- "https://scitools.github.io/test-iris-imagehash/images/v4/aef9f93a770085e9205fd696d13c4b2485ca1a43952f1934daa66bd1ca6b3c71.png"
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aef9f93a770085e9205fd696d13c4b2485ca1a43952f1934daa66bd1ca6b3c71.png",
+ "https://scitools.github.io/test-iris-imagehash/images/v4/aef9793a770085e9205fd696d03ccb2485ca1e43952f1934daa66bd1ca6b3c71.png"
],
"iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.4": [
"https://scitools.github.io/test-iris-imagehash/images/v4/ae953f87d5e82d86801f91ee6e1591fe7e117876c07d6877d068d878d800d07a.png",
diff --git a/lib/iris/tests/results/integration/grib2/TestDRT3/grid_complex_spatial_differencing.cml b/lib/iris/tests/results/integration/grib2/TestDRT3/grid_complex_spatial_differencing.cml
index b15c6a4308..2cfe06f8f6 100644
--- a/lib/iris/tests/results/integration/grib2/TestDRT3/grid_complex_spatial_differencing.cml
+++ b/lib/iris/tests/results/integration/grib2/TestDRT3/grid_complex_spatial_differencing.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/grib2/TestGDT30/lambert.cml b/lib/iris/tests/results/integration/grib2/TestGDT30/lambert.cml
index a33d0b04ba..215a0de88d 100644
--- a/lib/iris/tests/results/integration/grib2/TestGDT30/lambert.cml
+++ b/lib/iris/tests/results/integration/grib2/TestGDT30/lambert.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/grib2/TestGDT40/reduced.cml b/lib/iris/tests/results/integration/grib2/TestGDT40/reduced.cml
index f2ca666998..3a963b3203 100644
--- a/lib/iris/tests/results/integration/grib2/TestGDT40/reduced.cml
+++ b/lib/iris/tests/results/integration/grib2/TestGDT40/reduced.cml
@@ -2,6 +2,7 @@
+
diff --git a/lib/iris/tests/results/integration/grib2/TestGDT40/regular.cml b/lib/iris/tests/results/integration/grib2/TestGDT40/regular.cml
index fb6445b8b1..e5eea0fc7c 100644
--- a/lib/iris/tests/results/integration/grib2/TestGDT40/regular.cml
+++ b/lib/iris/tests/results/integration/grib2/TestGDT40/regular.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/grib2/TestImport/gdt1.cml b/lib/iris/tests/results/integration/grib2/TestImport/gdt1.cml
index d3cc6b4732..d304d8a843 100644
--- a/lib/iris/tests/results/integration/grib2/TestImport/gdt1.cml
+++ b/lib/iris/tests/results/integration/grib2/TestImport/gdt1.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml b/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml
index 9f950b5e1f..3118f86823 100644
--- a/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml
+++ b/lib/iris/tests/results/integration/grib2/TestImport/gdt90_with_bitmap.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml
index 4d0fddbba5..b0daf50907 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEII/0_TRACER_AIR_CONCENTRATION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml
index fd61a67eb6..aef4988ce6 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEII/1_TRACER_DOSAGE.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/2_TRACER_WET_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/2_TRACER_WET_DEPOSITION.cml
deleted file mode 100644
index 029aa022ea..0000000000
--- a/lib/iris/tests/results/integration/name_grib/NAMEII/2_TRACER_WET_DEPOSITION.cml
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml
index 429d9db2ff..5787c19643 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEII/3_TRACER_DRY_DEPOSITION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml
index 429d9db2ff..5787c19643 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEII/4_TRACER_TOTAL_DEPOSITION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml
index 8412a4f814..1a31427de0 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEIII/0_TRACER_AIR_CONCENTRATION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml
index 590e8ef463..7007836e62 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEIII/1_TRACER_AIR_CONCENTRATION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml
index d3edb03a56..850ef89ed2 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEIII/2_TRACER_DRY_DEPOSITION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml
index 3e8d62ef3d..ade4cea92d 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEIII/3_TRACER_WET_DEPOSITION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml b/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml
index 586aaa6b56..088b622c46 100644
--- a/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml
+++ b/lib/iris/tests/results/integration/name_grib/NAMEIII/4_TRACER_DEPOSITION.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/system/supported_filetype_.grib2.cml b/lib/iris/tests/results/system/supported_filetype_.grib2.cml
index c230684fbd..f334b13863 100644
--- a/lib/iris/tests/results/system/supported_filetype_.grib2.cml
+++ b/lib/iris/tests/results/system/supported_filetype_.grib2.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/results/uri_callback/grib_global.cml b/lib/iris/tests/results/uri_callback/grib_global.cml
index a7a23e7235..aef0310a96 100644
--- a/lib/iris/tests/results/uri_callback/grib_global.cml
+++ b/lib/iris/tests/results/uri_callback/grib_global.cml
@@ -1,6 +1,9 @@
+
+
+
diff --git a/lib/iris/tests/runner/_runner.py b/lib/iris/tests/runner/_runner.py
index 340923f370..ac5749a65b 100644
--- a/lib/iris/tests/runner/_runner.py
+++ b/lib/iris/tests/runner/_runner.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -29,41 +29,6 @@
import sys
-def failed_images_html():
- """
- Generates HTML which shows the image failures side-by-side
- when viewed in a web browser.
- """
- from iris.tests.idiff import step_over_diffs
-
- data_uri_template = '
'
-
- def image_as_base64(fname):
- with open(fname, "rb") as fh:
- return fh.read().encode("base64").replace("\n", "")
-
- html = ['', '', '']
- rdir = os.path.join(os.path.dirname(__file__), os.path.pardir,
- 'result_image_comparison')
- if not os.access(rdir, os.W_OK):
- rdir = os.path.join(os.getcwd(), 'iris_image_test_output')
-
- for expected, actual, diff in step_over_diffs(rdir, 'similar', False):
- expected_html = data_uri_template.format(
- alt='expected', img=image_as_base64(expected))
- actual_html = data_uri_template.format(
- alt='actual', img=image_as_base64(actual))
- diff_html = data_uri_template.format(
- alt='diff', img=image_as_base64(diff))
-
- html.extend([expected, '
',
- expected_html, actual_html, diff_html,
- '
'])
-
- html.extend(['', ''])
- return '\n'.join(html)
-
-
# NOTE: Do not inherit from object as distutils does not like it.
class TestRunner():
"""Run the Iris tests under nose and multiprocessor for performance"""
@@ -84,12 +49,9 @@ class TestRunner():
('num-processors=', 'p', 'The number of processors used for running '
'the tests.'),
('create-missing', 'm', 'Create missing test result files.'),
- ('print-failed-images', 'f', 'Print HTML encoded version of failed '
- 'images.'),
]
boolean_options = ['no-data', 'system-tests', 'stop', 'example-tests',
- 'default-tests', 'coding-tests', 'create-missing',
- 'print-failed-images']
+ 'default-tests', 'coding-tests', 'create-missing']
def initialize_options(self):
self.no_data = False
@@ -100,7 +62,6 @@ def initialize_options(self):
self.coding_tests = False
self.num_processors = None
self.create_missing = False
- self.print_failed_images = False
def finalize_options(self):
# These enviroment variables will be propagated to all the
@@ -185,6 +146,4 @@ def run(self):
# word Mixin.
result &= nose.run(argv=args)
if result is False:
- if self.print_failed_images:
- print(failed_images_html())
exit(1)
diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py
index e07848a3d8..56f5737862 100644
--- a/lib/iris/tests/test_analysis.py
+++ b/lib/iris/tests/test_analysis.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2018, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -411,10 +411,12 @@ class TestAggregators(tests.IrisTest):
def _check_collapsed_percentile(self, cube, percents, collapse_coord,
expected_result, CML_filename=None,
**kwargs):
+ cube_data_type = type(cube.data)
expected_result = np.array(expected_result, dtype=np.float32)
result = cube.collapsed(collapse_coord, iris.analysis.PERCENTILE,
percent=percents, **kwargs)
np.testing.assert_array_almost_equal(result.data, expected_result)
+ self.assertEqual(type(result.data), cube_data_type)
if CML_filename is not None:
self.assertCML(result, ('analysis', CML_filename), checksum=False)
@@ -422,6 +424,7 @@ def _check_percentile(self, data, axis, percents, expected_result,
**kwargs):
result = iris.analysis._percentile(data, axis, percents, **kwargs)
np.testing.assert_array_almost_equal(result, expected_result)
+ self.assertEqual(type(result), type(expected_result))
def test_percentile_1d_25_percent(self):
cube = tests.stock.simple_1d()
@@ -445,6 +448,13 @@ def test_fast_percentile_1d_75_percent(self):
cube, 75, 'foo', 7.5, fast_percentile_method=True,
CML_filename='third_quartile_foo_1d_fast_percentile.cml')
+ def test_fast_percentile_1d_75_percent_masked_type_no_mask(self):
+ cube = tests.stock.simple_1d()
+ cube.data = ma.MaskedArray(cube.data)
+ self._check_collapsed_percentile(
+ cube, 75, 'foo', 7.5, fast_percentile_method=True,
+ CML_filename='third_quartile_foo_1d_fast_percentile.cml')
+
def test_percentile_2d_single_coord(self):
cube = tests.stock.simple_2d()
self._check_collapsed_percentile(
@@ -469,6 +479,20 @@ def test_fast_percentile_2d_two_coords(self):
cube, 25, ['foo', 'bar'], [2.75], fast_percentile_method=True,
CML_filename='first_quartile_foo_bar_2d_fast_percentile.cml')
+ def test_fast_percentile_2d_single_coord_masked_type_no_mask(self):
+ cube = tests.stock.simple_2d()
+ cube.data = ma.MaskedArray(cube.data)
+ self._check_collapsed_percentile(
+ cube, 25, 'foo', [0.75, 4.75, 8.75], fast_percentile_method=True,
+ CML_filename='first_quartile_foo_2d_fast_percentile.cml')
+
+ def test_fast_percentile_2d_two_coords_masked_type_no_mask(self):
+ cube = tests.stock.simple_2d()
+ cube.data = ma.MaskedArray(cube.data)
+ self._check_collapsed_percentile(
+ cube, 25, ['foo', 'bar'], [2.75], fast_percentile_method=True,
+ CML_filename='first_quartile_foo_bar_2d_fast_percentile.cml')
+
def test_percentile_3d(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array([[6., 7., 8., 9.],
@@ -503,6 +527,16 @@ def test_fast_percentile_3d_axis_one(self):
self._check_percentile(array_3d, 1, 50, expected_result,
fast_percentile_method=True)
+ def test_fast_percentile_3d_axis_one_masked_type_no_mask(self):
+ array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
+ array_3d = np.ma.MaskedArray(array_3d)
+ expected_result = ma.MaskedArray([[4., 5., 6., 7.],
+ [16., 17., 18., 19.]],
+ dtype=np.float32)
+
+ self._check_percentile(array_3d, 1, 50, expected_result,
+ fast_percentile_method=True)
+
def test_percentile_3d_axis_two(self):
array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
expected_result = np.array([[1.5, 5.5, 9.5],
@@ -520,6 +554,16 @@ def test_fast_percentile_3d_axis_two(self):
self._check_percentile(array_3d, 2, 50, expected_result,
fast_percentile_method=True)
+ def test_fast_percentile_3d_axis_two_masked_type_no_mask(self):
+ array_3d = np.arange(24, dtype=np.int32).reshape((2, 3, 4))
+ array_3d = ma.MaskedArray(array_3d)
+ expected_result = ma.MaskedArray([[1.5, 5.5, 9.5],
+ [13.5, 17.5, 21.5]],
+ dtype=np.float32)
+
+ self._check_percentile(array_3d, 2, 50, expected_result,
+ fast_percentile_method=True)
+
def test_percentile_3d_masked(self):
cube = tests.stock.simple_3d_mask()
expected_result = [[12., 13., 14., 15.],
@@ -530,7 +574,7 @@ def test_percentile_3d_masked(self):
cube, 75, 'wibble', expected_result,
CML_filename='last_quartile_foo_3d_masked.cml')
- def test_fast_percentile_3d_masked(self):
+ def test_fast_percentile_3d_masked_type_masked(self):
cube = tests.stock.simple_3d_mask()
msg = 'Cannot use fast np.percentile method with masked array.'
diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py
index fd74bf8e53..deb3c86622 100644
--- a/lib/iris/tests/test_constraints.py
+++ b/lib/iris/tests/test_constraints.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -29,6 +29,7 @@
import datetime
import iris
+from iris import AttributeConstraint, NameConstraint
import iris.tests.stock as stock
@@ -282,6 +283,289 @@ def load_match(self, files, constraints):
return cubes
+@tests.skip_data
+class TestCubeExtract__names(TestMixin, tests.IrisTest):
+ def setUp(self):
+ fname = iris.sample_data_path("atlantic_profiles.nc")
+ self.cubes = iris.load(fname)
+ TestMixin.setUp(self)
+ cube = iris.load_cube(self.theta_path)
+ # Expected names...
+ self.standard_name = "air_potential_temperature"
+ self.long_name = "AIR POTENTIAL TEMPERATURE"
+ self.var_name = "apt"
+ self.stash = "m01s00i004"
+ # Configure missing names...
+ cube.long_name = self.long_name
+ cube.var_name = self.var_name
+ # Add this cube to the mix...
+ self.cubes.append(cube)
+ self.index = len(self.cubes) - 1
+
+ def test_standard_name(self):
+ constraint = iris.Constraint(self.standard_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+
+ def test_long_name(self):
+ constraint = iris.Constraint(self.long_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.long_name, self.long_name)
+
+ def test_var_name(self):
+ constraint = iris.Constraint(self.var_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.var_name, self.var_name)
+
+ def test_stash(self):
+ constraint = iris.Constraint(self.stash)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(str(result.attributes["STASH"]), self.stash)
+
+ def test_unknown(self):
+ cube = self.cubes[self.index]
+ # Clear the cube metadata.
+ cube.standard_name = None
+ cube.long_name = None
+ cube.var_name = None
+ cube.attributes = None
+ # Extract the unknown cube.
+ constraint = iris.Constraint("unknown")
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.name(), "unknown")
+
+
+@tests.skip_data
+class TestCubeExtract__name_constraint(TestMixin, tests.IrisTest):
+ def setUp(self):
+ fname = iris.sample_data_path("atlantic_profiles.nc")
+ self.cubes = iris.load(fname)
+ TestMixin.setUp(self)
+ cube = iris.load_cube(self.theta_path)
+ # Expected names...
+ self.standard_name = "air_potential_temperature"
+ self.long_name = "air potential temperature"
+ self.var_name = "apt"
+ self.stash = "m01s00i004"
+ # Configure missing names...
+ cube.long_name = self.long_name
+ cube.var_name = self.var_name
+ # Add this cube to the mix...
+ self.cubes.append(cube)
+ self.index = len(self.cubes) - 1
+
+ def test_standard_name(self):
+ # No match.
+ constraint = NameConstraint(standard_name="wibble")
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(standard_name=self.standard_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+
+ # Match - callable.
+ kwargs = dict(standard_name=lambda item: item.startswith("air_pot"))
+ constraint = NameConstraint(**kwargs)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+
+ def test_standard_name__None(self):
+ cube = self.cubes[self.index]
+ cube.standard_name = None
+ constraint = NameConstraint(
+ standard_name=None, long_name=self.long_name
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertIsNone(result.standard_name)
+ self.assertEqual(result.long_name, self.long_name)
+
+ def test_long_name(self):
+ # No match.
+ constraint = NameConstraint(long_name="wibble")
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(long_name=self.long_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.long_name, self.long_name)
+
+ # Match - callable.
+ kwargs = dict(
+ long_name=lambda item: item is not None
+ and item.startswith("air pot")
+ )
+ constraint = NameConstraint(**kwargs)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.long_name, self.long_name)
+
+ def test_long_name__None(self):
+ cube = self.cubes[self.index]
+ cube.long_name = None
+ constraint = NameConstraint(
+ standard_name=self.standard_name, long_name=None
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+ self.assertIsNone(result.long_name)
+
+ def test_var_name(self):
+ # No match.
+ constraint = NameConstraint(var_name="wibble")
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(var_name=self.var_name)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.var_name, self.var_name)
+
+ # Match - callable.
+ kwargs = dict(var_name=lambda item: item.startswith("ap"))
+ constraint = NameConstraint(**kwargs)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.var_name, self.var_name)
+
+ def test_var_name__None(self):
+ cube = self.cubes[self.index]
+ cube.var_name = None
+ constraint = NameConstraint(
+ standard_name=self.standard_name, var_name=None
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+ self.assertIsNone(result.var_name)
+
+ def test_stash(self):
+ # No match.
+ constraint = NameConstraint(STASH="m01s00i444")
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(STASH=self.stash)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(str(result.attributes["STASH"]), self.stash)
+
+ # Match - callable.
+ kwargs = dict(STASH=lambda stash: stash.item == 4)
+ constraint = NameConstraint(**kwargs)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+
+ def test_stash__None(self):
+ cube = self.cubes[self.index]
+ del cube.attributes["STASH"]
+ constraint = NameConstraint(
+ standard_name=self.standard_name, STASH=None
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+ self.assertIsNone(result.attributes.get("STASH"))
+
+ def test_compound(self):
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name, long_name=self.long_name
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+
+ # No match - var_name.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name="wibble",
+ )
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+ self.assertEqual(result.long_name, self.long_name)
+ self.assertEqual(result.var_name, self.var_name)
+
+ # No match - STASH.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ STASH="m01s00i444",
+ )
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ STASH=self.stash,
+ )
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertEqual(result.standard_name, self.standard_name)
+ self.assertEqual(result.long_name, self.long_name)
+ self.assertEqual(result.var_name, self.var_name)
+ self.assertEqual(result.var_name, self.var_name)
+
+ # No match - standard_name.
+ constraint = NameConstraint(
+ standard_name="wibble",
+ long_name=self.long_name,
+ var_name=self.var_name,
+ STASH=self.stash,
+ )
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ def test_unknown(self):
+ # No match.
+ constraint = NameConstraint(None, None, None, None)
+ result = self.cubes.extract(constraint)
+ self.assertFalse(result)
+
+ # Match.
+ cube = self.cubes[self.index]
+ cube.standard_name = None
+ cube.long_name = None
+ cube.var_name = None
+ cube.attributes = None
+ constraint = NameConstraint(None, None, None, None)
+ result = self.cubes.extract(constraint, strict=True)
+ self.assertIsNotNone(result)
+ self.assertIsNone(result.standard_name)
+ self.assertIsNone(result.long_name)
+ self.assertIsNone(result.var_name)
+ self.assertIsNone(result.attributes.get("STASH"))
+
+
@tests.skip_data
class TestCubeExtract(TestMixin, tests.IrisTest):
def setUp(self):
@@ -289,27 +573,36 @@ def setUp(self):
self.cube = iris.load_cube(self.theta_path)
def test_attribute_constraint(self):
- # there is no my_attribute attribute on the cube, so ensure it returns None
- cube = self.cube.extract(iris.AttributeConstraint(my_attribute='foobar'))
+ # There is no my_attribute on the cube, so ensure it returns None.
+ constraint = AttributeConstraint(my_attribute="foobar")
+ cube = self.cube.extract(constraint)
self.assertIsNone(cube)
orig_cube = self.cube
# add an attribute to the cubes
orig_cube.attributes['my_attribute'] = 'foobar'
- cube = orig_cube.extract(iris.AttributeConstraint(my_attribute='foobar'))
- self.assertCML(cube, ('constrained_load', 'attribute_constraint.cml'))
+ constraint = AttributeConstraint(my_attribute="foobar")
+ cube = orig_cube.extract(constraint)
+ self.assertCML(cube, ("constrained_load", "attribute_constraint.cml"))
- cube = orig_cube.extract(iris.AttributeConstraint(my_attribute='not me'))
+ constraint = AttributeConstraint(my_attribute="not me")
+ cube = orig_cube.extract(constraint)
self.assertIsNone(cube)
- cube = orig_cube.extract(iris.AttributeConstraint(my_attribute=lambda val: val.startswith('foo')))
- self.assertCML(cube, ('constrained_load', 'attribute_constraint.cml'))
+ kwargs = dict(my_attribute=lambda val: val.startswith("foo"))
+ constraint = AttributeConstraint(**kwargs)
+ cube = orig_cube.extract(constraint)
+ self.assertCML(cube, ("constrained_load", "attribute_constraint.cml"))
- cube = orig_cube.extract(iris.AttributeConstraint(my_attribute=lambda val: not val.startswith('foo')))
+ kwargs = dict(my_attribute=lambda val: not val.startswith("foo"))
+ constraint = AttributeConstraint(**kwargs)
+ cube = orig_cube.extract(constraint)
self.assertIsNone(cube)
- cube = orig_cube.extract(iris.AttributeConstraint(my_non_existant_attribute='hello world'))
+ kwargs = dict(my_non_existant_attribute="hello world")
+ constraint = AttributeConstraint(**kwargs)
+ cube = orig_cube.extract(constraint)
self.assertIsNone(cube)
def test_standard_name(self):
@@ -329,7 +622,7 @@ def test_empty_data(self):
cube = self.cube.extract(self.level_10).extract(self.level_10)
self.assertTrue(cube.has_lazy_data())
- def test_non_existant_coordinate(self):
+ def test_non_existent_coordinate(self):
# Check the behaviour when a constraint is given for a coordinate which does not exist/span a dimension
self.assertEqual(self.cube[0, :, :].extract(self.level_10), None)
diff --git a/lib/iris/tests/test_grib_load_translations.py b/lib/iris/tests/test_grib_load_translations.py
index dbb6bc8e1f..411ea5b352 100644
--- a/lib/iris/tests/test_grib_load_translations.py
+++ b/lib/iris/tests/test_grib_load_translations.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -60,7 +60,7 @@ def _mock_gribapi_fetch(message, key):
if key in message:
return message[key]
else:
- raise _mock_gribapi.GribInternalError
+ raise _mock_gribapi.errors.GribInternalError
def _mock_gribapi__grib_is_missing(grib_message, keyname):
@@ -83,13 +83,13 @@ def _mock_gribapi__grib_get_native_type(grib_message, keyname):
"""
if keyname in grib_message:
return type(grib_message[keyname])
- raise _mock_gribapi.GribInternalError(keyname)
+ raise _mock_gribapi.errors.GribInternalError(keyname)
if tests.GRIB_AVAILABLE:
# Construct a mock object to mimic the gribapi for GribWrapper testing.
_mock_gribapi = mock.Mock(spec=gribapi)
- _mock_gribapi.GribInternalError = Exception
+ _mock_gribapi.errors.GribInternalError = Exception
_mock_gribapi.grib_get_long = mock.Mock(side_effect=_mock_gribapi_fetch)
_mock_gribapi.grib_get_string = mock.Mock(side_effect=_mock_gribapi_fetch)
diff --git a/lib/iris/tests/test_grib_save.py b/lib/iris/tests/test_grib_save.py
index 0befc8d56a..0367902290 100644
--- a/lib/iris/tests/test_grib_save.py
+++ b/lib/iris/tests/test_grib_save.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
+# (C) British Crown Copyright 2010 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -56,6 +56,10 @@ def test_latlon_forecast_plev(self):
'shapeOfTheEarth': (0, 1),
'scaledValueOfRadiusOfSphericalEarth': (MDI,
6367470),
+ 'scaledValueOfEarthMajorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMajorAxis': (MDI, 0),
+ 'scaledValueOfEarthMinorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMinorAxis': (MDI, 0),
'typeOfGeneratingProcess': (0, 255),
'generatingProcessIdentifier': (128, 255),
}
@@ -76,6 +80,10 @@ def test_rotated_latlon(self):
'shapeOfTheEarth': (0, 1),
'scaledValueOfRadiusOfSphericalEarth': (MDI,
6367470),
+ 'scaledValueOfEarthMajorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMajorAxis': (MDI, 0),
+ 'scaledValueOfEarthMinorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMinorAxis': (MDI, 0),
'longitudeOfLastGridPoint': (392109982, 32106370),
'latitudeOfLastGridPoint': (19419996, 19419285),
'typeOfGeneratingProcess': (0, 255),
@@ -97,6 +105,10 @@ def test_time_mean(self):
'shapeOfTheEarth': (0, 1),
'scaledValueOfRadiusOfSphericalEarth': (MDI,
6367470),
+ 'scaledValueOfEarthMajorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMajorAxis': (MDI, 0),
+ 'scaledValueOfEarthMinorAxis': (MDI, 0),
+ 'scaleFactorOfEarthMinorAxis': (MDI, 0),
'longitudeOfLastGridPoint': (356249908, 356249809),
'latitudeOfLastGridPoint': (-89999938, -89999944),
'typeOfGeneratingProcess': (0, 255),
diff --git a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py
index 3c28ca6134..f6d4332623 100644
--- a/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py
+++ b/lib/iris/tests/unit/analysis/area_weighted/test_AreaWeightedRegridder.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2015, Met Office
+# (C) British Crown Copyright 2014 - 2020, Met Office
#
# This file is part of Iris.
#
@@ -25,6 +25,7 @@
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
+import iris.experimental.regrid as eregrid
import numpy as np
@@ -43,11 +44,13 @@ def cube(self, x, y):
lon = DimCoord(x, 'longitude', units='degrees')
cube.add_dim_coord(lat, 0)
cube.add_dim_coord(lon, 1)
+ cube.coord("latitude").guess_bounds()
+ cube.coord("longitude").guess_bounds()
return cube
def grids(self):
src = self.cube(np.linspace(20, 30, 3), np.linspace(10, 25, 4))
- target = self.cube(np.linspace(6, 18, 8), np.linspace(11, 22, 9))
+ target = self.cube(np.linspace(22, 28, 8), np.linspace(11, 22, 9))
return src, target
def extract_grid(self, cube):
@@ -55,31 +58,53 @@ def extract_grid(self, cube):
def check_mdtol(self, mdtol=None):
src_grid, target_grid = self.grids()
- if mdtol is None:
- regridder = AreaWeightedRegridder(src_grid, target_grid)
- mdtol = 1
- else:
- regridder = AreaWeightedRegridder(src_grid, target_grid,
- mdtol=mdtol)
-
- # Make a new cube to regrid with different data so we can
- # distinguish between regridding the original src grid
- # definition cube and the cube passed to the regridder.
- src = src_grid.copy()
- src.data += 10
-
- with mock.patch('iris.experimental.regrid.'
- 'regrid_area_weighted_rectilinear_src_and_grid',
- return_value=mock.sentinel.result) as regrid:
- result = regridder(src)
-
- self.assertEqual(regrid.call_count, 1)
- _, args, kwargs = regrid.mock_calls[0]
+ # Get _regrid_info result
+ _regrid_info =\
+ eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
+ src_grid, target_grid
+ )
+ self.assertEqual(len(_regrid_info), 9)
+ with mock.patch(
+ "iris.experimental.regrid."
+ "_regrid_area_weighted_rectilinear_src_and_grid__prepare",
+ return_value=_regrid_info,
+ ) as prepare:
+ with mock.patch(
+ "iris.experimental.regrid."
+ "_regrid_area_weighted_rectilinear_src_and_grid__perform",
+ return_value=mock.sentinel.result,
+ ) as perform:
+ # Setup the regridder
+ if mdtol is None:
+ regridder = AreaWeightedRegridder(src_grid, target_grid)
+ mdtol = 1
+ else:
+ regridder = AreaWeightedRegridder(
+ src_grid, target_grid, mdtol=mdtol
+ )
+ # Now regrid the source cube
+ src = src_grid
+ result = regridder(src)
+ # Make a new cube to regrid with different data so we can
+ # distinguish between regridding the original src grid
+ # definition cube and the cube passed to the regridder.
+ src = src_grid.copy()
+ src.data += 10
+ result = regridder(src)
+
+ # Prepare:
+ self.assertEqual(prepare.call_count, 1)
+ _, args, kwargs = prepare.mock_calls[0]
+ self.assertEqual(
+ self.extract_grid(args[1]), self.extract_grid(target_grid)
+ )
+
+ # Perform:
+ self.assertEqual(perform.call_count, 2)
+ _, args, kwargs = perform.mock_calls[1]
self.assertEqual(args[0], src)
- self.assertEqual(self.extract_grid(args[1]),
- self.extract_grid(target_grid))
- self.assertEqual(kwargs, {'mdtol': mdtol})
+ self.assertEqual(kwargs, {"mdtol": mdtol})
self.assertIs(result, mock.sentinel.result)
def test_default(self):
@@ -111,6 +136,129 @@ def test_mismatched_src_coord_systems(self):
with self.assertRaises(ValueError):
AreaWeightedRegridder(src, target)
+ def test_src_and_target_are_the_same(self):
+ src = self.cube(np.linspace(20, 30, 3), np.linspace(10, 25, 4))
+ target = self.cube(np.linspace(20, 30, 3), np.linspace(10, 25, 4))
+ regridder = AreaWeightedRegridder(src, target)
+ result = regridder(src)
+ self.assertArrayAllClose(result.data, target.data)
+
+ def test_multiple_src_on_same_grid(self):
+ coord_names = ["latitude", "longitude"]
+ src1 = self.cube(np.linspace(20, 32, 4), np.linspace(10, 22, 4))
+ src2 = self.cube(np.linspace(20, 32, 4), np.linspace(10, 22, 4))
+ src2.data *= 4
+ self.assertArrayEqual(src1.data * 4, src2.data)
+ for name in coord_names:
+ # Remove coords system and units so it is no longer spherical.
+ src1.coord(name).coord_system = None
+ src1.coord(name).units = None
+ src2.coord(name).coord_system = None
+ src2.coord(name).units = None
+
+ target = self.cube(np.linspace(20, 32, 2), np.linspace(10, 22, 2))
+ # Ensure the bounds of the target cover the same range as the
+ # source.
+ target_lat_bounds = np.column_stack(
+ (
+ src1.coord("latitude").bounds[[0, 1], [0, 1]],
+ src1.coord("latitude").bounds[[2, 3], [0, 1]],
+ )
+ )
+ target.coord("latitude").bounds = target_lat_bounds
+ target_lon_bounds = np.column_stack(
+ (
+ src1.coord("longitude").bounds[[0, 1], [0, 1]],
+ src1.coord("longitude").bounds[[2, 3], [0, 1]],
+ )
+ )
+ target.coord("longitude").bounds = target_lon_bounds
+ for name in coord_names:
+ # Remove coords system and units so it is no longer spherical.
+ target.coord(name).coord_system = None
+ target.coord(name).units = None
+
+ regridder = AreaWeightedRegridder(src1, target)
+ result1 = regridder(src1)
+ result2 = regridder(src2)
+
+ reference1 = self.cube(np.linspace(20, 32, 2), np.linspace(10, 22, 2))
+ reference1.data = np.array(
+ [
+ [np.mean(src1.data[0:2, 0:2]), np.mean(src1.data[0:2, 2:4])],
+ [np.mean(src1.data[2:4, 0:2]), np.mean(src1.data[2:4, 2:4])],
+ ]
+ )
+ reference1.coord("latitude").bounds = target_lat_bounds
+ reference1.coord("longitude").bounds = target_lon_bounds
+
+ reference2 = self.cube(np.linspace(20, 32, 2), np.linspace(10, 22, 2))
+ reference2.data = np.array(
+ [
+ [np.mean(src2.data[0:2, 0:2]), np.mean(src2.data[0:2, 2:4])],
+ [np.mean(src2.data[2:4, 0:2]), np.mean(src2.data[2:4, 2:4])],
+ ]
+ )
+ reference2.coord("latitude").bounds = target_lat_bounds
+ reference2.coord("longitude").bounds = target_lon_bounds
+
+ for name in coord_names:
+ # Remove coords system and units so it is no longer spherical.
+ reference1.coord(name).coord_system = None
+ reference1.coord(name).units = None
+ reference2.coord(name).coord_system = None
+ reference2.coord(name).units = None
+
+ # Compare the cubes rather than just the data.
+ self.assertEqual(result1, reference1)
+ self.assertEqual(result2, reference2)
+
+ def test_src_data_different_dims(self):
+ src, target = self.grids()
+ regridder = AreaWeightedRegridder(src, target)
+ result = regridder(src)
+ expected_mean, expected_std = 4.772097735195653, 2.211698479817678
+ self.assertArrayShapeStats(result, (9, 8), expected_mean, expected_std)
+ # New source cube with additional "levels" dimension
+ # Each level has identical x-y data so the mean and std stats remain
+ # identical when x, y and z dims are reordered
+ levels = DimCoord(np.arange(5), "model_level_number")
+ lat = src.coord("latitude")
+ lon = src.coord("longitude")
+ data = np.repeat(src.data[np.newaxis, ...], 5, axis=0)
+ src = Cube(data)
+ src.add_dim_coord(levels, 0)
+ src.add_dim_coord(lat, 1)
+ src.add_dim_coord(lon, 2)
+ result = regridder(src)
+ self.assertArrayShapeStats(
+ result, (5, 9, 8), expected_mean, expected_std
+ )
+ # Check data with dims in different order
+ # Reshape src so that the coords are ordered [x, z, y],
+ # the mean and std statistics should be the same
+ data = np.moveaxis(src.data.copy(), 2, 0)
+ src = Cube(data)
+ src.add_dim_coord(lon, 0)
+ src.add_dim_coord(levels, 1)
+ src.add_dim_coord(lat, 2)
+ result = regridder(src)
+ self.assertArrayShapeStats(
+ result, (8, 5, 9), expected_mean, expected_std
+ )
+ # Check data with dims in different order
+ # Reshape src so that the coords are ordered [y, x, z],
+ # the mean and std statistics should be the same
+ data = np.moveaxis(src.data.copy(), 2, 0)
+ src = Cube(data)
+ src.add_dim_coord(lat, 0)
+ src.add_dim_coord(lon, 1)
+ src.add_dim_coord(levels, 2)
+ result = regridder(src)
+ self.assertArrayShapeStats(
+ result, (9, 8, 5), expected_mean, expected_std
+ )
+
if __name__ == '__main__':
tests.main()
diff --git a/lib/iris/tests/unit/constraints/__init__.py b/lib/iris/tests/unit/constraints/__init__.py
new file mode 100644
index 0000000000..e325fde367
--- /dev/null
+++ b/lib/iris/tests/unit/constraints/__init__.py
@@ -0,0 +1,20 @@
+# (C) British Crown Copyright 2020, Met Office
+#
+# This file is part of Iris.
+#
+# Iris is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Iris is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Iris. If not, see .
+"""Unit tests for the :mod:`iris._constraints` module."""
+
+from __future__ import (absolute_import, division, print_function)
+from six.moves import (filter, input, map, range, zip) # noqa
diff --git a/lib/iris/tests/unit/constraints/test_NameConstraint.py b/lib/iris/tests/unit/constraints/test_NameConstraint.py
new file mode 100644
index 0000000000..9160605a2a
--- /dev/null
+++ b/lib/iris/tests/unit/constraints/test_NameConstraint.py
@@ -0,0 +1,243 @@
+# (C) British Crown Copyright 2020, Met Office
+#
+# This file is part of Iris.
+#
+# Iris is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Iris is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Iris. If not, see .
+"""Unit tests for the `iris._constraints.NameConstraint` class."""
+
+from __future__ import (absolute_import, division, print_function)
+from six.moves import (filter, input, map, range, zip) # noqa
+
+# Import iris.tests first so that some things can be initialised before
+# importing anything else.
+import iris.tests as tests
+
+from iris.tests import mock
+
+from iris._constraints import NameConstraint
+
+
+class Test___init__(tests.IrisTest):
+ def setUp(self):
+ self.default = "none"
+
+ def test_default(self):
+ constraint = NameConstraint()
+ self.assertEqual(constraint.standard_name, self.default)
+ self.assertEqual(constraint.long_name, self.default)
+ self.assertEqual(constraint.var_name, self.default)
+ self.assertEqual(constraint.STASH, self.default)
+
+ def test_standard_name(self):
+ standard_name = mock.sentinel.standard_name
+ constraint = NameConstraint(standard_name=standard_name)
+ self.assertEqual(constraint.standard_name, standard_name)
+ constraint = NameConstraint(standard_name=standard_name)
+ self.assertEqual(constraint.standard_name, standard_name)
+
+ def test_long_name(self):
+ long_name = mock.sentinel.long_name
+ constraint = NameConstraint(long_name=long_name)
+ self.assertEqual(constraint.standard_name, self.default)
+ self.assertEqual(constraint.long_name, long_name)
+ constraint = NameConstraint(standard_name=None, long_name=long_name)
+ self.assertIsNone(constraint.standard_name)
+ self.assertEqual(constraint.long_name, long_name)
+
+ def test_var_name(self):
+ var_name = mock.sentinel.var_name
+ constraint = NameConstraint(var_name=var_name)
+ self.assertEqual(constraint.standard_name, self.default)
+ self.assertEqual(constraint.long_name, self.default)
+ self.assertEqual(constraint.var_name, var_name)
+ constraint = NameConstraint(
+ standard_name=None, long_name=None, var_name=var_name
+ )
+ self.assertIsNone(constraint.standard_name)
+ self.assertIsNone(constraint.long_name)
+ self.assertEqual(constraint.var_name, var_name)
+
+ def test_STASH(self):
+ STASH = mock.sentinel.STASH
+ constraint = NameConstraint(STASH=STASH)
+ self.assertEqual(constraint.standard_name, self.default)
+ self.assertEqual(constraint.long_name, self.default)
+ self.assertEqual(constraint.var_name, self.default)
+ self.assertEqual(constraint.STASH, STASH)
+ constraint = NameConstraint(
+ standard_name=None, long_name=None, var_name=None, STASH=STASH
+ )
+ self.assertIsNone(constraint.standard_name)
+ self.assertIsNone(constraint.long_name)
+ self.assertIsNone(constraint.var_name)
+ self.assertEqual(constraint.STASH, STASH)
+
+
+class Test__cube_func(tests.IrisTest):
+ def setUp(self):
+ self.standard_name = mock.sentinel.standard_name
+ self.long_name = mock.sentinel.long_name
+ self.var_name = mock.sentinel.var_name
+ self.STASH = mock.sentinel.STASH
+ self.cube = mock.Mock(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ attributes=dict(STASH=self.STASH),
+ )
+
+ def test_standard_name(self):
+ # Match.
+ constraint = NameConstraint(standard_name=self.standard_name)
+ self.assertTrue(constraint._cube_func(self.cube))
+ # Match.
+ constraint = NameConstraint(standard_name=self.standard_name)
+ self.assertTrue(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(standard_name="wibble")
+ self.assertFalse(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(standard_name="wibble")
+ self.assertFalse(constraint._cube_func(self.cube))
+
+ def test_long_name(self):
+ # Match.
+ constraint = NameConstraint(long_name=self.long_name)
+ self.assertTrue(constraint._cube_func(self.cube))
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name, long_name=self.long_name
+ )
+ self.assertTrue(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(long_name=None)
+ self.assertFalse(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(
+ standard_name=None, long_name=self.long_name
+ )
+ self.assertFalse(constraint._cube_func(self.cube))
+
+ def test_var_name(self):
+ # Match.
+ constraint = NameConstraint(var_name=self.var_name)
+ self.assertTrue(constraint._cube_func(self.cube))
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ )
+ self.assertTrue(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(var_name=None)
+ self.assertFalse(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(
+ standard_name=None, long_name=None, var_name=self.var_name
+ )
+ self.assertFalse(constraint._cube_func(self.cube))
+
+ def test_STASH(self):
+ # Match.
+ constraint = NameConstraint(STASH=self.STASH)
+ self.assertTrue(constraint._cube_func(self.cube))
+ # Match.
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ STASH=self.STASH,
+ )
+ self.assertTrue(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(STASH=None)
+ self.assertFalse(constraint._cube_func(self.cube))
+ # No match.
+ constraint = NameConstraint(
+ standard_name=None, long_name=None, var_name=None, STASH=self.STASH
+ )
+ self.assertFalse(constraint._cube_func(self.cube))
+
+
+class Test___repr__(tests.IrisTest):
+ def setUp(self):
+ self.standard_name = mock.sentinel.standard_name
+ self.long_name = mock.sentinel.long_name
+ self.var_name = mock.sentinel.var_name
+ self.STASH = mock.sentinel.STASH
+ self.msg = "NameConstraint({})"
+ self.f_standard_name = "standard_name={!r}".format(self.standard_name)
+ self.f_long_name = "long_name={!r}".format(self.long_name)
+ self.f_var_name = "var_name={!r}".format(self.var_name)
+ self.f_STASH = "STASH={!r}".format(self.STASH)
+
+ def test(self):
+ constraint = NameConstraint()
+ expected = self.msg.format("")
+ self.assertEqual(repr(constraint), expected)
+
+ def test_standard_name(self):
+ constraint = NameConstraint(standard_name=self.standard_name)
+ expected = self.msg.format(self.f_standard_name)
+ self.assertEqual(repr(constraint), expected)
+
+ def test_long_name(self):
+ constraint = NameConstraint(long_name=self.long_name)
+ expected = self.msg.format(self.f_long_name)
+ self.assertEqual(repr(constraint), expected)
+ constraint = NameConstraint(
+ standard_name=self.standard_name, long_name=self.long_name
+ )
+ args = "{}, {}".format(self.f_standard_name, self.f_long_name)
+ expected = self.msg.format(args)
+ self.assertEqual(repr(constraint), expected)
+
+ def test_var_name(self):
+ constraint = NameConstraint(var_name=self.var_name)
+ expected = self.msg.format(self.f_var_name)
+ self.assertEqual(repr(constraint), expected)
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ )
+ args = "{}, {}, {}".format(
+ self.f_standard_name, self.f_long_name, self.f_var_name
+ )
+ expected = self.msg.format(args)
+ self.assertEqual(repr(constraint), expected)
+
+ def test_STASH(self):
+ constraint = NameConstraint(STASH=self.STASH)
+ expected = self.msg.format(self.f_STASH)
+ self.assertEqual(repr(constraint), expected)
+ constraint = NameConstraint(
+ standard_name=self.standard_name,
+ long_name=self.long_name,
+ var_name=self.var_name,
+ STASH=self.STASH,
+ )
+ args = "{}, {}, {}, {}".format(
+ self.f_standard_name,
+ self.f_long_name,
+ self.f_var_name,
+ self.f_STASH,
+ )
+ expected = self.msg.format(args)
+ self.assertEqual(repr(constraint), expected)
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py b/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py
index 9366bb848a..ddbe537122 100644
--- a/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py
+++ b/lib/iris/tests/unit/cube_coord_common/test_CFVariableMixin.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2019, Met Office
+# (C) British Crown Copyright 2020, Met Office
#
# This file is part of Iris.
#
@@ -145,6 +145,52 @@ def test_fail_token_default(self):
self.cf_var.name(default='_nope', token=True)
+class Test_names(tests.IrisTest):
+ def setUp(self):
+ self.cf_var = CFVariableMixin()
+ self.cf_var.standard_name = None
+ self.cf_var.long_name = None
+ self.cf_var.var_name = None
+ self.cf_var.attributes = dict()
+
+ def test_standard_name(self):
+ standard_name = "air_temperature"
+ self.cf_var.standard_name = standard_name
+ expected = (standard_name, None, None, None)
+ result = self.cf_var.names
+ self.assertEqual(expected, result)
+ self.assertEqual(result.standard_name, standard_name)
+
+ def test_long_name(self):
+ long_name = "air temperature"
+ self.cf_var.long_name = long_name
+ expected = (None, long_name, None, None)
+ result = self.cf_var.names
+ self.assertEqual(expected, result)
+ self.assertEqual(result.long_name, long_name)
+
+ def test_var_name(self):
+ var_name = "atemp"
+ self.cf_var.var_name = var_name
+ expected = (None, None, var_name, None)
+ result = self.cf_var.names
+ self.assertEqual(expected, result)
+ self.assertEqual(result.var_name, var_name)
+
+ def test_STASH(self):
+ stash = "m01s16i203"
+ self.cf_var.attributes = dict(STASH=stash)
+ expected = (None, None, None, stash)
+ result = self.cf_var.names
+ self.assertEqual(expected, result)
+ self.assertEqual(result.STASH, stash)
+
+ def test_None(self):
+ expected = (None, None, None, None)
+ result = self.cf_var.names
+ self.assertEqual(expected, result)
+
+
class Test_standard_name__setter(tests.IrisTest):
def test_valid_standard_name(self):
cf_var = CFVariableMixin()
diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py
index c530538b90..67145fe4b0 100644
--- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py
+++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2019, Met Office
+# (C) British Crown Copyright 2020, Met Office
#
# This file is part of Iris.
#
@@ -35,43 +35,44 @@
class TestBuildGeostationaryCoordinateSystem(tests.IrisTest):
- def _test(self, inverse_flattening=False):
+ def _test(self, inverse_flattening=False, replace_props=None,
+ remove_props=None):
"""
Generic test that can check vertical perspective validity with or
without inverse flattening.
"""
- cf_grid_var_kwargs = {
- 'spec': [],
+ # Make a dictionary of the non-ellipsoid properties to be added to
+ # both a test coord-system, and a test grid-mapping cf_var.
+ non_ellipsoid_kwargs = {
'latitude_of_projection_origin': 0.0,
'longitude_of_projection_origin': 2.0,
'perspective_point_height': 2000000.0,
'sweep_angle_axis': 'x',
'false_easting': 100.0,
- 'false_northing': 200.0,
- 'semi_major_axis': 6377563.396}
+ 'false_northing': 200.0}
+ # Make specified adjustments to the non-ellipsoid properties.
+ if remove_props:
+ for key in remove_props:
+ non_ellipsoid_kwargs.pop(key, None)
+ if replace_props:
+ for key, value in replace_props.items():
+ non_ellipsoid_kwargs[key] = value
+
+ # Make a dictionary of ellipsoid properties, to be added to both a test
+ # ellipsoid and the grid-mapping cf_var.
ellipsoid_kwargs = {'semi_major_axis': 6377563.396}
if inverse_flattening:
ellipsoid_kwargs['inverse_flattening'] = 299.3249646
else:
ellipsoid_kwargs['semi_minor_axis'] = 6356256.909
- cf_grid_var_kwargs.update(ellipsoid_kwargs)
-
- cf_grid_var = mock.Mock(**cf_grid_var_kwargs)
- ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs)
+ cf_grid_var_kwargs = non_ellipsoid_kwargs.copy()
+ cf_grid_var_kwargs.update(ellipsoid_kwargs)
+ cf_grid_var = mock.Mock(spec=[], **cf_grid_var_kwargs)
cs = build_geostationary_coordinate_system(None, cf_grid_var)
- expected = Geostationary(
- latitude_of_projection_origin=cf_grid_var.
- latitude_of_projection_origin,
- longitude_of_projection_origin=cf_grid_var.
- longitude_of_projection_origin,
- perspective_point_height=cf_grid_var.perspective_point_height,
- sweep_angle_axis=cf_grid_var.sweep_angle_axis,
- false_easting=cf_grid_var.false_easting,
- false_northing=cf_grid_var.false_northing,
- ellipsoid=ellipsoid)
-
+ ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs)
+ expected = Geostationary(ellipsoid=ellipsoid, **non_ellipsoid_kwargs)
self.assertEqual(cs, expected)
def test_valid(self):
@@ -79,3 +80,14 @@ def test_valid(self):
def test_inverse_flattening(self):
self._test(inverse_flattening=True)
+
+ def test_false_offsets_missing(self):
+ self._test(remove_props=['false_easting', 'false_northing'])
+
+ def test_false_offsets_none(self):
+ self._test(replace_props={'false_easting': None,
+ 'false_northing': None})
+
+
+if __name__ == "__main__":
+ tests.main()
diff --git a/requirements/core.txt b/requirements/core.txt
index 48ee3f1dfd..64453dcb27 100644
--- a/requirements/core.txt
+++ b/requirements/core.txt
@@ -8,7 +8,7 @@ cartopy
cf-units>=2
cftime
dask[array]>=1.2.0 #conda: dask>=1.2.0
-matplotlib>=2,<3
+matplotlib
netcdf4
numpy>=1.14
scipy
diff --git a/requirements/extensions.txt b/requirements/extensions.txt
index 0f3d0e0379..8e8e847f88 100644
--- a/requirements/extensions.txt
+++ b/requirements/extensions.txt
@@ -5,5 +5,5 @@
# struggle. To install these extensions, ensure iris[core] has been installed
# first.
-iris_grib;python_version<"3" #conda:
+iris-grib #conda:
gdal
diff --git a/requirements/test.txt b/requirements/test.txt
index 38d7410a9b..34719a2d04 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -5,5 +5,6 @@ mock
nose
pep8
filelock
+pillow<7
imagehash>=4.0
requests