Skip to content

Commit

Permalink
deprecate compat & encoding (#2703)
Browse files Browse the repository at this point in the history
* deprecate compat & encoding

* stacklevel

* whatsnew

* imports

* merge conflicts

* remove deprecations

* removal date
  • Loading branch information
max-sixty committed Feb 1, 2019
1 parent 4923039 commit d634f64
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 37 deletions.
3 changes: 2 additions & 1 deletion .github/stale.yml
Expand Up @@ -28,7 +28,8 @@ staleLabel: stale
# Comment to post when marking as stale. Set to `false` to disable
markComment: |
In order to maintain a list of currently relevant issues, we mark issues as stale after a period of inactivity
If this issue remains relevant, please comment here; otherwise it will be marked as closed automatically
If this issue remains relevant, please comment here or remove the `stale` label; otherwise it will be marked as closed automatically
# Comment to post when removing the stale label.
# unmarkComment: >
Expand Down
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Expand Up @@ -24,6 +24,10 @@ Breaking changes
- Remove support for Python 2. This is the first version of xarray that is
Python 3 only. (:issue:`1876`).
By `Joe Hamman <https://github.com/jhamman>`_.
- The `compat` argument to `Dataset` and the `encoding` argument to
`DataArray` are deprecated and will be removed in a future release.
(:issue:`1188`)
By `Maximilian Roos <https://github.com/max-sixty>`_.

Enhancements
~~~~~~~~~~~~
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/alignment.py
Expand Up @@ -495,7 +495,7 @@ def _broadcast_array(array):
coords = OrderedDict(array.coords)
coords.update(common_coords)
return DataArray(data, coords, data.dims, name=array.name,
attrs=array.attrs, encoding=array.encoding)
attrs=array.attrs)

def _broadcast_dataset(ds):
data_vars = OrderedDict(
Expand Down
25 changes: 13 additions & 12 deletions xarray/core/dataarray.py
Expand Up @@ -12,15 +12,13 @@
from .alignment import align, reindex_like_indexers
from .common import AbstractArray, DataWithCoords
from .coordinates import (
DataArrayCoordinates, LevelCoordinatesSource,
assert_coordinate_consistent, remap_label_indexers)
DataArrayCoordinates, LevelCoordinatesSource, assert_coordinate_consistent,
remap_label_indexers)
from .dataset import Dataset, merge_indexes, split_indexes
from .formatting import format_item
from .indexes import default_indexes, Indexes
from .indexes import Indexes, default_indexes
from .options import OPTIONS
from .utils import (
_check_inplace, decode_numpy_dict_values, either_dict_or_kwargs,
ensure_us_time_resolution)
from .utils import _check_inplace, either_dict_or_kwargs
from .variable import (
IndexVariable, Variable, as_compatible_data, as_variable,
assert_unique_multiindex_level_names)
Expand Down Expand Up @@ -192,13 +190,16 @@ def __init__(self, data, coords=None, dims=None, name=None,
attrs : dict_like or None, optional
Attributes to assign to the new instance. By default, an empty
attribute dictionary is initialized.
encoding : dict_like or None, optional
Dictionary specifying how to encode this array's data into a
serialized format like netCDF4. Currently used keys (for netCDF)
include '_FillValue', 'scale_factor', 'add_offset', 'dtype',
'units' and 'calendar' (the later two only for datetime arrays).
Unrecognized keys are ignored.
encoding : deprecated
"""

if encoding is not None:
warnings.warn(
'The `encoding` argument to `DataArray` is deprecated, and . '
'will be removed in 0.13. '
'Instead, specify the encoding when writing to disk or '
'set the `encoding` attribute directly.',
FutureWarning, stacklevel=2)
if fastpath:
variable = data
assert dims is None
Expand Down
33 changes: 17 additions & 16 deletions xarray/core/dataset.py
Expand Up @@ -13,16 +13,16 @@
import xarray as xr

from . import (
alignment, dtypes, duck_array_ops, formatting, groupby,
indexing, ops, pdcompat, resample, rolling, utils)
alignment, dtypes, duck_array_ops, formatting, groupby, indexing, ops,
pdcompat, resample, rolling, utils)
from ..coding.cftimeindex import _parse_array_of_cftime_strings
from .alignment import align
from .common import (
ALL_DIMS, DataWithCoords, ImplementsDatasetReduce,
_contains_datetime_like_objects)
from .coordinates import (
DatasetCoordinates, LevelCoordinatesSource,
assert_coordinate_consistent, remap_label_indexers)
DatasetCoordinates, LevelCoordinatesSource, assert_coordinate_consistent,
remap_label_indexers)
from .indexes import Indexes, default_indexes
from .merge import (
dataset_merge_method, dataset_update_method, merge_data_and_coords,
Expand All @@ -31,8 +31,8 @@
from .pycompat import dask_array_type
from .utils import (
Frozen, SortedKeysDict, _check_inplace, datetime_to_numeric,
decode_numpy_dict_values, either_dict_or_kwargs, ensure_us_time_resolution,
hashable, maybe_wrap_array)
decode_numpy_dict_values, either_dict_or_kwargs, hashable,
maybe_wrap_array)
from .variable import IndexVariable, Variable, as_variable, broadcast_variables

# list of attributes of pd.DatetimeIndex that are ndarrays of time info
Expand Down Expand Up @@ -324,7 +324,7 @@ class Dataset(Mapping, ImplementsDatasetReduce, DataWithCoords):
_resample_cls = resample.DatasetResample

def __init__(self, data_vars=None, coords=None, attrs=None,
compat='broadcast_equals'):
compat=None):
"""To load data from a file or file-like object, use the `open_dataset`
function.
Expand All @@ -348,16 +348,17 @@ def __init__(self, data_vars=None, coords=None, attrs=None,
name.
attrs : dict-like, optional
Global attributes to save on this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts when initializing this dataset:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
compat : deprecated
"""

if compat is not None:
warnings.warn(
'The `compat` argument to Dataset is deprecated and will be '
'removed in 0.13.'
'Instead, use `merge` to control how variables are combined',
FutureWarning, stacklevel=2)
else:
compat = 'broadcast_equals'
self._variables = OrderedDict()
self._coord_names = set()
self._dims = {}
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_dataarray.py
Expand Up @@ -258,7 +258,7 @@ def test_constructor(self):
expected = Dataset({None: (['x', 'y'], data, {'bar': 2})})[None]
assert_identical(expected, actual)

actual = DataArray(data, dims=['x', 'y'], encoding={'bar': 2})
actual = DataArray(data, dims=['x', 'y'])
expected = Dataset({None: (['x', 'y'], data, {}, {'bar': 2})})[None]
assert_identical(expected, actual)

Expand Down Expand Up @@ -296,7 +296,7 @@ def test_constructor_from_self_described(self):
expected = DataArray(data,
coords={'x': ['a', 'b'], 'y': [-1, -2]},
dims=['x', 'y'], name='foobar',
attrs={'bar': 2}, encoding={'foo': 3})
attrs={'bar': 2})
actual = DataArray(expected)
assert_identical(expected, actual)

Expand Down
6 changes: 1 addition & 5 deletions xarray/tests/test_dataset.py
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
import pickle
import sys
import warnings
from collections import OrderedDict
from copy import copy, deepcopy
from io import StringIO
import pickle
from textwrap import dedent

import numpy as np
Expand Down Expand Up @@ -354,13 +354,9 @@ def test_constructor_pandas_single(self):
def test_constructor_compat(self):
data = OrderedDict([('x', DataArray(0, coords={'y': 1})),
('y', ('z', [1, 1, 1]))])
with pytest.raises(MergeError):
Dataset(data, compat='equals')
expected = Dataset({'x': 0}, {'y': ('z', [1, 1, 1])})
actual = Dataset(data)
assert_identical(expected, actual)
actual = Dataset(data, compat='broadcast_equals')
assert_identical(expected, actual)

data = OrderedDict([('y', ('z', [1, 1, 1])),
('x', DataArray(0, coords={'y': 1}))])
Expand Down

0 comments on commit d634f64

Please sign in to comment.