Skip to content

Commit

Permalink
remove DataArray and Dataset constructor deprecations for 0.15 (#3560)
Browse files Browse the repository at this point in the history
* remove 0.15 deprecations

* whatsnew
  • Loading branch information
max-sixty authored and dcherian committed Jan 17, 2020
1 parent 3955c37 commit 46dfb77
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 28 deletions.
5 changes: 5 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ v0.15.0 (unreleased)
Breaking changes
~~~~~~~~~~~~~~~~

- Remove ``compat`` and ``encoding`` kwargs from ``DataArray``, which
have been deprecated since 0.12. (:pull:`3650`).
Instead, specify the encoding when writing to disk or set
the ``encoding`` attribute directly.
By `Maximilian Roos <https://github.com/max-sixty>`_

New Features
~~~~~~~~~~~~
Expand Down
16 changes: 1 addition & 15 deletions xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,8 +267,6 @@ def __init__(
dims: Union[Hashable, Sequence[Hashable], None] = None,
name: Hashable = None,
attrs: Mapping = None,
# deprecated parameters
encoding=None,
# internal parameters
indexes: Dict[Hashable, pd.Index] = None,
fastpath: bool = False,
Expand Down Expand Up @@ -313,20 +311,10 @@ def __init__(
Attributes to assign to the new instance. By default, an empty
attribute dictionary is initialized.
"""
if encoding is not None:
warnings.warn(
"The `encoding` argument to `DataArray` is deprecated, and . "
"will be removed in 0.15. "
"Instead, specify the encoding when writing to disk or "
"set the `encoding` attribute directly.",
FutureWarning,
stacklevel=2,
)
if fastpath:
variable = data
assert dims is None
assert attrs is None
assert encoding is None
else:
# try to fill in arguments from data if they weren't supplied
if coords is None:
Expand All @@ -348,13 +336,11 @@ def __init__(
name = getattr(data, "name", None)
if attrs is None and not isinstance(data, PANDAS_TYPES):
attrs = getattr(data, "attrs", None)
if encoding is None:
encoding = getattr(data, "encoding", None)

data = _check_data_shape(data, coords, dims)
data = as_compatible_data(data)
coords, dims = _infer_coords_and_dims(data.shape, coords, dims)
variable = Variable(dims, data, attrs, encoding, fastpath=True)
variable = Variable(dims, data, attrs, fastpath=True)
indexes = dict(
_extract_indexes_from_coords(coords)
) # needed for to_dataset
Expand Down
14 changes: 1 addition & 13 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,6 @@ def __init__(
data_vars: Mapping[Hashable, Any] = None,
coords: Mapping[Hashable, Any] = None,
attrs: Mapping[Hashable, Any] = None,
compat=None,
):
"""To load data from a file or file-like object, use the `open_dataset`
function.
Expand Down Expand Up @@ -513,18 +512,7 @@ def __init__(
attrs : dict-like, optional
Global attributes to save on this dataset.
compat : deprecated
"""
if compat is not None:
warnings.warn(
"The `compat` argument to Dataset is deprecated and will be "
"removed in 0.15."
"Instead, use `merge` to control how variables are combined",
FutureWarning,
stacklevel=2,
)
else:
compat = "broadcast_equals"

# TODO(shoyer): expose indexes as a public argument in __init__

Expand All @@ -544,7 +532,7 @@ def __init__(
coords = coords.variables

variables, coord_names, dims, indexes = merge_data_and_coords(
data_vars, coords, compat=compat
data_vars, coords, compat="broadcast_equals"
)

self._attrs = dict(attrs) if attrs is not None else None
Expand Down

0 comments on commit 46dfb77

Please sign in to comment.