From c9b53bd68a418591b7b30bc2e5c139803647f127 Mon Sep 17 00:00:00 2001 From: Keewis Date: Tue, 12 Nov 2019 19:28:51 +0100 Subject: [PATCH 01/23] silence sphinx warnings --- xarray/backends/api.py | 2 ++ xarray/core/alignment.py | 2 +- xarray/core/combine.py | 2 ++ xarray/core/common.py | 8 +++++--- xarray/core/concat.py | 1 + xarray/core/dataarray.py | 2 +- xarray/core/dataset.py | 9 ++++++--- xarray/core/groupby.py | 2 ++ 8 files changed, 20 insertions(+), 8 deletions(-) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index d23594fc675..3232a24e780 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -761,6 +761,7 @@ def open_mfdataset( 'no_conflicts', 'override'}, optional String indicating how to compare variables of the same name for potential conflicts when merging: + * 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. * 'equals': all values and dimensions must be the same. @@ -770,6 +771,7 @@ def open_mfdataset( must be equal. The returned dataset then contains the combination of all non-null values. * 'override': skip comparing and pick variable from first dataset + preprocess : callable, optional If provided, call this function on each dataset prior to concatenation. You can find the file-name from which each dataset was loaded in diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 41ff5a3b32d..b820d215d2f 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -108,7 +108,7 @@ def align( Returns ------- - aligned : same as *objects + aligned : same as `*objects` Tuple of objects with aligned coordinates. Raises diff --git a/xarray/core/combine.py b/xarray/core/combine.py index 3308dcef285..b9db30a9f92 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -531,6 +531,7 @@ def combine_by_coords( * 'all': All data variables will be concatenated. * list of str: The listed data variables will be concatenated, in addition to the 'minimal' data variables. + If objects are DataArrays, `data_vars` must be 'all'. coords : {'minimal', 'different', 'all' or list of str}, optional As per the 'data_vars' kwarg, but for coordinate variables. @@ -747,6 +748,7 @@ def auto_combine( 'no_conflicts', 'override'}, optional String indicating how to compare variables of the same name for potential conflicts: + - 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. - 'equals': all values and dimensions must be the same. diff --git a/xarray/core/common.py b/xarray/core/common.py index d372115ea57..d84adcad0a4 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -463,7 +463,7 @@ def assign_coords(self, coords=None, **coords_kwargs): def assign_attrs(self, *args, **kwargs): """Assign new attrs to this object. - Returns a new object equivalent to self.attrs.update(*args, **kwargs). + Returns a new object equivalent to ``self.attrs.update(*args, **kwargs)``. Parameters ---------- @@ -490,7 +490,7 @@ def pipe( **kwargs, ) -> T: """ - Apply func(self, *args, **kwargs) + Apply ``func(self, *args, **kwargs)`` This method replicates the pandas method of the same name. @@ -819,6 +819,7 @@ def rolling_exp( ---------- window : A single mapping from a dimension name to window value, optional + dim : str Name of the dimension to create the rolling exponential window along (e.g., `time`). @@ -857,6 +858,7 @@ def coarsen( ---------- dim: dict, optional Mapping from the dimension name to the window size. + dim : str Name of the dimension to create the rolling iterator along (e.g., `time`). @@ -867,7 +869,7 @@ def coarsen( multiple of the window size. If 'trim', the excess entries are dropped. If 'pad', NA will be padded. side : 'left' or 'right' or mapping from dimension to 'left' or 'right' - coord_func: function (name) that is applied to the coordintes, + coord_func : function (name) that is applied to the coordintes, or a mapping from coordinate name to function (name). Returns diff --git a/xarray/core/concat.py b/xarray/core/concat.py index 5b4fc078236..5ccbfa3f2b4 100644 --- a/xarray/core/concat.py +++ b/xarray/core/concat.py @@ -45,6 +45,7 @@ def concat( * 'all': All data variables will be concatenated. * list of str: The listed data variables will be concatenated, in addition to the 'minimal' data variables. + If objects are DataArrays, data_vars must be 'all'. coords : {'minimal', 'different', 'all' or list of str}, optional These coordinate variables will be concatenated together: diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 5e164f420c8..089f048257f 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -2916,7 +2916,7 @@ def quantile( is a scalar. If multiple percentiles are given, first axis of the result corresponds to the quantile and a quantile dimension is added to the return array. The other dimensions are the - dimensions that remain after the reduction of the array. + dimensions that remain after the reduction of the array. See Also -------- diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index dc5a315e72a..f6a512c0ff4 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -2108,7 +2108,7 @@ def thin( indexers: Union[Mapping[Hashable, int], int] = None, **indexers_kwargs: Any, ) -> "Dataset": - """Returns a new dataset with each array indexed along every `n`th + """Returns a new dataset with each array indexed along every `n`-th value for the specified dimension(s) Parameters @@ -3467,6 +3467,7 @@ def merge( 'no_conflicts'}, optional String indicating how to compare variables of the same name for potential conflicts: + - 'broadcast_equals': all values must be equal when variables are broadcast against each other to ensure common dimensions. - 'equals': all values and dimensions must be the same. @@ -3475,6 +3476,7 @@ def merge( - 'no_conflicts': only values which are not null in both datasets must be equal. The returned dataset then contains the combination of all non-null values. + join : {'outer', 'inner', 'left', 'right', 'exact'}, optional Method for joining ``self`` and ``other`` along shared dimensions: @@ -3615,7 +3617,7 @@ def drop_sel(self, labels=None, *, errors="raise", **labels_kwargs): in the dataset. If 'ignore', any given labels that are in the dataset are dropped and no error is raised. **labels_kwargs : {dim: label, ...}, optional - The keyword arguments form of ``dim`` and ``labels` + The keyword arguments form of ``dim`` and ``labels`` Returns ------- @@ -3907,6 +3909,7 @@ def interpolate_na( method : {'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'polynomial', 'barycentric', 'krog', 'pchip', 'spline'}, optional + String indicating which method to use for interpolation: - 'linear': linear interpolation (Default). Additional keyword @@ -5218,7 +5221,7 @@ def integrate(self, coord, datetime_unit=None): datetime_unit Can be specify the unit if datetime coordinate is used. One of {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', 'ps', 'fs', - 'as'} + 'as'} Returns ------- diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 8ae65d9b9df..00925bad6bf 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -677,11 +677,13 @@ def map(self, func, shortcut=False, args=(), **kwargs): Callable to apply to each array. shortcut : bool, optional Whether or not to shortcut evaluation under the assumptions that: + (1) The action of `func` does not depend on any of the array metadata (attributes or coordinates) but only on the data and dimensions. (2) The action of `func` creates arrays with homogeneous metadata, that is, with the same dimensions and attributes. + If these conditions are satisfied `shortcut` provides significant speedup. This should be the case for many common groupby operations (e.g., applying numpy ufuncs). From 5d9d263e40e1f67910cbefaf96d46a91c560b8b5 Mon Sep 17 00:00:00 2001 From: Keewis Date: Tue, 12 Nov 2019 20:55:20 +0100 Subject: [PATCH 02/23] silence more sphinx warnings --- doc/whats-new.rst | 2 +- xarray/backends/api.py | 2 +- xarray/core/dataarray.py | 13 +++++++------ xarray/core/dataset.py | 4 ++-- xarray/core/groupby.py | 4 ++-- xarray/plot/plot.py | 2 +- 6 files changed, 14 insertions(+), 13 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 96f0ba9a4a6..54aaa4b7595 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -229,7 +229,7 @@ Documentation (pull:`3331`, pull:`3331`). By `Justus Magin `_. - Add examples for :py:meth:`align`, :py:meth:`merge`, :py:meth:`combine_by_coords`, :py:meth:`full_like`, :py:meth:`zeros_like`, :py:meth:`ones_like`, :py:meth:`Dataset.pipe`, - :py:meth:`Dataset.assign`, :py:meth:`Dataset.reindex`, :py:meth:`Dataset.fillna` (pull:`3328`). + :py:meth:`Dataset.assign`, :py:meth:`Dataset.reindex`, :py:meth:`Dataset.fillna` (:pull:`3328`). By `Anderson Banihirwe `_. - Fixed documentation to clean up an unwanted file created in ``ipython`` example (:pull:`3353`). By `Gregory Gundersen `_. diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 3232a24e780..3dec3995e97 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -735,7 +735,7 @@ def open_mfdataset( Parameters ---------- paths : str or sequence - Either a string glob in the form "path/to/my/files/*.nc" or an explicit list of + Either a string glob in the form ``"path/to/my/files/*.nc"`` or an explicit list of files to open. Paths can be given as strings or as pathlib Paths. If concatenation along more than one dimension is desired, then ``paths`` must be a nested list-of-lists (see ``manual_combine`` for details). (A string glob will diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 089f048257f..b906056c922 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -1314,7 +1314,7 @@ def interp( values. kwargs: dictionary Additional keyword passed to scipy's interpolator. - **coords_kwarg : {dim: coordinate, ...}, optional + ``**coords_kwarg`` : {dim: coordinate, ...}, optional The keyword arguments form of ``coords``. One of coords or coords_kwargs must be provided. @@ -2028,9 +2028,10 @@ def interpolate_na( ---------- dim : str Specifies the dimension along which to interpolate. - method : {'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', - 'polynomial', 'barycentric', 'krog', 'pchip', - 'spline', 'akima'}, optional + method : {'linear', 'nearest', 'zero', 'slinear', 'quadratic', + 'cubic', 'polynomial', 'barycentric', 'krog', 'pchip', + 'spline', 'akima'}, optional + String indicating which method to use for interpolation: - 'linear': linear interpolation (Default). Additional keyword @@ -3041,8 +3042,8 @@ def integrate( Coordinate(s) used for the integration. datetime_unit: str, optional Can be used to specify the unit if datetime coordinate is used. - One of {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', - 'ps', 'fs', 'as'} + One of {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', 'ps', + 'fs', 'as'} Returns ------- diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index f6a512c0ff4..1bba6e23cba 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -1499,7 +1499,7 @@ def to_netcdf( Nested dictionary with variable names as keys and dictionaries of variable specific encodings as values, e.g., ``{'my_variable': {'dtype': 'int16', 'scale_factor': 0.1, - 'zlib': True}, ...}`` + 'zlib': True}, ...}`` The `h5netcdf` engine supports both the NetCDF4-style compression encoding parameters ``{'zlib': True, 'complevel': 9}`` and the h5py @@ -2117,7 +2117,7 @@ def thin( A dict with keys matching dimensions and integer values `n` or a single integer `n` applied over all dimensions. One of indexers or indexers_kwargs must be provided. - **indexers_kwargs : {dim: n, ...}, optional + ``**indexers_kwargs`` : {dim: n, ...}, optional The keyword arguments form of ``indexers``. One of indexers or indexers_kwargs must be provided. diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 00925bad6bf..8d6ede16141 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -687,9 +687,9 @@ def map(self, func, shortcut=False, args=(), **kwargs): If these conditions are satisfied `shortcut` provides significant speedup. This should be the case for many common groupby operations (e.g., applying numpy ufuncs). - args : tuple, optional + ``*args`` : tuple, optional Positional arguments passed to `func`. - **kwargs + ``**kwargs`` Used to call `func(ar, **kwargs)` for each array `ar`. Returns diff --git a/xarray/plot/plot.py b/xarray/plot/plot.py index 5c754c3f49b..16a4943627e 100644 --- a/xarray/plot/plot.py +++ b/xarray/plot/plot.py @@ -269,7 +269,7 @@ def line( if None, use the default for the matplotlib function. add_legend : boolean, optional Add legend with y axis coordinates (2D inputs only). - *args, **kwargs : optional + ``*args``, ``**kwargs`` : optional Additional arguments to matplotlib.pyplot.plot """ # Handle facetgrids first From fb559c04ee959e84fe7c1e3d4c0081a81682fb6d Mon Sep 17 00:00:00 2001 From: Keewis Date: Wed, 13 Nov 2019 00:29:49 +0100 Subject: [PATCH 03/23] fix some references --- xarray/backends/api.py | 4 ++-- xarray/core/common.py | 2 +- xarray/core/computation.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 3dec3995e97..8da213d6adf 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -729,7 +729,7 @@ def open_mfdataset( ``combine_by_coords`` and ``combine_nested``. By default the old (now deprecated) ``auto_combine`` will be used, please specify either ``combine='by_coords'`` or ``combine='nested'`` in future. Requires dask to be installed. See documentation for - details on dask [1]. Attributes from the first dataset file are used for the + details on dask [1]_. Attributes from the first dataset file are used for the combined dataset. Parameters @@ -745,7 +745,7 @@ def open_mfdataset( In general, these should divide the dimensions of each dataset. If int, chunk each dimension by ``chunks``. By default, chunks will be chosen to load entire input files into memory at once. This has a major impact on performance: please - see the full documentation for more details [2]. + see the full documentation for more details [2]_. concat_dim : str, or list of str, DataArray, Index or None, optional Dimensions to concatenate files along. You only need to provide this argument if any of the dimensions along which you want to concatenate is not a dimension diff --git a/xarray/core/common.py b/xarray/core/common.py index d84adcad0a4..f2d40e2ca1f 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -932,7 +932,7 @@ def resample( Parameters ---------- indexer : {dim: freq}, optional - Mapping from the dimension name to resample frequency. The + Mapping from the dimension name to resample frequency [1]_. The dimension must be datetime-like. skipna : bool, optional Whether to skip missing values when aggregating in downsampling. diff --git a/xarray/core/computation.py b/xarray/core/computation.py index bb5ab07d8dd..f8e4914e57b 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -947,7 +947,7 @@ def earth_mover_distance(first_samples, appropriately for use in `apply`. You may find helper functions such as numpy.broadcast_arrays helpful in writing your function. `apply_ufunc` also works well with numba's vectorize and guvectorize. Further explanation with - examples are provided in the xarray documentation [3]. + examples are provided in the xarray documentation [3]_. See also -------- From 192eebd2983f4b4706aec73c27e9efc1248832cb Mon Sep 17 00:00:00 2001 From: Keewis Date: Wed, 13 Nov 2019 00:56:10 +0100 Subject: [PATCH 04/23] fix the docstrings of Dataset reduce methods --- xarray/core/common.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/xarray/core/common.py b/xarray/core/common.py index f2d40e2ca1f..9fb5f1e75f5 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -100,15 +100,23 @@ def wrapped_func(self, dim=None, **kwargs): # type: ignore return wrapped_func - _reduce_extra_args_docstring = """dim : str or sequence of str, optional + _reduce_extra_args_docstring = dedent( + """ + dim : str or sequence of str, optional Dimension(s) over which to apply `{name}`. By default `{name}` is - applied over all dimensions.""" + applied over all dimensions. + """ + ).strip() - _cum_extra_args_docstring = """dim : str or sequence of str, optional + _cum_extra_args_docstring = dedent( + """ + dim : str or sequence of str, optional Dimension over which to apply `{name}`. axis : int or sequence of int, optional Axis over which to apply `{name}`. Only one of the 'dim' - and 'axis' arguments can be supplied.""" + and 'axis' arguments can be supplied. + """ + ).strip() class AbstractArray(ImplementsArrayReduce): From 4b93534421e8d8ff25b8d9ff1667f1eb325ac1da Mon Sep 17 00:00:00 2001 From: Keewis Date: Wed, 13 Nov 2019 22:25:36 +0100 Subject: [PATCH 05/23] mark the orphaned files as such --- doc/README.rst | 2 ++ doc/api-hidden.rst | 2 ++ 2 files changed, 4 insertions(+) diff --git a/doc/README.rst b/doc/README.rst index af7bc96092c..0579f85d85f 100644 --- a/doc/README.rst +++ b/doc/README.rst @@ -1,3 +1,5 @@ +:orphan: + xarray ------ diff --git a/doc/api-hidden.rst b/doc/api-hidden.rst index 8f82b30a442..4cd2ba0cc7f 100644 --- a/doc/api-hidden.rst +++ b/doc/api-hidden.rst @@ -2,6 +2,8 @@ .. This extra page is a work around for sphinx not having any support for .. hiding an autosummary table. +:orphan: + .. currentmodule:: xarray .. autosummary:: From 9b242197b02fa08f605966874d97f7240b4b9ea8 Mon Sep 17 00:00:00 2001 From: Keewis Date: Thu, 14 Nov 2019 01:14:00 +0100 Subject: [PATCH 06/23] silence some nit-picky warnings --- doc/combining.rst | 6 +++--- doc/computation.rst | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/combining.rst b/doc/combining.rst index 4593d410d23..05b7f2efc50 100644 --- a/doc/combining.rst +++ b/doc/combining.rst @@ -255,11 +255,11 @@ Combining along multiple dimensions ``combine_nested``. For combining many objects along multiple dimensions xarray provides -:py:func:`~xarray.combine_nested`` and :py:func:`~xarray.combine_by_coords`. These +:py:func:`~xarray.combine_nested` and :py:func:`~xarray.combine_by_coords`. These functions use a combination of ``concat`` and ``merge`` across different variables to combine many objects into one. -:py:func:`~xarray.combine_nested`` requires specifying the order in which the +:py:func:`~xarray.combine_nested` requires specifying the order in which the objects should be combined, while :py:func:`~xarray.combine_by_coords` attempts to infer this ordering automatically from the coordinates in the data. @@ -310,4 +310,4 @@ These functions can be used by :py:func:`~xarray.open_mfdataset` to open many files as one dataset. The particular function used is specified by setting the argument ``'combine'`` to ``'by_coords'`` or ``'nested'``. This is useful for situations where your data is split across many files in multiple locations, -which have some known relationship between one another. \ No newline at end of file +which have some known relationship between one another. diff --git a/doc/computation.rst b/doc/computation.rst index 663c546be20..0d01c294cf6 100644 --- a/doc/computation.rst +++ b/doc/computation.rst @@ -322,8 +322,8 @@ Broadcasting by dimension name ``DataArray`` objects are automatically align themselves ("broadcasting" in the numpy parlance) by dimension name instead of axis order. With xarray, you do not need to transpose arrays or insert dimensions of length 1 to get array -operations to work, as commonly done in numpy with :py:func:`np.reshape` or -:py:const:`np.newaxis`. +operations to work, as commonly done in numpy with :py:func:`numpy.reshape` or +:py:const:`numpy.newaxis`. This is best illustrated by a few examples. Consider two one-dimensional arrays with different sizes aligned along different dimensions: From 939c60d671e2aa66bc0e4db537092c4512d53be8 Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 17 Nov 2019 01:36:05 +0100 Subject: [PATCH 07/23] convert all references to xray to double backtick quoted text --- doc/whats-new.rst | 126 +++++++++++++++++++++++----------------------- 1 file changed, 62 insertions(+), 64 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index e5213c7e82f..be41df84d3a 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -2566,8 +2566,8 @@ Enhancements similar to what the command line utility ``ncdump -h`` produces (:issue:`1150`). By `Joe Hamman `_. - Added the ability write unlimited netCDF dimensions with the ``scipy`` and - ``netcdf4`` backends via the new :py:attr:`~xray.Dataset.encoding` attribute - or via the ``unlimited_dims`` argument to :py:meth:`~xray.Dataset.to_netcdf`. + ``netcdf4`` backends via the new ``xray.Dataset.encoding`` attribute + or via the ``unlimited_dims`` argument to ``xray.Dataset.to_netcdf``. By `Joe Hamman `_. - New :py:meth:`~DataArray.quantile` method to calculate quantiles from DataArray objects (:issue:`1187`). @@ -2980,7 +2980,7 @@ recommend switching your import statements to ``import xarray as xr``. Breaking changes ~~~~~~~~~~~~~~~~ -- The internal data model used by :py:class:`~xray.DataArray` has been +- The internal data model used by ``xray.DataArray`` has been rewritten to fix several outstanding issues (:issue:`367`, :issue:`634`, `this stackoverflow report`_). Internally, ``DataArray`` is now implemented in terms of ``._variable`` and ``._coords`` attributes instead of holding @@ -3018,7 +3018,7 @@ Breaking changes * x (x) int64 0 1 2 - It is no longer possible to convert a DataArray to a Dataset with - :py:meth:`xray.DataArray.to_dataset` if it is unnamed. This will now + ``xray.DataArray.to_dataset`` if it is unnamed. This will now raise ``ValueError``. If the array is unnamed, you need to supply the ``name`` argument. @@ -3087,7 +3087,7 @@ Enhancements - Plotting: more control on colormap parameters (:issue:`642`). ``vmin`` and ``vmax`` will not be silently ignored anymore. Setting ``center=False`` prevents automatic selection of a divergent colormap. -- New :py:meth:`~xray.Dataset.shift` and :py:meth:`~xray.Dataset.roll` methods +- New ``xray.Dataset.shift`` and ``xray.Dataset.roll`` methods for shifting/rotating datasets or arrays along a dimension: .. ipython:: python @@ -3103,7 +3103,7 @@ Enhancements index names correspond to the ``dims`` of the ``Dataset``, and its data is aligned. - Passing a :py:class:`pandas.DataFrame` or :py:class:`pandas.Panel` to a Dataset constructor is now permitted. -- New function :py:func:`~xray.broadcast` for explicitly broadcasting +- New function ``xray.broadcast`` for explicitly broadcasting ``DataArray`` and ``Dataset`` objects against each other. For example: .. ipython:: python @@ -3161,7 +3161,7 @@ API Changes ~~~~~~~~~~~ - The handling of colormaps and discrete color lists for 2D plots in - :py:meth:`~xray.DataArray.plot` was changed to provide more compatibility + ``xray.DataArray.plot`` was changed to provide more compatibility with matplotlib's ``contour`` and ``contourf`` functions (:issue:`538`). Now discrete lists of colors should be specified using ``colors`` keyword, rather than ``cmap``. @@ -3169,10 +3169,10 @@ API Changes Enhancements ~~~~~~~~~~~~ -- Faceted plotting through :py:class:`~xray.plot.FacetGrid` and the - :py:meth:`~xray.plot.plot` method. See :ref:`plotting.faceting` for more details +- Faceted plotting through ``xray.plot.FacetGrid`` and the + ``xray.plot.plot`` method. See :ref:`plotting.faceting` for more details and examples. -- :py:meth:`~xray.Dataset.sel` and :py:meth:`~xray.Dataset.reindex` now support +- ``xray.Dataset.sel`` and ``xray.Dataset.reindex`` now support the ``tolerance`` argument for controlling nearest-neighbor selection (:issue:`629`): @@ -3189,12 +3189,12 @@ Enhancements * x (x) float64 0.9 1.5 This feature requires pandas v0.17 or newer. -- New ``encoding`` argument in :py:meth:`~xray.Dataset.to_netcdf` for writing +- New ``encoding`` argument in ``xray.Dataset.to_netcdf`` for writing netCDF files with compression, as described in the new documentation section on :ref:`io.netcdf.writing_encoded`. -- Add :py:attr:`~xray.Dataset.real` and :py:attr:`~xray.Dataset.imag` +- Add ``xray.Dataset.real`` and ``xray.Dataset.imag`` attributes to Dataset and DataArray (:issue:`553`). -- More informative error message with :py:meth:`~xray.Dataset.from_dataframe` +- More informative error message with ``xray.Dataset.from_dataframe`` if the frame has duplicate columns. - xray now uses deterministic names for dask arrays it creates or opens from disk. This allows xray users to take advantage of dask's nascent support for @@ -3209,9 +3209,9 @@ Bug fixes - Aggregation functions now correctly skip ``NaN`` for data for ``complex128`` dtype (:issue:`554`). - Fixed indexing 0d arrays with unicode dtype (:issue:`568`). -- :py:meth:`~xray.DataArray.name` and Dataset keys must be a string or None to +- ``xray.DataArray.name`` and Dataset keys must be a string or None to be written to netCDF (:issue:`533`). -- :py:meth:`~xray.DataArray.where` now uses dask instead of numpy if either the +- ``xray.DataArray.where`` now uses dask instead of numpy if either the array or ``other`` is a dask array. Previously, if ``other`` was a numpy array the method was evaluated eagerly. - Global attributes are now handled more consistently when loading remote @@ -3238,15 +3238,15 @@ v0.6.0 (21 August 2015) This release includes numerous bug fixes and enhancements. Highlights include the introduction of a plotting module and the new Dataset and DataArray -methods :py:meth:`~xray.Dataset.isel_points`, :py:meth:`~xray.Dataset.sel_points`, -:py:meth:`~xray.Dataset.where` and :py:meth:`~xray.Dataset.diff`. There are no +methods ``xray.Dataset.isel_points``, ``xray.Dataset.sel_points``, +``xray.Dataset.where`` and ``xray.Dataset.diff``. There are no breaking changes from v0.5.2. Enhancements ~~~~~~~~~~~~ - Plotting methods have been implemented on DataArray objects - :py:meth:`~xray.DataArray.plot` through integration with matplotlib + ``xray.DataArray.plot`` through integration with matplotlib (:issue:`185`). For an introduction, see :ref:`plotting`. - Variables in netCDF files with multiple missing values are now decoded as NaN after issuing a warning if open_dataset is called with mask_and_scale=True. @@ -3255,7 +3255,7 @@ Enhancements - Dataset variables are now written to netCDF files in order of appearance when using the netcdf4 backend (:issue:`479`). -- Added :py:meth:`~xray.Dataset.isel_points` and :py:meth:`~xray.Dataset.sel_points` +- Added ``xray.Dataset.isel_points`` and ``xray.Dataset.sel_points`` to support pointwise indexing of Datasets and DataArrays (:issue:`475`). .. ipython:: @@ -3300,7 +3300,7 @@ Enhancements x (points) |S1 'a' 'b' 'g' * points (points) int64 0 1 2 -- New :py:meth:`~xray.Dataset.where` method for masking xray objects according +- New ``xray.Dataset.where`` method for masking xray objects according to some criteria. This works particularly well with multi-dimensional data: .. ipython:: python @@ -3311,11 +3311,10 @@ Enhancements @savefig where_example.png width=4in height=4in ds.distance.where(ds.distance < 100).plot() -- Added new methods :py:meth:`DataArray.diff ` - and :py:meth:`Dataset.diff ` for finite - difference calculations along a given axis. +- Added new methods ``xray.DataArray.diff`` and ``xray.Dataset.diff`` + for finite difference calculations along a given axis. -- New :py:meth:`~xray.DataArray.to_masked_array` convenience method for +- New ``xray.DataArray.to_masked_array`` convenience method for returning a numpy.ma.MaskedArray. .. ipython:: python @@ -3324,7 +3323,7 @@ Enhancements da.where(da < 0.5) da.where(da < 0.5).to_masked_array(copy=True) -- Added new flag "drop_variables" to :py:meth:`~xray.open_dataset` for +- Added new flag "drop_variables" to ``xray.open_dataset`` for excluding variables from being parsed. This may be useful to drop variables with problems or inconsistent values. @@ -3353,7 +3352,7 @@ options for ``xray.concat``. Backwards incompatible changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- The optional arguments ``concat_over`` and ``mode`` in :py:func:`~xray.concat` have +- The optional arguments ``concat_over`` and ``mode`` in ``xray.concat`` have been removed and replaced by ``data_vars`` and ``coords``. The new arguments are both more easily understood and more robustly implemented, and allowed us to fix a bug where ``concat`` accidentally loaded data into memory. If you set values for @@ -3363,16 +3362,16 @@ Backwards incompatible changes Enhancements ~~~~~~~~~~~~ -- :py:func:`~xray.open_mfdataset` now supports a ``preprocess`` argument for +- ``xray.open_mfdataset`` now supports a ``preprocess`` argument for preprocessing datasets prior to concatenaton. This is useful if datasets cannot be otherwise merged automatically, e.g., if the original datasets have conflicting index coordinates (:issue:`443`). -- :py:func:`~xray.open_dataset` and :py:func:`~xray.open_mfdataset` now use a +- ``xray.open_dataset`` and ``xray.open_mfdataset`` now use a global thread lock by default for reading from netCDF files with dask. This avoids possible segmentation faults for reading from netCDF4 files when HDF5 is not configured properly for concurrent access (:issue:`444`). - Added support for serializing arrays of complex numbers with `engine='h5netcdf'`. -- The new :py:func:`~xray.save_mfdataset` function allows for saving multiple +- The new ``xray.save_mfdataset`` function allows for saving multiple datasets to disk simultaneously. This is useful when processing large datasets with dask.array. For example, to save a dataset too big to fit into memory to one file per year, we could write: @@ -3391,7 +3390,7 @@ Bug fixes - Fixed ``min``, ``max``, ``argmin`` and ``argmax`` for arrays with string or unicode types (:issue:`453`). -- :py:func:`~xray.open_dataset` and :py:func:`~xray.open_mfdataset` support +- ``xray.open_dataset`` and ``xray.open_mfdataset`` support supplying chunks as a single integer. - Fixed a bug in serializing scalar datetime variable to netCDF. - Fixed a bug that could occur in serialization of 0-dimensional integer arrays. @@ -3408,9 +3407,9 @@ adds the ``pipe`` method, copied from pandas. Enhancements ~~~~~~~~~~~~ -- Added :py:meth:`~xray.Dataset.pipe`, replicating the `new pandas method`_ in version +- Added ``xray.Dataset.pipe``, replicating the `new pandas method`_ in version 0.16.2. See :ref:`transforming datasets` for more details. -- :py:meth:`~xray.Dataset.assign` and :py:meth:`~xray.Dataset.assign_coords` +- ``xray.Dataset.assign`` and ``xray.Dataset.assign_coords`` now assign new variables in sorted (alphabetical) order, mirroring the behavior in pandas. Previously, the order was arbitrary. @@ -3432,7 +3431,7 @@ Highlights The headline feature in this release is experimental support for out-of-core computing (data that doesn't fit into memory) with dask_. This includes a new -top-level function :py:func:`~xray.open_mfdataset` that makes it easy to open +top-level function ``xray.open_mfdataset`` that makes it easy to open a collection of netCDF (using dask) as a single ``xray.Dataset`` object. For more on dask, read the `blog post introducing xray + dask`_ and the new documentation section :doc:`dask`. @@ -3447,7 +3446,7 @@ Backwards incompatible changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - The logic used for choosing which variables are concatenated with - :py:func:`~xray.concat` has changed. Previously, by default any variables + ``xray.concat`` has changed. Previously, by default any variables which were equal across a dimension were not concatenated. This lead to some surprising behavior, where the behavior of groupby and concat operations could depend on runtime values (:issue:`268`). For example: @@ -3482,8 +3481,8 @@ Backwards incompatible changes Enhancements ~~~~~~~~~~~~ -- New :py:meth:`~xray.Dataset.to_array` and enhanced - :py:meth:`~xray.DataArray.to_dataset` methods make it easy to switch back +- New ``xray.Dataset.to_array`` and enhanced + ``xray.DataArray.to_dataset`` methods make it easy to switch back and forth between arrays and datasets: .. ipython:: python @@ -3493,7 +3492,7 @@ Enhancements ds.to_array() ds.to_array().to_dataset(dim='variable') -- New :py:meth:`~xray.Dataset.fillna` method to fill missing values, modeled +- New ``xray.Dataset.fillna`` method to fill missing values, modeled off the pandas method of the same name: .. ipython:: python @@ -3505,7 +3504,7 @@ Enhancements index based alignment and broadcasting like standard binary operations. It also can be applied by group, as illustrated in :ref:`fill with climatology`. -- New :py:meth:`~xray.Dataset.assign` and :py:meth:`~xray.Dataset.assign_coords` +- New ``xray.Dataset.assign`` and ``xray.Dataset.assign_coords`` methods patterned off the new :py:meth:`DataFrame.assign ` method in pandas: @@ -3517,8 +3516,8 @@ Enhancements These methods return a new Dataset (or DataArray) with updated data or coordinate variables. -- :py:meth:`~xray.Dataset.sel` now supports the ``method`` parameter, which works - like the paramter of the same name on :py:meth:`~xray.Dataset.reindex`. It +- ``xray.Dataset.sel`` now supports the ``method`` parameter, which works + like the paramter of the same name on ``xray.Dataset.reindex``. It provides a simple interface for doing nearest-neighbor interpolation: .. use verbatim because I can't seem to install pandas 0.16.1 on RTD :( @@ -3555,7 +3554,7 @@ Enhancements - Accessing data from remote datasets now has retrying logic (with exponential backoff) that should make it robust to occasional bad responses from DAP servers. -- You can control the width of the Dataset repr with :py:class:`xray.set_options`. +- You can control the width of the Dataset repr with ``xray.set_options``. It can be used either as a context manager, in which case the default is restored outside the context: @@ -3581,7 +3580,7 @@ Deprecations ~~~~~~~~~~~~ - The method ``load_data()`` has been renamed to the more succinct - :py:meth:`~xray.Dataset.load`. + ``xray.Dataset.load``. v0.4.1 (18 March 2015) ---------------------- @@ -3594,7 +3593,7 @@ Enhancements - New documentation sections on :ref:`time-series` and :ref:`combining multiple files`. -- :py:meth:`~xray.Dataset.resample` lets you resample a dataset or data array to +- ``xray.Dataset.resample`` lets you resample a dataset or data array to a new temporal resolution. The syntax is the `same as pandas`_, except you need to supply the time dimension explicitly: @@ -3637,7 +3636,7 @@ Enhancements array.resample('1D', dim='time', how='first') -- :py:meth:`~xray.Dataset.swap_dims` allows for easily swapping one dimension +- ``xray.Dataset.swap_dims`` allows for easily swapping one dimension out for another: .. ipython:: python @@ -3647,7 +3646,7 @@ Enhancements ds.swap_dims({'x': 'y'}) This was possible in earlier versions of xray, but required some contortions. -- :py:func:`~xray.open_dataset` and :py:meth:`~xray.Dataset.to_netcdf` now +- ``xray.open_dataset`` and ``xray.Dataset.to_netcdf`` now accept an ``engine`` argument to explicitly select which underlying library (netcdf4 or scipy) is used for reading/writing a netCDF file. @@ -3682,7 +3681,7 @@ Breaking changes - We now automatically align index labels in arithmetic, dataset construction, merging and updating. This means the need for manually invoking methods like - :py:func:`~xray.align` and :py:meth:`~xray.Dataset.reindex_like` should be + ``xray.align`` and ``xray.Dataset.reindex_like`` should be vastly reduced. :ref:`For arithmetic`, we align @@ -3734,7 +3733,7 @@ Breaking changes (a + b).coords This functionality can be controlled through the ``compat`` option, which - has also been added to the :py:class:`~xray.Dataset` constructor. + has also been added to the ``xray.Dataset`` constructor. - Datetime shortcuts such as ``'time.month'`` now return a ``DataArray`` with the name ``'month'``, not ``'time.month'`` (:issue:`345`). This makes it easier to index the resulting arrays when they are used with ``groupby``: @@ -3772,7 +3771,7 @@ Breaking changes Enhancements ~~~~~~~~~~~~ -- Support for :py:meth:`~xray.Dataset.reindex` with a fill method. This +- Support for ``xray.Dataset.reindex`` with a fill method. This provides a useful shortcut for upsampling: .. ipython:: python @@ -3786,16 +3785,15 @@ Enhancements - Use functions that return generic ndarrays with DataArray.groupby.apply and Dataset.apply (:issue:`327` and :issue:`329`). Thanks Jeff Gerard! - Consolidated the functionality of ``dumps`` (writing a dataset to a netCDF3 - bytestring) into :py:meth:`~xray.Dataset.to_netcdf` (:issue:`333`). -- :py:meth:`~xray.Dataset.to_netcdf` now supports writing to groups in netCDF4 + bytestring) into ``xray.Dataset.to_netcdf`` (:issue:`333`). +- ``xray.Dataset.to_netcdf`` now supports writing to groups in netCDF4 files (:issue:`333`). It also finally has a full docstring -- you should read it! -- :py:func:`~xray.open_dataset` and :py:meth:`~xray.Dataset.to_netcdf` now +- ``xray.open_dataset`` and ``xray.Dataset.to_netcdf`` now work on netCDF3 files when netcdf4-python is not installed as long as scipy is available (:issue:`333`). -- The new :py:meth:`Dataset.drop ` and - :py:meth:`DataArray.drop ` methods makes it easy to drop - explicitly listed variables or index labels: +- The new ``xray.Dataset.drop`` and ``xray.DataArray.drop`` methods + makes it easy to drop explicitly listed variables or index labels: .. ipython:: python :okwarning: @@ -3808,7 +3806,7 @@ Enhancements arr = xray.DataArray([1, 2, 3], coords=[('x', list('abc'))]) arr.drop(['a', 'c'], dim='x') -- :py:meth:`~xray.Dataset.broadcast_equals` has been added to correspond to +- ``xray.Dataset.broadcast_equals`` has been added to correspond to the new ``compat`` option. - Long attributes are now truncated at 500 characters when printing a dataset (:issue:`338`). This should make things more convenient for working with @@ -3834,8 +3832,8 @@ Deprecations ~~~~~~~~~~~~ - ``dump`` and ``dumps`` have been deprecated in favor of - :py:meth:`~xray.Dataset.to_netcdf`. -- ``drop_vars`` has been deprecated in favor of :py:meth:`~xray.Dataset.drop`. + ``xray.Dataset.to_netcdf``. +- ``drop_vars`` has been deprecated in favor of ``xray.Dataset.drop``. Future plans ~~~~~~~~~~~~ @@ -3965,10 +3963,10 @@ backwards incompatible changes. New features ~~~~~~~~~~~~ -- Added :py:meth:`~xray.Dataset.count` and :py:meth:`~xray.Dataset.dropna` +- Added ``xray.Dataset.count`` and ``xray.Dataset.dropna`` methods, copied from pandas, for working with missing values (:issue:`247`, :issue:`58`). -- Added :py:meth:`DataArray.to_pandas ` for +- Added ``xray.DataArray.to_pandas`` for converting a data array into the pandas object with the same dimensionality (1D to Series, 2D to DataFrame, etc.) (:issue:`255`). - Support for reading gzipped netCDF3 files (:issue:`239`). @@ -4001,7 +3999,7 @@ New features of arrays of metadata that describe the grid on which the points in "variable" arrays lie. They are preserved (when unambiguous) even though mathematical operations. -- **Dataset math** :py:class:`~xray.Dataset` objects now support all arithmetic +- **Dataset math** ``xray.Dataset`` objects now support all arithmetic operations directly. Dataset-array operations map across all dataset variables; dataset-dataset operations act on each pair of variables with the same name. @@ -4017,7 +4015,7 @@ Backwards incompatible changes - ``Dataset.__eq__`` and ``Dataset.__ne__`` are now element-wise operations instead of comparing all values to obtain a single boolean. Use the method - :py:meth:`~xray.Dataset.equals` instead. + ``xray.Dataset.equals`` instead. Deprecations ~~~~~~~~~~~~ @@ -4026,7 +4024,7 @@ Deprecations - ``Dataset.select_vars`` deprecated: index a ``Dataset`` with a list of variable names instead. - ``DataArray.select_vars`` and ``DataArray.drop_vars`` deprecated: use - :py:meth:`~xray.DataArray.reset_coords` instead. + ``xray.DataArray.reset_coords`` instead. v0.2 (14 August 2014) --------------------- @@ -4038,14 +4036,14 @@ fixes. Here are the highlights: possible to create a DataArray without using a Dataset. This is highlighted in the refreshed :doc:`tutorial`. - You can perform aggregation operations like ``mean`` directly on - :py:class:`~xray.Dataset` objects, thanks to Joe Hamman. These aggregation + ``xray.Dataset`` objects, thanks to Joe Hamman. These aggregation methods also worked on grouped datasets. - xray now works on Python 2.6, thanks to Anna Kuznetsova. - A number of methods and attributes were given more sensible (usually shorter) names: ``labeled`` -> ``sel``, ``indexed`` -> ``isel``, ``select`` -> ``select_vars``, ``unselect`` -> ``drop_vars``, ``dimensions`` -> ``dims``, ``coordinates`` -> ``coords``, ``attributes`` -> ``attrs``. -- New :py:meth:`~xray.Dataset.load_data` and :py:meth:`~xray.Dataset.close` +- New ``xray.Dataset.load_data`` and ``xray.Dataset.close`` methods for datasets facilitate lower level of control of data loaded from disk. From 11709105292dc1d87e1d98349200c6916c31b6ed Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 17 Nov 2019 18:04:11 +0100 Subject: [PATCH 08/23] silence more warnings in whats-new.rst --- doc/whats-new.rst | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index be41df84d3a..378ac4715d2 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -413,7 +413,7 @@ Enhancements when the user passes invalid arguments (:issue:`3176`). By `Gregory Gundersen `_. -- :py:func:`filter_by_attrs` now filters the coordinates as well as the variables. +- :py:meth:`Dataset.filter_by_attrs` now filters the coordinates as well as the variables. By `Spencer Jones `_. Bug fixes @@ -1117,7 +1117,7 @@ Enhancements (:issue:`2230`) By `Keisuke Fujii `_. -- :py:meth:`plot()` now accepts the kwargs +- :py:func:`~plot.plot()` now accepts the kwargs ``xscale, yscale, xlim, ylim, xticks, yticks`` just like Pandas. Also ``xincrease=False, yincrease=False`` now use matplotlib's axis inverting methods instead of setting limits. By `Deepak Cherian `_. (:issue:`2224`) @@ -1440,7 +1440,7 @@ Bug fixes - ``ValueError`` is raised when coordinates with the wrong size are assigned to a :py:class:`DataArray`. (:issue:`2112`) By `Keisuke Fujii `_. -- Fixed a bug in :py:meth:`~xarary.DatasArray.rolling` with bottleneck. Also, +- Fixed a bug in :py:meth:`~xarray.DataArray.rolling` with bottleneck. Also, fixed a bug in rolling an integer dask array. (:issue:`2113`) By `Keisuke Fujii `_. - Fixed a bug where `keep_attrs=True` flag was neglected if @@ -1629,7 +1629,7 @@ Enhancements 1D coordinate (e.g. time) and a 2D coordinate (e.g. depth as a function of time) (:issue:`1737`). By `Deepak Cherian `_. -- :py:func:`~plot()` rotates x-axis ticks if x-axis is time. +- :py:func:`~plot.plot()` rotates x-axis ticks if x-axis is time. By `Deepak Cherian `_. - :py:func:`~plot.line()` can draw multiple lines if provided with a 2D variable. @@ -2550,7 +2550,7 @@ Enhancements raising an error (:issue:`1082`). By `Stephan Hoyer `_. - Options for axes sharing between subplots are exposed to - :py:class:`FacetGrid` and :py:func:`~xarray.plot.plot`, so axes + :py:class:`~xarray.plot.FacetGrid` and :py:func:`~xarray.plot.plot`, so axes sharing can be disabled for polar plots. By `Bas Hoonhout `_. - New utility functions :py:func:`~xarray.testing.assert_equal`, @@ -2646,10 +2646,9 @@ Bug fixes Performance improvements ~~~~~~~~~~~~~~~~~~~~~~~~ -- :py:meth:`~xarray.Dataset.isel_points` and - :py:meth:`~xarray.Dataset.sel_points` now use vectorised indexing in numpy - and dask (:issue:`1161`), which can result in several orders of magnitude - speedup. +- ``xarray.Dataset.isel_points`` and ``xarray.Dataset.sel_points`` now + use vectorised indexing in numpy and dask (:issue:`1161`), which can + result in several orders of magnitude speedup. By `Jonathan Chambers `_. .. _whats-new.0.8.2: @@ -2758,16 +2757,17 @@ Enhancements any number of ``Dataset`` and/or ``DataArray`` variables. See :ref:`merge` for more details. By `Stephan Hoyer `_. -- DataArray and Dataset method :py:meth:`resample` now supports the +- :py:meth:`DataArray.resample` and :py:meth:`Dataset.resample` now support the ``keep_attrs=False`` option that determines whether variable and dataset attributes are retained in the resampled object. By `Jeremy McGibbon `_. -- Better multi-index support in DataArray and Dataset :py:meth:`sel` and - :py:meth:`loc` methods, which now behave more closely to pandas and which - also accept dictionaries for indexing based on given level names and labels - (see :ref:`multi-level indexing`). By - `Benoit Bovy `_. +- Better multi-index support in :py:meth:`DataArray.sel`, + :py:meth:`DataArray.loc`, :py:meth:`Dataset.sel` and + :py:meth:`Dataset.loc`, which now behave more closely to pandas and + which also accept dictionaries for indexing based on given level names + and labels (see :ref:`multi-level indexing`). + By `Benoit Bovy `_. - New (experimental) decorators :py:func:`~xarray.register_dataset_accessor` and :py:func:`~xarray.register_dataarray_accessor` for registering custom xarray @@ -2783,7 +2783,7 @@ Enhancements allowing more control on the colorbar (:issue:`872`). By `Fabien Maussion `_. -- New Dataset method :py:meth:`filter_by_attrs`, akin to +- New Dataset method :py:meth:`Dataset.filter_by_attrs`, akin to ``netCDF4.Dataset.get_variables_by_attributes``, to easily filter data variables using its attributes. `Filipe Fernandes `_. @@ -2910,7 +2910,7 @@ Enhancements - Numerical operations now return empty objects on no overlapping labels rather than raising ``ValueError`` (:issue:`739`). -- :py:class:`~pd.Series` is now supported as valid input to the ``Dataset`` +- :py:class:`~pandas.Series` is now supported as valid input to the ``Dataset`` constructor (:issue:`740`). Bug fixes @@ -2929,7 +2929,7 @@ Bug fixes reindexing leads to NaN values (:issue:`738`). - ``Dataset.rename`` and ``DataArray.rename`` support the old and new names being the same (:issue:`724`). -- Fix :py:meth:`~xarray.Dataset.from_dataset` for DataFrames with Categorical +- Fix :py:meth:`~xarray.Dataset.from_dataframe` for DataFrames with Categorical column and a MultiIndex index (:issue:`737`). - Fixes to ensure xarray works properly after the upcoming pandas v0.18 and NumPy v1.11 releases. @@ -3101,7 +3101,7 @@ Enhancements moves both data and coordinates. - Assigning a ``pandas`` object directly as a ``Dataset`` variable is now permitted. Its index names correspond to the ``dims`` of the ``Dataset``, and its data is aligned. -- Passing a :py:class:`pandas.DataFrame` or :py:class:`pandas.Panel` to a Dataset constructor +- Passing a :py:class:`pandas.DataFrame` or ``pandas.Panel`` to a Dataset constructor is now permitted. - New function ``xray.broadcast`` for explicitly broadcasting ``DataArray`` and ``Dataset`` objects against each other. For example: From 4f8d0f1a439a839e28992ed57b0f0399616f90ce Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 17 Nov 2019 18:40:03 +0100 Subject: [PATCH 09/23] require a whatsnew format of Name --- doc/whats-new.rst | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 378ac4715d2..4fde41a06ec 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -112,9 +112,9 @@ Bug fixes In addition, the ``allow_lazy`` kwarg to ``reduce`` is deprecated. By `Deepak Cherian `_. - Fix :py:meth:`GroupBy.reduce` when reducing over multiple dimensions. - (:issue:`3402`). By `Deepak Cherian `_ + (:issue:`3402`). By `Deepak Cherian `_ - Allow appending datetime and bool data variables to zarr stores. - (:issue:`3480`). By `Akihiro Matsukawa `_. + (:issue:`3480`). By `Akihiro Matsukawa `_. Documentation ~~~~~~~~~~~~~ @@ -234,9 +234,9 @@ Bug fixes (:issue:`3317`). By `Guido Imperiale `_. - Line plots with the ``x`` or ``y`` argument set to a 1D non-dimensional coord now plot the correct data for 2D DataArrays - (:issue:`3334`). By `Tom Nicholas `_. + (:issue:`3334`). By `Tom Nicholas `_. - Make :py:func:`~xarray.concat` more robust when merging variables present in some datasets but - not others (:issue:`508`). By `Deepak Cherian `_. + not others (:issue:`508`). By `Deepak Cherian `_. - The default behaviour of reducing across all dimensions for :py:class:`~xarray.core.groupby.DataArrayGroupBy` objects has now been properly removed as was done for :py:class:`~xarray.core.groupby.DatasetGroupBy` in 0.13.0 (:issue:`3337`). @@ -244,18 +244,18 @@ Bug fixes Also raise nicer error message when no groups are created (:issue:`1764`). By `Deepak Cherian `_. - Fix error in concatenating unlabeled dimensions (:pull:`3362`). - By `Deepak Cherian `_. + By `Deepak Cherian `_. - Warn if the ``dim`` kwarg is passed to rolling operations. This is redundant since a dimension is specified when the :py:class:`DatasetRolling` or :py:class:`DataArrayRolling` object is created. - (:pull:`3362`). By `Deepak Cherian `_. + (:pull:`3362`). By `Deepak Cherian `_. Documentation ~~~~~~~~~~~~~ - Created a glossary of important xarray terms (:issue:`2410`, :pull:`3352`). - By `Gregory Gundersen `_. + By `Gregory Gundersen `_. - Created a "How do I..." section (:ref:`howdoi`) for solutions to common questions. (:pull:`3357`). - By `Deepak Cherian `_. + By `Deepak Cherian `_. - Add examples for :py:meth:`Dataset.swap_dims` and :py:meth:`DataArray.swap_dims` (pull:`3331`, pull:`3331`). By `Justus Magin `_. - Add examples for :py:meth:`align`, :py:meth:`merge`, :py:meth:`combine_by_coords`, @@ -263,7 +263,7 @@ Documentation :py:meth:`Dataset.assign`, :py:meth:`Dataset.reindex`, :py:meth:`Dataset.fillna` (:pull:`3328`). By `Anderson Banihirwe `_. - Fixed documentation to clean up an unwanted file created in ``ipython`` example - (:pull:`3353`). By `Gregory Gundersen `_. + (:pull:`3353`). By `Gregory Gundersen `_. .. _whats-new.0.13.0: @@ -353,7 +353,7 @@ New functions/methods - Added :py:meth:`DataArray.broadcast_like` and :py:meth:`Dataset.broadcast_like`. By `Deepak Cherian `_ and `David Mertz - `_. + `_. - Dataset plotting API for visualizing dependencies between two DataArrays! Currently only :py:meth:`Dataset.plot.scatter` is implemented. @@ -406,7 +406,7 @@ Enhancements labels by using both ``dim`` and ``labels`` or using a :py:class:`~xarray.core.coordinates.DataArrayCoordinates` object are deprecated (:issue:`2910`). - By `Gregory Gundersen `_. + By `Gregory Gundersen `_. - Added examples of :py:meth:`Dataset.set_index` and :py:meth:`DataArray.set_index`, as well are more specific error messages @@ -440,7 +440,7 @@ Bug fixes By `Hasan Ahmad `_. - Fixed bug in ``combine_by_coords()`` causing a `ValueError` if the input had an unused dimension with coordinates which were not monotonic (:issue:`3150`). - By `Tom Nicholas `_. + By `Tom Nicholas `_. - Fixed crash when applying ``distributed.Client.compute()`` to a DataArray (:issue:`3171`). By `Guido Imperiale `_. - Better error message when using groupby on an empty DataArray (:issue:`3037`). @@ -464,7 +464,7 @@ Documentation - Fixed documentation to clean up unwanted files created in ``ipython`` examples (:issue:`3227`). - By `Gregory Gundersen `_. + By `Gregory Gundersen `_. .. _whats-new.0.12.3: @@ -534,7 +534,7 @@ New functions/methods To avoid FutureWarnings switch to using ``combine_nested`` or ``combine_by_coords``, (or set the ``combine`` argument in ``open_mfdataset``). (:issue:`2159`) - By `Tom Nicholas `_. + By `Tom Nicholas `_. - :py:meth:`~xarray.DataArray.rolling_exp` and :py:meth:`~xarray.Dataset.rolling_exp` added, similar to pandas' @@ -580,7 +580,7 @@ Enhancements to existing functionality :py:meth:`DataArray.groupby_bins`, and :py:meth:`DataArray.resample` now accept a keyword argument ``restore_coord_dims`` which keeps the order of the dimensions of multi-dimensional coordinates intact (:issue:`1856`). - By `Peter Hausamann `_. + By `Peter Hausamann `_. - Clean up Python 2 compatibility in code (:issue:`2950`) By `Guido Imperiale `_. - Better warning message when supplying invalid objects to ``xr.merge`` @@ -781,7 +781,7 @@ Bug fixes `Spencer Clark `_. - Line plots with the ``x`` argument set to a non-dimensional coord now plot the correct data for 1D DataArrays. - (:issue:`2725`). By `Tom Nicholas `_. + (:issue:`2725`). By `Tom Nicholas `_. - Subtracting a scalar ``cftime.datetime`` object from a :py:class:`CFTimeIndex` now results in a :py:class:`pandas.TimedeltaIndex` instead of raising a ``TypeError`` (:issue:`2671`). By `Spencer Clark @@ -797,12 +797,12 @@ Bug fixes By `Yohai Bar-Sinai `_. - Fixed error when trying to reduce a DataArray using a function which does not require an axis argument. (:issue:`2768`) - By `Tom Nicholas `_. + By `Tom Nicholas `_. - Concatenating a sequence of :py:class:`~xarray.DataArray` with varying names sets the name of the output array to ``None``, instead of the name of the first input array. If the names are the same it sets the name to that, instead to the name of the first DataArray in the list as it did before. - (:issue:`2775`). By `Tom Nicholas `_. + (:issue:`2775`). By `Tom Nicholas `_. - Per `CF conventions `_, @@ -822,7 +822,7 @@ Bug fixes (e.g. '2000-01-01T00:00:00-05:00') no longer raises an error (:issue:`2649`). By `Spencer Clark `_. - Fixed performance regression with ``open_mfdataset`` (:issue:`2662`). - By `Tom Nicholas `_. + By `Tom Nicholas `_. - Fixed supplying an explicit dimension in the ``concat_dim`` argument to to ``open_mfdataset`` (:issue:`2647`). By `Ben Root `_. @@ -887,13 +887,13 @@ Enhancements but were not explicitly closed. This is mostly useful for debugging; we recommend enabling it in your test suites if you use xarray for IO. By `Stephan Hoyer `_ -- Support Dask ``HighLevelGraphs`` by `Matthew Rocklin `_. +- Support Dask ``HighLevelGraphs`` by `Matthew Rocklin `_. - :py:meth:`DataArray.resample` and :py:meth:`Dataset.resample` now supports the ``loffset`` kwarg just like Pandas. By `Deepak Cherian `_ - Datasets are now guaranteed to have a ``'source'`` encoding, so the source file name is always stored (:issue:`2550`). - By `Tom Nicholas `_. + By `Tom Nicholas `_. - The ``apply`` methods for ``DatasetGroupBy``, ``DataArrayGroupBy``, ``DatasetResample`` and ``DataArrayResample`` now support passing positional arguments to the applied function as a tuple to the ``args`` argument. @@ -1015,7 +1015,7 @@ Enhancements dataset and dataarray attrs upon operations. The option is set with ``xarray.set_options(keep_attrs=True)``, and the default is to use the old behaviour. - By `Tom Nicholas `_. + By `Tom Nicholas `_. - Added a new backend for the GRIB file format based on ECMWF *cfgrib* python driver and *ecCodes* C-library. (:issue:`2475`) By `Alessandro Amici `_, @@ -1071,7 +1071,7 @@ Bug fixes CFTimeIndex is now allowed (:issue:`2484`). By `Spencer Clark `_. - Avoid use of Dask's deprecated ``get=`` parameter in tests - by `Matthew Rocklin `_. + by `Matthew Rocklin `_. - An ``OverflowError`` is now accurately raised and caught during the encoding process if a reference date is used that is so distant that the dates must be encoded using cftime rather than NumPy (:issue:`2272`). From 7c4211e2695592bb2061701f091066cc40b2bd3e Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 17 Nov 2019 22:44:00 +0100 Subject: [PATCH 10/23] rename the second cf conventions link --- doc/whats-new.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 4fde41a06ec..13a8cfcc149 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -804,7 +804,7 @@ Bug fixes instead to the name of the first DataArray in the list as it did before. (:issue:`2775`). By `Tom Nicholas `_. -- Per `CF conventions +- Per the `CF conventions section on calendars `_, specifying ``'standard'`` as the calendar type in :py:meth:`~xarray.cftime_range` now correctly refers to the ``'gregorian'`` From cee59e6418308474eeeb4d805912466bd857aae4 Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 17 Nov 2019 22:45:54 +0100 Subject: [PATCH 11/23] silence more sphinx warnings --- doc/whats-new.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 13a8cfcc149..4c7f2d34487 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -317,7 +317,7 @@ Breaking changes - :py:meth:`DataArray.to_dataset` requires ``name`` to be passed as a kwarg (previously ambiguous positional arguments were deprecated) - Reindexing with variables of a different dimension now raise an error (previously deprecated) -- :py:func:`~xarray.broadcast_array` is removed (previously deprecated in favor of +- ``xarray.broadcast_array`` is removed (previously deprecated in favor of :py:func:`~xarray.broadcast`) - :py:meth:`Variable.expand_dims` is removed (previously deprecated in favor of :py:meth:`Variable.set_dims`) @@ -402,7 +402,7 @@ Enhancements with ``engine="h5netcdf"``. It is passed to :py:func:`h5netcdf.File`. By `Ulrich Herter `_. -- :py:meth:`~xarray.Dataset.drop` now supports keyword arguments; dropping index +- ``xarray.Dataset.drop`` now supports keyword arguments; dropping index labels by using both ``dim`` and ``labels`` or using a :py:class:`~xarray.core.coordinates.DataArrayCoordinates` object are deprecated (:issue:`2910`). @@ -585,7 +585,7 @@ Enhancements to existing functionality By `Guido Imperiale `_. - Better warning message when supplying invalid objects to ``xr.merge`` (:issue:`2948`). By `Mathias Hauser `_. -- Add ``errors`` keyword argument to :py:meth:`Dataset.drop` and :py:meth:`Dataset.drop_dims` +- Add ``errors`` keyword argument to ``Dataset.drop`` and :py:meth:`Dataset.drop_dims` that allows ignoring errors if a passed label or dimension is not in the dataset (:issue:`2994`). By `Andrew Ross `_. @@ -1184,7 +1184,7 @@ Bug fixes - Follow up the renamings in dask; from dask.ghost to dask.overlap By `Keisuke Fujii `_. -- Now :py:func:`xr.apply_ufunc` raises a ValueError when the size of +- Now :py:func:`~xarray.apply_ufunc` raises a ValueError when the size of ``input_core_dims`` is inconsistent with the number of arguments. (:issue:`2341`) By `Keisuke Fujii `_. @@ -1477,7 +1477,7 @@ Enhancements supplied list, returning a bool array. See :ref:`selecting values with isin` for full details. Similar to the ``np.isin`` function. By `Maximilian Roos `_. -- Some speed improvement to construct :py:class:`~xarray.DataArrayRolling` +- Some speed improvement to construct :py:class:`~xarray.core.rolling.DataArrayRolling` object (:issue:`1993`) By `Keisuke Fujii `_. - Handle variables with different values for ``missing_value`` and @@ -1557,8 +1557,8 @@ Enhancements NumPy. By `Stephan Hoyer `_. - Improve :py:func:`~xarray.DataArray.rolling` logic. - :py:func:`~xarray.DataArrayRolling` object now supports - :py:func:`~xarray.DataArrayRolling.construct` method that returns a view + :py:func:`~xarray.core.rolling.DataArrayRolling` object now supports + :py:func:`~xarray.core.rolling.DataArrayRolling.construct` method that returns a view of the DataArray / Dataset object with the rolling-window dimension added to the last axis. This enables more flexible operation, such as strided rolling, windowed rolling, ND-rolling, short-time FFT and convolution. @@ -1904,7 +1904,7 @@ Enhancements concatenated array/dataset (:issue:`1521`). By `Guido Imperiale `_. -- Speed-up (x 100) of :py:func:`~xarray.conventions.decode_cf_datetime`. +- Speed-up (x 100) of ``xarray.conventions.decode_cf_datetime``. By `Christian Chwala `_. **IO related improvements** From 562567b250cc6686d1d3c54dff44cb229c6d10d2 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 01:03:13 +0100 Subject: [PATCH 12/23] get interpolate_na docstrings in sync with master --- xarray/core/dataarray.py | 4 +--- xarray/core/dataset.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 69f79e43dc9..a8d7c0df35c 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -2033,9 +2033,7 @@ def interpolate_na( ---------- dim : str Specifies the dimension along which to interpolate. - method : {'linear', 'nearest', 'zero', 'slinear', 'quadratic', - 'cubic', 'polynomial', 'barycentric', 'krog', 'pchip', - 'spline', 'akima'}, optional + method : str, optional String indicating which method to use for interpolation: - 'linear': linear interpolation (Default). Additional keyword diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index b73004f9ef5..e21741722c3 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -3917,9 +3917,7 @@ def interpolate_na( dim : str Specifies the dimension along which to interpolate. - method : {'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', - 'polynomial', 'barycentric', 'krog', 'pchip', - 'spline'}, optional + method : str, optional String indicating which method to use for interpolation: - 'linear': linear interpolation (Default). Additional keyword From 6e223eae295ed4af035c4936ae9a906cd89eefc6 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 01:03:57 +0100 Subject: [PATCH 13/23] fix sphinx warnings for interpolate_na docstrings --- xarray/core/dataarray.py | 2 ++ xarray/core/dataset.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index a8d7c0df35c..02495e3bff1 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -2044,6 +2044,7 @@ def interpolate_na( provided. - 'barycentric', 'krog', 'pchip', 'spline', 'akima': use their respective :py:class:`scipy.interpolate` classes. + use_coordinate : bool, str, default True Specifies which index to use as the x values in the interpolation formulated as `y = f(x)`. If False, values are treated as if @@ -2063,6 +2064,7 @@ def interpolate_na( - a string that is valid input for pandas.to_timedelta - a :py:class:`numpy.timedelta64` object - a :py:class:`pandas.Timedelta` object + Otherwise, ``max_gap`` must be an int or a float. Use of ``max_gap`` with unlabeled dimensions has not been implemented yet. Gap length is defined as the difference between coordinate values at the first data point after a gap and the last value diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index e21741722c3..09d8acb9213 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -3928,6 +3928,7 @@ def interpolate_na( provided. - 'barycentric', 'krog', 'pchip', 'spline', 'akima': use their respective :py:class:`scipy.interpolate` classes. + use_coordinate : bool, str, default True Specifies which index to use as the x values in the interpolation formulated as `y = f(x)`. If False, values are treated as if @@ -3947,6 +3948,7 @@ def interpolate_na( - a string that is valid input for pandas.to_timedelta - a :py:class:`numpy.timedelta64` object - a :py:class:`pandas.Timedelta` object + Otherwise, ``max_gap`` must be an int or a float. Use of ``max_gap`` with unlabeled dimensions has not been implemented yet. Gap length is defined as the difference between coordinate values at the first data point after a gap and the last value From c8559e440af302c599fa938f1a8a3244c6554011 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 02:51:06 +0100 Subject: [PATCH 14/23] update references to old documentation sections --- doc/whats-new.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 4c7f2d34487..c10b4cf7b88 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -1267,7 +1267,7 @@ Enhancements - :py:meth:`~xarray.DataArray.interp` and :py:meth:`~xarray.Dataset.interp` methods are newly added. - See :ref:`interpolating values with interp` for the detail. + See :ref:`interp` for the detail. (:issue:`2079`) By `Keisuke Fujii `_. @@ -1384,7 +1384,7 @@ non-standard calendars used in climate modeling. Documentation ~~~~~~~~~~~~~ -- New FAQ entry, :ref:`faq.other_projects`. +- New FAQ entry, :ref:`related-projects`. By `Deepak Cherian `_. - :ref:`assigning_values` now includes examples on how to select and assign values to a :py:class:`~xarray.DataArray` with ``.loc``. @@ -3251,7 +3251,7 @@ Enhancements - Variables in netCDF files with multiple missing values are now decoded as NaN after issuing a warning if open_dataset is called with mask_and_scale=True. - We clarified our rules for when the result from an xray operation is a copy - vs. a view (see :ref:`copies vs views` for more details). + vs. a view (see :ref:`copies_vs_views` for more details). - Dataset variables are now written to netCDF files in order of appearance when using the netcdf4 backend (:issue:`479`). @@ -4034,7 +4034,7 @@ fixes. Here are the highlights: - There is now a direct constructor for ``DataArray`` objects, which makes it possible to create a DataArray without using a Dataset. This is highlighted - in the refreshed :doc:`tutorial`. + in the refreshed ``tutorial``. - You can perform aggregation operations like ``mean`` directly on ``xray.Dataset`` objects, thanks to Joe Hamman. These aggregation methods also worked on grouped datasets. From f2cf661adbd9ce52b4c1ca59d974eeb9e2f631ed Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 02:51:44 +0100 Subject: [PATCH 15/23] cut the link to h5netcdf.File --- doc/whats-new.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index c10b4cf7b88..5a81896f2fa 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -399,7 +399,7 @@ Enhancements By `Gerardo Rivera `_. - :py:func:`~xarray.Dataset.to_netcdf()` now supports the ``invalid_netcdf`` kwarg when used - with ``engine="h5netcdf"``. It is passed to :py:func:`h5netcdf.File`. + with ``engine="h5netcdf"``. It is passed to ``h5netcdf.File``. By `Ulrich Herter `_. - ``xarray.Dataset.drop`` now supports keyword arguments; dropping index From 58d243fd43d0181d38af363e31c8c2d9739e1067 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 18:06:53 +0100 Subject: [PATCH 16/23] use the correct reference types for numpy --- doc/computation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/computation.rst b/doc/computation.rst index cf24d7d01f8..1ac30f55ee7 100644 --- a/doc/computation.rst +++ b/doc/computation.rst @@ -326,7 +326,7 @@ Broadcasting by dimension name the numpy parlance) by dimension name instead of axis order. With xarray, you do not need to transpose arrays or insert dimensions of length 1 to get array operations to work, as commonly done in numpy with :py:func:`numpy.reshape` or -:py:const:`numpy.newaxis`. +:py:data:`numpy.newaxis`. This is best illustrated by a few examples. Consider two one-dimensional arrays with different sizes aligned along different dimensions: @@ -566,7 +566,7 @@ to set ``axis=-1``. As an example, here is how we would wrap Because ``apply_ufunc`` follows a standard convention for ufuncs, it plays nicely with tools for building vectorized functions, like -:func:`numpy.broadcast_arrays` and :func:`numpy.vectorize`. For high performance +:py:func:`numpy.broadcast_arrays` and :py:class:`numpy.vectorize`. For high performance needs, consider using Numba's :doc:`vectorize and guvectorize `. In addition to wrapping functions, ``apply_ufunc`` can automatically parallelize From 0112190ef5dfd19474ed324d679104d02a3b9669 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 18:07:55 +0100 Subject: [PATCH 17/23] update the reference to atop (dask renamed it to blockwise) --- doc/dask.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/dask.rst b/doc/dask.rst index 11f378aa376..ed99ffaa896 100644 --- a/doc/dask.rst +++ b/doc/dask.rst @@ -285,7 +285,7 @@ automate `embarrassingly parallel `__ "map" type operations where a function written for processing NumPy arrays should be repeatedly applied to xarray objects containing Dask arrays. It works similarly to -:py:func:`dask.array.map_blocks` and :py:func:`dask.array.atop`, but without +:py:func:`dask.array.map_blocks` and :py:func:`dask.array.blockwise`, but without requiring an intermediate layer of abstraction. For the best performance when using Dask's multi-threaded scheduler, wrap a From 935f68c9254536f0683714fa7c2f72a5ac419907 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 18:23:33 +0100 Subject: [PATCH 18/23] rewrite numpy docstrings --- xarray/ufuncs.py | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/xarray/ufuncs.py b/xarray/ufuncs.py index 0f6fc3b1334..0d24b7af322 100644 --- a/xarray/ufuncs.py +++ b/xarray/ufuncs.py @@ -13,6 +13,7 @@ Once NumPy 1.10 comes out with support for overriding ufuncs, this module will hopefully no longer be necessary. """ +import textwrap import warnings as _warnings import numpy as _np @@ -78,10 +79,43 @@ def __call__(self, *args, **kwargs): return res +def _skip_signature(doc): + if doc.startswith(name): + signature_end = doc.find("\n\n") + doc = doc[signature_end + 2 :] + + return doc + + +def _remove_unused_reference_labels(doc): + max_references = 5 + for num in range(max_references): + label = f".. [{num}]" + reference = f"[{num}]_" + index = f"{num}. " + + if label not in doc or reference in doc: + continue + + doc = doc.replace(label, index) + + return doc + + +def _dedent(doc): + if not isinstance(doc, str): + return doc + + return textwrap.dedent(doc) + + def _create_op(name): func = _UFuncDispatcher(name) func.__name__ = name - doc = getattr(_np, name).__doc__ + doc = _remove_unused_reference_labels( + _skip_signature(_dedent(getattr(_np, name).__doc__)) + ) + func.__doc__ = ( "xarray specific variant of numpy.%s. Handles " "xarray.Dataset, xarray.DataArray, xarray.Variable, " From c61d19ba03a5d7e3d5b0ef0f0d0bdd01af4aac3e Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 19:02:18 +0100 Subject: [PATCH 19/23] guard against non-str documentation --- xarray/ufuncs.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/xarray/ufuncs.py b/xarray/ufuncs.py index 0d24b7af322..44931051707 100644 --- a/xarray/ufuncs.py +++ b/xarray/ufuncs.py @@ -80,6 +80,9 @@ def __call__(self, *args, **kwargs): def _skip_signature(doc): + if not isinstance(doc, str): + return doc + if doc.startswith(name): signature_end = doc.find("\n\n") doc = doc[signature_end + 2 :] @@ -88,6 +91,9 @@ def _skip_signature(doc): def _remove_unused_reference_labels(doc): + if not isinstance(doc, str): + return doc + max_references = 5 for num in range(max_references): label = f".. [{num}]" @@ -112,9 +118,9 @@ def _dedent(doc): def _create_op(name): func = _UFuncDispatcher(name) func.__name__ = name - doc = _remove_unused_reference_labels( - _skip_signature(_dedent(getattr(_np, name).__doc__)) - ) + doc = getattr(_np, name).__doc__ + + doc = _remove_unused_reference_labels(_skip_signature(_dedent(doc))) func.__doc__ = ( "xarray specific variant of numpy.%s. Handles " From bdc8594089dc0f0d553978c01a288dcc6b2a3672 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 19:34:32 +0100 Subject: [PATCH 20/23] pass name to skip_signature --- xarray/ufuncs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/xarray/ufuncs.py b/xarray/ufuncs.py index 44931051707..ae2c5c574b6 100644 --- a/xarray/ufuncs.py +++ b/xarray/ufuncs.py @@ -79,7 +79,7 @@ def __call__(self, *args, **kwargs): return res -def _skip_signature(doc): +def _skip_signature(doc, name): if not isinstance(doc, str): return doc @@ -120,7 +120,7 @@ def _create_op(name): func.__name__ = name doc = getattr(_np, name).__doc__ - doc = _remove_unused_reference_labels(_skip_signature(_dedent(doc))) + doc = _remove_unused_reference_labels(_skip_signature(_dedent(doc), name)) func.__doc__ = ( "xarray specific variant of numpy.%s. Handles " From a6453d2a9b0954097de92acb1389c64494cfd676 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 23:21:27 +0100 Subject: [PATCH 21/23] remove links to pandas.Panel --- doc/data-structures.rst | 6 +++--- doc/pandas.rst | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/data-structures.rst b/doc/data-structures.rst index 93cdc7e9765..d5c8fa961f7 100644 --- a/doc/data-structures.rst +++ b/doc/data-structures.rst @@ -45,7 +45,7 @@ Creating a DataArray The :py:class:`~xarray.DataArray` constructor takes: - ``data``: a multi-dimensional array of values (e.g., a numpy ndarray, - :py:class:`~pandas.Series`, :py:class:`~pandas.DataFrame` or :py:class:`~pandas.Panel`) + :py:class:`~pandas.Series`, :py:class:`~pandas.DataFrame` or ``pandas.Panel``) - ``coords``: a list or dictionary of coordinates. If a list, it should be a list of tuples where the first element is the dimension name and the second element is the corresponding coordinate array_like object. @@ -125,7 +125,7 @@ As a dictionary with coords across multiple dimensions: If you create a ``DataArray`` by supplying a pandas :py:class:`~pandas.Series`, :py:class:`~pandas.DataFrame` or -:py:class:`~pandas.Panel`, any non-specified arguments in the +``pandas.Panel``, any non-specified arguments in the ``DataArray`` constructor will be filled in from the pandas object: .. ipython:: python @@ -301,7 +301,7 @@ names, and its data is aligned to any existing dimensions. You can also create an dataset from: -- A :py:class:`pandas.DataFrame` or :py:class:`pandas.Panel` along its columns and items +- A :py:class:`pandas.DataFrame` or ``pandas.Panel`` along its columns and items respectively, by passing it into the :py:class:`~xarray.Dataset` directly - A :py:class:`pandas.DataFrame` with :py:meth:`Dataset.from_dataframe `, which will additionally handle MultiIndexes See :ref:`pandas` diff --git a/doc/pandas.rst b/doc/pandas.rst index 4f3088b4c34..72abf6609f6 100644 --- a/doc/pandas.rst +++ b/doc/pandas.rst @@ -112,7 +112,7 @@ automatically stacking them into a ``MultiIndex``. :py:meth:`DataArray.to_pandas() ` is a shortcut that lets you convert a DataArray directly into a pandas object with the same dimensionality (i.e., a 1D array is converted to a :py:class:`~pandas.Series`, -2D to :py:class:`~pandas.DataFrame` and 3D to :py:class:`~pandas.Panel`): +2D to :py:class:`~pandas.DataFrame` and 3D to ``pandas.Panel``): .. ipython:: python From a220c7a14ef9ad9b7145e5336c86df6df32ad5d7 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 18 Nov 2019 23:22:37 +0100 Subject: [PATCH 22/23] convince sphinx to create pages astype and groupby().quantile --- doc/api-hidden.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/api-hidden.rst b/doc/api-hidden.rst index 4cd2ba0cc7f..027c732697f 100644 --- a/doc/api-hidden.rst +++ b/doc/api-hidden.rst @@ -32,9 +32,11 @@ core.groupby.DatasetGroupBy.first core.groupby.DatasetGroupBy.last core.groupby.DatasetGroupBy.fillna + core.groupby.DatasetGroupBy.quantile core.groupby.DatasetGroupBy.where Dataset.argsort + Dataset.astype Dataset.clip Dataset.conj Dataset.conjugate @@ -73,6 +75,7 @@ core.groupby.DataArrayGroupBy.first core.groupby.DataArrayGroupBy.last core.groupby.DataArrayGroupBy.fillna + core.groupby.DataArrayGroupBy.quantile core.groupby.DataArrayGroupBy.where DataArray.argsort From 17fe69df46465e8b904db791ff12dfdef95b84a8 Mon Sep 17 00:00:00 2001 From: Keewis Date: Tue, 19 Nov 2019 01:43:43 +0100 Subject: [PATCH 23/23] more warnings --- xarray/coding/cftimeindex.py | 2 +- xarray/core/dataarray.py | 2 +- xarray/core/groupby.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/xarray/coding/cftimeindex.py b/xarray/coding/cftimeindex.py index 559c5e16287..4005d4fbf6d 100644 --- a/xarray/coding/cftimeindex.py +++ b/xarray/coding/cftimeindex.py @@ -506,7 +506,7 @@ def strftime(self, date_format): Returns ------- - Index + pandas.Index Index of formatted strings Examples diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 02495e3bff1..1205362ad91 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -239,7 +239,7 @@ class DataArray(AbstractArray, DataWithCoords): ---------- dims : tuple Dimension names associated with this array. - values : np.ndarray + values : numpy.ndarray Access or modify DataArray values as a numpy array. coords : dict-like Dictionary of DataArray objects that label values along each dimension. diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 536cc2100c4..ec752721781 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -573,6 +573,7 @@ def quantile(self, q, dim=None, interpolation="linear", keep_attrs=None): This optional parameter specifies the interpolation method to use when the desired quantile lies between two data points ``i < j``: + * linear: ``i + (j - i) * fraction``, where ``fraction`` is the fractional part of the index surrounded by ``i`` and ``j``.