diff --git a/changelog/507.breaking.rst b/changelog/507.breaking.rst index ccb1e7ca..e202f63a 100644 --- a/changelog/507.breaking.rst +++ b/changelog/507.breaking.rst @@ -1,9 +1,16 @@ -The minimum supported versions of dependencies and Python have been updated, this release requires: -* Python 3.11 -* asdf 2.15 (and plugin version bumps) -* dask 2023.2 -* matplotlib 3.7 -* ndcube 2.1 -* numpy 1.25 -* parfive 2.1 -* sunpy 5.0 +This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies. +The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs. + +* Python >= 3.11 +* dask >= 2023.2 +* gwcs >= 0.24 +* matplotlib >= 3.7 +* ndcube >= 2.1 +* numpy >= 1.25 +* parfive >= 2.1 +* sunpy >= 5.0.7 +* asdf >= 3.3 +* asdf-astropy >= 0.5 +* asdf-coordinate-schemas >= 0.3 +* asdf-transform-schemas >= 0.5 +* asdf-wcs-schemas >= 0.4 diff --git a/dkist/conftest.py b/dkist/conftest.py index 4e466c47..62c0f005 100644 --- a/dkist/conftest.py +++ b/dkist/conftest.py @@ -421,8 +421,9 @@ def croppable_visp_dataset(tmp_path_factory): @pytest.fixture(scope="session") -def croppable_cryo_dataset(): +def croppable_cryo_dataset(tmp_path_factory): + cryodir = tmp_path_factory.mktemp("data") with gzip.open(Path(rootdir) / "croppable_cryo.asdf.gz", mode="rb") as gfo: - with open(rootdir / "croppable_cryo.asdf", mode="wb") as afo: + with open(cryodir / "croppable_cryo.asdf", mode="wb") as afo: afo.write(gfo.read()) - return load_dataset(Path(rootdir) / "croppable_cryo.asdf") + return load_dataset(cryodir / "croppable_cryo.asdf") diff --git a/dkist/dataset/loader.py b/dkist/dataset/loader.py index bb3ec04b..6d20ca86 100644 --- a/dkist/dataset/loader.py +++ b/dkist/dataset/loader.py @@ -22,12 +22,6 @@ ASDF_FILENAME_PATTERN = r"^(?P[A-Z-]+)_L1_(?P\d{8}T\d{6})_(?P[A-Z]{5,})(?P_user_tools|_metadata)?.asdf$" -def asdf_open_memory_mapping_kwarg(memmap: bool) -> dict: - if asdf.__version__ > "3.1.0": - return {"memmap": memmap} - return {"copy_arrays": not memmap} - - @singledispatch def load_dataset(target): """ @@ -241,7 +235,7 @@ def _load_from_asdf(filepath): try: with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path: with asdf.open(filepath, custom_schema=schema_path.as_posix(), - lazy_load=False, **asdf_open_memory_mapping_kwarg(memmap=False)) as ff: + lazy_load=False, memmap=False) as ff: ds = ff.tree["dataset"] ds.meta["history"] = ff.tree["history"] if isinstance(ds, TiledDataset): diff --git a/dkist/dataset/tests/test_crop_dataset.py b/dkist/dataset/tests/test_crop_dataset.py index 20a24d30..f0504e5e 100644 --- a/dkist/dataset/tests/test_crop_dataset.py +++ b/dkist/dataset/tests/test_crop_dataset.py @@ -1,3 +1,4 @@ +import numpy as np import pytest import astropy.units as u @@ -7,6 +8,13 @@ gwcs = pytest.importorskip("gwcs", "0.22.2a1.dev2") +def assert_skycoord_allclose(coord1, coord2): + assert coord1.is_equivalent_frame(coord2) + data1 = coord1.frame.cartesian.xyz + data2 = coord2.frame.cartesian.xyz + assert np.allclose(data1, data2) + + def test_crop_visp_by_only_stokes(croppable_visp_dataset): cropped = croppable_visp_dataset.crop([ @@ -52,11 +60,11 @@ def test_crop_visp_by_time(croppable_visp_dataset): orig_coords = croppable_visp_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0] == orig_coords[0][200]).all() - assert (cropped_coords[0][-1] == orig_coords[0][400]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2][200:401]).all() - assert (cropped_coords[3] == orig_coords[3]).all() + assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200]) + assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][400]) + assert np.allclose(cropped_coords[1], orig_coords[1]) + assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:401].jd) + assert np.allclose(cropped_coords[3], orig_coords[3]) def test_crop_visp_by_lonlat(croppable_visp_dataset): @@ -90,11 +98,11 @@ def test_crop_visp_by_lonlat(croppable_visp_dataset): orig_coords = croppable_visp_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0] == orig_coords[0][200][500:1001]).all() - assert (cropped_coords[0][-1] == orig_coords[0][600][500:1001]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2][200:601]).all() - assert (cropped_coords[3] == orig_coords[3]).all() + assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200][500:1001]) + assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][600][500:1001]) + assert np.allclose(cropped_coords[1], orig_coords[1]) + assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:601].jd) + assert np.allclose(cropped_coords[3], orig_coords[3]) def test_crop_cryo_by_only_stokes(croppable_cryo_dataset): @@ -144,12 +152,12 @@ def test_crop_cryo_by_time(croppable_cryo_dataset): cropped_coords = cropped.axis_world_coords() # Whole coordinate array is too large to compare, so check just the edges - assert (cropped_coords[0][0, 0, 0, :] == orig_coords[0][0, 0, 0, :]).all() - assert (cropped_coords[0][0, 0, -1, :] == orig_coords[0][0, 0, -1, :]).all() - assert (cropped_coords[0][0, 0, :, 0] == orig_coords[0][0, 0, :, 0]).all() - assert (cropped_coords[0][0, 0, :, -1] == orig_coords[0][0, 0, :, -1]).all() - assert (cropped_coords[1] == orig_coords[1][:2, :2]).all() - assert (cropped_coords[2] == orig_coords[2]).all() + assert_skycoord_allclose(cropped_coords[0][0, 0, 0, :], orig_coords[0][0, 0, 0, :]) + assert_skycoord_allclose(cropped_coords[0][0, 0, -1, :], orig_coords[0][0, 0, -1, :]) + assert_skycoord_allclose(cropped_coords[0][0, 0, :, 0], orig_coords[0][0, 0, :, 0]) + assert_skycoord_allclose(cropped_coords[0][0, 0, :, -1], orig_coords[0][0, 0, :, -1]) + assert np.allclose(cropped_coords[1].jd, orig_coords[1][:2, :2].jd) + assert np.allclose(cropped_coords[2], orig_coords[2]) def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset): @@ -180,6 +188,6 @@ def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset): orig_coords = croppable_cryo_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0, 0] == orig_coords[0][0, 0, :201, :201]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2]).all() + assert_skycoord_allclose(cropped_coords[0][0, 0], orig_coords[0][0, 0, :201, :201]) + assert np.allclose(cropped_coords[1].jd, orig_coords[1].jd) + assert np.allclose(cropped_coords[2], orig_coords[2]) diff --git a/dkist/dataset/tests/test_tiled_dataset.py b/dkist/dataset/tests/test_tiled_dataset.py index 101ce7c2..c9548427 100644 --- a/dkist/dataset/tests/test_tiled_dataset.py +++ b/dkist/dataset/tests/test_tiled_dataset.py @@ -1,3 +1,4 @@ +import re import copy import matplotlib.pyplot as plt @@ -145,6 +146,29 @@ def test_tileddataset_plot_limit_swapping(swap_tile_limits): return plt.gcf() +@pytest.mark.remote_data +def test_tileddataset_plot_non2d_sliceindex(): + from dkist.data.sample import VBI_AJQWW + ds = load_dataset(VBI_AJQWW) + + newtiles = [] + for tile in ds.flat: + newtiles.append(tile.rebin((1, 8, 8), operation=np.sum)) + # ndcube 2.3.0 introduced a deepcopy for rebin, this broke our dataset validation + # https://github.com/sunpy/ndcube/issues/815 + for tile in newtiles: + tile.meta["inventory"] = ds.inventory + ds = TiledDataset(np.array(newtiles).reshape(ds.shape), meta={"inventory": newtiles[0].inventory}) + + already_sliced_ds = ds.slice_tiles[0] + + fig = plt.figure(figsize=(12, 15)) + with pytest.warns(DKISTUserWarning, + match="The metadata ASDF file that produced this dataset is out of date and will result in " + "incorrect plots. Please re-download the metadata ASDF file."): + with pytest.raises(ValueError, match=re.escape("Applying slice '(0,)' to this dataset resulted in a 1 " + "dimensional dataset, you should pass a slice which results in a 2D dataset for each tile.")): + already_sliced_ds.plot(0, figure=fig) @pytest.mark.accept_cli_tiled_dataset def test_repr(simple_tiled_dataset): diff --git a/dkist/dataset/tiled_dataset.py b/dkist/dataset/tiled_dataset.py index 2d04bb51..37e3fc09 100644 --- a/dkist/dataset/tiled_dataset.py +++ b/dkist/dataset/tiled_dataset.py @@ -5,6 +5,7 @@ but not representable in a single NDCube derived object as the array data are not contiguous in the spatial dimensions (due to overlaps and offsets). """ +import types import warnings from typing import Literal from textwrap import dedent @@ -195,7 +196,7 @@ def plot(self, slice_index, share_zscale=False, figure=None, swap_tile_limits: L slice_index : `int`, sequence of `int`s or `numpy.s_` Object representing a slice which will reduce each component dataset of the TiledDataset to a 2D image. This is passed to - ``TiledDataset.slice_tiles`` + `.TiledDataset.slice_tiles`, if each tile is already 2D pass ``slice_index=...``. share_zscale : `bool` Determines whether the color scale of the plots should be calculated independently (``False``) or shared across all plots (``True``). @@ -217,14 +218,21 @@ def plot(self, slice_index, share_zscale=False, figure=None, swap_tile_limits: L "will result in incorrect plots. Please re-download the metadata ASDF file.", DKISTUserWarning) - if isinstance(slice_index, int): + if isinstance(slice_index, (int, slice, types.EllipsisType)): slice_index = (slice_index,) + vmin, vmax = np.inf, 0 if figure is None: figure = plt.gcf() sliced_dataset = self.slice_tiles[slice_index] + # This can change to just .shape once we support ndcube >= 2.3 + if (nd_sliced := len(sliced_dataset.flat[0].data.shape)) != 2: + raise ValueError( + f"Applying slice '{slice_index}' to this dataset resulted in a {nd_sliced} " + "dimensional dataset, you should pass a slice which results in a 2D dataset for each tile." + ) dataset_ncols, dataset_nrows = sliced_dataset.shape gridspec = GridSpec(nrows=dataset_nrows, ncols=dataset_ncols, figure=figure) for col in range(dataset_ncols): diff --git a/dkist/io/asdf/tests/test_dataset.py b/dkist/io/asdf/tests/test_dataset.py index 4d0d843e..d461d14c 100644 --- a/dkist/io/asdf/tests/test_dataset.py +++ b/dkist/io/asdf/tests/test_dataset.py @@ -102,7 +102,7 @@ def test_asdf_tags(dataset, tmp_path): def test_save_dataset_with_file_schema(tagobj, tmpdir): tree = {"dataset": tagobj} with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path: - with asdf.AsdfFile(tree, custom_schema=schema_path.as_posix()) as afile: + with asdf.AsdfFile(tree, custom_schema=schema_path) as afile: afile.validate() # it seems that asdf 4.0 does not validate the custom schema on write? afile.write_to(Path(tmpdir / "test.asdf")) diff --git a/docs/whatsnew/1.10.rst b/docs/whatsnew/1.10.rst new file mode 100644 index 00000000..2db60999 --- /dev/null +++ b/docs/whatsnew/1.10.rst @@ -0,0 +1,43 @@ +.. _dkist:whatsnew:1.10: + +***** +v1.10 +***** + +ASDF File Updates +================= + +This release of ``dkist`` includes compatibility with new metadata ASDF files generated after 2025/02/07. + +.. warning:: + + You should re-download all ASDF files when upgrading to this version of ``dkist``, particularly for VBI data. + +These ASDF files change the ordering of the tiles inside a `.TiledDataset` object to correctly account for the differences between column-major (in FITS files) and row-major ordering (in numpy); this release of ``dkist`` updates the `.TiledDataset.plot` method to correctly plot these tiles in the expected order. +It should be noted that with new metadata ASDF files the indexing of specific tiles will have changed. + + +Dependency Updates +================== + +This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies. +The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs. + +* Python >= 3.11 +* dask >= 2023.2 +* gwcs >= 0.24 +* matplotlib >= 3.7 +* ndcube >= 2.1 +* numpy >= 1.25 +* parfive >= 2.1 +* sunpy >= 5.0.7 +* asdf >= 3.3 +* asdf-astropy >= 0.5 +* asdf-coordinate-schemas >= 0.3 +* asdf-transform-schemas >= 0.5 +* asdf-wcs-schemas >= 0.4 + +ASDF File History +================= + +History data from the metadata ASDF files is now accessible via ``Dataset.meta["history"]`` and ``TiledDataset.meta["history"]``, this history dictionary includes both entries relating to versions of packages used when writing the file and explicitly added history entries. diff --git a/docs/whatsnew/index.rst b/docs/whatsnew/index.rst index 1b10aa84..43ab10b4 100644 --- a/docs/whatsnew/index.rst +++ b/docs/whatsnew/index.rst @@ -7,6 +7,7 @@ Release History .. toctree:: :maxdepth: 1 + 1.10 1.1 1.0 changelog diff --git a/pyproject.toml b/pyproject.toml index 686a698a..0a59ee50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,22 +23,22 @@ dependencies = [ # Some of these schema dependencies are minimums because we generated asdf # files with them unpinned so they are now required to read generated asdf # files. - "asdf>=2.15.0", - "asdf-astropy>=0.4.0", - "asdf-coordinates-schemas>=0.2.0", + "asdf>=3.3.0", # Required by gwcs 0.24 + "asdf-astropy>=0.5.0", # Required by gwcs 0.24 + "asdf-coordinates-schemas>=0.3.0", # required by wcs-schemas 0.4 "asdf-standard>=1.1.0", - "asdf-transform-schemas>=0.4.0", - "asdf-wcs-schemas>=0.3.0", - "astropy>=5.3", + "asdf-transform-schemas>=0.5.0", # required by wcs-schemas 0.4 + "asdf-wcs-schemas>=0.4.0", # required by gwcs 0.24 + "astropy>=6.0", # required by gwcs 0.24 "dask[array]>=2023.2.0", "globus-sdk>=3.0", - "gwcs>=0.19.0", + "gwcs>=0.24.0", # Inverse transform fix "matplotlib>=3.7", "ndcube[plotting,reproject]>=2.1", "numpy>=1.25", "parfive[ftp]>=2.1", "platformdirs>=3.0", - "sunpy[net,asdf]>=5.0", + "sunpy[net,asdf]>=5.0.7", "tqdm>=4.65", ] dynamic = ["version"] diff --git a/pytest.ini b/pytest.ini index 59426483..9911833f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -69,3 +69,4 @@ filterwarnings = # Oldestdeps below here ignore:pkg_resources is deprecated as an API.*:DeprecationWarning ignore:Deprecated call to .*pkg_resources\.declare_namespace.*mpl_toolkits.*:DeprecationWarning + ignore:Extension .*sunpy-1\.0\.0:asdf.exceptions.AsdfManifestURIMismatchWarning diff --git a/tox.ini b/tox.ini index a7ec5353..cbc94054 100644 --- a/tox.ini +++ b/tox.ini @@ -59,7 +59,8 @@ extras = tests commands_pre = oldestdeps: minimum_dependencies dkist --filename requirements-min.txt - oldestdeps: pip install -r requirements-min.txt cryptography<42 jsonschema==4.0.1 + # We need to pin down the cryptography transitive dependency because of globus + oldestdeps: pip install -r requirements-min.txt cryptography<42 figure: python -c "from dkist.data.sample import download_all_sample_data; download_all_sample_data()" pip freeze --all --no-input commands =