From 4eb6cf0a703e3c8f6455d0b5237ae571a1a1eb60 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 15:08:19 +0000 Subject: [PATCH 01/16] Bump gwcs to first supported version with inverse support --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 686a698a..4bfe92b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "astropy>=5.3", "dask[array]>=2023.2.0", "globus-sdk>=3.0", - "gwcs>=0.19.0", + "gwcs>=0.24.0", # Inverse transform fix "matplotlib>=3.7", "ndcube[plotting,reproject]>=2.1", "numpy>=1.25", From feb83fd3224965b67f2107c60e2b3288e5eea141 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 15:09:17 +0000 Subject: [PATCH 02/16] add changelog --- changelog/519.breaking.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/519.breaking.rst diff --git a/changelog/519.breaking.rst b/changelog/519.breaking.rst new file mode 100644 index 00000000..d0d0841f --- /dev/null +++ b/changelog/519.breaking.rst @@ -0,0 +1 @@ +The minimum supported gwcs version is now 0.24 which is the first version with support for VISP inverse transforms (world to pixel). From 15f7659b93d8c72775a9514b111ca0ff1b91c2c2 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 15:14:42 +0000 Subject: [PATCH 03/16] Also need to bump other things --- changelog/519.breaking.rst | 8 +++++++- pyproject.toml | 12 ++++++------ pytest.ini | 1 + 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/changelog/519.breaking.rst b/changelog/519.breaking.rst index d0d0841f..97ee5b3b 100644 --- a/changelog/519.breaking.rst +++ b/changelog/519.breaking.rst @@ -1 +1,7 @@ -The minimum supported gwcs version is now 0.24 which is the first version with support for VISP inverse transforms (world to pixel). +The minimum supported gwcs version is now 0.24 which is the first version with support for VISP inverse transforms (world to pixel). This required the following additional dependency bumps: + +* asdf >= 3.3 +* asdf-astropy >= 0.5 +* asdf-coordinate-schemas >= 0.3 +* asdf-transform-schemas >= 0.5 +* asdf-wcs-schemas >= 0.4 diff --git a/pyproject.toml b/pyproject.toml index 4bfe92b2..5162b588 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,13 +23,13 @@ dependencies = [ # Some of these schema dependencies are minimums because we generated asdf # files with them unpinned so they are now required to read generated asdf # files. - "asdf>=2.15.0", - "asdf-astropy>=0.4.0", - "asdf-coordinates-schemas>=0.2.0", + "asdf>=3.3.0", # Required by gwcs 0.24 + "asdf-astropy>=0.5.0", # Required by gwcs 0.24 + "asdf-coordinates-schemas>=0.3.0", # required by wcs-schemas 0.4 "asdf-standard>=1.1.0", - "asdf-transform-schemas>=0.4.0", - "asdf-wcs-schemas>=0.3.0", - "astropy>=5.3", + "asdf-transform-schemas>=0.5.0", # required by wcs-schemas 0.4 + "asdf-wcs-schemas>=0.4.0", # required by gwcs 0.24 + "astropy>=6.0", # required by gwcs 0.24 "dask[array]>=2023.2.0", "globus-sdk>=3.0", "gwcs>=0.24.0", # Inverse transform fix diff --git a/pytest.ini b/pytest.ini index 59426483..9911833f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -69,3 +69,4 @@ filterwarnings = # Oldestdeps below here ignore:pkg_resources is deprecated as an API.*:DeprecationWarning ignore:Deprecated call to .*pkg_resources\.declare_namespace.*mpl_toolkits.*:DeprecationWarning + ignore:Extension .*sunpy-1\.0\.0:asdf.exceptions.AsdfManifestURIMismatchWarning From 7b198030223f7d2766d830cdf0c1c9afd410aaf0 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 15:55:14 +0000 Subject: [PATCH 04/16] Remove compatibility code for old asdf --- dkist/dataset/loader.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/dkist/dataset/loader.py b/dkist/dataset/loader.py index bb3ec04b..6d20ca86 100644 --- a/dkist/dataset/loader.py +++ b/dkist/dataset/loader.py @@ -22,12 +22,6 @@ ASDF_FILENAME_PATTERN = r"^(?P[A-Z-]+)_L1_(?P\d{8}T\d{6})_(?P[A-Z]{5,})(?P_user_tools|_metadata)?.asdf$" -def asdf_open_memory_mapping_kwarg(memmap: bool) -> dict: - if asdf.__version__ > "3.1.0": - return {"memmap": memmap} - return {"copy_arrays": not memmap} - - @singledispatch def load_dataset(target): """ @@ -241,7 +235,7 @@ def _load_from_asdf(filepath): try: with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path: with asdf.open(filepath, custom_schema=schema_path.as_posix(), - lazy_load=False, **asdf_open_memory_mapping_kwarg(memmap=False)) as ff: + lazy_load=False, memmap=False) as ff: ds = ff.tree["dataset"] ds.meta["history"] = ff.tree["history"] if isinstance(ds, TiledDataset): From 7b92e00bfca0bb6be6f8e9105df39e9ac81082c8 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 16:05:18 +0000 Subject: [PATCH 05/16] fix syntax in old changelog --- changelog/507.breaking.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/changelog/507.breaking.rst b/changelog/507.breaking.rst index ccb1e7ca..3eb9f081 100644 --- a/changelog/507.breaking.rst +++ b/changelog/507.breaking.rst @@ -1,4 +1,5 @@ The minimum supported versions of dependencies and Python have been updated, this release requires: + * Python 3.11 * asdf 2.15 (and plugin version bumps) * dask 2023.2 From c60fa19613595e8a886cd84a2c5eae8f2f7278c5 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Wed, 5 Feb 2025 18:00:13 +0000 Subject: [PATCH 06/16] More sunpy issues --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5162b588..0a59ee50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "numpy>=1.25", "parfive[ftp]>=2.1", "platformdirs>=3.0", - "sunpy[net,asdf]>=5.0", + "sunpy[net,asdf]>=5.0.7", "tqdm>=4.65", ] dynamic = ["version"] From 98d9baed39802c48fbc5c19abf1de1f3268e356a Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 09:37:08 +0000 Subject: [PATCH 07/16] things that go bump in the night --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index a7ec5353..cbc94054 100644 --- a/tox.ini +++ b/tox.ini @@ -59,7 +59,8 @@ extras = tests commands_pre = oldestdeps: minimum_dependencies dkist --filename requirements-min.txt - oldestdeps: pip install -r requirements-min.txt cryptography<42 jsonschema==4.0.1 + # We need to pin down the cryptography transitive dependency because of globus + oldestdeps: pip install -r requirements-min.txt cryptography<42 figure: python -c "from dkist.data.sample import download_all_sample_data; download_all_sample_data()" pip freeze --all --no-input commands = From ebd5023d765dcd3ad881757b40068c3ee5fed63f Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 09:45:46 +0000 Subject: [PATCH 08/16] Don't write temp file to repo dir --- dkist/conftest.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dkist/conftest.py b/dkist/conftest.py index 4e466c47..62c0f005 100644 --- a/dkist/conftest.py +++ b/dkist/conftest.py @@ -421,8 +421,9 @@ def croppable_visp_dataset(tmp_path_factory): @pytest.fixture(scope="session") -def croppable_cryo_dataset(): +def croppable_cryo_dataset(tmp_path_factory): + cryodir = tmp_path_factory.mktemp("data") with gzip.open(Path(rootdir) / "croppable_cryo.asdf.gz", mode="rb") as gfo: - with open(rootdir / "croppable_cryo.asdf", mode="wb") as afo: + with open(cryodir / "croppable_cryo.asdf", mode="wb") as afo: afo.write(gfo.read()) - return load_dataset(Path(rootdir) / "croppable_cryo.asdf") + return load_dataset(cryodir / "croppable_cryo.asdf") From ce3a86041b9bfa33599bb42bdad61a10f5fdd33b Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 12:13:24 +0000 Subject: [PATCH 09/16] Add whatsnew --- changelog/507.breaking.rst | 24 +++++++++++++-------- changelog/519.breaking.rst | 7 ------- docs/whatsnew/1.10.rst | 43 ++++++++++++++++++++++++++++++++++++++ docs/whatsnew/index.rst | 1 + 4 files changed, 59 insertions(+), 16 deletions(-) delete mode 100644 changelog/519.breaking.rst create mode 100644 docs/whatsnew/1.10.rst diff --git a/changelog/507.breaking.rst b/changelog/507.breaking.rst index 3eb9f081..e202f63a 100644 --- a/changelog/507.breaking.rst +++ b/changelog/507.breaking.rst @@ -1,10 +1,16 @@ -The minimum supported versions of dependencies and Python have been updated, this release requires: +This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies. +The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs. -* Python 3.11 -* asdf 2.15 (and plugin version bumps) -* dask 2023.2 -* matplotlib 3.7 -* ndcube 2.1 -* numpy 1.25 -* parfive 2.1 -* sunpy 5.0 +* Python >= 3.11 +* dask >= 2023.2 +* gwcs >= 0.24 +* matplotlib >= 3.7 +* ndcube >= 2.1 +* numpy >= 1.25 +* parfive >= 2.1 +* sunpy >= 5.0.7 +* asdf >= 3.3 +* asdf-astropy >= 0.5 +* asdf-coordinate-schemas >= 0.3 +* asdf-transform-schemas >= 0.5 +* asdf-wcs-schemas >= 0.4 diff --git a/changelog/519.breaking.rst b/changelog/519.breaking.rst deleted file mode 100644 index 97ee5b3b..00000000 --- a/changelog/519.breaking.rst +++ /dev/null @@ -1,7 +0,0 @@ -The minimum supported gwcs version is now 0.24 which is the first version with support for VISP inverse transforms (world to pixel). This required the following additional dependency bumps: - -* asdf >= 3.3 -* asdf-astropy >= 0.5 -* asdf-coordinate-schemas >= 0.3 -* asdf-transform-schemas >= 0.5 -* asdf-wcs-schemas >= 0.4 diff --git a/docs/whatsnew/1.10.rst b/docs/whatsnew/1.10.rst new file mode 100644 index 00000000..bcf15636 --- /dev/null +++ b/docs/whatsnew/1.10.rst @@ -0,0 +1,43 @@ +.. _dkist:whatsnew:1.10: + +***** +v1.10 +***** + +ASDF File Updates +================= + +This release of ``dkist`` includes compatibility with new metadata ASDF files generated after 2025/02/07. + +.. warning:: + + You should re-download ASDF files when upgrading to this version of ``dkist``, particularly for VBI data. + +These ASDF files change the ordering of the tiles inside a `.TiledDataset` object to correctly account for the differences between column-major (in FITS files) and row-major ordering (in numpy), this release of ``dkist`` updates the `.TiledDataset.plot` method to correctly plot these tiles in the expected order. +It should be noted that with new metadata ASDF files the indexing of specific tiles will have changed. + + +Dependency Updates +================== + +This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies. +The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs. + +* Python >= 3.11 +* dask >= 2023.2 +* gwcs >= 0.24 +* matplotlib >= 3.7 +* ndcube >= 2.1 +* numpy >= 1.25 +* parfive >= 2.1 +* sunpy >= 5.0.7 +* asdf >= 3.3 +* asdf-astropy >= 0.5 +* asdf-coordinate-schemas >= 0.3 +* asdf-transform-schemas >= 0.5 +* asdf-wcs-schemas >= 0.4 + +ASDF File History +================= + +History data from the metadata ASDF files is now accessible via ``Dataset.meta["history"]`` and ``TiledDataset.meta["history"]``, this history dictionary includes both entries relating to versions of packages used when writing the file and explicitly added history entries. diff --git a/docs/whatsnew/index.rst b/docs/whatsnew/index.rst index 1b10aa84..43ab10b4 100644 --- a/docs/whatsnew/index.rst +++ b/docs/whatsnew/index.rst @@ -7,6 +7,7 @@ Release History .. toctree:: :maxdepth: 1 + 1.10 1.1 1.0 changelog From c9f7c6cc09db571f3a7831fee1cc256d55faadfe Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 15:40:12 +0000 Subject: [PATCH 10/16] Let's see if I can actually fix this --- dkist/dataset/tests/test_crop_dataset.py | 46 ++++++++++++++---------- dkist/io/asdf/tests/test_dataset.py | 2 +- 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/dkist/dataset/tests/test_crop_dataset.py b/dkist/dataset/tests/test_crop_dataset.py index 20a24d30..f0504e5e 100644 --- a/dkist/dataset/tests/test_crop_dataset.py +++ b/dkist/dataset/tests/test_crop_dataset.py @@ -1,3 +1,4 @@ +import numpy as np import pytest import astropy.units as u @@ -7,6 +8,13 @@ gwcs = pytest.importorskip("gwcs", "0.22.2a1.dev2") +def assert_skycoord_allclose(coord1, coord2): + assert coord1.is_equivalent_frame(coord2) + data1 = coord1.frame.cartesian.xyz + data2 = coord2.frame.cartesian.xyz + assert np.allclose(data1, data2) + + def test_crop_visp_by_only_stokes(croppable_visp_dataset): cropped = croppable_visp_dataset.crop([ @@ -52,11 +60,11 @@ def test_crop_visp_by_time(croppable_visp_dataset): orig_coords = croppable_visp_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0] == orig_coords[0][200]).all() - assert (cropped_coords[0][-1] == orig_coords[0][400]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2][200:401]).all() - assert (cropped_coords[3] == orig_coords[3]).all() + assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200]) + assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][400]) + assert np.allclose(cropped_coords[1], orig_coords[1]) + assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:401].jd) + assert np.allclose(cropped_coords[3], orig_coords[3]) def test_crop_visp_by_lonlat(croppable_visp_dataset): @@ -90,11 +98,11 @@ def test_crop_visp_by_lonlat(croppable_visp_dataset): orig_coords = croppable_visp_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0] == orig_coords[0][200][500:1001]).all() - assert (cropped_coords[0][-1] == orig_coords[0][600][500:1001]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2][200:601]).all() - assert (cropped_coords[3] == orig_coords[3]).all() + assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200][500:1001]) + assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][600][500:1001]) + assert np.allclose(cropped_coords[1], orig_coords[1]) + assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:601].jd) + assert np.allclose(cropped_coords[3], orig_coords[3]) def test_crop_cryo_by_only_stokes(croppable_cryo_dataset): @@ -144,12 +152,12 @@ def test_crop_cryo_by_time(croppable_cryo_dataset): cropped_coords = cropped.axis_world_coords() # Whole coordinate array is too large to compare, so check just the edges - assert (cropped_coords[0][0, 0, 0, :] == orig_coords[0][0, 0, 0, :]).all() - assert (cropped_coords[0][0, 0, -1, :] == orig_coords[0][0, 0, -1, :]).all() - assert (cropped_coords[0][0, 0, :, 0] == orig_coords[0][0, 0, :, 0]).all() - assert (cropped_coords[0][0, 0, :, -1] == orig_coords[0][0, 0, :, -1]).all() - assert (cropped_coords[1] == orig_coords[1][:2, :2]).all() - assert (cropped_coords[2] == orig_coords[2]).all() + assert_skycoord_allclose(cropped_coords[0][0, 0, 0, :], orig_coords[0][0, 0, 0, :]) + assert_skycoord_allclose(cropped_coords[0][0, 0, -1, :], orig_coords[0][0, 0, -1, :]) + assert_skycoord_allclose(cropped_coords[0][0, 0, :, 0], orig_coords[0][0, 0, :, 0]) + assert_skycoord_allclose(cropped_coords[0][0, 0, :, -1], orig_coords[0][0, 0, :, -1]) + assert np.allclose(cropped_coords[1].jd, orig_coords[1][:2, :2].jd) + assert np.allclose(cropped_coords[2], orig_coords[2]) def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset): @@ -180,6 +188,6 @@ def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset): orig_coords = croppable_cryo_dataset.axis_world_coords() cropped_coords = cropped.axis_world_coords() - assert (cropped_coords[0][0, 0] == orig_coords[0][0, 0, :201, :201]).all() - assert (cropped_coords[1] == orig_coords[1]).all() - assert (cropped_coords[2] == orig_coords[2]).all() + assert_skycoord_allclose(cropped_coords[0][0, 0], orig_coords[0][0, 0, :201, :201]) + assert np.allclose(cropped_coords[1].jd, orig_coords[1].jd) + assert np.allclose(cropped_coords[2], orig_coords[2]) diff --git a/dkist/io/asdf/tests/test_dataset.py b/dkist/io/asdf/tests/test_dataset.py index 4d0d843e..d461d14c 100644 --- a/dkist/io/asdf/tests/test_dataset.py +++ b/dkist/io/asdf/tests/test_dataset.py @@ -102,7 +102,7 @@ def test_asdf_tags(dataset, tmp_path): def test_save_dataset_with_file_schema(tagobj, tmpdir): tree = {"dataset": tagobj} with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path: - with asdf.AsdfFile(tree, custom_schema=schema_path.as_posix()) as afile: + with asdf.AsdfFile(tree, custom_schema=schema_path) as afile: afile.validate() # it seems that asdf 4.0 does not validate the custom schema on write? afile.write_to(Path(tmpdir / "test.asdf")) From 6cf818d8203c8f6498561ca75d6861886998fbe6 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 16:46:49 +0000 Subject: [PATCH 11/16] Explicitly allow for no-slicing operations. This supports `np.s_[None]` `...` `np.s_[...]` etc --- dkist/dataset/tiled_dataset.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dkist/dataset/tiled_dataset.py b/dkist/dataset/tiled_dataset.py index 129b78c9..0f113d77 100644 --- a/dkist/dataset/tiled_dataset.py +++ b/dkist/dataset/tiled_dataset.py @@ -5,6 +5,7 @@ but not representable in a single NDCube derived object as the array data are not contiguous in the spatial dimensions (due to overlaps and offsets). """ +import types import warnings from textwrap import dedent from collections.abc import Collection @@ -193,7 +194,7 @@ def plot(self, slice_index, share_zscale=False, figure=None, **kwargs): slice_index : `int`, sequence of `int`s or `numpy.s_` Object representing a slice which will reduce each component dataset of the TiledDataset to a 2D image. This is passed to - ``TiledDataset.slice_tiles`` + ``TiledDataset.slice_tiles``, if each tile is already 2D pass ``slice_index=...`. share_zscale : `bool` Determines whether the color scale of the plots should be calculated independently (``False``) or shared across all plots (``True``). @@ -202,7 +203,7 @@ def plot(self, slice_index, share_zscale=False, figure=None, **kwargs): A figure to use for the plot. If not specified the current pyplot figure will be used, or a new one created. """ - if isinstance(slice_index, int): + if isinstance(slice_index, (int, slice, types.EllipsisType)): slice_index = (slice_index,) vmin, vmax = np.inf, 0 From c81e2b2b705884061cde12b397f8ed489ce70006 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 16:57:55 +0000 Subject: [PATCH 12/16] raise in plot if slices results in a dataset which is not 2D --- dkist/dataset/tiled_dataset.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/dkist/dataset/tiled_dataset.py b/dkist/dataset/tiled_dataset.py index 0da17fb0..51e4f540 100644 --- a/dkist/dataset/tiled_dataset.py +++ b/dkist/dataset/tiled_dataset.py @@ -227,6 +227,11 @@ def plot(self, slice_index, share_zscale=False, figure=None, swap_tile_limits: L figure = plt.gcf() sliced_dataset = self.slice_tiles[slice_index] + if (nd_sliced := len(sliced_dataset.flat[0].shape)) != 2: + raise ValueError( + f"Applying slice '{slice_index}' to this dataset resulted in a {nd_sliced} " + "dimensional dataset, you should pass a slice which results in a 2D dataset for each tile." + ) dataset_ncols, dataset_nrows = sliced_dataset.shape gridspec = GridSpec(nrows=dataset_nrows, ncols=dataset_ncols, figure=figure) for col in range(dataset_ncols): From 3ccf3b861ae5aa167b3c5eb899ad044c742cdf44 Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Thu, 6 Feb 2025 17:02:13 +0000 Subject: [PATCH 13/16] doc fixes --- dkist/dataset/tiled_dataset.py | 2 +- docs/whatsnew/1.10.rst | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dkist/dataset/tiled_dataset.py b/dkist/dataset/tiled_dataset.py index 51e4f540..a82aeeb7 100644 --- a/dkist/dataset/tiled_dataset.py +++ b/dkist/dataset/tiled_dataset.py @@ -196,7 +196,7 @@ def plot(self, slice_index, share_zscale=False, figure=None, swap_tile_limits: L slice_index : `int`, sequence of `int`s or `numpy.s_` Object representing a slice which will reduce each component dataset of the TiledDataset to a 2D image. This is passed to - ``TiledDataset.slice_tiles``, if each tile is already 2D pass ``slice_index=...`. + `.TiledDataset.slice_tiles`, if each tile is already 2D pass ``slice_index=...``. share_zscale : `bool` Determines whether the color scale of the plots should be calculated independently (``False``) or shared across all plots (``True``). diff --git a/docs/whatsnew/1.10.rst b/docs/whatsnew/1.10.rst index bcf15636..2db60999 100644 --- a/docs/whatsnew/1.10.rst +++ b/docs/whatsnew/1.10.rst @@ -11,9 +11,9 @@ This release of ``dkist`` includes compatibility with new metadata ASDF files ge .. warning:: - You should re-download ASDF files when upgrading to this version of ``dkist``, particularly for VBI data. + You should re-download all ASDF files when upgrading to this version of ``dkist``, particularly for VBI data. -These ASDF files change the ordering of the tiles inside a `.TiledDataset` object to correctly account for the differences between column-major (in FITS files) and row-major ordering (in numpy), this release of ``dkist`` updates the `.TiledDataset.plot` method to correctly plot these tiles in the expected order. +These ASDF files change the ordering of the tiles inside a `.TiledDataset` object to correctly account for the differences between column-major (in FITS files) and row-major ordering (in numpy); this release of ``dkist`` updates the `.TiledDataset.plot` method to correctly plot these tiles in the expected order. It should be noted that with new metadata ASDF files the indexing of specific tiles will have changed. From aba17acf91ea11d7ceda82cbedfbb81fae83341f Mon Sep 17 00:00:00 2001 From: Arthur Eigenbrot Date: Thu, 6 Feb 2025 15:08:38 -0700 Subject: [PATCH 14/16] Add test coverage for new error raised by bad `.plot` slices --- dkist/dataset/tests/test_tiled_dataset.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/dkist/dataset/tests/test_tiled_dataset.py b/dkist/dataset/tests/test_tiled_dataset.py index 101ce7c2..44dcd821 100644 --- a/dkist/dataset/tests/test_tiled_dataset.py +++ b/dkist/dataset/tests/test_tiled_dataset.py @@ -1,3 +1,4 @@ +import re import copy import matplotlib.pyplot as plt @@ -145,6 +146,28 @@ def test_tileddataset_plot_limit_swapping(swap_tile_limits): return plt.gcf() +def test_tileddataset_plot_non2d_sliceindex(): + from dkist.data.sample import VBI_AJQWW + ds = load_dataset(VBI_AJQWW) + + newtiles = [] + for tile in ds.flat: + newtiles.append(tile.rebin((1, 8, 8), operation=np.sum)) + # ndcube 2.3.0 introduced a deepcopy for rebin, this broke our dataset validation + # https://github.com/sunpy/ndcube/issues/815 + for tile in newtiles: + tile.meta["inventory"] = ds.inventory + ds = TiledDataset(np.array(newtiles).reshape(ds.shape), meta={"inventory": newtiles[0].inventory}) + + already_sliced_ds = ds.slice_tiles[0] + + fig = plt.figure(figsize=(12, 15)) + with pytest.warns(DKISTUserWarning, + match="The metadata ASDF file that produced this dataset is out of date and will result in " + "incorrect plots. Please re-download the metadata ASDF file."): + with pytest.raises(ValueError, match=re.escape("Applying slice '(0,)' to this dataset resulted in a 1 " + "dimensional dataset, you should pass a slice which results in a 2D dataset for each tile.")): + already_sliced_ds.plot(0, figure=fig) @pytest.mark.accept_cli_tiled_dataset def test_repr(simple_tiled_dataset): From 090a155282a05a759028545a3424cd96fe946042 Mon Sep 17 00:00:00 2001 From: Arthur Eigenbrot Date: Thu, 6 Feb 2025 15:20:48 -0700 Subject: [PATCH 15/16] Need to mark as using remote data? --- dkist/dataset/tests/test_tiled_dataset.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dkist/dataset/tests/test_tiled_dataset.py b/dkist/dataset/tests/test_tiled_dataset.py index 44dcd821..c9548427 100644 --- a/dkist/dataset/tests/test_tiled_dataset.py +++ b/dkist/dataset/tests/test_tiled_dataset.py @@ -146,6 +146,7 @@ def test_tileddataset_plot_limit_swapping(swap_tile_limits): return plt.gcf() +@pytest.mark.remote_data def test_tileddataset_plot_non2d_sliceindex(): from dkist.data.sample import VBI_AJQWW ds = load_dataset(VBI_AJQWW) From 408b9f679130ba0f06409781b49a37123a00d729 Mon Sep 17 00:00:00 2001 From: Drew Leonard Date: Fri, 7 Feb 2025 14:21:58 +0000 Subject: [PATCH 16/16] NDCube fail --- dkist/dataset/tiled_dataset.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dkist/dataset/tiled_dataset.py b/dkist/dataset/tiled_dataset.py index a82aeeb7..37e3fc09 100644 --- a/dkist/dataset/tiled_dataset.py +++ b/dkist/dataset/tiled_dataset.py @@ -227,7 +227,8 @@ def plot(self, slice_index, share_zscale=False, figure=None, swap_tile_limits: L figure = plt.gcf() sliced_dataset = self.slice_tiles[slice_index] - if (nd_sliced := len(sliced_dataset.flat[0].shape)) != 2: + # This can change to just .shape once we support ndcube >= 2.3 + if (nd_sliced := len(sliced_dataset.flat[0].data.shape)) != 2: raise ValueError( f"Applying slice '{slice_index}' to this dataset resulted in a {nd_sliced} " "dimensional dataset, you should pass a slice which results in a 2D dataset for each tile."