Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump gwcs version to 0.24 #519

Merged
merged 18 commits into from
Feb 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 16 additions & 9 deletions changelog/507.breaking.rst
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
The minimum supported versions of dependencies and Python have been updated, this release requires:
* Python 3.11
* asdf 2.15 (and plugin version bumps)
* dask 2023.2
* matplotlib 3.7
* ndcube 2.1
* numpy 1.25
* parfive 2.1
* sunpy 5.0
This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies.
The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs.

* Python >= 3.11
* dask >= 2023.2
* gwcs >= 0.24
* matplotlib >= 3.7
* ndcube >= 2.1
* numpy >= 1.25
* parfive >= 2.1
* sunpy >= 5.0.7
* asdf >= 3.3
* asdf-astropy >= 0.5
* asdf-coordinate-schemas >= 0.3
* asdf-transform-schemas >= 0.5
* asdf-wcs-schemas >= 0.4
7 changes: 4 additions & 3 deletions dkist/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,8 +421,9 @@ def croppable_visp_dataset(tmp_path_factory):


@pytest.fixture(scope="session")
def croppable_cryo_dataset():
def croppable_cryo_dataset(tmp_path_factory):
cryodir = tmp_path_factory.mktemp("data")
with gzip.open(Path(rootdir) / "croppable_cryo.asdf.gz", mode="rb") as gfo:
with open(rootdir / "croppable_cryo.asdf", mode="wb") as afo:
with open(cryodir / "croppable_cryo.asdf", mode="wb") as afo:
afo.write(gfo.read())
return load_dataset(Path(rootdir) / "croppable_cryo.asdf")
return load_dataset(cryodir / "croppable_cryo.asdf")
8 changes: 1 addition & 7 deletions dkist/dataset/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,6 @@
ASDF_FILENAME_PATTERN = r"^(?P<instrument>[A-Z-]+)_L1_(?P<timestamp>\d{8}T\d{6})_(?P<datasetid>[A-Z]{5,})(?P<suffix>_user_tools|_metadata)?.asdf$"


def asdf_open_memory_mapping_kwarg(memmap: bool) -> dict:
if asdf.__version__ > "3.1.0":
return {"memmap": memmap}
return {"copy_arrays": not memmap}


@singledispatch
def load_dataset(target):
"""
Expand Down Expand Up @@ -241,7 +235,7 @@ def _load_from_asdf(filepath):
try:
with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path:
with asdf.open(filepath, custom_schema=schema_path.as_posix(),
lazy_load=False, **asdf_open_memory_mapping_kwarg(memmap=False)) as ff:
lazy_load=False, memmap=False) as ff:
ds = ff.tree["dataset"]
ds.meta["history"] = ff.tree["history"]
if isinstance(ds, TiledDataset):
Expand Down
46 changes: 27 additions & 19 deletions dkist/dataset/tests/test_crop_dataset.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import numpy as np
import pytest

import astropy.units as u
Expand All @@ -7,6 +8,13 @@
gwcs = pytest.importorskip("gwcs", "0.22.2a1.dev2")


def assert_skycoord_allclose(coord1, coord2):
assert coord1.is_equivalent_frame(coord2)
data1 = coord1.frame.cartesian.xyz
data2 = coord2.frame.cartesian.xyz
assert np.allclose(data1, data2)


def test_crop_visp_by_only_stokes(croppable_visp_dataset):

cropped = croppable_visp_dataset.crop([
Expand Down Expand Up @@ -52,11 +60,11 @@ def test_crop_visp_by_time(croppable_visp_dataset):

orig_coords = croppable_visp_dataset.axis_world_coords()
cropped_coords = cropped.axis_world_coords()
assert (cropped_coords[0][0] == orig_coords[0][200]).all()
assert (cropped_coords[0][-1] == orig_coords[0][400]).all()
assert (cropped_coords[1] == orig_coords[1]).all()
assert (cropped_coords[2] == orig_coords[2][200:401]).all()
assert (cropped_coords[3] == orig_coords[3]).all()
assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200])
assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][400])
assert np.allclose(cropped_coords[1], orig_coords[1])
assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:401].jd)
assert np.allclose(cropped_coords[3], orig_coords[3])


def test_crop_visp_by_lonlat(croppable_visp_dataset):
Expand Down Expand Up @@ -90,11 +98,11 @@ def test_crop_visp_by_lonlat(croppable_visp_dataset):

orig_coords = croppable_visp_dataset.axis_world_coords()
cropped_coords = cropped.axis_world_coords()
assert (cropped_coords[0][0] == orig_coords[0][200][500:1001]).all()
assert (cropped_coords[0][-1] == orig_coords[0][600][500:1001]).all()
assert (cropped_coords[1] == orig_coords[1]).all()
assert (cropped_coords[2] == orig_coords[2][200:601]).all()
assert (cropped_coords[3] == orig_coords[3]).all()
assert_skycoord_allclose(cropped_coords[0][0], orig_coords[0][200][500:1001])
assert_skycoord_allclose(cropped_coords[0][-1], orig_coords[0][600][500:1001])
assert np.allclose(cropped_coords[1], orig_coords[1])
assert np.allclose(cropped_coords[2].jd, orig_coords[2][200:601].jd)
assert np.allclose(cropped_coords[3], orig_coords[3])


def test_crop_cryo_by_only_stokes(croppable_cryo_dataset):
Expand Down Expand Up @@ -144,12 +152,12 @@ def test_crop_cryo_by_time(croppable_cryo_dataset):
cropped_coords = cropped.axis_world_coords()

# Whole coordinate array is too large to compare, so check just the edges
assert (cropped_coords[0][0, 0, 0, :] == orig_coords[0][0, 0, 0, :]).all()
assert (cropped_coords[0][0, 0, -1, :] == orig_coords[0][0, 0, -1, :]).all()
assert (cropped_coords[0][0, 0, :, 0] == orig_coords[0][0, 0, :, 0]).all()
assert (cropped_coords[0][0, 0, :, -1] == orig_coords[0][0, 0, :, -1]).all()
assert (cropped_coords[1] == orig_coords[1][:2, :2]).all()
assert (cropped_coords[2] == orig_coords[2]).all()
assert_skycoord_allclose(cropped_coords[0][0, 0, 0, :], orig_coords[0][0, 0, 0, :])
assert_skycoord_allclose(cropped_coords[0][0, 0, -1, :], orig_coords[0][0, 0, -1, :])
assert_skycoord_allclose(cropped_coords[0][0, 0, :, 0], orig_coords[0][0, 0, :, 0])
assert_skycoord_allclose(cropped_coords[0][0, 0, :, -1], orig_coords[0][0, 0, :, -1])
assert np.allclose(cropped_coords[1].jd, orig_coords[1][:2, :2].jd)
assert np.allclose(cropped_coords[2], orig_coords[2])


def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset):
Expand Down Expand Up @@ -180,6 +188,6 @@ def test_crop_cryo_by_only_lonlat(croppable_cryo_dataset):
orig_coords = croppable_cryo_dataset.axis_world_coords()
cropped_coords = cropped.axis_world_coords()

assert (cropped_coords[0][0, 0] == orig_coords[0][0, 0, :201, :201]).all()
assert (cropped_coords[1] == orig_coords[1]).all()
assert (cropped_coords[2] == orig_coords[2]).all()
assert_skycoord_allclose(cropped_coords[0][0, 0], orig_coords[0][0, 0, :201, :201])
assert np.allclose(cropped_coords[1].jd, orig_coords[1].jd)
assert np.allclose(cropped_coords[2], orig_coords[2])
24 changes: 24 additions & 0 deletions dkist/dataset/tests/test_tiled_dataset.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import re
import copy

import matplotlib.pyplot as plt
Expand Down Expand Up @@ -145,6 +146,29 @@

return plt.gcf()

@pytest.mark.remote_data
def test_tileddataset_plot_non2d_sliceindex():
from dkist.data.sample import VBI_AJQWW
ds = load_dataset(VBI_AJQWW)

Check warning on line 152 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L151-L152

Added lines #L151 - L152 were not covered by tests

newtiles = []
for tile in ds.flat:
newtiles.append(tile.rebin((1, 8, 8), operation=np.sum))

Check warning on line 156 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L154-L156

Added lines #L154 - L156 were not covered by tests
# ndcube 2.3.0 introduced a deepcopy for rebin, this broke our dataset validation
# https://github.com/sunpy/ndcube/issues/815
for tile in newtiles:
tile.meta["inventory"] = ds.inventory
ds = TiledDataset(np.array(newtiles).reshape(ds.shape), meta={"inventory": newtiles[0].inventory})

Check warning on line 161 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L159-L161

Added lines #L159 - L161 were not covered by tests

already_sliced_ds = ds.slice_tiles[0]

Check warning on line 163 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L163

Added line #L163 was not covered by tests

fig = plt.figure(figsize=(12, 15))
with pytest.warns(DKISTUserWarning,

Check warning on line 166 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L165-L166

Added lines #L165 - L166 were not covered by tests
match="The metadata ASDF file that produced this dataset is out of date and will result in "
"incorrect plots. Please re-download the metadata ASDF file."):
with pytest.raises(ValueError, match=re.escape("Applying slice '(0,)' to this dataset resulted in a 1 "

Check warning on line 169 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L169

Added line #L169 was not covered by tests
"dimensional dataset, you should pass a slice which results in a 2D dataset for each tile.")):
already_sliced_ds.plot(0, figure=fig)

Check warning on line 171 in dkist/dataset/tests/test_tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tests/test_tiled_dataset.py#L171

Added line #L171 was not covered by tests

@pytest.mark.accept_cli_tiled_dataset
def test_repr(simple_tiled_dataset):
Expand Down
12 changes: 10 additions & 2 deletions dkist/dataset/tiled_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
but not representable in a single NDCube derived object as the array data are
not contiguous in the spatial dimensions (due to overlaps and offsets).
"""
import types
import warnings
from typing import Literal
from textwrap import dedent
Expand Down Expand Up @@ -195,7 +196,7 @@
slice_index : `int`, sequence of `int`s or `numpy.s_`
Object representing a slice which will reduce each component dataset
of the TiledDataset to a 2D image. This is passed to
``TiledDataset.slice_tiles``
`.TiledDataset.slice_tiles`, if each tile is already 2D pass ``slice_index=...``.
share_zscale : `bool`
Determines whether the color scale of the plots should be calculated
independently (``False``) or shared across all plots (``True``).
Expand All @@ -217,14 +218,21 @@
"will result in incorrect plots. Please re-download the metadata ASDF file.",
DKISTUserWarning)

if isinstance(slice_index, int):
if isinstance(slice_index, (int, slice, types.EllipsisType)):

Check warning on line 221 in dkist/dataset/tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tiled_dataset.py#L221

Added line #L221 was not covered by tests
slice_index = (slice_index,)

vmin, vmax = np.inf, 0

if figure is None:
figure = plt.gcf()

sliced_dataset = self.slice_tiles[slice_index]
# This can change to just .shape once we support ndcube >= 2.3
if (nd_sliced := len(sliced_dataset.flat[0].data.shape)) != 2:
raise ValueError(

Check warning on line 232 in dkist/dataset/tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tiled_dataset.py#L231-L232

Added lines #L231 - L232 were not covered by tests
f"Applying slice '{slice_index}' to this dataset resulted in a {nd_sliced} "
"dimensional dataset, you should pass a slice which results in a 2D dataset for each tile."
)
dataset_ncols, dataset_nrows = sliced_dataset.shape
gridspec = GridSpec(nrows=dataset_nrows, ncols=dataset_ncols, figure=figure)
for col in range(dataset_ncols):
Expand Down
2 changes: 1 addition & 1 deletion dkist/io/asdf/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def test_asdf_tags(dataset, tmp_path):
def test_save_dataset_with_file_schema(tagobj, tmpdir):
tree = {"dataset": tagobj}
with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path:
with asdf.AsdfFile(tree, custom_schema=schema_path.as_posix()) as afile:
with asdf.AsdfFile(tree, custom_schema=schema_path) as afile:
afile.validate() # it seems that asdf 4.0 does not validate the custom schema on write?
afile.write_to(Path(tmpdir / "test.asdf"))

Expand Down
43 changes: 43 additions & 0 deletions docs/whatsnew/1.10.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
.. _dkist:whatsnew:1.10:

*****
v1.10
*****

ASDF File Updates
=================

This release of ``dkist`` includes compatibility with new metadata ASDF files generated after 2025/02/07.

.. warning::

You should re-download all ASDF files when upgrading to this version of ``dkist``, particularly for VBI data.

These ASDF files change the ordering of the tiles inside a `.TiledDataset` object to correctly account for the differences between column-major (in FITS files) and row-major ordering (in numpy); this release of ``dkist`` updates the `.TiledDataset.plot` method to correctly plot these tiles in the expected order.
It should be noted that with new metadata ASDF files the indexing of specific tiles will have changed.


Dependency Updates
==================

This release of ``dkist`` includes both scheduled bumps to our dependencies as well as a special bump of gwcs to version 0.24, and associated dependencies.
The version upgrade to gwcs 0.24 fixes the inverse transform (world to pixel) for VISP WCSs.

* Python >= 3.11
* dask >= 2023.2
* gwcs >= 0.24
* matplotlib >= 3.7
* ndcube >= 2.1
* numpy >= 1.25
* parfive >= 2.1
* sunpy >= 5.0.7
* asdf >= 3.3
* asdf-astropy >= 0.5
* asdf-coordinate-schemas >= 0.3
* asdf-transform-schemas >= 0.5
* asdf-wcs-schemas >= 0.4

ASDF File History
=================

History data from the metadata ASDF files is now accessible via ``Dataset.meta["history"]`` and ``TiledDataset.meta["history"]``, this history dictionary includes both entries relating to versions of packages used when writing the file and explicitly added history entries.
1 change: 1 addition & 0 deletions docs/whatsnew/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Release History
.. toctree::
:maxdepth: 1

1.10
1.1
1.0
changelog
16 changes: 8 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,22 +23,22 @@ dependencies = [
# Some of these schema dependencies are minimums because we generated asdf
# files with them unpinned so they are now required to read generated asdf
# files.
"asdf>=2.15.0",
"asdf-astropy>=0.4.0",
"asdf-coordinates-schemas>=0.2.0",
"asdf>=3.3.0", # Required by gwcs 0.24
"asdf-astropy>=0.5.0", # Required by gwcs 0.24
"asdf-coordinates-schemas>=0.3.0", # required by wcs-schemas 0.4
"asdf-standard>=1.1.0",
"asdf-transform-schemas>=0.4.0",
"asdf-wcs-schemas>=0.3.0",
"astropy>=5.3",
"asdf-transform-schemas>=0.5.0", # required by wcs-schemas 0.4
"asdf-wcs-schemas>=0.4.0", # required by gwcs 0.24
"astropy>=6.0", # required by gwcs 0.24
"dask[array]>=2023.2.0",
"globus-sdk>=3.0",
"gwcs>=0.19.0",
"gwcs>=0.24.0", # Inverse transform fix
"matplotlib>=3.7",
"ndcube[plotting,reproject]>=2.1",
"numpy>=1.25",
"parfive[ftp]>=2.1",
"platformdirs>=3.0",
"sunpy[net,asdf]>=5.0",
"sunpy[net,asdf]>=5.0.7",
"tqdm>=4.65",
]
dynamic = ["version"]
Expand Down
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,4 @@ filterwarnings =
# Oldestdeps below here
ignore:pkg_resources is deprecated as an API.*:DeprecationWarning
ignore:Deprecated call to .*pkg_resources\.declare_namespace.*mpl_toolkits.*:DeprecationWarning
ignore:Extension .*sunpy-1\.0\.0:asdf.exceptions.AsdfManifestURIMismatchWarning
3 changes: 2 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ extras =
tests
commands_pre =
oldestdeps: minimum_dependencies dkist --filename requirements-min.txt
oldestdeps: pip install -r requirements-min.txt cryptography<42 jsonschema==4.0.1
# We need to pin down the cryptography transitive dependency because of globus
oldestdeps: pip install -r requirements-min.txt cryptography<42
figure: python -c "from dkist.data.sample import download_all_sample_data; download_all_sample_data()"
pip freeze --all --no-input
commands =
Expand Down