diff --git a/changelog/24.bugfix.rst b/changelog/24.bugfix.rst new file mode 100644 index 00000000..4d5ccb24 --- /dev/null +++ b/changelog/24.bugfix.rst @@ -0,0 +1 @@ +Fix a lot of bugs in dataset generation and wcs slicing. diff --git a/dkist/asdf_maker/generator.py b/dkist/asdf_maker/generator.py index 73846f32..01d05cf7 100644 --- a/dkist/asdf_maker/generator.py +++ b/dkist/asdf_maker/generator.py @@ -25,21 +25,14 @@ def headers_from_filenames(filenames, hdu=0): """ A generator to get the headers from filenames. """ - return [fits.getheader(fname, ext=hdu) for fname in filenames] + return [dict(fits.getheader(fname, ext=hdu)) for fname in filenames] def table_from_headers(headers): - h0 = headers[0] + return Table(rows=headers, names=list(headers[0].keys())) - t = Table(list(zip(h0.values())), names=list(h0.keys())) - for h in headers[1:]: - t.add_row(h) - - return t - - -def validate_headers(headers): +def validate_headers(table_headers): """ Given a bunch of headers, validate that they form a coherent set. This function also adds the headers to a list as they are read from the file. @@ -55,7 +48,7 @@ def validate_headers(headers): out_headers : `list` A list of headers. """ - t = table_from_headers(headers) + t = table_headers """ Let's do roughly the minimal amount of verification here. @@ -80,7 +73,7 @@ def validate_headers(headers): if not all(col == col[0]): raise ValueError(f"The {col.name} values did not all match:\n {col}") - return headers + return table_headers def build_pixel_frame(header): @@ -201,7 +194,7 @@ def slice_for_n(self): naxes = self.header['DEAXES'] ss = [0] * naxes ss[i] = slice(None) - return ss + return ss[::-1] @property def slice_headers(self): @@ -322,16 +315,18 @@ def gwcs_from_headers(headers): output_frame=world_frame) -def sorter_DINDEX(headers): +def make_sorted_table(headers, filenames): """ - A sorting function based on the values of DINDEX in the header. + Return an `astropy.table.Table` instance where the rows are correctly sorted. """ - t = table_from_headers(headers) + theaders = table_from_headers(headers) + theaders['filenames'] = filenames + theaders['headers'] = headers dataset_axes = headers[0]['DNAXIS'] array_axes = headers[0]['DAAXES'] keys = [f'DINDEX{k}' for k in range(dataset_axes, array_axes, -1)] - t = np.array(t[keys]) - return np.argsort(t, order=keys) + t = np.array(theaders[keys]) + return theaders[np.argsort(t, order=keys)] def asdf_tree_from_filenames(filenames, hdu=0, relative_to=None): @@ -350,34 +345,30 @@ def asdf_tree_from_filenames(filenames, hdu=0, relative_to=None): # headers is an iterator headers = headers_from_filenames(filenames, hdu=hdu) - # headers is a now list - headers = validate_headers(headers) + table_headers = make_sorted_table(headers, filenames) - sort_inds = sorter_DINDEX(headers) - - sort_heads = ((head, sort_inds[i]) for i, head in enumerate(headers)) - heads = sorted(sort_heads, key=lambda h: h[1]) - headers = [head[0] for head in heads] + validate_headers(table_headers) # Sort the filenames into DS order. - sorted_filenames = np.array(filenames)[sort_inds] + sorted_filenames = np.array(table_headers['filenames']) + sorted_headers = np.array(table_headers['headers']) # Get the array shape shape = tuple((headers[0][f'DNAXIS{n}'] for n in range(headers[0]['DNAXIS'], headers[0]['DAAXES'], -1))) # References from filenames - reference_array = references_from_filenames(sorted_filenames, array_shape=shape, + reference_array = references_from_filenames(sorted_filenames, sorted_headers, array_shape=shape, hdu_index=hdu, relative_to=relative_to) tree = {'dataset': reference_array, - 'gwcs': gwcs_from_headers(headers)} + 'gwcs': gwcs_from_headers(sorted_headers)} # TODO: Write a schema for the tree. return tree -def dataset_from_fits(path, asdf_filename, hdu=0, relative_to=None): +def dataset_from_fits(path, asdf_filename, hdu=0, relative_to=None, **kwargs): """ Given a path containing FITS files write an asdf file in the same path. @@ -392,6 +383,9 @@ def dataset_from_fits(path, asdf_filename, hdu=0, relative_to=None): hdu : `int` The HDU to read from the FITS files. + kwargs + Additional kwargs are passed to `asdf.AsdfFile.write_to`. + """ path = pathlib.Path(path) @@ -400,4 +394,4 @@ def dataset_from_fits(path, asdf_filename, hdu=0, relative_to=None): tree = asdf_tree_from_filenames(list(files), hdu=hdu, relative_to=relative_to) with asdf.AsdfFile(tree) as afile: - afile.write_to(str(path/asdf_filename)) + afile.write_to(str(path/asdf_filename), **kwargs) diff --git a/dkist/asdf_maker/helpers.py b/dkist/asdf_maker/helpers.py index a16ddef9..76db2372 100644 --- a/dkist/asdf_maker/helpers.py +++ b/dkist/asdf_maker/helpers.py @@ -4,7 +4,6 @@ import asdf import astropy.units as u -from astropy.io import fits from astropy.time import Time from gwcs.lookup_table import LookupTable from astropy.modeling.models import (Shift, Linear1D, Multiply, Pix2Sky_TAN, @@ -16,7 +15,7 @@ 'spatial_model_from_quantity', 'spatial_model_from_header', 'references_from_filenames'] -def references_from_filenames(filenames, array_shape, hdu_index=0, relative_to=None): +def references_from_filenames(filenames, headers, array_shape, hdu_index=0, relative_to=None): """ Given an array of paths to FITS files create a set of nested lists of `asdf.external_reference.ExternalArrayReference` objects with the same @@ -28,6 +27,9 @@ def references_from_filenames(filenames, array_shape, hdu_index=0, relative_to=N filenames : `numpy.ndarray` An array of filenames, in numpy order for the output array (i.e. ``.flat``) + headers : `list` + A list of headers for files + array_shape : `tuple` The desired output shape of the reference array. (i.e the shape of the data minus the HDU dimensions.) @@ -45,20 +47,17 @@ def references_from_filenames(filenames, array_shape, hdu_index=0, relative_to=N raise ValueError(f"An incorrect number of filenames ({filenames.size})" f" supplied for array_shape ({array_shape})") - for i, filepath in enumerate(filenames.flat): - with fits.open(filepath) as hdul: - hdu = hdul[hdu_index] - dtype = BITPIX2DTYPE[hdu.header['BITPIX']] - # hdu.shape is already in Python order - shape = tuple(hdu.shape) + for i, (filepath, head) in enumerate(zip(filenames.flat, headers.flat)): + dtype = BITPIX2DTYPE[head['BITPIX']] + shape = tuple([int(head[f"NAXIS{a}"]) for a in range(head["NAXIS"], 0, -1)]) - # Convert paths to relative paths - relative_path = filepath - if relative_to: - relative_path = os.path.relpath(filepath, relative_to) + # Convert paths to relative paths + relative_path = filepath + if relative_to: + relative_path = os.path.relpath(filepath, str(relative_to)) - reference_array.flat[i] = ExternalArrayReference( - relative_path, hdu_index, dtype, shape) + reference_array.flat[i] = ExternalArrayReference( + relative_path, hdu_index, dtype, shape) return reference_array.tolist() @@ -160,6 +159,7 @@ def time_model_from_date_obs(date_obs, date_bgn=None): intercept = 0 * u.s return linear_time_model(cadence=slope, reference_val=intercept) else: + print(f"creating tabular temporal axis. ddeltas: {ddelta}") return LookupTable(deltas.to(u.s)) @@ -180,6 +180,7 @@ def spectral_model_from_framewave(framewav): slope = ddeltas[0] return linear_spectral_model(slope, wave_bgn) else: + print(f"creating tabular wavelength axis. ddeltas: {ddeltas}") return LookupTable(framewav) diff --git a/dkist/asdf_maker/tests/test_generator.py b/dkist/asdf_maker/tests/test_generator.py index 5e4b1e06..1754d273 100644 --- a/dkist/asdf_maker/tests/test_generator.py +++ b/dkist/asdf_maker/tests/test_generator.py @@ -9,7 +9,8 @@ from dkist.dataset import Dataset from dkist.asdf_maker.generator import (validate_headers, dataset_from_fits, gwcs_from_headers, - headers_from_filenames, asdf_tree_from_filenames) + headers_from_filenames, asdf_tree_from_filenames, + table_from_headers) @pytest.fixture @@ -36,8 +37,9 @@ def test_frames(transform_builder): def test_input_name_ordering(wcs): # Check the ordering of the input and output frames - allowed_pixel_names = (('spatial x', 'spatial y', 'wavelength position', 'scan number', 'stokes'), - ('wavelength', 'slit position', 'raster position', 'scan number', 'stokes')) + allowed_pixel_names = (('spatial x', 'spatial y', 'wavelength position', 'scan number', + 'stokes'), ('wavelength', 'slit position', 'raster position', + 'scan number', 'stokes')) assert wcs.input_frame.axes_names in allowed_pixel_names @@ -72,12 +74,12 @@ def test_validator(header_filenames): headers = headers_from_filenames(header_filenames) headers[10]['NAXIS'] = 5 with pytest.raises(ValueError) as excinfo: - validate_headers(headers) + validate_headers(table_from_headers(headers)) assert "NAXIS" in str(excinfo) def test_make_asdf(header_filenames, tmpdir): path = pathlib.Path(header_filenames[0]) dataset_from_fits(path.parent, "test.asdf") - assert (path.parent/"test.asdf").exists() + assert (path.parent / "test.asdf").exists() assert isinstance(Dataset.from_directory(str(path.parent)), Dataset) diff --git a/dkist/asdf_maker/tests/test_helpers.py b/dkist/asdf_maker/tests/test_helpers.py index 98fd6cdb..ce087b08 100644 --- a/dkist/asdf_maker/tests/test_helpers.py +++ b/dkist/asdf_maker/tests/test_helpers.py @@ -1,6 +1,7 @@ import os import pytest +import numpy as np import asdf import astropy.units as u @@ -13,12 +14,13 @@ from dkist.asdf_maker.helpers import (make_asdf, linear_time_model, linear_spectral_model, time_model_from_date_obs, references_from_filenames, spatial_model_from_header, spectral_model_from_framewave) -from dkist.asdf_maker.generator import asdf_tree_from_filenames +from dkist.asdf_maker.generator import asdf_tree_from_filenames, headers_from_filenames def test_references_from_filesnames_shape_error(header_filenames): + headers = headers_from_filenames(header_filenames, hdu=0) with pytest.raises(ValueError) as exc: - references_from_filenames(header_filenames, [2, 3]) + references_from_filenames(header_filenames, headers, [2, 3]) assert "incorrect number" in str(exc) assert "2, 3" in str(exc) @@ -26,8 +28,10 @@ def test_references_from_filesnames_shape_error(header_filenames): def test_references_from_filenames(header_filenames): + headers = headers_from_filenames(header_filenames, hdu=0) base = os.path.split(header_filenames[0])[0] - refs = references_from_filenames(header_filenames, (len(header_filenames),), relative_to=base) + refs = references_from_filenames(header_filenames, np.array(headers, dtype=object), + (len(header_filenames),), relative_to=base) for ref in refs: assert base not in ref.fileuri diff --git a/dkist/data/test/5d_gwcs.asdf b/dkist/data/test/5d_gwcs.asdf new file mode 100644 index 00000000..0fdf0650 --- /dev/null +++ b/dkist/data/test/5d_gwcs.asdf @@ -0,0 +1,136 @@ +#ASDF 1.0.0 +#ASDF_STANDARD 1.3.0 +%YAML 1.1 +%TAG ! tag:stsci.edu:asdf/ +--- !core/asdf-1.1.0 +asdf_library: !core/software-1.0.0 {author: Space Telescope Science Institute, homepage: 'http://github.com/spacetelescope/asdf', + name: asdf, version: 2.3.0.dev1648} +history: + extensions: + - !core/extension_metadata-1.0.0 + extension_class: gwcs.extension.GWCSExtension + software: {name: gwcs, version: 0.10.dev399} + - !core/extension_metadata-1.0.0 + extension_class: astropy.io.misc.asdf.extension.AstropyExtension + software: {name: astropy, version: 3.2.dev23413} + - !core/extension_metadata-1.0.0 + extension_class: asdf.extension.BuiltinExtension + software: {name: asdf, version: 2.3.0.dev1648} + - !core/extension_metadata-1.0.0 + extension_class: astropy.io.misc.asdf.extension.AstropyAsdfExtension + software: {name: astropy, version: 3.2.dev23413} +gwcs: ! + name: '' + steps: + - ! + frame: ! + axes_names: [spatial x, spatial y, wavelength position, scan number, stokes] + axes_order: [0, 1, 2, 3, 4] + axes_type: [SPATIAL, SPATIAL, SPECTRAL, TEMPORAL, STOKES] + name: pixel + naxes: 5 + unit: [!unit/unit-1.0.0 pixel, !unit/unit-1.0.0 pixel, !unit/unit-1.0.0 pixel, + !unit/unit-1.0.0 pixel, !unit/unit-1.0.0 pixel] + transform: !transform/concatenate-1.1.0 + forward: + - !transform/concatenate-1.1.0 + forward: + - !transform/concatenate-1.1.0 + forward: + - !transform/compose-1.1.0 + forward: + - !transform/compose-1.1.0 + forward: + - !transform/compose-1.1.0 + forward: + - !transform/compose-1.1.0 + forward: + - !transform/concatenate-1.1.0 + forward: + - !transform/shift-1.2.0 + offset: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pixel, + value: -490.0} + - !transform/shift-1.2.0 + offset: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pixel, + value: -483.0} + - !transform/concatenate-1.1.0 + forward: + - !transform/multiplyscale-1.0.0 + factor: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec + pixel-1, value: 0.06} + - !transform/multiplyscale-1.0.0 + factor: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec + pixel-1, value: 0.06} + - !transform/affine-1.2.0 + matrix: !unit/quantity-1.1.0 + unit: !unit/unit-1.0.0 arcsec + value: !core/ndarray-1.0.0 + data: + - [0.9804184378390942, -0.1969255868219009] + - [0.1969255868219009, 0.9804184378390942] + datatype: float64 + shape: [2, 2] + translation: !unit/quantity-1.1.0 + unit: !unit/unit-1.0.0 arcsec + value: !core/ndarray-1.0.0 + data: [0.0, 0.0] + datatype: float64 + shape: [2] + - !transform/gnomonic-1.1.0 {direction: pix2sky} + - !transform/rotate3d-1.2.0 + phi: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec, value: -933.4107287336818} + psi: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 deg, value: 180.0} + theta: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 arcsec, value: 851.4832366363328} + - !transform/linear1d-1.0.0 + intercept: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 nm, value: 854.1105} + slope: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 nm pixel-1, value: 0.010499999999979082} + - !transform/linear1d-1.0.0 + intercept: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 s, value: 0.0} + slope: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 pixel-1 s, value: 467.53299999999456} + - ! + lookup_table: !unit/quantity-1.1.0 + unit: !unit/unit-1.0.0 pixel + value: !core/ndarray-1.0.0 + data: [0.0, 1.0, 2.0, 3.0] + datatype: float64 + shape: [4] + - ! + frame: ! + frames: + - ! + axes_names: [longitude, latitude] + name: helioprojective + reference_frame: ! + frame_attributes: + observer: ! + data: ! + components: + distance: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 AU, value: 1.0025005045375186} + lat: ! {unit: !unit/unit-1.0.0 deg, + value: 6.898593959064842} + lon: ! + unit: !unit/unit-1.0.0 deg + value: 0.0 + wrap_angle: ! { + unit: !unit/unit-1.0.0 deg, value: 180.0} + type: SphericalRepresentation + frame_attributes: {obstime: !time/time-1.1.0 '2022-09-27T02:16:18.414'} + obstime: !time/time-1.1.0 2022-09-27T02:16:18.414 + rsun: !unit/quantity-1.1.0 {unit: !unit/unit-1.0.0 km, value: 695508.0} + unit: [!unit/unit-1.0.0 arcsec, !unit/unit-1.0.0 arcsec] + - ! + axes_names: [wavelength] + axes_order: [2] + name: wavelength + unit: [!unit/unit-1.0.0 nm] + - ! + axes_names: [time] + axes_order: [3] + name: time + reference_time: !time/time-1.1.0 2022-09-27T02:16:18.414 + unit: [!unit/unit-1.0.0 s] + - ! + axes_order: [4] + name: stokes + name: CompositeFrame +... diff --git a/dkist/data/test/EIT/eit_2004-03-01T00:00:10.515000.asdf b/dkist/data/test/EIT/eit_2004-03-01T00:00:10.515000.asdf index 4b677312..0394cfb9 100644 Binary files a/dkist/data/test/EIT/eit_2004-03-01T00:00:10.515000.asdf and b/dkist/data/test/EIT/eit_2004-03-01T00:00:10.515000.asdf differ diff --git a/dkist/data/test/__init__.py b/dkist/data/test/__init__.py index a03884ee..8955d481 100644 --- a/dkist/data/test/__init__.py +++ b/dkist/data/test/__init__.py @@ -1,5 +1,6 @@ import os +import pathlib import dkist -rootdir = os.path.join(os.path.dirname(dkist.__file__), "data", "test") +rootdir = pathlib.Path(os.path.join(os.path.dirname(dkist.__file__), "data", "test")) diff --git a/dkist/dataset/dataset.py b/dkist/dataset/dataset.py index c9a83216..82ab06e4 100644 --- a/dkist/dataset/dataset.py +++ b/dkist/dataset/dataset.py @@ -22,8 +22,63 @@ class Dataset(DatasetSlicingMixin, DatasetPlotMixin, NDCubeABC): The base class for DKIST datasets. This class is backed by `dask.array.Array` and `gwcs.wcs.WCS` objects. + + Parameters + ---------- + data: `numpy.ndarray` + The array holding the actual data in this object. + + wcs: `ndcube.wcs.wcs.WCS` + The WCS object containing the axes' information + + uncertainty : any type, optional + Uncertainty in the dataset. Should have an attribute uncertainty_type + that defines what kind of uncertainty is stored, for example "std" + for standard deviation or "var" for variance. A metaclass defining + such an interface is NDUncertainty - but isn’t mandatory. If the uncertainty + has no such attribute the uncertainty is stored as UnknownUncertainty. + Defaults to None. + + mask : any type, optional + Mask for the dataset. Masks should follow the numpy convention + that valid data points are marked by False and invalid ones with True. + Defaults to None. + + meta : dict-like object, optional + Additional meta information about the dataset. If no meta is provided + an empty collections.OrderedDict is created. Default is None. + + unit : Unit-like or str, optional + Unit for the dataset. Strings that can be converted to a Unit are allowed. + Default is None. + + copy : bool, optional + Indicates whether to save the arguments as copy. True copies every attribute + before saving it while False tries to save every parameter as reference. + Note however that it is not always possible to save the input as reference. + Default is False. + + missing_axis : `list` of `bool` + Designates which axes in wcs object do not have a corresponding axis is the data. + True means axis is "missing", False means axis corresponds to a data axis. + Ordering corresponds to the axis ordering in the WCS object, i.e. reverse of data. + For example, say the data's y-axis corresponds to latitude and x-axis corresponds + to wavelength. In order the convert the y-axis to latitude the WCS must contain + a "missing" longitude axis as longitude and latitude are not separable. """ + def __init__(self, data, uncertainty=None, mask=None, wcs=None, + meta=None, unit=None, copy=False, missing_axis=None): + + super().__init__(data, uncertainty, mask, wcs, meta, unit, copy) + + if self.wcs and missing_axis is None: + self.missing_axis = [False]*self.wcs.forward_transform.n_outputs + else: + self.missing_axis = missing_axis + + self.array_container = None + @classmethod def from_directory(cls, directory): """ @@ -64,7 +119,9 @@ def from_asdf(cls, filepath): wcs = asdf_tree['gwcs'] - return cls(data, wcs=wcs) + cls = cls(data, wcs=wcs) + cls.array_container = array_container + return cls @property def pixel_axes_names(self): diff --git a/dkist/dataset/mixins.py b/dkist/dataset/mixins.py index 93ca1f2c..fda92e25 100644 --- a/dkist/dataset/mixins.py +++ b/dkist/dataset/mixins.py @@ -14,13 +14,36 @@ class DatasetSlicingMixin(NDSlicingMixin): """ A class to override the wcs slicing behavior of `astropy.nddata.mixins.NDSlicingMixin`. """ + def _slice(self, item): + """ + Construct a set of keyword arguments to initialise a new (sliced) + instance of the class. This method is called in + `astropy.nddata.mixins.NDSlicingMixin.__getitem__`. + + This method extends the `~astropy.nddata.mixins.NDSlicingMixin` method + to add support for ``missing_axis`` and ``extra_coords`` and overwrites + the astropy handling of wcs slicing. + """ + kwargs = super()._slice(item) + + wcs, missing_axis = self._slice_wcs_missing_axes(item) + kwargs['wcs'] = wcs + kwargs['missing_axis'] = missing_axis + + return kwargs + + # Implement this to stop it throwing a warning. def _slice_wcs(self, item): + return + + def _slice_wcs_missing_axes(self, item): if self.wcs is None: - return None + return None, None if isinstance(self.wcs, gwcs.WCS): # Reverse the item so the pixel slice matches the cartesian WCS - return GWCSSlicer(self.wcs, copy=True, pixel_order=True)[item] - return self.wcs[item] + slicer = GWCSSlicer(self.wcs, copy=True, pixel_order=True) + return slicer[item] + return self.wcs[item], None class DatasetPlotMixin(NDCubePlotMixin): # pragma: no cover diff --git a/dkist/dataset/tests/test_dataset.py b/dkist/dataset/tests/test_dataset.py index c1a75c32..c7904733 100644 --- a/dkist/dataset/tests/test_dataset.py +++ b/dkist/dataset/tests/test_dataset.py @@ -172,7 +172,7 @@ def test_load_from_directory(): def test_from_directory_no_asdf(): with pytest.raises(ValueError) as e: - Dataset.from_directory(rootdir) + Dataset.from_directory(rootdir/"notadirectory") assert "No asdf file found" in str(e) @@ -188,12 +188,6 @@ def test_no_wcs_slice(dataset): assert ds.wcs is None -def test_random_wcs_slice(dataset): - dataset._wcs = "aslkdjalsjdkls" - ds = dataset[3] - assert ds.wcs == "k" - - def test_crop_few_slices(dataset_4d): sds = dataset_4d[0, 0] assert len(sds.wcs.input_frame.axes_order) diff --git a/dkist/io/fits.py b/dkist/io/fits.py index 9e6b9197..6db21e74 100644 --- a/dkist/io/fits.py +++ b/dkist/io/fits.py @@ -127,7 +127,7 @@ def _read_fits_array(self): """ Make sure we cache the header while we have the file open. """ - with fits.open(self.absolute_uri, memmap=True, do_not_scale_image_data=False) as hdul: + with fits.open(self.absolute_uri, memmap=True, do_not_scale_image_data=False, mode="denywrite") as hdul: hdul.verify('fix') hdu = hdul[self.fitsarray.target] if not self._fits_header: diff --git a/dkist/tests/generate_eit_test_dataset.py b/dkist/tests/generate_eit_test_dataset.py index 98b3eea4..ac6d6106 100644 --- a/dkist/tests/generate_eit_test_dataset.py +++ b/dkist/tests/generate_eit_test_dataset.py @@ -123,11 +123,13 @@ def main(): hcubemodel = spatial & timemodel sky_frame = cf.CelestialFrame(axes_order=(0, 1), name='helioprojective', - reference_frame=smap0.coordinate_frame) + reference_frame=smap0.coordinate_frame, + axes_names=("helioprojective longitude", "helioprojective latitude")) time_frame = cf.TemporalFrame(axes_order=(2, ), unit=u.s, - reference_time=Time(time_coords[0])) + reference_time=Time(time_coords[0]), + axes_names=("time",)) - sky_frame = cf.CompositeFrame([sky_frame, time_frame]) + sky_frame = cf.CompositeFrame([sky_frame, time_frame], name="world") detector_frame = cf.CoordinateFrame(name="detector", naxes=3, axes_order=(0, 1, 2), axes_type=("pixel", "pixel", "pixel"), diff --git a/dkist/wcs/slicer.py b/dkist/wcs/slicer.py index 1eec0ad1..e733139a 100644 --- a/dkist/wcs/slicer.py +++ b/dkist/wcs/slicer.py @@ -16,12 +16,13 @@ class FixedInputs(Model): _name = "FixedInputs" + def __init__(self, input_specification): self.input_specification = input_specification @property def inputs(self): - return tuple(f"n{i}" for i in range(len(self.input_specification)) if not self.input_specification[i]) + return tuple(f"n{i}" for i in range(len(self.input_specification)) if self.input_specification[i] is None) @property def outputs(self): @@ -43,7 +44,7 @@ def evaluate(self, *inputs): shape_arr = np.zeros(shape) for finp in self.input_specification: - if finp: + if finp is not None: if not shape: outputs.append(finp) # pragma: no cover else: @@ -55,10 +56,7 @@ def evaluate(self, *inputs): @property def inverse(self): - m = Identity(1) - for i in range(1, len(self.input_specification)): - m &= Identity(1) - return m + return Identity(len(self.input_specification)) class GWCSSlicer: @@ -95,9 +93,9 @@ def _get_frames(self): Return a list of frames which comprise the output frame. """ if hasattr(self.gwcs.output_frame, "frames"): - frames = self.gwcs.output_frame.frames + frames = deepcopy(self.gwcs.output_frame.frames) else: - frames = (self.gwcs.output_frame,) + frames = (deepcopy(self.gwcs.output_frame),) return frames def _get_coupled_axes(self): @@ -153,7 +151,24 @@ def _new_output_frame(self, axes): frames = list(frames) for axis in axes: drop_frame = axes_map[axis] - frames.remove(drop_frame) + # If we are removing coupled axes we might have already removed the frame + if drop_frame in frames: + frames.remove(drop_frame) + + # We now need to reindex the axes_order of all the frames to account + # for any removed axes. + for i, frame in enumerate(frames): + if i == 0: + start = i + else: + axes_order = frames[i-1].axes_order + start = axes_order[-1] + # Start can either be an int or a list/tuple here. + if not isinstance(start, int): + start = start[-1] # pragma: no cover # I can't work out how to hit this. + # Increment start for the next frame. + start += 1 + frame._axes_order = tuple(range(start, start+frame.naxes)) if len(frames) == 1: return frames[0] @@ -229,14 +244,7 @@ def _sanitize(self, item): else: return item - def __getitem__(self, item): - """ - Once the item is sanitized, we fix the parameter if the item is an integer, - shift if the start is set on the slice or - do nothing to the axis otherwise. - """ - item = self._sanitize(item) - + def _convert_item_to_models(self, item, drop_all_non_separable): inputs = [] prepend = [] axes_to_drop = [] @@ -252,6 +260,8 @@ def __getitem__(self, item): if isinstance(ax, int): if self.separable[i]: axes_to_drop.append(i) + elif not self.separable[i] and drop_all_non_separable: + axes_to_drop.append(i) else: inputs.append(ax*input_units[i]) prepend.append(Identity(1)) @@ -262,13 +272,45 @@ def __getitem__(self, item): inputs.append(None) prepend.append(Identity(1)) + return inputs, prepend, axes_to_drop + + def __getitem__(self, item): + """ + Once the item is sanitized, we fix the parameter if the item is an integer, + shift if the start is set on the slice or + do nothing to the axis otherwise. + """ + item = self._sanitize(item) + + drop_all_non_separable = all(isinstance(ax, int) for i, ax in enumerate(item) if not self.separable[i]) + + inputs, prepend, axes_to_drop = self._convert_item_to_models(item, drop_all_non_separable) + + missing_axes = [i is not None for i in inputs] + if self.pixel_order: + missing_axes = missing_axes[::-1] + model = self.gwcs.forward_transform axes_to_drop.sort(reverse=True) + skip = False + for drop_ax in axes_to_drop: + # If we are removing non separable axes then we need to skip all + # but the first non-separable axis. + + # TODO: This is assuming there is only one set of non-separable + # axes in the WCS. If there were more than two sets of + # non-separable axes this would break. + if skip: + continue + skip = not self.separable[drop_ax] if drop_all_non_separable else skip + inp = model._tree.inputs[drop_ax] - trees = remove_input_frame(model._tree, inp) + trees = remove_input_frame(model._tree, inp, + remove_coupled_trees=drop_all_non_separable) model = re_model_trees(trees) + if not all([isinstance(a, Identity) for a in prepend]): model = self._list_to_compound(prepend) | model @@ -284,4 +326,4 @@ def __getitem__(self, item): # Update the gwcs self.gwcs._initialize_wcs(model, new_in_frame, new_out_frame) - return self.gwcs + return self.gwcs, missing_axes diff --git a/dkist/wcs/tests/test_slicer.py b/dkist/wcs/tests/test_slicer.py index 23732b9d..f4a15e40 100644 --- a/dkist/wcs/tests/test_slicer.py +++ b/dkist/wcs/tests/test_slicer.py @@ -1,20 +1,28 @@ -import pytest - import numpy as np +import pytest +import asdf import astropy.units as u import gwcs.coordinate_frames as cf from gwcs import WCS +from astropy.time import Time from astropy.coordinates import SkyCoord from astropy.modeling.models import Identity from sunpy.coordinates.frames import Helioprojective from dkist.conftest import spatial_like +from dkist.data.test import rootdir from dkist.wcs.slicer import GWCSSlicer - # Some fixtures used in this file are defined in conftest.py + +@pytest.fixture +def gwcs_5d(): + with asdf.open(str(rootdir / "5d_gwcs.asdf")) as f: + return f.tree['gwcs'] + + @pytest.fixture def gwcs_3d(): detector_frame = cf.CoordinateFrame( @@ -57,6 +65,11 @@ def slicer_3d(): return GWCSSlicer(gwcs_3d(), pixel_order=False) +@pytest.fixture +def slicer_5d(): + return GWCSSlicer(gwcs_5d(), pixel_order=False) + + def test_slicer_init(gwcs_3d, gwcs_1d): for gwcs in (gwcs_1d, gwcs_3d): slc = GWCSSlicer(gwcs) @@ -77,7 +90,6 @@ def test_get_axes_map(slicer_3d): def test_new_output_frame(slicer_1d, slicer_3d): - assert slicer_1d._new_output_frame(tuple()) is slicer_1d.gwcs.output_frame assert slicer_3d._new_output_frame(tuple()) is not slicer_1d.gwcs.output_frame new_frame = slicer_3d._new_output_frame(tuple()) @@ -89,45 +101,50 @@ def test_new_output_frame(slicer_1d, slicer_3d): def test_simple_slices(slicer_3d): - sl = slicer_3d[:, :, :] + sl, _ = slicer_3d[:, :, :] assert sl is slicer_3d.gwcs - outs = sl(10*u.pix, 10*u.pix, 10*u.pix) + outs = sl(10 * u.pix, 10 * u.pix, 10 * u.pix) assert len(outs) == 3 + sl.invert(*outs) def test_simple_slices2(slicer_3d): - sl = slicer_3d[:, :, 0] + sl, _ = slicer_3d[:, :, 0] assert sl.forward_transform.n_inputs == 2 assert isinstance(sl.output_frame, cf.CoordinateFrame) - outs = sl(10*u.pix, 10*u.pix) + outs = sl(10 * u.pix, 10 * u.pix) assert len(outs) == 2 + sl.invert(*outs) def test_simple_slices3(slicer_3d): - sl = slicer_3d[:, 10, :] + sl, _ = slicer_3d[:, 10, :] assert sl.forward_transform.n_inputs == 2 assert sl.forward_transform.n_outputs == 3 assert isinstance(sl.output_frame, cf.CompositeFrame) - outs = sl(10*u.pix, 10*u.pix) + outs = sl(10 * u.pix, 10 * u.pix) assert len(outs) == 3 + sl.invert(*outs) def test_simple_slices4(slicer_3d): - sl = slicer_3d[10] + sl, _ = slicer_3d[10] assert sl.forward_transform.n_inputs == 2 assert sl.forward_transform.n_outputs == 3 assert isinstance(sl.output_frame, cf.CompositeFrame) - outs = sl(10*u.pix, 10*u.pix) + outs = sl(10 * u.pix, 10 * u.pix) assert len(outs) == 3 + sl.invert(*outs) def test_simple_slices5(slicer_3d): - sl = slicer_3d[10:] + sl, _ = slicer_3d[10:] assert sl.forward_transform.n_inputs == 3 assert sl.forward_transform.n_outputs == 3 assert isinstance(sl.output_frame, cf.CompositeFrame) - outs = sl(10*u.pix, 10*u.pix, 10*u.pix) + outs = sl(10 * u.pix, 10 * u.pix, 10 * u.pix) assert len(outs) == 3 + sl.invert(*outs) def test_error_step(slicer_3d): @@ -141,7 +158,7 @@ def test_error_type(slicer_3d): def test_roundtrip(slicer_3d): - wcs = slicer_3d[10:, 10, 10] + wcs, _ = slicer_3d[10:, 10, 10] w = wcs(10 * u.pix, with_units=True) assert isinstance(w, SkyCoord) p = wcs.invert(w, with_units=True) @@ -152,9 +169,56 @@ def test_array_call(slicer_3d): """ Test that FixedInputs works with array inputs. """ - inp = [np.linspace(0, 10)*u.pix]*3 + inp = [np.linspace(0, 10) * u.pix] * 3 # Sanity check. slicer_3d.gwcs(*inp) - wcs2 = slicer_3d[10] + wcs2, _ = slicer_3d[10] x, y, z = wcs2(*inp[1:]) + + +# - Slice out both spatial dimensions + + +def test_5d_both_spatial(slicer_5d): + wcs, _ = slicer_5d[0, 0, :, 0, 0] + assert wcs.forward_transform.n_inputs == 1 + assert u.allclose(wcs((0, 1, 2) * u.pix, with_units=True), + [854.1105, 854.121, 854.1315] * u.nm) + + +# - Slice out all of the stokes axis (Table) + + +def test_5d_stokes_None(slicer_5d): + wcs, _ = slicer_5d[0, 0, 0, 0, :] + assert wcs.forward_transform.n_inputs == 1 + stokes = wcs(range(4) * u.pix, with_units=True) + assert isinstance(stokes, np.ndarray) + assert list(stokes) == ['I', 'Q', 'U', 'V'] + + +# - Slice out a range in the stokes axis (Table) + + +def test_5d_stokes_range(slicer_5d): + wcs, _ = slicer_5d[0, 0, 0, 0, 1:3] + assert wcs.forward_transform.n_inputs == 1 + stokes = wcs(range(3) * u.pix, with_units=True) + assert isinstance(stokes, np.ndarray) + assert list(stokes) == ['Q', 'U', 'V'] + + +# - Slice out one of the coupled axis + + +def test_5d_spatial_split(slicer_5d): + wcs, _ = slicer_5d[0, :, 0, :, 0] + assert wcs.forward_transform.n_inputs == 2 + assert wcs.forward_transform.n_outputs == 3 + coords = wcs(0 * u.pix, 0 * u.pix, with_units=True) + assert isinstance(coords[0], SkyCoord) + assert u.allclose(coords[0].Tx, -956.528 * u.arcsec) + assert u.allclose(coords[0].Ty, 817.281 * u.arcsec) + assert isinstance(coords[1], Time) + np.testing.assert_allclose(coords[1].jd, 2459849.5946575697) diff --git a/ez_setup.py b/ez_setup.py deleted file mode 100644 index 800c31ef..00000000 --- a/ez_setup.py +++ /dev/null @@ -1,414 +0,0 @@ -#!/usr/bin/env python - -""" -Setuptools bootstrapping installer. - -Maintained at https://github.com/pypa/setuptools/tree/bootstrap. - -Run this script to install or upgrade setuptools. - -This method is DEPRECATED. Check https://github.com/pypa/setuptools/issues/581 for more details. -""" - -import os -import shutil -import sys -import tempfile -import zipfile -import optparse -import subprocess -import platform -import textwrap -import contextlib - -from distutils import log - -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -# 33.1.1 is the last version that supports setuptools self upgrade/installation. -DEFAULT_VERSION = "33.1.1" -DEFAULT_URL = "https://pypi.io/packages/source/s/setuptools/" -DEFAULT_SAVE_DIR = os.curdir -DEFAULT_DEPRECATION_MESSAGE = "ez_setup.py is deprecated and when using it setuptools will be pinned to {0} since it's the last version that supports setuptools self upgrade/installation, check https://github.com/pypa/setuptools/issues/581 for more info; use pip to install setuptools" - -MEANINGFUL_INVALID_ZIP_ERR_MSG = 'Maybe {0} is corrupted, delete it and try again.' - -log.warn(DEFAULT_DEPRECATION_MESSAGE.format(DEFAULT_VERSION)) - - -def _python_cmd(*args): - """ - Execute a command. - - Return True if the command succeeded. - """ - args = (sys.executable,) + args - return subprocess.call(args) == 0 - - -def _install(archive_filename, install_args=()): - """Install Setuptools.""" - with archive_context(archive_filename): - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - - -def _build_egg(egg, archive_filename, to_dir): - """Build Setuptools egg.""" - with archive_context(archive_filename): - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -class ContextualZipFile(zipfile.ZipFile): - - """Supplement ZipFile class to support context manager for Python 2.6.""" - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - def __new__(cls, *args, **kwargs): - """Construct a ZipFile or ContextualZipFile as appropriate.""" - if hasattr(zipfile.ZipFile, '__exit__'): - return zipfile.ZipFile(*args, **kwargs) - return super(ContextualZipFile, cls).__new__(cls) - - -@contextlib.contextmanager -def archive_context(filename): - """ - Unzip filename to a temporary directory, set to the cwd. - - The unzipped target is cleaned up after. - """ - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - try: - with ContextualZipFile(filename) as archive: - archive.extractall() - except zipfile.BadZipfile as err: - if not err.args: - err.args = ('', ) - err.args = err.args + ( - MEANINGFUL_INVALID_ZIP_ERR_MSG.format(filename), - ) - raise - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - yield - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _do_download(version, download_base, to_dir, download_delay): - """Download Setuptools.""" - py_desig = 'py{sys.version_info[0]}.{sys.version_info[1]}'.format(sys=sys) - tp = 'setuptools-{version}-{py_desig}.egg' - egg = os.path.join(to_dir, tp.format(**locals())) - if not os.path.exists(egg): - archive = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, archive, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - _unload_pkg_resources() - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=DEFAULT_SAVE_DIR, download_delay=15): - """ - Ensure that a setuptools version is installed. - - Return None. Raise SystemExit if the requested version - or later cannot be installed. - """ - to_dir = os.path.abspath(to_dir) - - # prior to importing, capture the module state for - # representative modules. - rep_modules = 'pkg_resources', 'setuptools' - imported = set(sys.modules).intersection(rep_modules) - - try: - import pkg_resources - pkg_resources.require("setuptools>=" + version) - # a suitable version is already installed - return - except ImportError: - # pkg_resources not available; setuptools is not installed; download - pass - except pkg_resources.DistributionNotFound: - # no version of setuptools was found; allow download - pass - except pkg_resources.VersionConflict as VC_err: - if imported: - _conflict_bail(VC_err, version) - - # otherwise, unload pkg_resources to allow the downloaded version to - # take precedence. - del pkg_resources - _unload_pkg_resources() - - return _do_download(version, download_base, to_dir, download_delay) - - -def _conflict_bail(VC_err, version): - """ - Setuptools was imported prior to invocation, so it is - unsafe to unload it. Bail out. - """ - conflict_tmpl = textwrap.dedent(""" - The required version of setuptools (>={version}) is not available, - and can't be installed while this script is running. Please - install a more recent version first, using - 'easy_install -U setuptools'. - - (Currently using {VC_err.args[0]!r}) - """) - msg = conflict_tmpl.format(**locals()) - sys.stderr.write(msg) - sys.exit(2) - - -def _unload_pkg_resources(): - sys.meta_path = [ - importer - for importer in sys.meta_path - if importer.__class__.__module__ != 'pkg_resources.extern' - ] - del_modules = [ - name for name in sys.modules - if name.startswith('pkg_resources') - ] - for mod_name in del_modules: - del sys.modules[mod_name] - - -def _clean_check(cmd, target): - """ - Run the command to download target. - - If the command fails, clean up before re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell. - - Powershell will validate trust. - Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - ps_cmd = ( - "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " - "[System.Net.CredentialCache]::DefaultCredentials; " - '(new-object System.Net.WebClient).DownloadFile("%(url)s", "%(target)s")' - % locals() - ) - cmd = [ - 'powershell', - '-Command', - ps_cmd, - ] - _clean_check(cmd, target) - - -def has_powershell(): - """Determine if Powershell is available.""" - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True -download_file_powershell.viable = has_powershell - - -def download_file_curl(url, target): - cmd = ['curl', url, '--location', '--silent', '--output', target] - _clean_check(cmd, target) - - -def has_curl(): - cmd = ['curl', '--version'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True -download_file_curl.viable = has_curl - - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - - -def has_wget(): - cmd = ['wget', '--version'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True -download_file_wget.viable = has_wget - - -def download_file_insecure(url, target): - """Use Python to download the file, without connection authentication.""" - src = urlopen(url) - try: - # Read all the data in one block. - data = src.read() - finally: - src.close() - - # Write all the data in one block to avoid creating a partial file. - with open(target, "wb") as dst: - dst.write(data) -download_file_insecure.viable = lambda: True - - -def get_best_downloader(): - downloaders = ( - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ) - viable_downloaders = (dl for dl in downloaders if dl.viable()) - return next(viable_downloaders, None) - - -def download_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=DEFAULT_SAVE_DIR, delay=15, - downloader_factory=get_best_downloader): - """ - Download setuptools from a specified location and return its filename. - - `version` should be a valid setuptools version number that is available - as an sdist for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - zip_name = "setuptools-%s.zip" % version - url = download_base + zip_name - saveto = os.path.join(to_dir, zip_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package. - - Returns list of command line arguments. - """ - return ['--user'] if options.user_install else [] - - -def _parse_args(): - """Parse the command line for options.""" - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - parser.add_option( - '--version', help="Specify which version to download", - default=DEFAULT_VERSION, - ) - parser.add_option( - '--to-dir', - help="Directory to save (and re-use) package", - default=DEFAULT_SAVE_DIR, - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - - -def _download_args(options): - """Return args for download_setuptools function from cmdline args.""" - return dict( - version=options.version, - download_base=options.download_base, - downloader_factory=options.downloader_factory, - to_dir=options.to_dir, - ) - - -def main(): - """Install or upgrade setuptools and EasyInstall.""" - options = _parse_args() - archive = download_setuptools(**_download_args(options)) - return _install(archive, _build_install_args(options)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/requirements.txt b/requirements.txt index 2893689d..3950f4cc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,11 +10,11 @@ scipy matplotlib # ndcube -asdf +# asdf # git deps git+https://github.com/astropy/astropy -# git+https://github.com/spacetelescope/asdf +git+https://github.com/spacetelescope/asdf git+https://github.com/Cadair/gwcs@dkist git+https://github.com/Cadair/sunpy@asdf_tags git+https://github.com/sunpy/ndcube diff --git a/setup.cfg b/setup.cfg index 3193f79f..94405dd2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,7 +14,8 @@ show-response = 1 [tool:pytest] minversion = 3.0 -norecursedirs = build docs/_build +testpaths = "dkist" "docs" +norecursedirs = ".tox" "build" "docs[\/]_build" "docs[\/]generated" "*.egg-info" "astropy_helpers" "examples" doctest_plus = enabled addopts = -p no:warnings @@ -39,6 +40,7 @@ exclude = extern,sphinx,*parsetab.py [flake8] max-line-length = 100 +ignore = I100,I102,I103,I104,I201 [yapf] column_limit = 100 @@ -55,7 +57,7 @@ edit_on_github = True github_project = DKISTDC/dkist # install_requires should be formatted as a comma-separated list, e.g.: # install_requires = astropy, scipy, matplotlib -install_requires = astropy +install_requires = astropy, dask[array] # version should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) version = 0.1.dev0 # Note: you will also need to change this in your package's __init__.py @@ -75,4 +77,37 @@ known_astro=astropy,sunpy,gwcs,asdf,ndcube multi_line_output=0 balanced_wrapping=True include_trailing_comma=false -length_sort=True \ No newline at end of file +length_sort=True + +[coverage:run] +source = dkist/ +omit = + dkist/_dkist_init* + dkist/conftest* + dkist/cython_version* + dkist/setup_package* + dkist/*/setup_package* + dkist/*/*/setup_package* + dkist/tests/* + dkist/*/tests/* + dkist/*/*/tests/* + dkist/version* + +[coverage:report] +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about packages we have installed + except ImportError + + # Don't complain if tests don't hit assertions + raise AssertionError + raise NotImplementedError + + # Don't complain about script hooks + def main\(.*\): + + # Ignore branches that don't pertain to this version of Python + pragma: py{ignore_python_version} +