Skip to content

Commit

Permalink
Merge pull request #68 from Cadair/post_ndcube2_cleanup
Browse files Browse the repository at this point in the history
NDCube 2.0 Support
  • Loading branch information
Cadair authored Jan 7, 2020
2 parents 044a2f2 + f2e6d01 commit 296c879
Show file tree
Hide file tree
Showing 22 changed files with 462 additions and 225 deletions.
32 changes: 14 additions & 18 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ variables:
# as a service connection (if it has not been already).
resources:
repositories:
- repository: sunpy
- repository: OpenAstronomy
type: github
endpoint: DKISTDC
name: sunpy/azure-pipelines-template
name: OpenAstronomy/azure-pipelines-templates
ref: master

trigger:
Expand All @@ -25,25 +25,21 @@ trigger:
# - '*post*'

jobs:
- template: run-tox-env.yml@sunpy
- template: run-tox-env.yml@OpenAstronomy
parameters:
name: Linux_37
os: linux
tox: py37 --
coverage: codecov
envs:
- macos: py36
name: macos_36

# - template: run-tox-env.yml@sunpy
# parameters:
# name: Windows_37
# os: windows
# tox: py37 --
- linux: py37
name: linux_37

- linus: py38
name: linux_38

- template: run-tox-env.yml@sunpy
parameters:
name: macOS_36
os: macos
tox: py36 --

- ${{ if startsWith(variables['Build.SourceBranch'], 'refs/tags/') }}:
- template: publish-pypi.yml@sunpy
- template: publish.yml@OpenAstronomy
parameters:
dependsOn: [Linux_37,Windows_37,macOS_36]
dependsOn: [linux_37, linux_38, macos_36]
1 change: 1 addition & 0 deletions changelog/68.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add support for dask 2+ and make that the minmum version
48 changes: 30 additions & 18 deletions dkist/asdf_maker/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@
import gwcs
import gwcs.coordinate_frames as cf
from astropy.io import fits
from astropy.modeling.models import Tabular1D
from astropy.table import Table
from astropy.time import Time
from gwcs.lookup_table import LookupTable
from sunpy.coordinates import Helioprojective
from sunpy.time import parse_time

from dkist.asdf_maker.helpers import (linear_spectral_model, references_from_filenames,
spatial_model_from_header, spectral_model_from_framewave,
time_model_from_date_obs)
from dkist.asdf_maker.helpers import (generate_lookup_table, linear_spectral_model,
references_from_filenames, spatial_model_from_header,
spectral_model_from_framewave, time_model_from_date_obs)
from dkist.dataset import Dataset
from dkist.io.array_containers import DaskFITSArrayContainer
from dkist.io.fits import AstropyFITSLoader
Expand Down Expand Up @@ -226,8 +226,7 @@ def make_stokes(self):
"""
name = self.header[f'DWNAME{self.n}']
self._frames.append(cf.StokesFrame(axes_order=(self._i,), name=name))
self._transforms.append(LookupTable([0, 1, 2, 3] * u.pixel))

self._transforms.append(generate_lookup_table([0, 1, 2, 3] * u.one, interpolation='nearest'))
self._i += 1

def make_temporal(self):
Expand Down Expand Up @@ -260,10 +259,13 @@ def make_spatial(self):
axes_names = [(self.header[f'DWNAME{nn}'].rsplit(' ')[1]) for nn in (self.n, self._n(i+1))]

obstime = Time(self.header['DATE-BGN'])
axes_types = ["lat" if "LT" in self.axes_types[i] else "lon", "lon" if "LN" in self.axes_types[i] else "lat"]
self._frames.append(cf.CelestialFrame(axes_order=(i, i+1), name=name,
reference_frame=Helioprojective(obstime=obstime),
axes_names=axes_names,
unit=self.get_units(self._i, self._i+1)))
unit=self.get_units(self._i, self._i+1),
axis_physical_types=(f"custom:pos.helioprojective.{axes_types[0]}",
f"custom:pos.helioprojective.{axes_types[1]}")))

self._transforms.append(spatial_model_from_header(self.header))

Expand Down Expand Up @@ -341,6 +343,21 @@ def make_sorted_table(headers, filenames):
return theaders[np.argsort(t, order=keys)]


def _preprocess_headers(headers, filenames):
table_headers = make_sorted_table(headers, filenames)

validate_headers(table_headers)

# Sort the filenames into DS order.
sorted_filenames = np.array(table_headers['filenames'])
sorted_headers = np.array(table_headers['headers'])

table_headers.remove_columns(["headers", "filenames"])

return table_headers, sorted_filenames, sorted_headers



def asdf_tree_from_filenames(filenames, asdf_filename, inventory=None, hdu=0,
relative_to=None, extra_inventory=None):
"""
Expand All @@ -355,24 +372,19 @@ def asdf_tree_from_filenames(filenames, asdf_filename, inventory=None, hdu=0,
hdu : `int`
The HDU to read from the FITS files.
"""
# In case filenames is a generator we cast to list.
filenames = list(filenames)

# headers is an iterator
headers = headers_from_filenames(filenames, hdu=hdu)

table_headers = make_sorted_table(headers, filenames)

validate_headers(table_headers)
table_headers, sorted_filenames, sorted_headers = _preprocess_headers(headers, filenames)

if not inventory:
inventory = generate_datset_inventory_from_headers(table_headers, asdf_filename)
if extra_inventory:
inventory.update(extra_inventory)

# Sort the filenames into DS order.
sorted_filenames = np.array(table_headers['filenames'])
sorted_headers = np.array(table_headers['headers'])

table_headers.remove_columns(["headers", "filenames"])

# Get the array shape
shape = tuple((headers[0][f'DNAXIS{n}'] for n in range(headers[0]['DNAXIS'],
headers[0]['DAAXES'], -1)))
Expand All @@ -381,7 +393,7 @@ def asdf_tree_from_filenames(filenames, asdf_filename, inventory=None, hdu=0,
hdu_index=hdu, relative_to=relative_to)

array_container = DaskFITSArrayContainer(reference_array, loader=AstropyFITSLoader)
ds = Dataset(array_container.array, gwcs_from_headers(sorted_headers), meta=inventory, header_table=table_headers)
ds = Dataset(array_container.array, gwcs_from_headers(sorted_headers), meta=inventory, headers=table_headers)

ds._array_container = array_container

Expand Down Expand Up @@ -501,7 +513,7 @@ def generate_datset_inventory_from_headers(headers, asdf_name):
constants = {
'frame_count': len(headers),
'bucket': 'data',
'asdf_object_key': asdf_name
'asdf_object_key': str(asdf_name)
}

output = {}
Expand Down
25 changes: 20 additions & 5 deletions dkist/asdf_maker/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@
from asdf.tags.core.external_reference import ExternalArrayReference
from astropy.io.fits.hdu.base import BITPIX2DTYPE
from astropy.modeling.models import (AffineTransformation2D, Linear1D, Multiply,
Pix2Sky_TAN, RotateNative2Celestial, Shift)
Pix2Sky_TAN, RotateNative2Celestial, Shift, Tabular1D)
from astropy.time import Time
from gwcs.lookup_table import LookupTable

__all__ = ['make_asdf', 'time_model_from_date_obs', 'linear_time_model', 'linear_spectral_model',
'spatial_model_from_quantity', 'spatial_model_from_header', 'references_from_filenames']
Expand Down Expand Up @@ -134,7 +133,23 @@ def linear_time_model(cadence: u.s, reference_val: u.s = 0*u.s):
"""
if not reference_val:
reference_val = 0 * cadence.unit
return Linear1D(slope=cadence/(1*u.pix), intercept=reference_val)
return Linear1D(slope=cadence / (1 * u.pix), intercept=reference_val)


def generate_lookup_table(lookup_table, interpolation='linear', points_unit=u.pix, **kwargs):
if not isinstance(lookup_table, u.Quantity):
raise TypeError("lookup_table must be a Quantity.")

points = np.arange(lookup_table.size) * points_unit

kwargs = {
'bounds_error': False,
'fill_value': np.nan,
'method': interpolation,
**kwargs
}

return Tabular1D(points, lookup_table, **kwargs)


def time_model_from_date_obs(date_obs, date_bgn=None):
Expand All @@ -160,7 +175,7 @@ def time_model_from_date_obs(date_obs, date_bgn=None):
return linear_time_model(cadence=slope, reference_val=intercept)
else:
print(f"creating tabular temporal axis. ddeltas: {ddelta}")
return LookupTable(deltas.to(u.s))
return generate_lookup_table(deltas.to(u.s))


def spectral_model_from_framewave(framewav):
Expand All @@ -181,7 +196,7 @@ def spectral_model_from_framewave(framewav):
return linear_spectral_model(slope, wave_bgn)
else:
print(f"creating tabular wavelength axis. ddeltas: {ddeltas}")
return LookupTable(framewav)
return generate_lookup_table(framewav)


def make_asdf(filename, *, dataset, **kwargs):
Expand Down
21 changes: 19 additions & 2 deletions dkist/asdf_maker/tests/test_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import gwcs
import gwcs.coordinate_frames as cf
from astropy.modeling import Model, models
from gwcs.lookup_table import LookupTable

from dkist.asdf_maker.generator import (asdf_tree_from_filenames, dataset_from_fits,
gwcs_from_headers, headers_from_filenames,
Expand Down Expand Up @@ -62,7 +61,25 @@ def test_transform_models(wcs):
sms = wcs.forward_transform._leaflist
smtypes = [type(m) for m in sms]
assert sum(mt is models.Linear1D for mt in smtypes) == 2
assert sum(mt is LookupTable for mt in smtypes) == 1
assert sum(mt is models.Tabular1D for mt in smtypes) == 1


def test_array_container_shape(header_filenames):
from dkist.asdf_maker.generator import _preprocess_headers, references_from_filenames
from dkist.io import DaskFITSArrayContainer, AstropyFITSLoader

headers = headers_from_filenames(header_filenames, hdu=0)
table_headers, sorted_filenames, sorted_headers = _preprocess_headers(headers, header_filenames)
# Get the array shape
shape = tuple((headers[0][f'DNAXIS{n}'] for n in range(headers[0]['DNAXIS'],
headers[0]['DAAXES'], -1)))
# References from filenames
reference_array = references_from_filenames(sorted_filenames, sorted_headers, array_shape=shape,
hdu_index=0, relative_to=".")
array_container = DaskFITSArrayContainer(reference_array, loader=AstropyFITSLoader)

assert len(array_container.shape) == 5
assert array_container.shape == array_container.array.shape


def test_asdf_tree(header_filenames):
Expand Down
16 changes: 7 additions & 9 deletions dkist/asdf_maker/tests/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,8 @@
import asdf
import astropy.units as u
from astropy.io import fits
from astropy.modeling import Model
from astropy.modeling.models import Linear1D
from astropy.modeling import Model, models
from astropy.time import Time
from gwcs.lookup_table import LookupTable

from dkist.asdf_maker.generator import asdf_tree_from_filenames, headers_from_filenames
from dkist.asdf_maker.helpers import (linear_spectral_model, linear_time_model, make_asdf,
Expand Down Expand Up @@ -56,29 +54,29 @@ def test_spatial_model_fail(header_filenames):

def test_linear_spectral():
lin = linear_spectral_model(10*u.nm, 0*u.nm)
assert isinstance(lin, Linear1D)
assert isinstance(lin, models.Linear1D)
assert u.allclose(lin.slope, 10*u.nm/u.pix)
assert u.allclose(lin.intercept, 0*u.nm)


def test_linear_time():
lin = linear_time_model(10*u.s)
assert isinstance(lin, Linear1D)
assert isinstance(lin, models.Linear1D)
assert u.allclose(lin.slope, 10*u.s/u.pix)
assert u.allclose(lin.intercept, 0*u.s)


def test_time_from_dateobs(header_filenames):
date_obs = [fits.getheader(f)['DATE-OBS'] for f in header_filenames]
time = time_model_from_date_obs(date_obs)
assert isinstance(time, Linear1D)
assert isinstance(time, models.Linear1D)


def test_time_from_dateobs_lookup(header_filenames):
date_obs = [fits.getheader(f)['DATE-OBS'] for f in header_filenames]
date_obs[5] = (Time(date_obs[5]) + 10*u.s).isot
time = time_model_from_date_obs(date_obs)
assert isinstance(time, LookupTable)
assert isinstance(time, models.Tabular1D)


def test_spectral_framewave(header_filenames):
Expand All @@ -92,10 +90,10 @@ def test_spectral_framewave(header_filenames):
framewave = [fits.getheader(h)['FRAMEWAV'] for h in header_filenames]

m = spectral_model_from_framewave(framewave[:nwave])
assert isinstance(m, Linear1D)
assert isinstance(m, models.Linear1D)

m2 = spectral_model_from_framewave(framewave)
assert isinstance(m2, LookupTable)
assert isinstance(m2, models.Tabular1D)


def test_make_asdf(header_filenames, tmpdir):
Expand Down
5 changes: 2 additions & 3 deletions dkist/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,12 @@ def dataset(array, identity_gwcs):
meta = {'bucket': 'data',
'dataset_id': 'test_dataset',
'asdf_object_key': 'test_dataset.asdf'}
ds = Dataset(array, wcs=identity_gwcs, meta=meta, header_table=Table())
ds = Dataset(array, wcs=identity_gwcs, meta=meta, headers=Table())
# Sanity checks
assert ds.data is array
assert ds.wcs is identity_gwcs

ds._array_container = DaskFITSArrayContainer([ExternalArrayReference('test1.fits', 0, 'float', (10, 10)),
ExternalArrayReference('test2.fits', 0, 'float', (10, 10))],
ds._array_container = DaskFITSArrayContainer([ExternalArrayReference('test1.fits', 0, 'float', array.shape)],
loader=AstropyFITSLoader)

return ds
Expand Down
Loading

0 comments on commit 296c879

Please sign in to comment.