diff --git a/.circleci/config.yml b/.circleci/config.yml index b1b230e8..434c6a19 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,7 +2,7 @@ apt-run: &apt-install name: Install apt packages command: | sudo apt update - sudo apt install -y graphviz npm node + sudo apt install -y graphviz deps-run: &cihelpers-install name: Install CI Helpers @@ -23,21 +23,13 @@ pip-run: &pip-install . venv/bin/activate pip install -q --user $PIP_DEPENDENCIES -report-status: &report-status - name: Report CI Status - command: | - sudo npm install -g commit-status - if [[ ! -z $CIRCLE_PR_NUMBER ]]; then - commit-status success docs "Your documentation built" $DOCS_URL - fi - version: 2 jobs: build: environment: - - PIP_DEPENDENCIES: "sphinx git+https://bitbucket.org/dkistdc/dkist-sphinx-theme.git sunpy asdf gwcs" + - PIP_DEPENDENCIES: "sphinx git+https://bitbucket.org/dkistdc/dkist-sphinx-theme.git sunpy git+https://github.com/spacetelescope/asdf gwcs dask[complete] astropy ndcube" - MAIN_CMD: "python setup.py" - - SETUP_CMD: "build_docs -W" + - SETUP_CMD: "build_docs -w" docker: - image: circleci/python:3.6 steps: @@ -52,5 +44,3 @@ jobs: - run: name: "Built documentation is available at:" command: DOCS_URL="${CIRCLE_BUILD_URL}/artifacts/${CIRCLE_NODE_INDEX}/${CIRCLE_WORKING_DIRECTORY/#\~/$HOME}/docs/_build/html/index.html"; echo $DOCS_URL - - - run: *report-status diff --git a/.travis.yml b/.travis.yml index 67069e03..1f6f1d0d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,11 +29,11 @@ env: # List other runtime dependencies for the package that are available as # conda packages here. - - CONDA_DEPENDENCIES='' + - CONDA_DEPENDENCIES='asdf gwcs' # List other runtime dependencies for the package that are available as # pip packages here. - - PIP_DEPENDENCIES='pytest-astropy' + - PIP_DEPENDENCIES='pytest-astropy git+https://github.com/sunpy/ndcube.git' # Conda packages for affiliated packages are hosted in channel # "astropy" while builds for astropy LTS with recent numpy versions @@ -61,11 +61,6 @@ matrix: - os: linux env: SETUP_CMD='test --coverage' - # Check for sphinx doc build warnings - we do this first because it - # may run for a long time - - os: linux - env: SETUP_CMD='build_docs -w' - # Do a PEP8 test with pycodestyle - os: linux env: MAIN_CMD='pycodestyle dkist --count' SETUP_CMD='' diff --git a/ah_bootstrap.py b/ah_bootstrap.py index 786b8b14..65c38eab 100644 --- a/ah_bootstrap.py +++ b/ah_bootstrap.py @@ -19,9 +19,14 @@ contains an option called ``auto_use`` with a value of ``True``, it will automatically call the main function of this module called `use_astropy_helpers` (see that function's docstring for full details). -Otherwise no further action is taken (however, -``ah_bootstrap.use_astropy_helpers`` may be called manually from within the -setup.py script). +Otherwise no further action is taken and by default the system-installed version +of astropy-helpers will be used (however, ``ah_bootstrap.use_astropy_helpers`` +may be called manually from within the setup.py script). + +This behavior can also be controlled using the ``--auto-use`` and +``--no-auto-use`` command-line flags. For clarity, an alias for +``--no-auto-use`` is ``--use-system-astropy-helpers``, and we recommend using +the latter if needed. Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same names as the arguments to `use_astropy_helpers`, and can be used to configure @@ -47,14 +52,7 @@ from configparser import ConfigParser, RawConfigParser -if sys.version_info[0] < 3: - _str_types = (str, unicode) - _text_type = unicode - PY3 = False -else: - _str_types = (str, bytes) - _text_type = str - PY3 = True +_str_types = (str, bytes) # What follows are several import statements meant to deal with install-time @@ -137,7 +135,6 @@ from setuptools import Distribution from setuptools.package_index import PackageIndex -from setuptools.sandbox import run_setup from distutils import log from distutils.debug import DEBUG @@ -146,6 +143,7 @@ # TODO: Maybe enable checking for a specific version of astropy_helpers? DIST_NAME = 'astropy-helpers' PACKAGE_NAME = 'astropy_helpers' +UPPER_VERSION_EXCLUSIVE = None # Defaults for other options DOWNLOAD_IF_NEEDED = True @@ -177,7 +175,7 @@ def __init__(self, path=None, index_url=None, use_git=None, offline=None, if not (isinstance(path, _str_types) or path is False): raise TypeError('path must be a string or False') - if PY3 and not isinstance(path, _text_type): + if not isinstance(path, str): fs_encoding = sys.getfilesystemencoding() path = path.decode(fs_encoding) # path to unicode @@ -287,6 +285,18 @@ def parse_command_line(cls, argv=None): config['offline'] = True argv.remove('--offline') + if '--auto-use' in argv: + config['auto_use'] = True + argv.remove('--auto-use') + + if '--no-auto-use' in argv: + config['auto_use'] = False + argv.remove('--no-auto-use') + + if '--use-system-astropy-helpers' in argv: + config['auto_use'] = False + argv.remove('--use-system-astropy-helpers') + return config def run(self): @@ -464,9 +474,10 @@ def _directory_import(self): # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): - with _silence(): - run_setup(os.path.join(path, 'setup.py'), - ['egg_info']) + # We use subprocess instead of run_setup from setuptools to + # avoid segmentation faults - see the following for more details: + # https://github.com/cython/cython/issues/2104 + sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... @@ -501,16 +512,32 @@ def get_option_dict(self, command_name): if version: req = '{0}=={1}'.format(DIST_NAME, version) else: - req = DIST_NAME + if UPPER_VERSION_EXCLUSIVE is None: + req = DIST_NAME + else: + req = '{0}<{1}'.format(DIST_NAME, UPPER_VERSION_EXCLUSIVE) attrs = {'setup_requires': [req]} + # NOTE: we need to parse the config file (e.g. setup.cfg) to make sure + # it honours the options set in the [easy_install] section, and we need + # to explicitly fetch the requirement eggs as setup_requires does not + # get honored in recent versions of setuptools: + # https://github.com/pypa/setuptools/issues/1273 + try: - if DEBUG: - _Distribution(attrs=attrs) - else: - with _silence(): - _Distribution(attrs=attrs) + + context = _verbose if DEBUG else _silence + with context(): + dist = _Distribution(attrs=attrs) + try: + dist.parse_config_files(ignore_option_errors=True) + dist.fetch_build_eggs(req) + except TypeError: + # On older versions of setuptools, ignore_option_errors + # doesn't exist, and the above two lines are not needed + # so we can just continue + pass # If the setup_requires succeeded it will have added the new dist to # the main working_set @@ -791,9 +818,9 @@ def run_cmd(cmd): stdio_encoding = 'latin1' # Unlikely to fail at this point but even then let's be flexible - if not isinstance(stdout, _text_type): + if not isinstance(stdout, str): stdout = stdout.decode(stdio_encoding, 'replace') - if not isinstance(stderr, _text_type): + if not isinstance(stderr, str): stderr = stderr.decode(stdio_encoding, 'replace') return (p.returncode, stdout, stderr) @@ -846,6 +873,10 @@ def flush(self): pass +@contextlib.contextmanager +def _verbose(): + yield + @contextlib.contextmanager def _silence(): """A context manager that silences sys.stdout and sys.stderr.""" diff --git a/astropy_helpers b/astropy_helpers index d23a53f4..09bbe958 160000 --- a/astropy_helpers +++ b/astropy_helpers @@ -1 +1 @@ -Subproject commit d23a53f46dd1c3703e5eee63dca3f53bd18a4e8b +Subproject commit 09bbe9583e70ed1804f1051a3aaca1640d08c6f8 diff --git a/dkist/dataset/dataset.py b/dkist/dataset/dataset.py index 9330f105..ca326322 100644 --- a/dkist/dataset/dataset.py +++ b/dkist/dataset/dataset.py @@ -1,26 +1,39 @@ +import copy import glob import os.path from pathlib import Path +import asdf +import numpy as np + +import astropy.units as u + from ndcube.ndcube import NDCubeBase -class Dataset(NDCubeBase): - """ - Load a DKIST dataset. +from dkist.dataset.mixins import DatasetPlotMixin +from dkist.io import DaskFITSArrayContainer, AstropyFITSLoader - Parameters - ---------- +__all__ = ['Dataset'] + + +class Dataset(DatasetPlotMixin, NDCubeBase): + """ + The base class for DKIST datasets. - directory : `str` - The directory holding the dataset. + This class is backed by `dask.array.Array` and `gwcs.wcs.WCS` objects. """ - def __init__(self, directory): + @classmethod + def from_directory(cls, directory): + """ + Construct a `~dkist.dataset.Dataset` from a directory containing one + asdf file and a collection of FITS files. + """ if not os.path.isdir(directory): raise ValueError("directory argument must be a directory") - self.base_path = Path(directory) - asdf_files = glob.glob(self.base_path / "*.asdf") + base_path = Path(directory) + asdf_files = glob.glob(str(base_path / "*.asdf")) if not asdf_files: raise ValueError("No asdf file found in directory.") @@ -28,21 +41,89 @@ def __init__(self, directory): raise NotImplementedError("Multiple asdf files found in this" " directory. Can't handle this yet.") - self.asdf_file = asdf_files[0] + asdf_file = asdf_files[0] - # super().__init__(self, data, wcs) + with asdf.AsdfFile.open(asdf_file) as ff: + # TODO: without this it segfaults on access + asdf_tree = copy.deepcopy(ff.tree) + pointer_array = np.array(ff.tree['dataset']) - """ - Methods to be implemented. - """ - def pixel_to_world(self, quantity_axis_list, origin=0): - raise NotImplementedError() + array_container = DaskFITSArrayContainer(pointer_array, loader=AstropyFITSLoader, + basepath=str(base_path)) + + data = array_container.array + + wcs = asdf_tree['gwcs'] - def world_to_pixel(self, quantity_axis_list, origin=0): + return cls(data, wcs=wcs) + + def __repr__(self): + """ + Overload the NDData repr because it does not play nice with the dask delayed io. + """ + prefix = self.__class__.__name__ + '(' + body = str(self.data) + return ''.join([prefix, body, ')']) + + def pixel_to_world(self, *quantity_axis_list): + """ + Convert a pixel coordinate to a data (world) coordinate by using + `~gwcs.wcs.WCS`. + + Parameters + ---------- + quantity_axis_list : iterable + An iterable of `~astropy.units.Quantity` with unit as pixel `pix`. + Note that these quantities must be entered as separate arguments, not as one list. + + Returns + ------- + coord : `list` + A list of arrays containing the output coordinates. + """ + return tuple(self.wcs(*quantity_axis_list, output='numericals_plus')) + + def world_to_pixel(self, *quantity_axis_list): + """ + Convert a world coordinate to a data (pixel) coordinate by using + `~gwcs.wcs.WCS.invert`. + + Parameters + ---------- + quantity_axis_list : iterable + A iterable of `~astropy.units.Quantity`. + Note that these quantities must be entered as separate arguments, not as one list. + + Returns + ------- + coord : `list` + A list of arrays containing the output coordinates. + """ + return tuple(self.wcs.invert(*quantity_axis_list, output="numericals_plus")) + + def world_axis_physical_types(self): raise NotImplementedError() + @property def dimensions(self): - raise NotImplementedError() + """ + The dimensions of the data as a `~astropy.units.Quantity`. + """ + return u.Quantity(self.data.shape, unit=u.pix) - def crop_by_coords(self, lower_left_corner, dimension_widths): - raise NotImplementedError() + def crop_by_coords(self, min_coord_values, interval_widths): + # The docstring is defined in NDDataBase + + n_dim = len(self.dimensions) + if len(min_coord_values) != len(interval_widths) != n_dim: + raise ValueError("min_coord_values and interval_widths must have " + "same number of elements as number of data dimensions.") + # Convert coords of lower left corner to pixel units. + lower_pixels = self.world_to_pixel(*min_coord_values) + upper_pixels = self.world_to_pixel(*[min_coord_values[i] + interval_widths[i] + for i in range(len(min_coord_values))]) + # Round pixel values to nearest integer. + lower_pixels = [int(np.rint(l.value)) for l in lower_pixels] + upper_pixels = [int(np.rint(u.value)) for u in upper_pixels] + item = tuple([slice(lower_pixels[i], upper_pixels[i]) for i in range(n_dim)]) + return self.data[item] diff --git a/dkist/dataset/mixins.py b/dkist/dataset/mixins.py new file mode 100644 index 00000000..04d882a5 --- /dev/null +++ b/dkist/dataset/mixins.py @@ -0,0 +1,71 @@ +import matplotlib.pyplot as plt + +from sunpy.visualization.imageanimator import ImageAnimator +from ndcube.mixins import NDCubePlotMixin + +__all__ = ['DatasetPlotMixin'] + + +class DatasetPlotMixin(NDCubePlotMixin): + """ + Handle plotting operations for Dataset. + """ + + def _plot_3D_cube(self, image_axes=None, unit_x_axis=None, unit_y_axis=None, + axis_ranges=None, **kwargs): + """ + Plots an interactive visualization of this cube using sliders to move through axes + plot using in the image. + Parameters other than data and wcs are passed to ImageAnimatorWCS, which in turn + passes them to imshow. + + Parameters + ---------- + image_axes: `list` + The two axes that make the image. + Like [-1,-2] this implies cube instance -1 dimension + will be x-axis and -2 dimension will be y-axis. + + unit_x_axis: `astropy.units.Unit` + The unit of x axis. + + unit_y_axis: `astropy.units.Unit` + The unit of y axis. + + axis_ranges: `list` of physical coordinates for array or None + If None array indices will be used for all axes. + If a list it should contain one element for each axis of the numpy array. + For the image axes a [min, max] pair should be specified which will be + passed to :func:`matplotlib.pyplot.imshow` as extent. + For the slider axes a [min, max] pair can be specified or an array the + same length as the axis which will provide all values for that slider. + If None is specified for an axis then the array indices will be used + for that axis. + """ + if not image_axes: + image_axes = [-1, -2] + i = ImageAnimator(self.data, image_axes=image_axes, + axis_ranges=axis_ranges, **kwargs) + return i + + def _plot_2D_cube(self, axes=None, image_axes=None, **kwargs): + """ + Plots a 2D image onto the current + axes. Keyword arguments are passed on to matplotlib. + + Parameters + ---------- + axes: `astropy.visualization.wcsaxes.core.WCSAxes` or `None`: + The axes to plot onto. If None the current axes will be used. + + image_axes: `list`. + The first axis in WCS object will become the first axis of image_axes and + second axis in WCS object will become the second axis of image_axes. + Default: ['x', 'y'] + """ + if not image_axes: + image_axes = ['x', 'y'] + if axes is None: + axes = plt.gca() + plot = axes.imshow(self.data, **kwargs) + return plot diff --git a/dkist/io/__init__.py b/dkist/io/__init__.py new file mode 100644 index 00000000..facf55a9 --- /dev/null +++ b/dkist/io/__init__.py @@ -0,0 +1,2 @@ +from .fits import BaseFITSLoader, AstropyFITSLoader +from .dask import BaseFITSArrayContainer, NumpyFITSArrayContainer, DaskFITSArrayContainer diff --git a/dkist/io/dask.py b/dkist/io/dask.py index 430b3ea4..4e584717 100644 --- a/dkist/io/dask.py +++ b/dkist/io/dask.py @@ -6,8 +6,10 @@ import numpy as np import dask.array as da +from asdf.tags.core.external_reference import ExternalArrayReference -from .fits import BaseFITSLoader + +__all__ = ['BaseFITSArrayContainer', 'NumpyFITSArrayContainer', 'DaskFITSArrayContainer'] class BaseFITSArrayContainer(metaclass=abc.ABCMeta): @@ -16,9 +18,7 @@ class BaseFITSArrayContainer(metaclass=abc.ABCMeta): contiguous array type from them. """ - def __init__(self, reference_array, *, loader): - if not isinstance(loader, BaseFITSLoader): - raise ValueError("loader must be an instance of BaseFITSLoader") + def __init__(self, reference_array, *, loader, **kwargs): reference_array = np.asarray(reference_array, dtype=object) @@ -30,7 +30,7 @@ def __init__(self, reference_array, *, loader): loader_array = np.empty_like(reference_array, dtype=object) for i, ele in enumerate(reference_array.flat): - loader_array.flat[i] = loader(ele) + loader_array.flat[i] = loader(ele, **kwargs) self.loader_array = loader_array def _check_contents(self, reference_array): @@ -38,7 +38,7 @@ def _check_contents(self, reference_array): dtype = reference_array.flat[0].dtype for i, ele in enumerate(reference_array.flat): # TODO: Make these not asserts - # assert isinstance(ele, ExternalArrayReference) + assert isinstance(ele, ExternalArrayReference) assert ele.dtype == dtype assert ele.shape == shape @@ -70,7 +70,7 @@ class DaskFITSArrayContainer(BaseFITSArrayContainer): @property def array(self): - chunk = tuple(self.reference_array.flat[0].shape) + chunk = tuple(self.loader_array.flat[0].shape) aa = map( - partial(da.from_array, chunks=chunk), self.reference_array.flat) + partial(da.from_array, chunks=chunk), self.loader_array.flat) return da.stack(list(aa)).reshape(self.shape) diff --git a/dkist/io/fits.py b/dkist/io/fits.py index 77b04aff..7613de9c 100644 --- a/dkist/io/fits.py +++ b/dkist/io/fits.py @@ -3,15 +3,19 @@ minimise (virtual) memory usage and the number of open files. """ +import os import abc from astropy.io import fits +__all__ = ['BaseFITSLoader', 'AstropyFITSLoader'] + class BaseFITSLoader(metaclass=abc.ABCMeta): - def __init__(self, externalarray): + def __init__(self, externalarray, basepath=None): self.fitsarray = externalarray + self.basepath = basepath # Private cache self._array = None self._fits_header = None @@ -23,14 +27,14 @@ def __repr__(self): # repr alone should not force loading of the data if self._array is None: return "".format( - self.fitsarray.filename, self.fitsarray.shape, self.fitsarray.dtype) + self.fitsarray.fileuri, self.fitsarray.shape, self.fitsarray.dtype) return repr(self._array) def __str__(self): # str alone should not force loading of the data if self._array is None: return "".format( - self.fitsarray.filename, self.fitsarray.shape, self.fitsarray.dtype) + self.fitsarray.fileuri, self.fitsarray.shape, self.fitsarray.dtype) return str(self._array) def __array__(self): @@ -39,6 +43,13 @@ def __array__(self): def __getitem__(self, slc): return self.fits_array[slc] + @property + def absolute_uri(self): + if self.basepath: + return os.path.join(self.basepath, self.fitsarray.fileuri) + else: + return self.fitsarray.fileuri + @property def fits_header(self): if not self._fits_header: @@ -51,8 +62,8 @@ def fits_array(self): This method opens the FITS file and returns a reference to the desired array. """ - if not self._array: - self._array = self._read_array() + if self._array is None: + self._array = self._read_fits_array() return self._array @abc.abstractmethod @@ -78,21 +89,21 @@ def _read_fits_array(self): """ -class AstropyFITSLoader: +class AstropyFITSLoader(BaseFITSLoader): """ A class that resolves FITS references in a lazy way. """ def _read_fits_array(self): - with fits.open(self.fitsarray.filename, memmap=True) as hdul: + with fits.open(self.absolute_uri, memmap=True, do_not_scale_image_data=False) as hdul: hdul.verify('fix') - hdu = hdul[self.fitsarray.hdu_index] + hdu = hdul[self.fitsarray.target] if not self._fits_header: self._fits_header = hdu.header return hdu.data def _read_fits_header(self): - with fits.open(self.fitsarray.filename) as hdul: + with fits.open(self.absolute_uri) as hdul: hdul.verify('fix') - hdu = hdul[self.fitsarray.hdu_index] + hdu = hdul[self.fitsarray.target] return hdu.header diff --git a/dkist/tests/generate_aia_dataset.py b/dkist/tests/generate_aia_dataset.py new file mode 100644 index 00000000..907f8db9 --- /dev/null +++ b/dkist/tests/generate_aia_dataset.py @@ -0,0 +1,203 @@ +import os +import glob +from pathlib import Path + +import asdf +import numpy as np +import astropy.units as u +from astropy.io import fits +from astropy.time import Time +from astropy.modeling.models import Shift, AffineTransformation2D, Pix2Sky_TAN, RotateNative2Celestial, Scale + +import gwcs +from gwcs import coordinate_frames as cf +from gwcs.lookup_table import LookupTable + +import sunpy.map +from sunpy.time import parse_time +from sunpy.net import Fido, attrs as a +from sunpy.net.jsoc import JSOCClient + + +def map_to_transform(smap): + # crval1u, crval2u = smap.reference_coordinate.Tx, smap.reference_coordinate.Ty + # cdelt1u, cdelt2u = smap.scale + # pcu = smap.rotation_matrix * u.arcsec + + # # First, shift the reference pixel from FITS (1) to Python (0) indexing + # crpix1, crpix2 = u.Quantity(smap.reference_pixel) - 1 * u.pixel + # # Then FITS WCS uses the negative of this value as the shift. + # shiftu = Shift(-crpix1) & Shift(-crpix2) + + # # Next we define the Affine Transform. + # # This also includes the pixel scale operation by using equivalencies + # scale_e = {a: u.pixel_scale(scale) for a, scale in zip('xy', smap.scale)} + # rotu = AffineTransformation2D(pcu, translation=(0, 0) * u.arcsec) + # rotu.input_units_equivalencies = scale_e + + # # Handle the projection + # tanu = Pix2Sky_TAN() + + # # Rotate from native spherical to celestial spherical coordinates + # skyrotu = RotateNative2Celestial(crval1u, crval2u, 180 * u.deg) + + # # Combine the whole pipeline into one compound model + # transu = shiftu | rotu | tanu | skyrotu + + crpix1u, crpix2u = u.Quantity(smap.reference_pixel)-1*u.pixel + crval1u, crval2u = smap.reference_coordinate.Tx, smap.reference_coordinate.Ty + cdelt1u, cdelt2u = smap.scale + pcu = smap.rotation_matrix * u.arcsec + shiftu = Shift(-crpix1u) & Shift(-crpix2u) + scaleu = Scale(cdelt1u) & Scale(cdelt2u) + rotu = AffineTransformation2D(pcu, translation=(0, 0)*u.arcsec) + tanu = Pix2Sky_TAN() + skyrotu = RotateNative2Celestial(crval1u, crval2u, 180*u.deg) + transu = shiftu | scaleu | rotu | tanu | skyrotu + transu.rename("spatial") + + return transu + + +def references_from_filenames(filename_array, relative_to=None): + """ + Given an array of paths to FITS files create a set of nested lists of + `asdf.external_reference.ExternalArrayReference` objects with the same + shape. + """ + + from astropy.io.fits.hdu.base import BITPIX2DTYPE + from asdf.tags.core.external_reference import ExternalArrayReference + + reference_array = np.empty_like(cube, dtype=object) + for i, filepath in enumerate(filename_array.flat): + with fits.open(filepath) as hdul: + hdu_index = 1 + hdu = hdul[hdu_index] + dtype = BITPIX2DTYPE[hdu.header['BITPIX']] + shape = tuple(reversed(hdu.shape)) + + # Convert paths to relative paths + relative_path = filepath + if relative_to: + relative_path = os.path.relpath(filepath, relative_to) + + reference_array.flat[i] = ExternalArrayReference( + relative_path, hdu_index, dtype, shape) + + return reference_array.tolist() + + +path = Path('~/sunpy/data/jsocflare/').expanduser() +files = glob.glob(str(path / '*.fits')) + +requestid = 'JSOC_20180111_1429' + +if not files: + if requestid: + c = JSOCClient() + filesd = c.get_request( + requestid, path=str(path), overwrite=False).wait() + files = [] + for f in filesd.values(): + files.append(f['path']) + else: + results = Fido.search( + a.jsoc.Time('2017-09-06T12:00:00', '2017-09-06T12:02:00'), + a.jsoc.Series('aia.lev1_euv_12s'), a.jsoc.Segment('image'), + a.jsoc.Notify("stuart@cadair.com")) + + print(results) + + files = Fido.fetch(results, path=str(path)) + +files.sort() +files = np.array(files) + +# For each image get: +# the index +inds = [] +# the time +times = [] +# the dt from the first image +seconds = [] +# the wavelength +waves = [] + +for i, filepath in enumerate(files): + with fits.open(filepath) as hdul: + header = hdul[1].header + time = parse_time(header['DATE-OBS']) + if i == 0: + root_header = header + start_time = time + inds.append(i) + times.append(time) + seconds.append((time - start_time).total_seconds()) + waves.append(header['WAVELNTH']) + +# Construct an array and sort it by wavelength and time +arr = np.array((inds, seconds, waves)).T +sorter = np.lexsort((arr[:, 1], arr[:, 2])) + +# Using this double-sorted array get the list indicies +list_sorter = np.array(arr[sorter][:, 0], dtype=int) + +# Calculate the desired shape of the output array +n_waves = len(list(set(waves))) +shape = (n_waves, len(files) // n_waves) + +# Construct a 2D array of filenames +cube = files[list_sorter].reshape(shape) + +# Extract a list of coordinates in time and wavelength +# this assumes all wavelength images are taken at the same time +time_coords = np.array( + [t.isoformat() for t in times])[list_sorter].reshape(shape)[0, :] +wave_coords = np.array(waves)[list_sorter].reshape(shape)[:, 0] + +smap0 = sunpy.map.Map(files[0]) +spatial = map_to_transform(smap0) + +timemodel = LookupTable(lookup_table=seconds[:shape[1]]*u.s) +wavemodel = LookupTable(lookup_table=waves[:shape[0]]*u.AA) + +hcubemodel = wavemodel & timemodel & spatial + +wave_frame = cf.SpectralFrame(axes_order=(0, ), unit=u.AA) +time_frame = cf.TemporalFrame( + axes_order=(1, ), unit=u.s, reference_time=Time(time_coords[0])) +sky_frame = cf.CelestialFrame(axes_order=(2, 3), name='helioprojective', reference_frame=smap0.coordinate_frame) + +sky_frame = cf.CompositeFrame([wave_frame, time_frame, sky_frame]) + +wcs = gwcs.wcs.WCS(forward_transform=hcubemodel, output_frame=sky_frame) + +print(repr(wcs)) + +print(wcs(*[1*u.pix]*4, output="numericals_plus")) + +ea = references_from_filenames(cube, relative_to=str(path)) + +crpix1u, crpix2u = u.Quantity(smap0.reference_pixel)-1*u.pixel +shiftu = Shift(-crpix1u) & Shift(-crpix2u) +tree = { + 'gwcs': wcs, + 'dataset': ea, +} + +with asdf.AsdfFile(tree) as ff: + # ff.write_to("test.asdf") + filename = str(path / "aia_{}.asdf".format(time_coords[0])) + ff.write_to(filename) + print("Saved to : {}".format(filename)) + + +# import sys; sys.exit(0) + +from dkist.dataset import Dataset + +ds = Dataset.from_directory(str(path)) +print(repr(ds)) +print(repr(ds.wcs)) +print(ds.wcs(*[1*u.pix]*4, output="numericals_plus")) diff --git a/docs/conf.py b/docs/conf.py index 98ccd3af..5bbe3f13 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,6 +51,12 @@ conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) +intersphinx_mapping['sunpy'] = ('http://docs.sunpy.org/en/stable/', None) +intersphinx_mapping['ndcube'] = ('http://docs.sunpy.org/projects/ndcube/en/stable/', None) +intersphinx_mapping['gwcs'] = ('http://gwcs.readthedocs.io/en/latest/', None) +intersphinx_mapping['asdf'] = ('http://asdf.readthedocs.io/en/latest/', None) +intersphinx_mapping['dask'] = ('http://dask.pydata.org/en/latest/', None) + # -- General configuration ---------------------------------------------------- # By default, highlight as Python 3. diff --git a/docs/dataset.rst b/docs/dataset.rst new file mode 100644 index 00000000..72f3b458 --- /dev/null +++ b/docs/dataset.rst @@ -0,0 +1,14 @@ +.. _dkist-dataset: + +Dataset Module +============== + +The `~dkist.dataset.Dataset` class is provided as a loader for a whole DKIST +dataset. Given a directory containing a number of FITS files and the dataset +asdf file, the dataset will be loaded into an array. + + + +.. automodapi:: dkist.dataset + +.. automodapi:: dkist.dataset.mixins diff --git a/docs/dkist/index.rst b/docs/dkist/index.rst deleted file mode 100644 index 31fe30b0..00000000 --- a/docs/dkist/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -******************* -dkist Documentation -******************* - -This is the documentation for dkist. - -Reference/API -============= - -.. automodapi:: dkist diff --git a/docs/index.rst b/docs/index.rst index ec42dcfe..721f4874 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,5 @@ -Documentation -============= +DKIST Package Documentation +=========================== This is the documentation for dkist. A Python library of tools for obtaining, processing and interacting with DKIST data. @@ -7,9 +7,6 @@ A Python library of tools for obtaining, processing and interacting with DKIST d .. toctree:: :maxdepth: 2 - dkist/index.rst - -.. note:: The layout of this directory is simply a suggestion. To follow - traditional practice, do *not* edit this page, but instead place - all documentation for the package inside ``dkist/``. - You can follow this practice or choose your own layout. + self + dataset.rst + io.rst diff --git a/docs/io.rst b/docs/io.rst new file mode 100644 index 00000000..d873c577 --- /dev/null +++ b/docs/io.rst @@ -0,0 +1,4 @@ +IO Module +========= + +.. automodapi:: dkist.io