From 34d9ab271128817aafaa1522c3fe65a39082051d Mon Sep 17 00:00:00 2001 From: Stuart Mumford Date: Sat, 8 Feb 2025 08:04:11 +0000 Subject: [PATCH] Fixes related to sample data updates --- .github/workflows/main.yml | 2 +- docs/examples/reproject_vbi_mosaic.md | 2 +- tools/update_sample_data.py | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 09de3237..388a6f42 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -92,7 +92,7 @@ jobs: with: python-version: '3.13' test_extras: tests - test_command: pytest --pyargs dkist -k "not test_fail" + test_command: pytest --pyargs dkist -k "not test_fail" --remote-data=none # We have to work around a github runner bug here: https://github.com/actions/runner/issues/2788#issuecomment-2145922705 upload_to_pypi: ${{ startsWith(github.ref || format('{0}{1}', 'refs/tags/', github.event.release.tag_name), 'refs/tags/v') && !endsWith(github.ref || format('{0}{1}', 'refs/tags/', github.event.release.tag_name), '.dev') }} secrets: diff --git a/docs/examples/reproject_vbi_mosaic.md b/docs/examples/reproject_vbi_mosaic.md index 7fe7db9c..49aa75a4 100644 --- a/docs/examples/reproject_vbi_mosaic.md +++ b/docs/examples/reproject_vbi_mosaic.md @@ -44,7 +44,7 @@ If you want to replace this dataset with your own dataset, see {ref}`dkist:howto Let's load the data with {obj}`dkist.load_dataset`: ```{code-cell} ipython3 -ds = dkist.load_dataset(VBI_AJQWW / "VBI_L1_20231016T184519_AJQWW.asdf") +ds = dkist.load_dataset(VBI_AJQWW) ds ``` diff --git a/tools/update_sample_data.py b/tools/update_sample_data.py index e03414a3..b007f629 100644 --- a/tools/update_sample_data.py +++ b/tools/update_sample_data.py @@ -45,7 +45,7 @@ def main(datasets, working_directory, destination_path="/user_tools_tutorial_dat for did, props in datasets.items(): res = Fido.search(a.dkist.Dataset(did)) - asdf_file = Fido.fetch(res, path=working_directory / "{dataset_id}", progress=False, overwrite=False) + asdf_file = Fido.fetch(res, path=working_directory / "{dataset_id}", progress=False, overwrite=True) ds = dkist.load_dataset(asdf_file) if "slice" in props: @@ -64,10 +64,12 @@ def main(datasets, working_directory, destination_path="/user_tools_tutorial_dat [f.unlink() for f in dataset_path.glob("*.mp4")] [f.unlink() for f in dataset_path.glob("*.pdf")] assert len(list(dataset_path.glob("*.asdf"))) == 1 + dataset_files = tuple(dataset_path.glob("*")) sample_filename = working_directory / props["filename"] with tarfile.open(sample_filename, mode="w") as tfile: - tfile.add(dataset_path, recursive=True) + for dfile in dataset_files: + tfile.add(dfile, arcname=dfile.name, recursive=False) sample_files_for_upload.append(sample_filename)