From 71245a24c2ffb32e21c5df360d63fa7884bee9a0 Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Mon, 8 Jan 2024 16:20:59 -0800 Subject: [PATCH 1/6] workflows --- .github/workflows/distribute.yml | 37 ++++++++++++++++++++++++++++ .github/workflows/labeled-pr.yml | 15 +++++++++++ .github/workflows/release-github.yml | 16 ++++++++++++ 3 files changed, 68 insertions(+) create mode 100644 .github/workflows/distribute.yml create mode 100644 .github/workflows/labeled-pr.yml create mode 100644 .github/workflows/release-github.yml diff --git a/.github/workflows/distribute.yml b/.github/workflows/distribute.yml new file mode 100644 index 0000000..16f0b43 --- /dev/null +++ b/.github/workflows/distribute.yml @@ -0,0 +1,37 @@ +name: Distribute to PyPI + +on: + push: + tags: + - v* + +jobs: + distribute: + runs-on: ubuntu-latest + outputs: + SDIST_VERSION: ${{ steps.build.outputs.version }} + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v1 + with: + python-version: 3.11 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install --upgrade setuptools build "setuptools-scm[toml]" + + - name: Build distribuion + id: build + run: | + git fetch origin +refs/tags/*:refs/tags/* + export SDIST_VERSION=$(python -m setuptools_scm) + echo "::set-output name=version::${SDIST_VERSION}" + python -m build + + - name: upload to PyPI.org + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml new file mode 100644 index 0000000..a8863a6 --- /dev/null +++ b/.github/workflows/labeled-pr.yml @@ -0,0 +1,15 @@ +name: Is PR labeled? + +on: + pull_request: + types: + - opened + - labeled + - unlabeled + - synchronize + branches: + - main + +jobs: + call-labeled-pr-check-workflow: + uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.8.3 \ No newline at end of file diff --git a/.github/workflows/release-github.yml b/.github/workflows/release-github.yml new file mode 100644 index 0000000..183b70e --- /dev/null +++ b/.github/workflows/release-github.yml @@ -0,0 +1,16 @@ +name: Create Release + +on: + push: + tags: + - 'v*' + +jobs: + call-release-workflow: + uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.8.3 + with: + release_prefix: tile-stitcher + develop_branch: dev + sync_pr_label: team-bot + secrets: + USER_TOKEN: ${{ secrets.ACCESS_GITHUB_TOKEN }} \ No newline at end of file From 479e4961c1a5e91be9099d92d994c82486114336 Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Wed, 10 Jan 2024 08:57:28 -0800 Subject: [PATCH 2/6] ruff 1 --- environment.yml | 1 + pyproject.toml | 34 ++++++++++--- tests/test_stitch_api.py | 42 +++++++++------- tile_stitcher/stitcher.py | 98 +++++++++++++++++++++---------------- tile_stitcher/tile_model.py | 18 ++++--- 5 files changed, 120 insertions(+), 73 deletions(-) diff --git a/environment.yml b/environment.yml index 5070100..d44a8df 100644 --- a/environment.yml +++ b/environment.yml @@ -23,3 +23,4 @@ dependencies: - flake8-import-order - pytest - pytest-cov + - ruff diff --git a/pyproject.toml b/pyproject.toml index 5d84c83..666bb7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,10 +5,12 @@ build-backend = 'setuptools.build_meta' [project] name = 'tile-stitcher' requires-python = '>=3.10' -authors = [{name='Alexander Handwerger, Charlie Marshak, and OPERA Project Science Team'}] +authors = [ + { name = 'Alexander Handwerger, Charlie Marshak, and OPERA Project Science Team' }, +] description = 'Create rasters from global tiles' -license = {text = 'Apache 2.0'} -classifiers=[ +license = { text = 'Apache 2.0' } +classifiers = [ 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache 2.0', 'Natural Language :: English', @@ -30,7 +32,7 @@ dependencies = [ "numpy", "pandas", "pandera", - "shapely" + "shapely", ] [project.optional-dependencies] @@ -56,9 +58,29 @@ include = ['tile_stitcher'] exclude = ['notebooks*', 'tests*'] [tool.setuptools.dynamic] -readme = {file = ['README.md'], content-type = 'text/markdown'} +readme = { file = ['README.md'], content-type = 'text/markdown' } [tool.setuptools_scm] [tool.ruff] -line-length = 120 \ No newline at end of file +line-length = 80 + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".eggs", + ".git", + ".ipynb_checkpoints", + ".mypy_cache", + ".pytest_cache", + ".ruff_cache", + ".vscode", + "__pypackages__", + "_build", + "build", + "dist", + "site-packages", +] +indent-width = 4 + +[tool.ruff.format] +quote-style="single" \ No newline at end of file diff --git a/tests/test_stitch_api.py b/tests/test_stitch_api.py index 5359161..35edeb0 100644 --- a/tests/test_stitch_api.py +++ b/tests/test_stitch_api.py @@ -1,8 +1,12 @@ import pytest from tile_stitcher import get_raster_from_tiles -from tile_stitcher.stitcher import (COP_100_YEARS, HANSEN_MOSAIC_YEARS, - S1_TEMPORAL_BASELINE_DAYS, SEASONS) +from tile_stitcher.stitcher import ( + COP_100_YEARS, + HANSEN_MOSAIC_YEARS, + S1_TEMPORAL_BASELINE_DAYS, + SEASONS, +) def test_esa_world_cover(): @@ -20,34 +24,36 @@ def test_pekel_water_occ(): assert len(X.shape) == 3 -@pytest.mark.parametrize("year", - [HANSEN_MOSAIC_YEARS[k] for k in [0, 2, 4, 6, 10]] - ) +@pytest.mark.parametrize( + 'year', [HANSEN_MOSAIC_YEARS[k] for k in [0, 2, 4, 6, 10]] +) def test_hansen_datasets(year): # Note only getting 1 tile - these are large datasets! bounds = [-120.45, 34.85, -120.15, 34.95] - X, _ = get_raster_from_tiles(bounds, - tile_shortname='hansen_annual_mosaic', - year=year) + X, _ = get_raster_from_tiles( + bounds, tile_shortname='hansen_annual_mosaic', year=year + ) assert len(X.shape) == 3 -@pytest.mark.parametrize("season", SEASONS) -@pytest.mark.parametrize("temporal_baseline_days", S1_TEMPORAL_BASELINE_DAYS) +@pytest.mark.parametrize('season', SEASONS) +@pytest.mark.parametrize('temporal_baseline_days', S1_TEMPORAL_BASELINE_DAYS) def test_coherence_dataset(season, temporal_baseline_days): # Note only getting 1 tile bounds = [-120.45, 34.85, -120.15, 34.95] - X, _ = get_raster_from_tiles(bounds, - tile_shortname='s1_coherence_2020', - season=season, - temporal_baseline_days=temporal_baseline_days) + X, _ = get_raster_from_tiles( + bounds, + tile_shortname='s1_coherence_2020', + season=season, + temporal_baseline_days=temporal_baseline_days, + ) assert len(X.shape) == 3 -@pytest.mark.parametrize("year", COP_100_YEARS) +@pytest.mark.parametrize('year', COP_100_YEARS) def test_cop100_dataset(year: int): bounds = [-120.45, 34.85, -120.15, 34.95] - X, _ = get_raster_from_tiles(bounds, - tile_shortname='cop_100_lulc_discrete', - year=year) + X, _ = get_raster_from_tiles( + bounds, tile_shortname='cop_100_lulc_discrete', year=year + ) assert len(X.shape) == 3 diff --git a/tile_stitcher/stitcher.py b/tile_stitcher/stitcher.py index e2301da..67717e7 100644 --- a/tile_stitcher/stitcher.py +++ b/tile_stitcher/stitcher.py @@ -18,7 +18,7 @@ 'esa_world_cover_2021': 'esa_world_cover_2021.geojson.zip', 'hansen_annual_mosaic': 'hansen_landsat_mosaic_2022.geojson.zip', 's1_coherence_2020': 's1_coherence_2020.geojson.zip', - 'cop_100_lulc_discrete': 'cop_100m_lulc_discrete_classes.geojson.zip' + 'cop_100_lulc_discrete': 'cop_100m_lulc_discrete_classes.geojson.zip', } DATASET_SHORTNAMES = list(GEOJSON_DICT.keys()) @@ -42,20 +42,25 @@ def get_all_tile_data(tile_key: str) -> gpd.GeoDataFrame: @lru_cache -def get_tile_data(tile_key: str, - year: int = None, - season: str = None, - temporal_baseline_days: int = None) -> gpd.GeoDataFrame: +def get_tile_data( + tile_key: str, + year: int = None, + season: str = None, + temporal_baseline_days: int = None, +) -> gpd.GeoDataFrame: # Because tile data is cached - we need to copy it. df_tiles = get_all_tile_data(tile_key).copy() - if (year is not None): + if year is not None: if tile_key not in DATASETS_WITH_YEAR: - raise NotImplementedError('Year is only supported ' - f'with {DATASETS_WITH_YEAR}') + raise NotImplementedError( + 'Year is only supported ' f'with {DATASETS_WITH_YEAR}' + ) if tile_key == 'hansen_annual_mosaic': + def update_hansen_landsat_mosaic_url_p(url): return update_hansen_landsat_mosaic_url(url, year) + df_tiles.url = df_tiles.url.map(update_hansen_landsat_mosaic_url_p) if tile_key == 'cop_100_lulc_discrete': df_tiles = df_tiles[df_tiles.year == year].reset_index(drop=True) @@ -65,12 +70,16 @@ def update_hansen_landsat_mosaic_url_p(url): if tile_key == 's1_coherence_2020': if any([var is None for var in [temporal_baseline_days, season]]): - raise ValueError(f'{tile_key} requires season and temporal baseline ' - 'to be specified') + raise ValueError( + f'{tile_key} requires season and temporal baseline ' + 'to be specified' + ) if season not in SEASONS: raise ValueError(f'season keyword must be in {", ".join(SEASONS)}') if temporal_baseline_days not in S1_TEMPORAL_BASELINE_DAYS: - raise ValueError(f'temporal_baseline_days must be in {", ".join(S1_TEMPORAL_BASELINE_DAYS)}') + raise ValueError( + f'temporal_baseline_days must be in {", ".join(S1_TEMPORAL_BASELINE_DAYS)}' + ) ind_season = df_tiles.season == season ind_tb = df_tiles.temporal_baseline_days == temporal_baseline_days df_tiles = df_tiles[ind_tb & ind_season].reset_index(drop=True) @@ -88,21 +97,24 @@ def update_hansen_landsat_mosaic_url(url: str, year: int): elif year <= 2015: # Gets the "last_00N_040W.tif" portion of the url url_end = url[-17:] - url_updated = ('https://storage.googleapis.com/earthenginepartners-hansen/' - f'GFC{year}/Hansen_GFC{year}_{url_end}') + url_updated = ( + 'https://storage.googleapis.com/earthenginepartners-hansen/' + f'GFC{year}/Hansen_GFC{year}_{url_end}' + ) else: year_diff = CURRENT_HANSEN_YEAR - year version_updated = CURRENT_HANSEN_VERSION - year_diff - url_updated = url.replace(str(CURRENT_HANSEN_YEAR), - str(year)) - url_updated = url_updated.replace(f'v1.{CURRENT_HANSEN_VERSION}', - f'v1.{version_updated}') + url_updated = url.replace(str(CURRENT_HANSEN_YEAR), str(year)) + url_updated = url_updated.replace( + f'v1.{CURRENT_HANSEN_VERSION}', f'v1.{version_updated}' + ) return url_updated -def get_urls_from_tile_df(extent: list[float], - df_tiles: gpd.GeoDataFrame) -> list[str]: +def get_urls_from_tile_df( + extent: list[float], df_tiles: gpd.GeoDataFrame +) -> list[str]: bbox = box(*extent) ind_inter = df_tiles.geometry.intersects(bbox) df_subset = df_tiles[ind_inter].reset_index(drop=True) @@ -110,8 +122,9 @@ def get_urls_from_tile_df(extent: list[float], return urls -def get_additional_tile_metadata(urls: list[str], - max_tile_tries: int = 10) -> dict: +def get_additional_tile_metadata( + urls: list[str], max_tile_tries: int = 10 +) -> dict: """Some tile sets may have missing data when they should not. Ideally we can remove said tiles from dataframe. However, in the case of Hansen mosiacs, these errors seem to be year-to-year e.g. 2017 where the upper left @@ -125,12 +138,11 @@ def get_additional_tile_metadata(urls: list[str], with rasterio.open(url) as ds: tags = ds.tags() try: - cmap = {k+1: ds.colormap(k+1) for k in range(ds.count)} + cmap = {k + 1: ds.colormap(k + 1) for k in range(ds.count)} # no colormap in existing dataset yields a ValueError in Rasterio except ValueError: cmap = {} - metadata = {'tags': tags, - 'colormap': cmap} + metadata = {'tags': tags, 'colormap': cmap} break # When dataset does not exist with given url except RasterioIOError: @@ -138,26 +150,31 @@ def get_additional_tile_metadata(urls: list[str], return metadata -def get_raster_from_tiles(extent: list[float], - tile_shortname: str = None, - df_tiles: gpd.GeoDataFrame = None, - year: int = None, - season: str = None, - temporal_baseline_days: int = None) -> tuple: - +def get_raster_from_tiles( + extent: list[float], + tile_shortname: str = None, + df_tiles: gpd.GeoDataFrame = None, + year: int = None, + season: str = None, + temporal_baseline_days: int = None, +) -> tuple: if (tile_shortname is None) and (df_tiles is None): - raise ValueError('Either "tile_shortname" or ' - '"df_tiles" must be provided') + raise ValueError( + 'Either "tile_shortname" or "df_tiles" must be provided' + ) if (tile_shortname is not None) and (df_tiles is not None): - raise ValueError('"tile_shortname" and "df_tiles" cannot ' - 'both be provided') + raise ValueError( + '"tile_shortname" and "df_tiles" cannot both be provided' + ) if isinstance(tile_shortname, str): - df_tiles = get_tile_data(tile_shortname, - year=year, - temporal_baseline_days=temporal_baseline_days, - season=season) + df_tiles = get_tile_data( + tile_shortname, + year=year, + temporal_baseline_days=temporal_baseline_days, + season=season, + ) df_tiles = TILE_SCHEMA.validate(df_tiles) @@ -165,8 +182,7 @@ def get_raster_from_tiles(extent: list[float], tile_metadata = get_additional_tile_metadata(urls) urls_subset = get_urls_from_tile_df(extent, df_tiles) - X_merged, p_merged = merge_tile_datasets_within_extent(urls_subset, - extent) + X_merged, p_merged = merge_tile_datasets_within_extent(urls_subset, extent) # Are stored in the profile for provenance p_merged.update(**tile_metadata) diff --git a/tile_stitcher/tile_model.py b/tile_stitcher/tile_model.py index a2ec4d2..c046737 100644 --- a/tile_stitcher/tile_model.py +++ b/tile_stitcher/tile_model.py @@ -1,11 +1,13 @@ import geopandas as gpd from pandera import Column, DataFrameSchema -TILE_SCHEMA = DataFrameSchema({ - 'tile_id': Column(str, required=True), - 'url': Column(str, required=True), - 'year': Column(int, required=False), - 'temporal_baseline_days': Column(int, required=False), - 'season': Column(str, required=False), - 'geometry': Column(gpd.array.GeometryDtype, required=True) -}) +TILE_SCHEMA = DataFrameSchema( + { + 'tile_id': Column(str, required=True), + 'url': Column(str, required=True), + 'year': Column(int, required=False), + 'temporal_baseline_days': Column(int, required=False), + 'season': Column(str, required=False), + 'geometry': Column(gpd.array.GeometryDtype, required=True), + } +) From fe2baad3f672064155dd1a2fce88b1f3d0a949d6 Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Wed, 10 Jan 2024 09:04:21 -0800 Subject: [PATCH 3/6] workflow and 120 line length --- .github/workflows/static-analysis.yml | 3 +++ pyproject.toml | 4 +-- tests/test_stitch_api.py | 12 +++------ tile_stitcher/stitcher.py | 36 +++++++-------------------- 4 files changed, 17 insertions(+), 38 deletions(-) diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index 3ed4d38..e124675 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -10,3 +10,6 @@ jobs: call-secrets-analysis-workflow: uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.8.3 + + call-ruff-workflow: + uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.9.0 diff --git a/pyproject.toml b/pyproject.toml index 666bb7a..7832502 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ readme = { file = ['README.md'], content-type = 'text/markdown' } [tool.setuptools_scm] [tool.ruff] -line-length = 80 +line-length = 120 # Exclude a variety of commonly ignored directories. exclude = [ @@ -83,4 +83,4 @@ exclude = [ indent-width = 4 [tool.ruff.format] -quote-style="single" \ No newline at end of file +quote-style = "single" diff --git a/tests/test_stitch_api.py b/tests/test_stitch_api.py index 35edeb0..aeb6beb 100644 --- a/tests/test_stitch_api.py +++ b/tests/test_stitch_api.py @@ -24,15 +24,11 @@ def test_pekel_water_occ(): assert len(X.shape) == 3 -@pytest.mark.parametrize( - 'year', [HANSEN_MOSAIC_YEARS[k] for k in [0, 2, 4, 6, 10]] -) +@pytest.mark.parametrize('year', [HANSEN_MOSAIC_YEARS[k] for k in [0, 2, 4, 6, 10]]) def test_hansen_datasets(year): # Note only getting 1 tile - these are large datasets! bounds = [-120.45, 34.85, -120.15, 34.95] - X, _ = get_raster_from_tiles( - bounds, tile_shortname='hansen_annual_mosaic', year=year - ) + X, _ = get_raster_from_tiles(bounds, tile_shortname='hansen_annual_mosaic', year=year) assert len(X.shape) == 3 @@ -53,7 +49,5 @@ def test_coherence_dataset(season, temporal_baseline_days): @pytest.mark.parametrize('year', COP_100_YEARS) def test_cop100_dataset(year: int): bounds = [-120.45, 34.85, -120.15, 34.95] - X, _ = get_raster_from_tiles( - bounds, tile_shortname='cop_100_lulc_discrete', year=year - ) + X, _ = get_raster_from_tiles(bounds, tile_shortname='cop_100_lulc_discrete', year=year) assert len(X.shape) == 3 diff --git a/tile_stitcher/stitcher.py b/tile_stitcher/stitcher.py index 67717e7..0fc1ad1 100644 --- a/tile_stitcher/stitcher.py +++ b/tile_stitcher/stitcher.py @@ -53,9 +53,7 @@ def get_tile_data( if year is not None: if tile_key not in DATASETS_WITH_YEAR: - raise NotImplementedError( - 'Year is only supported ' f'with {DATASETS_WITH_YEAR}' - ) + raise NotImplementedError('Year is only supported ' f'with {DATASETS_WITH_YEAR}') if tile_key == 'hansen_annual_mosaic': def update_hansen_landsat_mosaic_url_p(url): @@ -70,16 +68,11 @@ def update_hansen_landsat_mosaic_url_p(url): if tile_key == 's1_coherence_2020': if any([var is None for var in [temporal_baseline_days, season]]): - raise ValueError( - f'{tile_key} requires season and temporal baseline ' - 'to be specified' - ) + raise ValueError(f'{tile_key} requires season and temporal baseline ' 'to be specified') if season not in SEASONS: raise ValueError(f'season keyword must be in {", ".join(SEASONS)}') if temporal_baseline_days not in S1_TEMPORAL_BASELINE_DAYS: - raise ValueError( - f'temporal_baseline_days must be in {", ".join(S1_TEMPORAL_BASELINE_DAYS)}' - ) + raise ValueError(f'temporal_baseline_days must be in {", ".join(S1_TEMPORAL_BASELINE_DAYS)}') ind_season = df_tiles.season == season ind_tb = df_tiles.temporal_baseline_days == temporal_baseline_days df_tiles = df_tiles[ind_tb & ind_season].reset_index(drop=True) @@ -98,23 +91,18 @@ def update_hansen_landsat_mosaic_url(url: str, year: int): # Gets the "last_00N_040W.tif" portion of the url url_end = url[-17:] url_updated = ( - 'https://storage.googleapis.com/earthenginepartners-hansen/' - f'GFC{year}/Hansen_GFC{year}_{url_end}' + 'https://storage.googleapis.com/earthenginepartners-hansen/' f'GFC{year}/Hansen_GFC{year}_{url_end}' ) else: year_diff = CURRENT_HANSEN_YEAR - year version_updated = CURRENT_HANSEN_VERSION - year_diff url_updated = url.replace(str(CURRENT_HANSEN_YEAR), str(year)) - url_updated = url_updated.replace( - f'v1.{CURRENT_HANSEN_VERSION}', f'v1.{version_updated}' - ) + url_updated = url_updated.replace(f'v1.{CURRENT_HANSEN_VERSION}', f'v1.{version_updated}') return url_updated -def get_urls_from_tile_df( - extent: list[float], df_tiles: gpd.GeoDataFrame -) -> list[str]: +def get_urls_from_tile_df(extent: list[float], df_tiles: gpd.GeoDataFrame) -> list[str]: bbox = box(*extent) ind_inter = df_tiles.geometry.intersects(bbox) df_subset = df_tiles[ind_inter].reset_index(drop=True) @@ -122,9 +110,7 @@ def get_urls_from_tile_df( return urls -def get_additional_tile_metadata( - urls: list[str], max_tile_tries: int = 10 -) -> dict: +def get_additional_tile_metadata(urls: list[str], max_tile_tries: int = 10) -> dict: """Some tile sets may have missing data when they should not. Ideally we can remove said tiles from dataframe. However, in the case of Hansen mosiacs, these errors seem to be year-to-year e.g. 2017 where the upper left @@ -159,14 +145,10 @@ def get_raster_from_tiles( temporal_baseline_days: int = None, ) -> tuple: if (tile_shortname is None) and (df_tiles is None): - raise ValueError( - 'Either "tile_shortname" or "df_tiles" must be provided' - ) + raise ValueError('Either "tile_shortname" or "df_tiles" must be provided') if (tile_shortname is not None) and (df_tiles is not None): - raise ValueError( - '"tile_shortname" and "df_tiles" cannot both be provided' - ) + raise ValueError('"tile_shortname" and "df_tiles" cannot both be provided') if isinstance(tile_shortname, str): df_tiles = get_tile_data( From bc79e8f3f938b0a596b5ee5fa6be8acd3465135e Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Wed, 10 Jan 2024 09:12:38 -0800 Subject: [PATCH 4/6] init format --- tile_stitcher/__init__.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tile_stitcher/__init__.py b/tile_stitcher/__init__.py index 212d3a3..1beb5d3 100644 --- a/tile_stitcher/__init__.py +++ b/tile_stitcher/__init__.py @@ -6,9 +6,12 @@ __version__ = version(__name__) except PackageNotFoundError: __version__ = None - warnings.warn('package is not installed!\n' - 'Install in editable/develop mode via (from the top of this repo):\n' - ' python -m pip install -e .\n', RuntimeWarning) + warnings.warn( + 'package is not installed!\n' + 'Install in editable/develop mode via (from the top of this repo):\n' + ' python -m pip install -e .\n', + RuntimeWarning, + ) from .stitcher import DATASET_SHORTNAMES, get_raster_from_tiles From 2149358d904a1a5b1b25efcec3e633394653044f Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Wed, 10 Jan 2024 09:21:18 -0800 Subject: [PATCH 5/6] changelog --- CHANGELOG.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 53b7d07..92602fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,4 +7,10 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [0.0.1] -* First release with ESA worldcover 2020 and 2021; Pekel water occurence 2021; S1 coherence from 2020; and Hansen annual mosaics. See readme. \ No newline at end of file +* First release of `tile_stitcher` with following datasets (See readme for links): + - ESA 10m worldcover 2020 and 2021; + - Pekel 30m water occurence 2021; + - S1 coherence from 2020; + - Hansen annual mosaics 2000, 2013 - present + - Cop 100m Landcover 2015-2019 +* Includes workflows for static analysis and integration testing. \ No newline at end of file From e887b5b03df7c737d174fd651884e67452d21988 Mon Sep 17 00:00:00 2001 From: Charlie Marshak Date: Wed, 10 Jan 2024 09:32:21 -0800 Subject: [PATCH 6/6] update token --- .github/workflows/release-github.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-github.yml b/.github/workflows/release-github.yml index 183b70e..4e1f4ee 100644 --- a/.github/workflows/release-github.yml +++ b/.github/workflows/release-github.yml @@ -13,4 +13,4 @@ jobs: develop_branch: dev sync_pr_label: team-bot secrets: - USER_TOKEN: ${{ secrets.ACCESS_GITHUB_TOKEN }} \ No newline at end of file + USER_TOKEN: ${{ secrets.OPERA_PST_GITHUB_TOKEN }} \ No newline at end of file