Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add tests for Xarray io functions #30

Merged
merged 21 commits into from
Dec 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
02c3108
removed (commented-out) CONUS/AK search constraint
Jack-Hayes Oct 14, 2024
5f2b7f0
removed CONUS/AK constraint (cryocloud dev instead of local)
Jack-Hayes Oct 15, 2024
e5a70a4
Merge conflict cryocloud
Jack-Hayes Oct 15, 2024
fe707f6
updated CONUS/AK (not sure why my branch didn't have this already) an…
Jack-Hayes Oct 16, 2024
acf9467
WSL build complete
Jack-Hayes Oct 17, 2024
f786a49
Deleted CONUS/AK test and box import
Jack-Hayes Oct 17, 2024
fa66db4
Merge remote-tracking branch 'origin/main' into hayes-dev
Jack-Hayes Nov 8, 2024
ec9a5b8
added shapely make_valid to cascading_search
Jack-Hayes Nov 13, 2024
b2f2e22
Merge branch 'main' of https://github.com/uw-cryo/coincident into hay…
Jack-Hayes Nov 27, 2024
24fdb5c
Return rasters for cop30 and ESA search
Jack-Hayes Nov 27, 2024
0fc08ba
Added ODC dependency and fixed formatting
Jack-Hayes Dec 5, 2024
4b836ec
Merge remote-tracking branch 'origin/hayes-dev' into hayes-dev
Jack-Hayes Dec 5, 2024
3c88b5e
Merge remote-tracking branch 'origin/main' into hayes-dev
Jack-Hayes Dec 10, 2024
2468431
Added test for .io.xarray and synced search main.py
Jack-Hayes Dec 10, 2024
999d960
Cleaned test_xarray and added aoi, large_aoi to init
Jack-Hayes Dec 11, 2024
6b46f48
test_xarray matplotlib import inside test func
Jack-Hayes Dec 12, 2024
3429121
removed depends_on_optional from xarray test
Jack-Hayes Dec 12, 2024
f799324
use conftest.py
scottyhq Dec 13, 2024
19918e8
add back unused import for auth test
scottyhq Dec 13, 2024
6ab0514
streamline environments
scottyhq Dec 14, 2024
b7f271e
streamline environments 2
scottyhq Dec 14, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4,841 changes: 1,722 additions & 3,119 deletions pixi.lock

Large diffs are not rendered by default.

29 changes: 13 additions & 16 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,39 +31,37 @@ classifiers = [
dynamic = ["version"]

dependencies = [
#"aiohttp>=3.10.6,<4",
"cloudpathlib[s3]>=0.20.0,<0.21",
#"fsspec>=2024.9.0,<2025",
"geopandas>=1.0.1,<2",
"maxar-platform>=1.0.2,<2",
"odc-stac>=0.3.10,<0.4",
"planetary-computer>=1.0.0,<2",
"pyarrow>=18.0.0,<19",
"pystac-client>=0.8.3,<0.9",
"requests>=2.32.3,<3",
"rioxarray>=0.17.0,<0.18",
"stac-asset>=0.4.3,<0.5",
"stac-geoparquet>=0.6.0,<0.7",
"odc-stac>=0.3.10,<0.4",
]

[project.optional-dependencies]
dev = [
"ipykernel>=6.29.5,<7",
"matplotlib>=3.9.4,<4",
"mypy>=1.11.2,<2",
"pre-commit>=3.8.0,<4",
"pylint>=3.3.1,<4",
"pytest >=6",
"pytest-cov >=3",
"sliderule>=4.7.1,<5",
]
docs = [
"folium", # comes w/ geopandas on conda-forge but not pypi
#"furo>=2023.08.17",
"mapclassify",
"matplotlib",
"myst-nb",
"myst_parser>=0.13",
"pydata-sphinx-theme>=0.16.0,<0.17",
#"rpds-py>=0.21.0,<0.22",
"sphinx>=7.0",
"sphinx_autodoc_typehints",
"sphinx_copybutton",
Expand Down Expand Up @@ -211,37 +209,36 @@ docs = { features = ["docs"], solve-group = "default" }
# If a package is listed in project.dependencies but not repeated here, it is installed from pypi
[tool.pixi.dependencies]
python = "<3.13" # https://github.com/stac-utils/stac-geoparquet/issues/81
#aiohttp = "*"
#fsspec = "*"
geopandas = "*"
odc-stac = "*"
planetary-computer = "*"
pystac-client = "*"
requests = "*"
rioxarray = "*"
# stac-asset = "*" # not on conda-forge
#s3fs = "*"
stac-geoparquet = "*"
pyarrow = "*"
# Testing additional dependencies (not in pypi list
jsonschema = ">=4.23.0,<5"
libgdal-arrow-parquet = ">=3.10.0,<4"
odc-stac = "*"
#nbconvert = ">=7.16.4,<8"
#cloudpathlib-s3 = ">=0.20.0,<0.21"
#matplotlib-base = ">=3.9.2,<4"
#sliderule = ">=4.7.1,<5"


[tool.pixi.feature.dev.dependencies]
# NOTE: ipykernel to run notebooks in vscode
ipykernel = ">=6.29.5,<7"
matplotlib = "*"
mypy = "*"
# Bug if=*? Because you require pre-commit>=3.8.0,<4 and pre-commit==4.0.0, we can conclude that your requirements are unsatisfiable.
pre-commit = "<4"
pylint = "*"
pytest = "*"
mypy = "*"
# Testing additional dependencies
sliderule = "*"

# NOTE: test new dependencies locally by adding below
#rich = ">=13.8.1,<14" # Optional. convenient for rich.print(dataset)
#xvec = ">=0.3.0,<0.4"
sliderule = ">=4.7.1,<5"
#sliderule = ">=4.7.1,<5"
#matplotlib = ">=3.9.4,<4"

[tool.pixi.pypi-dependencies]
coincident = { path = ".", editable = false }
Expand Down
6 changes: 3 additions & 3 deletions src/coincident/datasets/planetary_computer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@

@dataclass
class COP30(Dataset):
"""Essential metadata for Copernicus DEM"""
"""Essential metadata and data access for Copernicus DEM"""

alias: str = "cop30"
has_stac_api: bool = True
collections: list[str] = field(default_factory=lambda: ["cop-dem-glo-30"])
search: str = STACAPI
start: str | None = None # NOTE: has 'representative' datetime of 2021-04-22
start: str | None = None # Copernicus DEM has 'representative' datetime: 2021-04-22
end: str | None = None
type: str = "sar"
type: str = "dem"
provider: str = "microsoft"


Expand Down
2 changes: 1 addition & 1 deletion src/coincident/io/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def to_dataset(
bands: list[str] | None = None,
aoi: gpd.GeoDataFrame | None = None,
mask: bool = False,
**kwargs: dict[str, Any],
**kwargs: Any,
) -> xr.DataArray:
"""
Convert a GeoDataFrame to an xarray DataArray using odc.stac
Expand Down
5 changes: 0 additions & 5 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +0,0 @@
from __future__ import annotations

# import os
# if not os.environ.get('MAXAR_API_KEY'):
# os.environ['MAXAR_API_KEY'] = 'fake-test-key'
23 changes: 23 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# ruff: noqa: ARG001
from __future__ import annotations

import geopandas as gpd
import pytest

# import os
# if not os.environ.get('MAXAR_API_KEY'):
# os.environ['MAXAR_API_KEY'] = 'fake-test-key'


@pytest.fixture(scope="package")
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi(scope="package"):
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)
16 changes: 1 addition & 15 deletions tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,10 @@
except: # noqa: E722
not_authenticated = True
maxar_authenticated = pytest.mark.skipif(
not_authenticated, reason="tests for linux only"
not_authenticated, reason="Not authenticated with Maxar API"
)


@pytest.fixture
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi():
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)


@typing.no_type_check
def test_no_dataset_specified():
with pytest.raises(
Expand Down
70 changes: 70 additions & 0 deletions tests/test_xarray.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from __future__ import annotations

import pytest
import xarray as xr
from matplotlib.collections import QuadMesh

import coincident
from coincident.io.xarray import plot_esa_worldcover, to_dataset

# Decorate tests requiring internet (slow & flaky)
network = pytest.mark.network


@network
def test_to_dataset_with_cop30(aoi):
"""Test `to_dataset` functionality with COP30 dataset."""
gf_cop30 = coincident.search.search(
dataset="cop30",
intersects=aoi,
)
ds = to_dataset(
gf_cop30,
aoi=aoi,
resolution=0.1, # ~1km
).compute()
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "data" in ds.data_vars, "Expected 'data' variable in the Dataset."


@network
def test_to_dataset_with_worldcover(aoi):
"""Test `to_dataset` functionality with WorldCover dataset."""
gf_wc = coincident.search.search(
dataset="worldcover",
intersects=aoi,
datetime=["2020"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=aoi,
resolution=0.1, # ~1km
).compute()
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "map" in ds.data_vars, "Expected 'map' variable in the Dataset."


@network
def test_plot_esa_worldcover_valid(aoi):
"""Test `plot_esa_worldcover` with valid WorldCover dataset."""

gf_wc = coincident.search.search(
dataset="worldcover",
intersects=aoi,
datetime=["2021"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=aoi,
resolution=0.1, # ~1km
).compute()
ds = ds.rename(map="landcover")
ax = plot_esa_worldcover(ds)
assert ax is not None, "Expected a valid Matplotlib Axes object."
# https://matplotlib.org/stable/users/prev_whats_new/whats_new_3.4.0.html
# https://github.com/matplotlib/matplotlib/blob/main/lib/matplotlib/tests/test_contour.py#L146
assert any(
isinstance(c, QuadMesh) for c in ax.get_children()
), "Expected at least one pcolormesh object in the plot."
Loading