diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 07353d32c..bcd4456e2 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -49,6 +49,11 @@ jobs:
python -m pip install -e src/titiler/extensions["test,cogeo,stac"]
python -m pytest src/titiler/extensions --cov=titiler.extensions --cov-report=xml --cov-append --cov-report=term-missing
+ - name: Test titiler.xarray
+ run: |
+ python -m pip install -e src/titiler/xarray["test"]
+ python -m pytest src/titiler/xarray --cov=titiler.xarray --cov-report=xml --cov-append --cov-report=term-missing
+
- name: Test titiler.mosaic
run: |
python -m pip install -e src/titiler/mosaic["test"]
diff --git a/.github/workflows/deploy_mkdocs.yml b/.github/workflows/deploy_mkdocs.yml
index 31536e303..a3a2e3db0 100644
--- a/.github/workflows/deploy_mkdocs.yml
+++ b/.github/workflows/deploy_mkdocs.yml
@@ -29,7 +29,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install src/titiler/core src/titiler/extensions["cogeo,stac"] src/titiler/mosaic src/titiler/application
+ python -m pip install src/titiler/core src/titiler/extensions["cogeo,stac"] src/titiler/xarray src/titiler/mosaic src/titiler/application
python -m pip install -r requirements/requirements-docs.txt
diff --git a/CHANGES.md b/CHANGES.md
index 9265882d0..ed896170a 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -76,6 +76,12 @@
* `/bounds` endpoints now return a `crs: str` attribute in the response
+* update `wmts.xml` template to support multiple layers
+
+* re-order endpoints parameters
+
+* avoid `lat/lon` overflow in `map` viewer
+
### titiler.mosaic
* Rename `reader` attribute to `backend` in `MosaicTilerFactory` **breaking change**
@@ -86,6 +92,8 @@
* Update `cogeo-mosaic` dependency to `>=8.0,<9.0`
+* re-order endpoints parameters
+
### titiler.extensions
* Encode URL for cog_viewer and stac_viewer (author @guillemc23, https://github.com/developmentseed/titiler/pull/961)
diff --git a/README.md b/README.md
index c38adc3d9..5dc5daadc 100644
--- a/README.md
+++ b/README.md
@@ -55,6 +55,7 @@ Starting with version `0.3.0`, the `TiTiler` python module has been split into a
| Package | Version | Description
| ------- | ------- |-------------
[**titiler.core**](https://github.com/developmentseed/titiler/tree/main/src/titiler/core) | [![titiler.core](https://img.shields.io/pypi/v/titiler.core?color=%2334D058&label=pypi)](https://pypi.org/project/titiler.core) | The `Core` package contains libraries to help create a dynamic tiler for COG and STAC
+[**titiler.xarray**](https://github.com/developmentseed/titiler/tree/main/src/titiler/xarray) | [![titiler.xarray](https://img.shields.io/pypi/v/titiler.xarray?color=%2334D058&label=pypi)](https://pypi.org/project/titiler.xarray) | The `xarray` package contains libraries to help create a dynamic tiler for Zarr/NetCDF datasets
[**titiler.extensions**](https://github.com/developmentseed/titiler/tree/main/src/titiler/extensions) | [![titiler.extensions](https://img.shields.io/pypi/v/titiler.extensions?color=%2334D058&label=pypi)](https://pypi.org/project/titiler.extensions) | TiTiler's extensions package. Contains extensions for Tiler Factories.
[**titiler.mosaic**](https://github.com/developmentseed/titiler/tree/main/src/titiler/mosaic) | [![titiler.mosaic](https://img.shields.io/pypi/v/titiler.mosaic?color=%2334D058&label=pypi)](https://pypi.org/project/titiler.mosaic) | The `mosaic` package contains libraries to help create a dynamic tiler for MosaicJSON (adds `cogeo-mosaic` requirement)
[**titiler.application**](https://github.com/developmentseed/titiler/tree/main/src/titiler/application) | [![titiler.application](https://img.shields.io/pypi/v/titiler.application?color=%2334D058&label=pypi)](https://pypi.org/project/titiler.application) | TiTiler's `demo` package. Contains a FastAPI application with full support of COG, STAC and MosaicJSON
@@ -71,6 +72,7 @@ python -m pip install -U pip
python -m pip install titiler.{package}
# e.g.,
# python -m pip install titiler.core
+# python -m pip install titiler.xarray
# python -m pip install titiler.extensions
# python -m pip install titiler.mosaic
# python -m pip install titiler.application (also installs core, extensions and mosaic)
@@ -89,7 +91,7 @@ git clone https://github.com/developmentseed/titiler.git
cd titiler
python -m pip install -U pip
-python -m pip install -e src/titiler/core -e src/titiler/extensions -e src/titiler/mosaic -e src/titiler/application
+python -m pip install -e src/titiler/core -e src/titiler/xarray -e src/titiler/extensions -e src/titiler/mosaic -e src/titiler/application
python -m pip install uvicorn
uvicorn titiler.application.main:app --reload
@@ -125,6 +127,7 @@ Some options can be set via environment variables, see: https://github.com/tiang
src/titiler/ - titiler modules.
├── application/ - Titiler's `Application` package
├── extensions/ - Titiler's `Extensions` package
+ ├── xarray/ - Titiler's `Xarray` package
├── core/ - Titiler's `Core` package
└── mosaic/ - Titiler's `Mosaic` package
```
diff --git a/scripts/publish b/scripts/publish
index 7e548c6ec..1383cd75f 100755
--- a/scripts/publish
+++ b/scripts/publish
@@ -2,6 +2,7 @@
SUBPACKAGE_DIRS=(
"core"
+ "xarray"
"mosaic"
"application"
"extensions"
diff --git a/src/titiler/application/tests/routes/test_cog.py b/src/titiler/application/tests/routes/test_cog.py
index 3b185787b..75c30a022 100644
--- a/src/titiler/application/tests/routes/test_cog.py
+++ b/src/titiler/application/tests/routes/test_cog.py
@@ -64,7 +64,7 @@ def test_wmts(rio, app):
"http://testserver/cog/WebMercatorQuad/WMTSCapabilities.xml?url=https"
in response.content.decode()
)
- assert "Dataset" in response.content.decode()
+ assert "default" in response.content.decode()
assert (
"http://testserver/cog/tiles/WebMercatorQuad/{TileMatrix}/{TileCol}/{TileRow}@1x.png?url=https"
in response.content.decode()
diff --git a/src/titiler/core/titiler/core/dependencies.py b/src/titiler/core/titiler/core/dependencies.py
index 2a66cf3e8..110b677cc 100644
--- a/src/titiler/core/titiler/core/dependencies.py
+++ b/src/titiler/core/titiler/core/dependencies.py
@@ -606,7 +606,7 @@ def CRSParams(
crs: Annotated[
Optional[str],
Query(
- description="Coordinate Reference System`.",
+ description="Coordinate Reference System.",
),
] = None,
) -> Optional[CRS]:
diff --git a/src/titiler/core/titiler/core/factory.py b/src/titiler/core/titiler/core/factory.py
index 823503be6..fff72bd37 100644
--- a/src/titiler/core/titiler/core/factory.py
+++ b/src/titiler/core/titiler/core/factory.py
@@ -82,7 +82,7 @@
Statistics,
StatisticsGeoJSON,
)
-from titiler.core.resources.enums import ImageType, MediaType
+from titiler.core.resources.enums import ImageType
from titiler.core.resources.responses import GeoJSONResponse, JSONResponse, XMLResponse
from titiler.core.routing import EndpointScope
from titiler.core.utils import render_image
@@ -430,13 +430,13 @@ def statistics(self):
)
def statistics(
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_preview_dependency),
post_process=Depends(self.process_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Dataset statistics."""
@@ -475,6 +475,7 @@ def geojson_statistics(
Body(description="GeoJSON Feature or FeatureCollection."),
],
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
coord_crs=Depends(CoordCRSParams),
dst_crs=Depends(DstCRSParams),
layer_params=Depends(self.layer_dependency),
@@ -483,7 +484,6 @@ def geojson_statistics(
post_process=Depends(self.process_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Statistics from a geojson feature or featureCollection."""
@@ -578,15 +578,15 @@ def tile(
"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
] = None,
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ tile_params=Depends(self.tile_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- tile_params=Depends(self.tile_dependency),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Create map tile from a dataset."""
@@ -641,7 +641,6 @@ def tilejson(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
Optional[ImageType],
Query(
@@ -662,15 +661,16 @@ def tilejson(
Optional[int],
Query(description="Overwrite default maxzoom."),
] = None,
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ tile_params=Depends(self.tile_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- tile_params=Depends(self.tile_dependency),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Return TileJSON document for a dataset."""
@@ -726,7 +726,6 @@ def map_viewer(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
Optional[ImageType],
Query(
@@ -747,15 +746,16 @@ def map_viewer(
Optional[int],
Query(description="Overwrite default maxzoom."),
] = None,
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ tile_params=Depends(self.tile_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- tile_params=Depends(self.tile_dependency),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Return TileJSON document for a dataset."""
@@ -791,7 +791,6 @@ def wmts(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
ImageType,
Query(description="Output image type. Default is png."),
@@ -816,15 +815,16 @@ def wmts(
description="Use EPSG code, not opengis.net, for the ows:SupportedCRS in the TileMatrixSet (set to True to enable ArcMap compatability)"
),
] = False,
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ tile_params=Depends(self.tile_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- tile_params=Depends(self.tile_dependency),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""OGC WMTS endpoint."""
@@ -853,8 +853,6 @@ def wmts(
for (key, value) in request.query_params._list
if key.lower() not in qs_key_to_remove
]
- if qs:
- tiles_url += f"?{urlencode(qs)}"
tms = self.supported_tms.get(tileMatrixSetId)
with rasterio.Env(**env):
@@ -885,6 +883,16 @@ def wmts(
else:
supported_crs = tms.crs.srs
+ layers = [
+ {
+ "title": src_path if isinstance(src_path, str) else "TiTiler",
+ "name": "default",
+ "tiles_url": tiles_url,
+ "query_string": urlencode(qs, doseq=True) if qs else None,
+ "bounds": bounds,
+ },
+ ]
+
bbox_crs_type = "WGS84BoundingBox"
bbox_crs_uri = "urn:ogc:def:crs:OGC:2:84"
if tms.rasterio_geographic_crs != WGS84_CRS:
@@ -895,18 +903,15 @@ def wmts(
request,
name="wmts.xml",
context={
- "tiles_endpoint": tiles_url,
- "bounds": bounds,
+ "tileMatrixSetId": tms.id,
"tileMatrix": tileMatrix,
- "tms": tms,
"supported_crs": supported_crs,
"bbox_crs_type": bbox_crs_type,
"bbox_crs_uri": bbox_crs_uri,
- "title": src_path if isinstance(src_path, str) else "TiTiler",
- "layer_name": "Dataset",
+ "layers": layers,
"media_type": tile_format.mediatype,
},
- media_type=MediaType.xml.value,
+ media_type="application/xml",
)
############################################################################
@@ -925,10 +930,10 @@ def point(
lon: Annotated[float, Path(description="Longitude")],
lat: Annotated[float, Path(description="Latitude")],
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
coord_crs=Depends(CoordCRSParams),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Point value for a dataset."""
@@ -963,16 +968,16 @@ def preview(
"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
] = None,
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
- dst_crs=Depends(DstCRSParams),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_preview_dependency),
+ dst_crs=Depends(DstCRSParams),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Create preview of a dataset."""
@@ -1029,17 +1034,17 @@ def bbox_image(
"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
] = None,
src_path=Depends(self.path_dependency),
- dst_crs=Depends(DstCRSParams),
- coord_crs=Depends(CoordCRSParams),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_part_dependency),
+ dst_crs=Depends(DstCRSParams),
+ coord_crs=Depends(CoordCRSParams),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Create image from a bbox."""
@@ -1093,17 +1098,17 @@ def feature_image(
"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
] = None,
src_path=Depends(self.path_dependency),
- coord_crs=Depends(CoordCRSParams),
- dst_crs=Depends(DstCRSParams),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_part_dependency),
+ coord_crs=Depends(CoordCRSParams),
+ dst_crs=Depends(DstCRSParams),
post_process=Depends(self.process_dependency),
rescale=Depends(self.rescale_dependency),
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Create image from a geojson feature."""
@@ -1178,8 +1183,8 @@ def info(self):
)
def info(
src_path=Depends(self.path_dependency),
- asset_params=Depends(self.assets_dependency),
reader_params=Depends(self.reader_dependency),
+ asset_params=Depends(self.assets_dependency),
env=Depends(self.environment_dependency),
):
"""Return dataset's basic info or the list of available assets."""
@@ -1201,8 +1206,8 @@ def info(
)
def info_geojson(
src_path=Depends(self.path_dependency),
- asset_params=Depends(self.assets_dependency),
reader_params=Depends(self.reader_dependency),
+ asset_params=Depends(self.assets_dependency),
crs=Depends(CRSParams),
env=Depends(self.environment_dependency),
):
@@ -1266,12 +1271,12 @@ def statistics(self): # noqa: C901
)
def asset_statistics(
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
asset_params=Depends(AssetsBidxParams),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_preview_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Per Asset statistics"""
@@ -1301,13 +1306,13 @@ def asset_statistics(
)
def statistics(
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(AssetsBidxExprParamsOptional),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_preview_dependency),
post_process=Depends(self.process_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Merged assets statistics."""
@@ -1350,15 +1355,15 @@ def geojson_statistics(
Body(description="GeoJSON Feature or FeatureCollection."),
],
src_path=Depends(self.path_dependency),
- coord_crs=Depends(CoordCRSParams),
- dst_crs=Depends(DstCRSParams),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(AssetsBidxExprParamsOptional),
dataset_params=Depends(self.dataset_dependency),
+ coord_crs=Depends(CoordCRSParams),
+ dst_crs=Depends(DstCRSParams),
post_process=Depends(self.process_dependency),
image_params=Depends(self.img_part_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Statistics from a geojson feature or featureCollection."""
@@ -1436,8 +1441,8 @@ def info(self):
)
def info(
src_path=Depends(self.path_dependency),
- bands_params=Depends(self.bands_dependency),
reader_params=Depends(self.reader_dependency),
+ bands_params=Depends(self.bands_dependency),
env=Depends(self.environment_dependency),
):
"""Return dataset's basic info."""
@@ -1459,8 +1464,8 @@ def info(
)
def info_geojson(
src_path=Depends(self.path_dependency),
- bands_params=Depends(self.bands_dependency),
reader_params=Depends(self.reader_dependency),
+ bands_params=Depends(self.bands_dependency),
crs=Depends(CRSParams),
env=Depends(self.environment_dependency),
):
@@ -1518,13 +1523,13 @@ def statistics(self): # noqa: C901
)
def statistics(
src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
bands_params=Depends(BandsExprParamsOptional),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_preview_dependency),
post_process=Depends(self.process_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Dataset statistics."""
@@ -1567,15 +1572,15 @@ def geojson_statistics(
Body(description="GeoJSON Feature or FeatureCollection."),
],
src_path=Depends(self.path_dependency),
- coord_crs=Depends(CoordCRSParams),
- dst_crs=Depends(DstCRSParams),
+ reader_params=Depends(self.reader_dependency),
bands_params=Depends(BandsExprParamsOptional),
dataset_params=Depends(self.dataset_dependency),
image_params=Depends(self.img_part_dependency),
+ coord_crs=Depends(CoordCRSParams),
+ dst_crs=Depends(DstCRSParams),
post_process=Depends(self.process_dependency),
stats_params=Depends(self.stats_dependency),
histogram_params=Depends(self.histogram_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Statistics from a geojson feature or featureCollection."""
@@ -1632,7 +1637,7 @@ def register_routes(self):
responses={
200: {
"content": {
- MediaType.json.value: {},
+ "application/json": {},
},
},
},
@@ -1673,7 +1678,7 @@ async def tilematrixsets(request: Request):
responses={
200: {
"content": {
- MediaType.json.value: {},
+ "application/json": {},
},
},
},
diff --git a/src/titiler/core/titiler/core/templates/map.html b/src/titiler/core/titiler/core/templates/map.html
index ff8f64760..e52c3f73a 100644
--- a/src/titiler/core/titiler/core/templates/map.html
+++ b/src/titiler/core/titiler/core/templates/map.html
@@ -86,20 +86,33 @@
let bounds = [...data.bounds]
+ var left = bounds[0],
+ bottom = bounds[1],
+ right = bounds[2],
+ top = bounds[3];
+
+ if (left < right) {
+ left = Math.max(left, {{ tms.bbox.left }})
+ right = Math.min(right, {{ tms.bbox.right }})
+ }
+ bottom = Math.max(bottom, {{ tms.bbox.bottom }})
+ top = Math.min(top, {{ tms.bbox.top }})
+ console.log(left, bottom, right, top)
+
let geo;
// handle files that span accross dateline
- if (bounds[0] > bounds[2]) {
+ if (left > right) {
geo = {
"type": "FeatureCollection",
"features": [
- bboxPolygon([-180, bounds[1], bounds[2], bounds[3]]),
- bboxPolygon([bounds[0], bounds[1], 180, bounds[3]]),
+ bboxPolygon([-180, bottom, right, top]),
+ bboxPolygon([left, bottom, 180, top]),
]
}
} else {
geo = {
"type": "FeatureCollection",
- "features": [bboxPolygon(bounds)]
+ "features": [bboxPolygon([left, bottom, right, top])]
}
}
console.log(geo)
@@ -110,13 +123,9 @@
map.fitBounds(aoi.getBounds());
// Bounds crossing dateline
- if (bounds[0] > bounds[2]) {
- bounds[0] = bounds[0] - 360
+ if (left > right) {
+ left = right - 360
}
- var left = bounds[0],
- bottom = bounds[1],
- right = bounds[2],
- top = bounds[3];
L.tileLayer(
data.tiles[0], {
diff --git a/src/titiler/core/titiler/core/templates/wmts.xml b/src/titiler/core/titiler/core/templates/wmts.xml
index b86349ad5..2fea0cf39 100644
--- a/src/titiler/core/titiler/core/templates/wmts.xml
+++ b/src/titiler/core/titiler/core/templates/wmts.xml
@@ -33,25 +33,27 @@
+ {% for layer in layers %}
- {{ title }}
- {{ layer_name }}
- {{ title }}
+ {{ layer.title }}
+ {{ layer.name }}
+ {{ layer.name }}
- {{ bounds[0] }} {{ bounds[1] }}
- {{ bounds[2] }} {{ bounds[3] }}
+ {{ layer.bounds[0] }} {{ layer.bounds[1] }}
+ {{ layer.bounds[2] }} {{ layer.bounds[3] }}
{{ media_type }}
- {{ tms.id }}
+ {{ tileMatrixSetId }}
-
+
+ {% endfor %}
- {{ tms.id }}
+ {{ tileMatrixSetId }}
{{ supported_crs }}
{% for item in tileMatrix %}
{{ item | safe }}
diff --git a/src/titiler/extensions/titiler/extensions/wms.py b/src/titiler/extensions/titiler/extensions/wms.py
index 71dd1e206..b63ac0b68 100644
--- a/src/titiler/extensions/titiler/extensions/wms.py
+++ b/src/titiler/extensions/titiler/extensions/wms.py
@@ -284,13 +284,13 @@ def register(self, factory: TilerFactory): # noqa: C901
def wms( # noqa: C901
request: Request,
# vendor (titiler) parameters
+ reader_params=Depends(factory.reader_dependency),
layer_params=Depends(factory.layer_dependency),
dataset_params=Depends(factory.dataset_dependency),
post_process=Depends(factory.process_dependency),
rescale=Depends(RescalingParams),
color_formula=Depends(ColorFormulaParams),
colormap=Depends(factory.colormap_dependency),
- reader_params=Depends(factory.reader_dependency),
env=Depends(factory.environment_dependency),
):
"""Return a WMS query for a single COG.
diff --git a/src/titiler/mosaic/titiler/mosaic/factory.py b/src/titiler/mosaic/titiler/mosaic/factory.py
index 12b7f1d5e..0ec2cc1d7 100644
--- a/src/titiler/mosaic/titiler/mosaic/factory.py
+++ b/src/titiler/mosaic/titiler/mosaic/factory.py
@@ -45,7 +45,7 @@
)
from titiler.core.factory import DEFAULT_TEMPLATES, BaseFactory, img_endpoint_params
from titiler.core.models.mapbox import TileJSON
-from titiler.core.resources.enums import ImageType, MediaType, OptionalHeader
+from titiler.core.resources.enums import ImageType, OptionalHeader
from titiler.core.resources.responses import GeoJSONResponse, JSONResponse, XMLResponse
from titiler.core.utils import render_image
from titiler.mosaic.models.responses import Point
@@ -316,6 +316,8 @@ def tile(
"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
] = None,
src_path=Depends(self.path_dependency),
+ backend_params=Depends(self.backend_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
pixel_selection=Depends(self.pixel_selection_dependency),
@@ -325,8 +327,6 @@ def tile(
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- backend_params=Depends(self.backend_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Create map tile from a COG."""
@@ -410,7 +410,6 @@ def tilejson(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
Optional[ImageType],
Query(
@@ -431,6 +430,9 @@ def tilejson(
Optional[int],
Query(description="Overwrite default maxzoom."),
] = None,
+ src_path=Depends(self.path_dependency),
+ backend_params=Depends(self.backend_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
pixel_selection=Depends(self.pixel_selection_dependency),
@@ -440,8 +442,6 @@ def tilejson(
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- backend_params=Depends(self.backend_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Return TileJSON document for a COG."""
@@ -504,7 +504,6 @@ def map_viewer(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
Optional[ImageType],
Query(
@@ -525,6 +524,9 @@ def map_viewer(
Optional[int],
Query(description="Overwrite default maxzoom."),
] = None,
+ src_path=Depends(self.path_dependency),
+ backend_params=Depends(self.backend_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
pixel_selection=Depends(self.pixel_selection_dependency),
@@ -534,8 +536,6 @@ def map_viewer(
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- backend_params=Depends(self.backend_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Return TileJSON document for a dataset."""
@@ -571,7 +571,6 @@ def wmts(
description="Identifier selecting one of the TileMatrixSetId supported."
),
],
- src_path=Depends(self.path_dependency),
tile_format: Annotated[
ImageType,
Query(description="Output image type. Default is png."),
@@ -596,6 +595,9 @@ def wmts(
description="Use EPSG code, not opengis.net, for the ows:SupportedCRS in the TileMatrixSet (set to True to enable ArcMap compatability)"
),
] = False,
+ src_path=Depends(self.path_dependency),
+ backend_params=Depends(self.backend_dependency),
+ reader_params=Depends(self.reader_dependency),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
pixel_selection=Depends(self.pixel_selection_dependency),
@@ -605,8 +607,6 @@ def wmts(
color_formula=Depends(self.color_formula_dependency),
colormap=Depends(self.colormap_dependency),
render_params=Depends(self.render_dependency),
- backend_params=Depends(self.backend_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""OGC WMTS endpoint."""
@@ -635,8 +635,6 @@ def wmts(
for (key, value) in request.query_params._list
if key.lower() not in qs_key_to_remove
]
- if qs:
- tiles_url += f"?{urlencode(qs)}"
tms = self.supported_tms.get(tileMatrixSetId)
with rasterio.Env(**env):
@@ -671,6 +669,18 @@ def wmts(
else:
supported_crs = tms.crs.srs
+ layers = [
+ {
+ "title": src_path
+ if isinstance(src_path, str)
+ else "TiTiler Mosaic",
+ "name": "default",
+ "tiles_url": tiles_url,
+ "query_string": urlencode(qs, doseq=True) if qs else None,
+ "bounds": bounds,
+ },
+ ]
+
bbox_crs_type = "WGS84BoundingBox"
bbox_crs_uri = "urn:ogc:def:crs:OGC:2:84"
if tms.rasterio_geographic_crs != WGS84_CRS:
@@ -681,20 +691,15 @@ def wmts(
request,
name="wmts.xml",
context={
- "tiles_endpoint": tiles_url,
- "bounds": bounds,
+ "tileMatrixSetId": tms.id,
"tileMatrix": tileMatrix,
- "tms": tms,
"supported_crs": supported_crs,
"bbox_crs_type": bbox_crs_type,
"bbox_crs_uri": bbox_crs_uri,
- "title": src_path
- if isinstance(src_path, str)
- else "TiTiler Mosaic",
- "layer_name": "Mosaic",
+ "layers": layers,
"media_type": tile_format.mediatype,
},
- media_type=MediaType.xml.value,
+ media_type="application/xml",
)
############################################################################
@@ -714,11 +719,11 @@ def point(
lon: Annotated[float, Path(description="Longitude")],
lat: Annotated[float, Path(description="Latitude")],
src_path=Depends(self.path_dependency),
+ backend_params=Depends(self.backend_dependency),
+ reader_params=Depends(self.reader_dependency),
coord_crs=Depends(CoordCRSParams),
layer_params=Depends(self.layer_dependency),
dataset_params=Depends(self.dataset_dependency),
- backend_params=Depends(self.backend_dependency),
- reader_params=Depends(self.reader_dependency),
env=Depends(self.environment_dependency),
):
"""Get Point value for a Mosaic."""
@@ -768,9 +773,9 @@ def assets_for_bbox(
maxx: Annotated[float, Path(description="Bounding box max X")],
maxy: Annotated[float, Path(description="Bounding box max Y")],
src_path=Depends(self.path_dependency),
- coord_crs=Depends(CoordCRSParams),
backend_params=Depends(self.backend_dependency),
reader_params=Depends(self.reader_dependency),
+ coord_crs=Depends(CoordCRSParams),
env=Depends(self.environment_dependency),
):
"""Return a list of assets which overlap a bounding box"""
diff --git a/src/titiler/xarray/README.md b/src/titiler/xarray/README.md
new file mode 100644
index 000000000..143133544
--- /dev/null
+++ b/src/titiler/xarray/README.md
@@ -0,0 +1,46 @@
+## titiler.xarray
+
+Adds support for Xarray Dataset (NetCDF/Zarr) in Titiler.
+
+## Installation
+
+```bash
+$ python -m pip install -U pip
+
+# From Pypi
+$ python -m pip install titiler.xarray
+
+# Or from sources
+$ git clone https://github.com/developmentseed/titiler.git
+$ cd titiler && python -m pip install -e src/titiler/core -e src/titiler/xarray
+```
+
+## How To
+
+```python
+from fastapi import FastAPI
+from titiler.xarray.factory import TilerFactory
+
+# Create a FastAPI application
+app = FastAPI(
+ description="A lightweight Cloud Optimized GeoTIFF tile server",
+)
+
+# Create a set of MosaicJSON endpoints
+endpoint = TilerFactory()
+
+# Register the Mosaic endpoints to the application
+app.include_router(endpoint.router)
+```
+
+## Package structure
+
+```
+titiler/
+ └── xarray/
+ ├── tests/ - Tests suite
+ └── titiler/xarray/ - `xarray` namespace package
+ ├── dependencies.py - titiler-xarray dependencies
+ ├── io.py - titiler-xarray Readers
+ └── factory.py - endpoints factory
+```
diff --git a/src/titiler/xarray/pyproject.toml b/src/titiler/xarray/pyproject.toml
new file mode 100644
index 000000000..78747a86d
--- /dev/null
+++ b/src/titiler/xarray/pyproject.toml
@@ -0,0 +1,70 @@
+[project]
+name = "titiler.xarray"
+description = "Xarray plugin for TiTiler."
+readme = "README.md"
+requires-python = ">=3.8"
+authors = [
+ {name = "Vincent Sarago", email = "vincent@developmentseed.com"},
+ {name = "Aimee Barciauskas", email = "aimee@developmentseed.com"},
+]
+license = {text = "MIT"}
+keywords = [
+ "TiTiler",
+ "Xarray",
+ "Zarr",
+ "NetCDF",
+ "HDF",
+]
+classifiers = [
+ "Intended Audience :: Information Technology",
+ "Intended Audience :: Science/Research",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Topic :: Scientific/Engineering :: GIS",
+]
+dynamic = ["version"]
+dependencies = [
+ "titiler.core==0.19.0.dev",
+ "cftime",
+ "h5netcdf",
+ "xarray",
+ "rioxarray",
+ "zarr",
+ "fsspec",
+ "s3fs",
+ "aiohttp",
+ "pandas",
+ "httpx",
+]
+
+[project.optional-dependencies]
+test = [
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+ "httpx",
+]
+
+[project.urls]
+Homepage = "https://developmentseed.org/titiler/"
+Documentation = "https://developmentseed.org/titiler/"
+Issues = "https://github.com/developmentseed/titiler/issues"
+Source = "https://github.com/developmentseed/titiler"
+Changelog = "https://developmentseed.org/titiler/release-notes/"
+
+[build-system]
+requires = ["pdm-pep517"]
+build-backend = "pdm.pep517.api"
+
+[tool.pdm.version]
+source = "file"
+path = "titiler/xarray/__init__.py"
+
+[tool.pdm.build]
+includes = ["titiler/xarray"]
+excludes = ["tests/", "**/.mypy_cache", "**/.DS_Store"]
diff --git a/src/titiler/xarray/tests/conftest.py b/src/titiler/xarray/tests/conftest.py
new file mode 100644
index 000000000..dc4af91ae
--- /dev/null
+++ b/src/titiler/xarray/tests/conftest.py
@@ -0,0 +1 @@
+"""titiler.xarray test configuration."""
diff --git a/src/titiler/xarray/tests/test_dependencies.py b/src/titiler/xarray/tests/test_dependencies.py
new file mode 100644
index 000000000..6cb44d0a8
--- /dev/null
+++ b/src/titiler/xarray/tests/test_dependencies.py
@@ -0,0 +1,51 @@
+"""test dependencies."""
+
+
+from fastapi import Depends, FastAPI, Path
+from starlette.testclient import TestClient
+from typing_extensions import Annotated
+
+from titiler.xarray import dependencies
+
+
+def test_xarray_tile():
+ """Create App."""
+ app = FastAPI()
+
+ @app.get("/tiles/{z}/{x}/{y}")
+ def tiles(
+ z: Annotated[
+ int,
+ Path(
+ description="Identifier (Z) selecting one of the scales defined in the TileMatrixSet and representing the scaleDenominator the tile.",
+ ),
+ ],
+ x: Annotated[
+ int,
+ Path(
+ description="Column (X) index of the tile on the selected TileMatrix. It cannot exceed the MatrixHeight-1 for the selected TileMatrix.",
+ ),
+ ],
+ y: Annotated[
+ int,
+ Path(
+ description="Row (Y) index of the tile on the selected TileMatrix. It cannot exceed the MatrixWidth-1 for the selected TileMatrix.",
+ ),
+ ],
+ params=Depends(dependencies.CompatXarrayParams),
+ ):
+ """return params."""
+ return params.as_dict()
+
+ with TestClient(app) as client:
+ response = client.get("/tiles/1/2/3")
+ params = response.json()
+ assert params == {}
+
+ response = client.get("/tiles/1/2/3", params={"variable": "yo"})
+ params = response.json()
+ assert params == {"variable": "yo"}
+
+ response = client.get("/tiles/1/2/3", params={"multiscale": True})
+ params = response.json()
+ assert params == {"group": 1}
diff --git a/src/titiler/xarray/tests/test_io_tools.py b/src/titiler/xarray/tests/test_io_tools.py
new file mode 100644
index 000000000..68f62a335
--- /dev/null
+++ b/src/titiler/xarray/tests/test_io_tools.py
@@ -0,0 +1,105 @@
+"""test titiler.xarray.io utility functions."""
+
+from datetime import datetime
+
+import numpy
+import pytest
+import xarray
+
+from titiler.xarray.io import get_variable
+
+
+def test_get_variable():
+ """test io.get_variable."""
+ arr = numpy.arange(0, 33 * 35 * 2).reshape(2, 33, 35)
+ data = xarray.DataArray(
+ arr,
+ dims=("time", "y", "x"),
+ coords={
+ "x": numpy.arange(-170, 180, 10),
+ "y": numpy.arange(-80, 85, 5),
+ "time": [datetime(2022, 1, 1), datetime(2023, 1, 1)],
+ },
+ )
+ data.attrs.update({"valid_min": arr.min(), "valid_max": arr.max()})
+ assert not data.rio.crs
+ assert data.dims == ("time", "y", "x")
+
+ ds = data.to_dataset(name="dataset")
+ da = get_variable(ds, "dataset")
+ assert da.rio.crs
+ assert da.dims == ("y", "x")
+ # Default to the first Time value
+ assert da["time"] == numpy.datetime64("2022-01-01")
+
+ da = get_variable(ds, "dataset", datetime="2023-01-01T01:00:00.000Z")
+ assert da.rio.crs
+ assert da.dims == ("y", "x")
+ assert da["time"] == numpy.datetime64("2023-01-01")
+
+ # Select the Nearest Time
+ da = get_variable(ds, "dataset", datetime="2024-01-01T01:00:00.000Z")
+ assert da.rio.crs
+ assert da.dims == ("y", "x")
+ assert da["time"] == numpy.datetime64("2023-01-01")
+
+ data = data.rename({"y": "Lat", "x": "Lon"})
+ assert data.dims == ("time", "Lat", "Lon")
+ ds = data.to_dataset(name="dataset")
+ da = get_variable(ds, "dataset")
+ assert da.rio.crs
+ assert da.dims == ("y", "x")
+
+ # 4D dataset
+ arr = numpy.arange(0, 33 * 35 * 2).reshape(2, 1, 33, 35)
+ data = xarray.DataArray(
+ arr,
+ dims=("time", "z", "y", "x"),
+ coords={
+ "x": numpy.arange(-170, 180, 10),
+ "y": numpy.arange(-80, 85, 5),
+ "z": [0],
+ "time": [datetime(2022, 1, 1), datetime(2023, 1, 1)],
+ },
+ )
+ ds = data.to_dataset(name="dataset")
+ da = get_variable(ds, "dataset")
+ assert da.rio.crs
+ assert da.dims == ("z", "y", "x")
+
+ # 5D dataset
+ arr = numpy.arange(0, 33 * 35 * 2).reshape(2, 1, 1, 33, 35)
+ data = xarray.DataArray(
+ arr,
+ dims=("time", "universe", "z", "y", "x"),
+ coords={
+ "x": numpy.arange(-170, 180, 10),
+ "y": numpy.arange(-80, 85, 5),
+ "z": [0],
+ "universe": ["somewhere"],
+ "time": [datetime(2022, 1, 1), datetime(2023, 1, 1)],
+ },
+ )
+ ds = data.to_dataset(name="dataset")
+ with pytest.raises(AssertionError):
+ get_variable(ds, "dataset")
+
+ da = get_variable(ds, "dataset", drop_dim="universe=somewhere")
+ assert da.rio.crs
+ assert da.dims == ("z", "y", "x")
+
+ # 5D dataset
+ arr = numpy.arange(0, 33 * 35 * 2).reshape(2, 33, 35)
+ data = xarray.DataArray(
+ arr,
+ dims=("time", "haut_bas", "gauche_droite"),
+ coords={
+ "gauche_droite": numpy.arange(-170, 180, 10),
+ "haut_bas": numpy.arange(-80, 85, 5),
+ "time": [datetime(2022, 1, 1), datetime(2023, 1, 1)],
+ },
+ )
+
+ ds = data.to_dataset(name="dataset")
+ with pytest.raises(ValueError):
+ da = get_variable(ds, "dataset")
diff --git a/src/titiler/xarray/titiler/xarray/__init__.py b/src/titiler/xarray/titiler/xarray/__init__.py
new file mode 100644
index 000000000..bf0391716
--- /dev/null
+++ b/src/titiler/xarray/titiler/xarray/__init__.py
@@ -0,0 +1,3 @@
+"""titiler.xarray"""
+
+__version__ = "0.19.0.dev"
diff --git a/src/titiler/xarray/titiler/xarray/dependencies.py b/src/titiler/xarray/titiler/xarray/dependencies.py
new file mode 100644
index 000000000..1e49a7810
--- /dev/null
+++ b/src/titiler/xarray/titiler/xarray/dependencies.py
@@ -0,0 +1,211 @@
+"""titiler.xarray dependencies."""
+
+from dataclasses import dataclass
+from typing import Optional, Union
+
+import numpy
+from fastapi import Query
+from rio_tiler.types import RIOResampling, WarpResampling
+from starlette.requests import Request
+from typing_extensions import Annotated
+
+from titiler.core.dependencies import DefaultDependency
+
+
+@dataclass
+class XarrayIOParams(DefaultDependency):
+ """Dataset IO Options."""
+
+ group: Annotated[
+ Optional[int],
+ Query(
+ description="Select a specific zarr group from a zarr hierarchy. Could be associated with a zoom level or dataset."
+ ),
+ ] = None
+
+ reference: Annotated[
+ Optional[bool],
+ Query(
+ title="reference",
+ description="Whether the dataset is a kerchunk reference",
+ ),
+ ] = None
+
+ decode_times: Annotated[
+ Optional[bool],
+ Query(
+ title="decode_times",
+ description="Whether to decode times",
+ ),
+ ] = None
+
+ consolidated: Annotated[
+ Optional[bool],
+ Query(
+ title="consolidated",
+ description="Whether to expect and open zarr store with consolidated metadata",
+ ),
+ ] = None
+
+ # cache_client
+
+
+@dataclass
+class XarrayDsParams(DefaultDependency):
+ """Xarray Dataset Options."""
+
+ variable: Annotated[str, Query(description="Xarray Variable name")]
+
+ drop_dim: Annotated[
+ Optional[str],
+ Query(description="Dimension to drop"),
+ ] = None
+
+ datetime: Annotated[
+ Optional[str], Query(description="Slice of time to read (if available)")
+ ] = None
+
+
+@dataclass
+class XarrayParams(XarrayIOParams, XarrayDsParams):
+ """Xarray Reader dependency."""
+
+ pass
+
+
+@dataclass(init=False)
+class CompatXarrayParams(DefaultDependency):
+ """Custom XarrayParams endpoints.
+
+ This Dependency aims to be used in a tiler where both GDAL/Xarray dataset would be supported.
+ By default `variable` won't be required but when using an Xarray dataset,
+ it would fail without the variable query-parameter set.
+ """
+
+ # File IO Options
+ group: Optional[int] = None
+ reference: Optional[bool] = None
+ decode_times: Optional[bool] = None
+ consolidated: Optional[bool] = None
+
+ # Dataset Options
+ variable: Optional[str] = None
+ drop_dim: Optional[str] = None
+ datetime: Optional[str] = None
+
+ def __init__(
+ self,
+ request: Request,
+ variable: Annotated[
+ Optional[str], Query(description="Xarray Variable name")
+ ] = None,
+ group: Annotated[
+ Optional[int],
+ Query(
+ description="Select a specific zarr group from a zarr hierarchy. Could be associated with a zoom level or dataset."
+ ),
+ ] = None,
+ reference: Annotated[
+ Optional[bool],
+ Query(
+ title="reference",
+ description="Whether the dataset is a kerchunk reference",
+ ),
+ ] = None,
+ decode_times: Annotated[
+ Optional[bool],
+ Query(
+ title="decode_times",
+ description="Whether to decode times",
+ ),
+ ] = None,
+ consolidated: Annotated[
+ Optional[bool],
+ Query(
+ title="consolidated",
+ description="Whether to expect and open zarr store with consolidated metadata",
+ ),
+ ] = None,
+ drop_dim: Annotated[
+ Optional[str],
+ Query(description="Dimension to drop"),
+ ] = None,
+ datetime: Annotated[
+ Optional[str], Query(description="Slice of time to read (if available)")
+ ] = None,
+ ):
+ """Initialize XarrayIOParamsTiles
+
+ Note: Because we don't want `z and multi-scale` to appear in the documentation we use a dataclass with a custom `__init__` method.
+ FastAPI will use the `__init__` method but will exclude Request in the documentation making `pool` an invisible dependency.
+ """
+ self.variable = variable
+ self.group = group
+ self.reference = reference
+ self.decode_times = decode_times
+ self.consolidated = consolidated
+ self.drop_dim = drop_dim
+ self.datetime = datetime
+
+ if request.query_params.get("multiscale") and request.path_params.get("z"):
+ self.group = int(request.path_params.get("z"))
+
+
+@dataclass
+class DatasetParams(DefaultDependency):
+ """Low level WarpedVRT Optional parameters."""
+
+ nodata: Annotated[
+ Optional[Union[str, int, float]],
+ Query(
+ title="Nodata value",
+ description="Overwrite internal Nodata value",
+ ),
+ ] = None
+ reproject_method: Annotated[
+ Optional[WarpResampling],
+ Query(
+ alias="reproject",
+ description="WarpKernel resampling algorithm (only used when doing re-projection). Defaults to `nearest`.",
+ ),
+ ] = None
+
+ def __post_init__(self):
+ """Post Init."""
+ if self.nodata is not None:
+ self.nodata = numpy.nan if self.nodata == "nan" else float(self.nodata)
+
+
+@dataclass
+class TileParams(DefaultDependency):
+ """Custom TileParams for Xarray."""
+
+ multiscale: Annotated[
+ Optional[bool],
+ Query(
+ title="multiscale",
+ description="Whether the dataset has multiscale groups (Zoom levels)",
+ ),
+ ] = None
+
+
+# Custom PartFeatureParams which add `resampling`
+@dataclass
+class PartFeatureParams(DefaultDependency):
+ """Common parameters for bbox and feature."""
+
+ max_size: Annotated[Optional[int], "Maximum image size to read onto."] = None
+ height: Annotated[Optional[int], "Force output image height."] = None
+ width: Annotated[Optional[int], "Force output image width."] = None
+ resampling_method: Annotated[
+ Optional[RIOResampling],
+ Query(
+ alias="resampling",
+ description="RasterIO resampling algorithm. Defaults to `nearest`.",
+ ),
+ ] = None
+
+ def __post_init__(self):
+ """Post Init."""
+ if self.width and self.height:
+ self.max_size = None
diff --git a/src/titiler/xarray/titiler/xarray/factory.py b/src/titiler/xarray/titiler/xarray/factory.py
new file mode 100644
index 000000000..1f768f08b
--- /dev/null
+++ b/src/titiler/xarray/titiler/xarray/factory.py
@@ -0,0 +1,364 @@
+"""TiTiler.xarray factory."""
+
+from typing import Callable, List, Literal, Optional, Type, Union
+
+import rasterio
+from attrs import define, field
+from fastapi import Body, Depends, Path, Query
+from geojson_pydantic.features import Feature, FeatureCollection
+from geojson_pydantic.geometries import MultiPolygon, Polygon
+from pydantic import Field
+from rio_tiler.constants import WGS84_CRS
+from rio_tiler.models import Info
+from starlette.responses import Response
+from typing_extensions import Annotated
+
+from titiler.core.dependencies import (
+ CoordCRSParams,
+ CRSParams,
+ DatasetPathParams,
+ DefaultDependency,
+ DstCRSParams,
+ HistogramParams,
+ StatisticsParams,
+)
+from titiler.core.factory import TilerFactory as BaseTilerFactory
+from titiler.core.factory import img_endpoint_params
+from titiler.core.models.responses import InfoGeoJSON, StatisticsGeoJSON
+from titiler.core.resources.enums import ImageType
+from titiler.core.resources.responses import GeoJSONResponse, JSONResponse
+from titiler.core.utils import render_image
+from titiler.xarray.dependencies import (
+ DatasetParams,
+ PartFeatureParams,
+ TileParams,
+ XarrayIOParams,
+ XarrayParams,
+)
+from titiler.xarray.io import Reader
+
+
+@define(kw_only=True)
+class TilerFactory(BaseTilerFactory):
+ """Xarray Tiler Factory."""
+
+ reader: Type[Reader] = Reader
+
+ path_dependency: Callable[..., str] = DatasetPathParams
+
+ reader_dependency: Type[DefaultDependency] = XarrayParams
+
+ # Indexes/Expression Dependencies (Not layer dependencies for Xarray)
+ layer_dependency: Type[DefaultDependency] = DefaultDependency
+
+ # Dataset Options (nodata, reproject)
+ dataset_dependency: Type[DefaultDependency] = DatasetParams
+
+ # Tile/Tilejson/WMTS Dependencies (multiscale option)
+ tile_dependency: Type[TileParams] = TileParams
+
+ # Statistics/Histogram Dependencies
+ stats_dependency: Type[DefaultDependency] = StatisticsParams
+ histogram_dependency: Type[DefaultDependency] = HistogramParams
+
+ img_part_dependency: Type[DefaultDependency] = PartFeatureParams
+
+ # Custom dependency for /variable
+ io_dependency: Type[DefaultDependency] = XarrayIOParams
+
+ add_viewer: bool = True
+ add_part: bool = True
+
+ # remove some attribute from init
+ img_preview_dependency: Type[DefaultDependency] = field(init=False)
+ add_preview: bool = field(init=False)
+
+ def register_routes(self):
+ """Register routes to the router."""
+ self.variables()
+ self.bounds()
+ self.info()
+ self.tile()
+ self.tilejson()
+ self.wmts()
+ self.point()
+ self.statistics()
+
+ if self.add_part:
+ self.part()
+
+ if self.add_viewer:
+ self.map_viewer()
+
+ def variables(self):
+ """Register /variables endpoint."""
+
+ @self.router.get(
+ "/variables",
+ response_model=List[str],
+ responses={200: {"description": "Return Xarray Dataset variables."}},
+ )
+ def variables(
+ src_path=Depends(self.path_dependency),
+ io_params=Depends(self.io_dependency),
+ ):
+ """return available variables."""
+ return self.reader.list_variables(src_path, **io_params.as_dict())
+
+ # Custom /info endpoints (adds `show_times` options)
+ def info(self):
+ """Register /info endpoint."""
+
+ @self.router.get(
+ "/info",
+ response_model=Info,
+ response_model_exclude_none=True,
+ response_class=JSONResponse,
+ responses={200: {"description": "Return dataset's basic info."}},
+ )
+ def info_endpoint(
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ show_times: Annotated[
+ Optional[bool],
+ Query(description="Show info about the time dimension"),
+ ] = None,
+ env=Depends(self.environment_dependency),
+ ) -> Info:
+ """Return dataset's basic info."""
+ with rasterio.Env(**env):
+ with self.reader(src_path, **reader_params.as_dict()) as src_dst:
+ info = src_dst.info().model_dump()
+ if show_times and "time" in src_dst.input.dims:
+ times = [str(x.data) for x in src_dst.input.time]
+ info["count"] = len(times)
+ info["times"] = times
+
+ return Info(**info)
+
+ @self.router.get(
+ "/info.geojson",
+ response_model=InfoGeoJSON,
+ response_model_exclude_none=True,
+ response_class=GeoJSONResponse,
+ responses={
+ 200: {
+ "content": {"application/geo+json": {}},
+ "description": "Return dataset's basic info as a GeoJSON feature.",
+ }
+ },
+ )
+ def info_geojson(
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ show_times: Annotated[
+ Optional[bool],
+ Query(description="Show info about the time dimension"),
+ ] = None,
+ crs=Depends(CRSParams),
+ env=Depends(self.environment_dependency),
+ ):
+ """Return dataset's basic info as a GeoJSON feature."""
+ with rasterio.Env(**env):
+ with self.reader(src_path, **reader_params.as_dict()) as src_dst:
+ bounds = src_dst.get_geographic_bounds(crs or WGS84_CRS)
+ if bounds[0] > bounds[2]:
+ pl = Polygon.from_bounds(-180, bounds[1], bounds[2], bounds[3])
+ pr = Polygon.from_bounds(bounds[0], bounds[1], 180, bounds[3])
+ geometry = MultiPolygon(
+ type="MultiPolygon",
+ coordinates=[pl.coordinates, pr.coordinates],
+ )
+ else:
+ geometry = Polygon.from_bounds(*bounds)
+
+ info = src_dst.info().model_dump()
+ if show_times and "time" in src_dst.input.dims:
+ times = [str(x.data) for x in src_dst.input.time]
+ info["count"] = len(times)
+ info["times"] = times
+
+ return Feature(
+ type="Feature",
+ bbox=bounds,
+ geometry=geometry,
+ properties=info,
+ )
+
+ # custom /tiles endpoints (adds `multiscale` options)
+ def tile(self):
+ """Register /tiles endpoint."""
+
+ @self.router.get(r"/tiles/{tileMatrixSetId}/{z}/{x}/{y}", **img_endpoint_params)
+ @self.router.get(
+ r"/tiles/{tileMatrixSetId}/{z}/{x}/{y}.{format}", **img_endpoint_params
+ )
+ @self.router.get(
+ r"/tiles/{tileMatrixSetId}/{z}/{x}/{y}@{scale}x", **img_endpoint_params
+ )
+ @self.router.get(
+ r"/tiles/{tileMatrixSetId}/{z}/{x}/{y}@{scale}x.{format}",
+ **img_endpoint_params,
+ )
+ def tile(
+ z: Annotated[
+ int,
+ Path(
+ description="Identifier (Z) selecting one of the scales defined in the TileMatrixSet and representing the scaleDenominator the tile.",
+ ),
+ ],
+ x: Annotated[
+ int,
+ Path(
+ description="Column (X) index of the tile on the selected TileMatrix. It cannot exceed the MatrixHeight-1 for the selected TileMatrix.",
+ ),
+ ],
+ y: Annotated[
+ int,
+ Path(
+ description="Row (Y) index of the tile on the selected TileMatrix. It cannot exceed the MatrixWidth-1 for the selected TileMatrix.",
+ ),
+ ],
+ tileMatrixSetId: Annotated[
+ Literal[tuple(self.supported_tms.list())],
+ Path(
+ description="Identifier selecting one of the TileMatrixSetId supported."
+ ),
+ ],
+ scale: Annotated[
+ int,
+ Field(
+ gt=0, le=4, description="Tile size scale. 1=256x256, 2=512x512..."
+ ),
+ ] = 1,
+ format: Annotated[
+ ImageType,
+ "Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
+ ] = None,
+ multiscale: Annotated[
+ Optional[bool],
+ Query(
+ title="multiscale",
+ description="Whether the dataset has multiscale groups (Zoom levels)",
+ ),
+ ] = None,
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ tile_params=Depends(self.tile_dependency),
+ layer_params=Depends(self.layer_dependency),
+ dataset_params=Depends(self.dataset_dependency),
+ post_process=Depends(self.process_dependency),
+ rescale=Depends(self.rescale_dependency),
+ color_formula=Depends(self.color_formula_dependency),
+ colormap=Depends(self.colormap_dependency),
+ render_params=Depends(self.render_dependency),
+ env=Depends(self.environment_dependency),
+ ):
+ """Create map tile from a dataset."""
+ tms = self.supported_tms.get(tileMatrixSetId)
+
+ reader_options = reader_params.as_dict()
+ if getattr(tile_params, "multiscale", False):
+ reader_options["group"] = z
+
+ with rasterio.Env(**env):
+ with self.reader(src_path, tms=tms, **reader_options) as src_dst:
+ image = src_dst.tile(
+ x,
+ y,
+ z,
+ tilesize=scale * 256,
+ **layer_params.as_dict(),
+ **dataset_params.as_dict(),
+ )
+
+ if post_process:
+ image = post_process(image)
+
+ if rescale:
+ image.rescale(rescale)
+
+ if color_formula:
+ image.apply_color_formula(color_formula)
+
+ content, media_type = render_image(
+ image,
+ output_format=format,
+ colormap=colormap,
+ **render_params.as_dict(),
+ )
+
+ return Response(content, media_type=media_type)
+
+ # custom /statistics endpoints (remove /statistics - GET)
+ def statistics(self):
+ """add statistics endpoints."""
+
+ # POST endpoint
+ @self.router.post(
+ "/statistics",
+ response_model=StatisticsGeoJSON,
+ response_model_exclude_none=True,
+ response_class=GeoJSONResponse,
+ responses={
+ 200: {
+ "content": {"application/geo+json": {}},
+ "description": "Return dataset's statistics from feature or featureCollection.",
+ }
+ },
+ )
+ def geojson_statistics(
+ geojson: Annotated[
+ Union[FeatureCollection, Feature],
+ Body(description="GeoJSON Feature or FeatureCollection."),
+ ],
+ src_path=Depends(self.path_dependency),
+ reader_params=Depends(self.reader_dependency),
+ coord_crs=Depends(CoordCRSParams),
+ dst_crs=Depends(DstCRSParams),
+ layer_params=Depends(self.layer_dependency),
+ dataset_params=Depends(self.dataset_dependency),
+ image_params=Depends(self.img_part_dependency),
+ post_process=Depends(self.process_dependency),
+ stats_params=Depends(self.stats_dependency),
+ histogram_params=Depends(self.histogram_dependency),
+ env=Depends(self.environment_dependency),
+ ):
+ """Get Statistics from a geojson feature or featureCollection."""
+ fc = geojson
+ if isinstance(fc, Feature):
+ fc = FeatureCollection(type="FeatureCollection", features=[geojson])
+
+ with rasterio.Env(**env):
+ with self.reader(src_path, **reader_params.as_dict()) as src_dst:
+ for feature in fc:
+ shape = feature.model_dump(exclude_none=True)
+ image = src_dst.feature(
+ shape,
+ shape_crs=coord_crs or WGS84_CRS,
+ dst_crs=dst_crs,
+ align_bounds_with_dataset=True,
+ **layer_params.as_dict(),
+ **image_params.as_dict(),
+ **dataset_params.as_dict(),
+ )
+
+ # Get the coverage % array
+ coverage_array = image.get_coverage_array(
+ shape,
+ shape_crs=coord_crs or WGS84_CRS,
+ )
+
+ if post_process:
+ image = post_process(image)
+
+ stats = image.statistics(
+ **stats_params.as_dict(),
+ hist_options=histogram_params.as_dict(),
+ coverage=coverage_array,
+ )
+
+ feature.properties = feature.properties or {}
+ feature.properties.update({"statistics": stats})
+
+ return fc.features[0] if isinstance(geojson, Feature) else fc
diff --git a/src/titiler/xarray/titiler/xarray/io.py b/src/titiler/xarray/titiler/xarray/io.py
new file mode 100644
index 000000000..12a39743f
--- /dev/null
+++ b/src/titiler/xarray/titiler/xarray/io.py
@@ -0,0 +1,306 @@
+"""titiler.xarray.io"""
+
+import pickle
+import re
+from typing import Any, Callable, Dict, List, Optional, Protocol
+
+import attr
+import fsspec
+import numpy
+import s3fs
+import xarray
+from morecantile import TileMatrixSet
+from rio_tiler.constants import WEB_MERCATOR_TMS
+from rio_tiler.io.xarray import XarrayReader
+
+
+class CacheClient(Protocol):
+ """CacheClient Protocol."""
+
+ def get(self, key: str) -> bytes:
+ """Get key."""
+ ...
+
+ def set(self, key: str, body: bytes) -> None:
+ """Set key."""
+ ...
+
+
+def parse_protocol(src_path: str, reference: Optional[bool] = False) -> str:
+ """Parse protocol from path."""
+ match = re.match(r"^(s3|https|http)", src_path)
+ protocol = "file"
+ if match:
+ protocol = match.group(0)
+
+ # override protocol if reference
+ if reference:
+ protocol = "reference"
+
+ return protocol
+
+
+def xarray_engine(src_path: str) -> str:
+ """Parse xarray engine from path."""
+ # ".hdf", ".hdf5", ".h5" will be supported once we have tests + expand the type permitted for the group parameter
+ if any(src_path.lower().endswith(ext) for ext in [".nc", ".nc4"]):
+ return "h5netcdf"
+ else:
+ return "zarr"
+
+
+def get_filesystem(
+ src_path: str,
+ protocol: str,
+ xr_engine: str,
+ anon: bool = True,
+):
+ """
+ Get the filesystem for the given source path.
+ """
+ if protocol == "s3":
+ s3_filesystem = s3fs.S3FileSystem()
+ return (
+ s3_filesystem.open(src_path)
+ if xr_engine == "h5netcdf"
+ else s3fs.S3Map(root=src_path, s3=s3_filesystem)
+ )
+
+ elif protocol == "reference":
+ reference_args = {"fo": src_path, "remote_options": {"anon": anon}}
+ return fsspec.filesystem("reference", **reference_args).get_mapper("")
+
+ elif protocol in ["https", "http", "file"]:
+ filesystem = fsspec.filesystem(protocol) # type: ignore
+ return (
+ filesystem.open(src_path)
+ if xr_engine == "h5netcdf"
+ else filesystem.get_mapper(src_path)
+ )
+
+ else:
+ raise ValueError(f"Unsupported protocol: {protocol}")
+
+
+def xarray_open_dataset(
+ src_path: str,
+ group: Optional[Any] = None,
+ reference: Optional[bool] = False,
+ decode_times: Optional[bool] = True,
+ consolidated: Optional[bool] = True,
+ cache_client: Optional[CacheClient] = None,
+) -> xarray.Dataset:
+ """Open dataset."""
+ # Generate cache key and attempt to fetch the dataset from cache
+ if cache_client:
+ cache_key = f"{src_path}_{group}" if group is not None else src_path
+ data_bytes = cache_client.get(cache_key)
+ if data_bytes:
+ return pickle.loads(data_bytes)
+
+ protocol = parse_protocol(src_path, reference=reference)
+ xr_engine = xarray_engine(src_path)
+ file_handler = get_filesystem(src_path, protocol, xr_engine)
+
+ # Arguments for xarray.open_dataset
+ # Default args
+ xr_open_args: Dict[str, Any] = {
+ "decode_coords": "all",
+ "decode_times": decode_times,
+ }
+
+ # Argument if we're opening a datatree
+ if isinstance(group, int):
+ xr_open_args["group"] = group
+
+ # NetCDF arguments
+ if xr_engine == "h5netcdf":
+ xr_open_args["engine"] = "h5netcdf"
+ xr_open_args["lock"] = False
+ else:
+ # Zarr arguments
+ xr_open_args["engine"] = "zarr"
+ xr_open_args["consolidated"] = consolidated
+
+ # Additional arguments when dealing with a reference file.
+ if reference:
+ xr_open_args["consolidated"] = False
+ xr_open_args["backend_kwargs"] = {"consolidated": False}
+
+ ds = xarray.open_dataset(file_handler, **xr_open_args)
+
+ if cache_client:
+ # Serialize the dataset to bytes using pickle
+ data_bytes = pickle.dumps(ds)
+ cache_client.set(cache_key, data_bytes)
+
+ return ds
+
+
+def _arrange_dims(da: xarray.DataArray) -> xarray.DataArray:
+ """Arrange coordinates and time dimensions.
+
+ An rioxarray.exceptions.InvalidDimensionOrder error is raised if the coordinates are not in the correct order time, y, and x.
+ See: https://github.com/corteva/rioxarray/discussions/674
+
+ We conform to using x and y as the spatial dimension names..
+
+ """
+ if "x" not in da.dims and "y" not in da.dims:
+ try:
+ latitude_var_name = next(
+ x for x in ["lat", "latitude", "LAT", "LATITUDE", "Lat"] if x in da.dims
+ )
+ longitude_var_name = next(
+ x
+ for x in ["lon", "longitude", "LON", "LONGITUDE", "Lon"]
+ if x in da.dims
+ )
+ except StopIteration as e:
+ raise ValueError(f"Couldn't find X/Y dimensions in {da.dims}") from e
+
+ da = da.rename({latitude_var_name: "y", longitude_var_name: "x"})
+
+ if "TIME" in da.dims:
+ da = da.rename({"TIME": "time"})
+
+ if _dims := [d for d in da.dims if d not in ["x", "y"]]:
+ da = da.transpose(*_dims, "y", "x")
+ else:
+ da = da.transpose("y", "x")
+
+ # If min/max values are stored in `valid_range` we add them in `valid_min/valid_max`
+ vmin, vmax = da.attrs.get("valid_min"), da.attrs.get("valid_max")
+ if "valid_range" in da.attrs and not (vmin is not None and vmax is not None):
+ valid_range = da.attrs.get("valid_range")
+ da.attrs.update({"valid_min": valid_range[0], "valid_max": valid_range[1]})
+
+ return da
+
+
+def get_variable(
+ ds: xarray.Dataset,
+ variable: str,
+ datetime: Optional[str] = None,
+ drop_dim: Optional[str] = None,
+) -> xarray.DataArray:
+ """Get Xarray variable as DataArray.
+
+ Args:
+ ds (xarray.Dataset): Xarray Dataset.
+ variable (str): Variable to extract from the Dataset.
+ datetime (str, optional): datetime to select from the DataArray.
+ drop_dim (str, optional): DataArray dimension to drop in form of `{dimension}={value}`.
+
+ Returns:
+ xarray.DataArray: 2D or 3D DataArray.
+
+ """
+ da = ds[variable]
+
+ if drop_dim:
+ dim_to_drop, dim_val = drop_dim.split("=")
+ da = da.sel({dim_to_drop: dim_val}).drop_vars(dim_to_drop)
+
+ da = _arrange_dims(da)
+
+ # Make sure we have a valid CRS
+ crs = da.rio.crs or "epsg:4326"
+ da = da.rio.write_crs(crs)
+
+ if crs == "epsg:4326" and (da.x > 180).any():
+ # Adjust the longitude coordinates to the -180 to 180 range
+ da = da.assign_coords(x=(da.x + 180) % 360 - 180)
+
+ # Sort the dataset by the updated longitude coordinates
+ da = da.sortby(da.x)
+
+ # TODO: Technically we don't have to select the first time, rio-tiler should handle 3D dataset
+ if "time" in da.dims:
+ if datetime:
+ # TODO: handle time interval
+ time_as_str = datetime.split("T")[0]
+ if da["time"].dtype == "O":
+ da["time"] = da["time"].astype("datetime64[ns]")
+
+ da = da.sel(
+ time=numpy.array(time_as_str, dtype=numpy.datetime64), method="nearest"
+ )
+ else:
+ da = da.isel(time=0)
+
+ assert len(da.dims) in [2, 3], "titiler.xarray can only work with 2D or 3D dataset"
+
+ return da
+
+
+@attr.s
+class Reader(XarrayReader):
+ """Reader: Open Zarr file and access DataArray."""
+
+ src_path: str = attr.ib()
+ variable: str = attr.ib()
+
+ # xarray.Dataset options
+ opener: Callable[..., xarray.Dataset] = attr.ib(default=xarray_open_dataset)
+
+ group: Optional[Any] = attr.ib(default=None)
+ reference: bool = attr.ib(default=False)
+ decode_times: bool = attr.ib(default=False)
+ consolidated: Optional[bool] = attr.ib(default=True)
+ cache_client: Optional[CacheClient] = attr.ib(default=None)
+
+ # xarray.DataArray options
+ datetime: Optional[str] = attr.ib(default=None)
+ drop_dim: Optional[str] = attr.ib(default=None)
+
+ tms: TileMatrixSet = attr.ib(default=WEB_MERCATOR_TMS)
+
+ ds: xarray.Dataset = attr.ib(init=False)
+ input: xarray.DataArray = attr.ib(init=False)
+
+ _dims: List = attr.ib(init=False, factory=list)
+
+ def __attrs_post_init__(self):
+ """Set bounds and CRS."""
+ self.ds = self.opener(
+ self.src_path,
+ group=self.group,
+ reference=self.reference,
+ decode_times=self.decode_times,
+ consolidated=self.consolidated,
+ cache_client=self.cache_client,
+ )
+
+ self.input = get_variable(
+ self.ds,
+ self.variable,
+ datetime=self.datetime,
+ drop_dim=self.drop_dim,
+ )
+ super().__attrs_post_init__()
+
+ def close(self):
+ """Close xarray dataset."""
+ self.ds.close()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Support using with Context Managers."""
+ self.close()
+
+ @classmethod
+ def list_variables(
+ cls,
+ src_path: str,
+ group: Optional[Any] = None,
+ reference: Optional[bool] = False,
+ consolidated: Optional[bool] = True,
+ ) -> List[str]:
+ """List available variable in a dataset."""
+ with xarray_open_dataset(
+ src_path,
+ group=group,
+ reference=reference,
+ consolidated=consolidated,
+ ) as ds:
+ return list(ds.data_vars) # type: ignore
diff --git a/src/titiler/xarray/titiler/xarray/main.py b/src/titiler/xarray/titiler/xarray/main.py
new file mode 100644
index 000000000..e23cb5e5d
--- /dev/null
+++ b/src/titiler/xarray/titiler/xarray/main.py
@@ -0,0 +1,30 @@
+"""titiler xarray app."""
+
+from fastapi import FastAPI
+
+from titiler.xarray.factory import TilerFactory
+
+###############################################################################
+
+app = FastAPI(
+ openapi_url="/api",
+ docs_url="/api.html",
+ description="""Xarray based tiles server for MultiDimensional dataset (Zarr/NetCDF).
+
+---
+
+**Documentation**: https://developmentseed.org/titiler/
+
+**Source Code**: https://github.com/developmentseed/titiler
+
+---
+ """,
+)
+
+md = TilerFactory(router_prefix="/md")
+
+app.include_router(
+ md.router,
+ prefix="/md",
+ tags=["Multi Dimensional"],
+)
diff --git a/src/titiler/xarray/titiler/xarray/py.typed b/src/titiler/xarray/titiler/xarray/py.typed
new file mode 100644
index 000000000..e69de29bb