From 2ef99928e670818d2df1743023acfd009c6083e1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 17 May 2024 07:05:54 +0000 Subject: [PATCH] Deployed ffec7c8 to 1.3.0 with MkDocs 1.6.0 and mike 2.1.1 --- 1.3.0/404.html | 1131 ++ 1.3.0/advanced/custom_search/custom_search.md | 97 + 1.3.0/advanced/custom_search/index.html | 1254 ++ .../custom_tilejson/custom_tilejson.md | 162 + 1.3.0/advanced/custom_tilejson/index.html | 1324 ++ 1.3.0/advanced/metadata/index.html | 1338 ++ 1.3.0/advanced/metadata/metadata.md | 118 + 1.3.0/advanced/searches_list/index.html | 1190 ++ 1.3.0/advanced/searches_list/searches_list.md | 30 + 1.3.0/api/titiler/pgstac/db/db.md | 29 + 1.3.0/api/titiler/pgstac/db/index.html | 1296 ++ .../pgstac/dependencies/dependencies.md | 157 + .../titiler/pgstac/dependencies/index.html | 1660 ++ .../titiler/pgstac/extensions/extensions.md | 33 + .../api/titiler/pgstac/extensions/index.html | 1348 ++ 1.3.0/api/titiler/pgstac/factory/factory.md | 320 + 1.3.0/api/titiler/pgstac/factory/index.html | 1805 ++ 1.3.0/api/titiler/pgstac/model/index.html | 14550 ++++++++++++++++ 1.3.0/api/titiler/pgstac/model/model.md | 5416 ++++++ 1.3.0/api/titiler/pgstac/mosaic/index.html | 2905 +++ 1.3.0/api/titiler/pgstac/mosaic/mosaic.md | 701 + 1.3.0/api/titiler/pgstac/reader/index.html | 2191 +++ 1.3.0/api/titiler/pgstac/reader/reader.md | 361 + 1.3.0/api/titiler/pgstac/settings/index.html | 6055 +++++++ 1.3.0/api/titiler/pgstac/settings/settings.md | 1937 ++ 1.3.0/api/titiler/pgstac/utils/index.html | 1272 ++ 1.3.0/api/titiler/pgstac/utils/utils.md | 18 + 1.3.0/assets/images/favicon.png | Bin 0 -> 1870 bytes .../assets/javascripts/bundle.ebd0bdb7.min.js | 29 + .../javascripts/bundle.ebd0bdb7.min.js.map | 7 + .../javascripts/lunr/min/lunr.ar.min.js | 1 + .../javascripts/lunr/min/lunr.da.min.js | 18 + .../javascripts/lunr/min/lunr.de.min.js | 18 + .../javascripts/lunr/min/lunr.du.min.js | 18 + .../javascripts/lunr/min/lunr.el.min.js | 1 + .../javascripts/lunr/min/lunr.es.min.js | 18 + .../javascripts/lunr/min/lunr.fi.min.js | 18 + .../javascripts/lunr/min/lunr.fr.min.js | 18 + .../javascripts/lunr/min/lunr.he.min.js | 1 + .../javascripts/lunr/min/lunr.hi.min.js | 1 + .../javascripts/lunr/min/lunr.hu.min.js | 18 + .../javascripts/lunr/min/lunr.hy.min.js | 1 + .../javascripts/lunr/min/lunr.it.min.js | 18 + .../javascripts/lunr/min/lunr.ja.min.js | 1 + .../javascripts/lunr/min/lunr.jp.min.js | 1 + .../javascripts/lunr/min/lunr.kn.min.js | 1 + .../javascripts/lunr/min/lunr.ko.min.js | 1 + .../javascripts/lunr/min/lunr.multi.min.js | 1 + .../javascripts/lunr/min/lunr.nl.min.js | 18 + .../javascripts/lunr/min/lunr.no.min.js | 18 + .../javascripts/lunr/min/lunr.pt.min.js | 18 + .../javascripts/lunr/min/lunr.ro.min.js | 18 + .../javascripts/lunr/min/lunr.ru.min.js | 18 + .../javascripts/lunr/min/lunr.sa.min.js | 1 + .../lunr/min/lunr.stemmer.support.min.js | 1 + .../javascripts/lunr/min/lunr.sv.min.js | 18 + .../javascripts/lunr/min/lunr.ta.min.js | 1 + .../javascripts/lunr/min/lunr.te.min.js | 1 + .../javascripts/lunr/min/lunr.th.min.js | 1 + .../javascripts/lunr/min/lunr.tr.min.js | 18 + .../javascripts/lunr/min/lunr.vi.min.js | 1 + .../javascripts/lunr/min/lunr.zh.min.js | 1 + 1.3.0/assets/javascripts/lunr/tinyseg.js | 206 + 1.3.0/assets/javascripts/lunr/wordcut.js | 6708 +++++++ .../workers/search.b8dbb3d2.min.js | 42 + .../workers/search.b8dbb3d2.min.js.map | 7 + .../assets/stylesheets/main.6543a935.min.css | 1 + .../stylesheets/main.6543a935.min.css.map | 1 + .../stylesheets/palette.06af60db.min.css | 1 + .../stylesheets/palette.06af60db.min.css.map | 1 + 1.3.0/benchmark.html | 292 + 1.3.0/contributing/contributing.md | 23 + 1.3.0/contributing/index.html | 1184 ++ .../collections_endpoints.md | 502 + .../collections_endpoints/index.html | 1989 +++ 1.3.0/endpoints/index.html | 1172 ++ 1.3.0/endpoints/index.md | 16 + 1.3.0/endpoints/items_endpoints/index.html | 1982 +++ .../items_endpoints/items_endpoints.md | 429 + 1.3.0/endpoints/searches_endpoints/index.html | 2051 +++ .../searches_endpoints/searches_endpoints.md | 520 + 1.3.0/endpoints/tms_endpoints/index.html | 1330 ++ .../endpoints/tms_endpoints/tms_endpoints.md | 77 + 1.3.0/img/favicon.ico | Bin 0 -> 3350 bytes 1.3.0/img/logo.png | Bin 0 -> 5137 bytes 1.3.0/index.html | 1472 ++ 1.3.0/index.md | 118 + 1.3.0/intro/index.html | 1628 ++ 1.3.0/intro/intro.md | 330 + 1.3.0/migrations/v1_migration/index.html | 1617 ++ 1.3.0/migrations/v1_migration/v1_migration.md | 241 + 1.3.0/notebooks/demo/demo.ipynb | 691 + 1.3.0/notebooks/demo/index.html | 2850 +++ 1.3.0/overrides/stylesheets/extra.css | 5 + 1.3.0/release-notes/index.html | 2633 +++ 1.3.0/release-notes/release-notes.md | 556 + 1.3.0/search/search_index.json | 1 + 1.3.0/sitemap.xml | 128 + 1.3.0/sitemap.xml.gz | Bin 0 -> 427 bytes 1.3.0/tiler_factories/index.html | 1520 ++ 1.3.0/tiler_factories/tiler_factories.md | 135 + latest | 2 +- versions.json | 9 +- 103 files changed, 81488 insertions(+), 3 deletions(-) create mode 100644 1.3.0/404.html create mode 100644 1.3.0/advanced/custom_search/custom_search.md create mode 100644 1.3.0/advanced/custom_search/index.html create mode 100644 1.3.0/advanced/custom_tilejson/custom_tilejson.md create mode 100644 1.3.0/advanced/custom_tilejson/index.html create mode 100644 1.3.0/advanced/metadata/index.html create mode 100644 1.3.0/advanced/metadata/metadata.md create mode 100644 1.3.0/advanced/searches_list/index.html create mode 100644 1.3.0/advanced/searches_list/searches_list.md create mode 100644 1.3.0/api/titiler/pgstac/db/db.md create mode 100644 1.3.0/api/titiler/pgstac/db/index.html create mode 100644 1.3.0/api/titiler/pgstac/dependencies/dependencies.md create mode 100644 1.3.0/api/titiler/pgstac/dependencies/index.html create mode 100644 1.3.0/api/titiler/pgstac/extensions/extensions.md create mode 100644 1.3.0/api/titiler/pgstac/extensions/index.html create mode 100644 1.3.0/api/titiler/pgstac/factory/factory.md create mode 100644 1.3.0/api/titiler/pgstac/factory/index.html create mode 100644 1.3.0/api/titiler/pgstac/model/index.html create mode 100644 1.3.0/api/titiler/pgstac/model/model.md create mode 100644 1.3.0/api/titiler/pgstac/mosaic/index.html create mode 100644 1.3.0/api/titiler/pgstac/mosaic/mosaic.md create mode 100644 1.3.0/api/titiler/pgstac/reader/index.html create mode 100644 1.3.0/api/titiler/pgstac/reader/reader.md create mode 100644 1.3.0/api/titiler/pgstac/settings/index.html create mode 100644 1.3.0/api/titiler/pgstac/settings/settings.md create mode 100644 1.3.0/api/titiler/pgstac/utils/index.html create mode 100644 1.3.0/api/titiler/pgstac/utils/utils.md create mode 100644 1.3.0/assets/images/favicon.png create mode 100644 1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js create mode 100644 1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js.map create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ar.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.da.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.de.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.du.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.el.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.es.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.fi.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.fr.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.he.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.hi.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.hu.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.hy.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.it.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ja.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.jp.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.kn.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ko.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.multi.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.nl.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.no.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.pt.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ro.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ru.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.sa.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.stemmer.support.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.sv.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.ta.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.te.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.th.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.tr.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.vi.min.js create mode 100644 1.3.0/assets/javascripts/lunr/min/lunr.zh.min.js create mode 100644 1.3.0/assets/javascripts/lunr/tinyseg.js create mode 100644 1.3.0/assets/javascripts/lunr/wordcut.js create mode 100644 1.3.0/assets/javascripts/workers/search.b8dbb3d2.min.js create mode 100644 1.3.0/assets/javascripts/workers/search.b8dbb3d2.min.js.map create mode 100644 1.3.0/assets/stylesheets/main.6543a935.min.css create mode 100644 1.3.0/assets/stylesheets/main.6543a935.min.css.map create mode 100644 1.3.0/assets/stylesheets/palette.06af60db.min.css create mode 100644 1.3.0/assets/stylesheets/palette.06af60db.min.css.map create mode 100644 1.3.0/benchmark.html create mode 100644 1.3.0/contributing/contributing.md create mode 100644 1.3.0/contributing/index.html create mode 100644 1.3.0/endpoints/collections_endpoints/collections_endpoints.md create mode 100644 1.3.0/endpoints/collections_endpoints/index.html create mode 100644 1.3.0/endpoints/index.html create mode 100644 1.3.0/endpoints/index.md create mode 100644 1.3.0/endpoints/items_endpoints/index.html create mode 100644 1.3.0/endpoints/items_endpoints/items_endpoints.md create mode 100644 1.3.0/endpoints/searches_endpoints/index.html create mode 100644 1.3.0/endpoints/searches_endpoints/searches_endpoints.md create mode 100644 1.3.0/endpoints/tms_endpoints/index.html create mode 100644 1.3.0/endpoints/tms_endpoints/tms_endpoints.md create mode 100644 1.3.0/img/favicon.ico create mode 100644 1.3.0/img/logo.png create mode 100644 1.3.0/index.html create mode 100644 1.3.0/index.md create mode 100644 1.3.0/intro/index.html create mode 100644 1.3.0/intro/intro.md create mode 100644 1.3.0/migrations/v1_migration/index.html create mode 100644 1.3.0/migrations/v1_migration/v1_migration.md create mode 100644 1.3.0/notebooks/demo/demo.ipynb create mode 100644 1.3.0/notebooks/demo/index.html create mode 100644 1.3.0/overrides/stylesheets/extra.css create mode 100644 1.3.0/release-notes/index.html create mode 100644 1.3.0/release-notes/release-notes.md create mode 100644 1.3.0/search/search_index.json create mode 100644 1.3.0/sitemap.xml create mode 100644 1.3.0/sitemap.xml.gz create mode 100644 1.3.0/tiler_factories/index.html create mode 100644 1.3.0/tiler_factories/tiler_factories.md diff --git a/1.3.0/404.html b/1.3.0/404.html new file mode 100644 index 00000000..55d5ba27 --- /dev/null +++ b/1.3.0/404.html @@ -0,0 +1,1131 @@ + + + + + + + + + + + + + + + + + + + + + TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/advanced/custom_search/custom_search.md b/1.3.0/advanced/custom_search/custom_search.md new file mode 100644 index 00000000..675b9d07 --- /dev/null +++ b/1.3.0/advanced/custom_search/custom_search.md @@ -0,0 +1,97 @@ +Even though `TiTiler.PgSTAC` includes default FastAPI application, +it also can be used like a library if you want to extend or +override default behavior. + +Let's look at one such example. Imagine that we use JSON Web Token (JWT) +based approach for authorization and every token contains information +about area a user has access to: + +```json +{ + "sub": "1234567890", + "name": "John Doe", + "iat": 1516239022, + "scope": "zone_A" +} +``` + +We want our application to take this information into account while +registering a search query. It can be done in the following way: + +```python +from contextlib import asynccontextmanager + +from typing import Tuple +import json +import jwt +from fastapi import FastAPI +from fastapi.security.utils import get_authorization_scheme_param +from starlette.requests import Request +from titiler.pgstac.factory import MosaicTilerFactory, add_search_register_route +from titiler.pgstac.model import RegisterMosaic, Metadata, PgSTACSearch +from titiler.pgstac.db import close_db_connection, connect_to_db +from titiler.pgstac.extensions import searchInfoExtension + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """FastAPI Lifespan.""" + # Create Connection Pool + await connect_to_db(app, settings=postgres_settings) + yield + # Close the Connection Pool + await close_db_connection(app) + +app = FastAPI(lifespan=lifespan) + +AREAS = { + "zone_A": {"type": "Point", "coordinates": [-41.93, -12.76]}, + "zone_B": {"type": "Point", "coordinates": [2.15, 41.39]}, +} + + +def search_factory(request: Request, body: RegisterMosaic) -> Tuple[PgSTACSearch, Metadata]: + authorization = request.headers.get("Authorization") + scheme, token = get_authorization_scheme_param(authorization) + payload = jwt.decode(token, algorithms=["HS256"], key="your-256-bit-secret") + + search = body.dict(exclude_none=True, exclude={"metadata"}, by_alias=True) + search["filter"] = { + "op": "and", + "args": [ + { + "op": "s_intersects", + "args": [{"property": "geometry"}, AREAS[payload["scope"]]], + }, + search["filter"], + ], + } + + return model.PgSTACSearch(**search), body.metadata + + +mosaic = MosaicTilerFactory( + extensions=[ + searchInfoExtension + ] +) +app.include_router(mosaic.router) +add_search_register_route(app, search_dependency=search_factory) +``` + +Checking: + +```bash +$ curl -s -X 'POST' \ + 'http://localhost:8081/register' \ + -H 'accept: application/json' \ + -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyLCJzY29wZSI6InpvbmVfQSJ9.BelzluX7v7kYObix2KSyy1T5gEOQYQn_pyNO5Ri0gWo' \ + -H 'Content-Type: application/json' \ + -d '{"filter":{"op":"and","args":[{"op":"=","args":[{"property":"collection"},"l1"]}]}}' | jq '.id' +"bbc3c8f4c392436f74de6cd0308469f6" + +$ curl -X 'GET' \ + 'http://localhost:8081/bbc3c8f4c392436f74de6cd0308469f6/info' \ + -H 'accept: application/json' +{"hash":"bbc3c8f4c392436f74de6cd0308469f6","search":{"filter":{"op":"and","args":[{"op":"s_intersects","args":[{"property":"geometry"},{"type":"Point","coordinates":[-41.93,-12.76]}]},{"op":"and","args":[{"op":"=","args":[{"property":"collection"},"l1"]}]}]}},"_where":"( ( st_intersects(geometry, '0101000020E6100000D7A3703D0AF744C085EB51B81E8529C0'::geometry) and ( (collection_id = 'l1') ) ) ) ","orderby":"datetime DESC, id DESC","lastused":"2022-02-23T13:00:04.090757+00:00","usecount":3,"metadata":{"type":"mosaic"}} +``` diff --git a/1.3.0/advanced/custom_search/index.html b/1.3.0/advanced/custom_search/index.html new file mode 100644 index 00000000..25736414 --- /dev/null +++ b/1.3.0/advanced/custom_search/index.html @@ -0,0 +1,1254 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Custom search model - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Custom search model

+ +

Even though TiTiler.PgSTAC includes default FastAPI application, +it also can be used like a library if you want to extend or +override default behavior.

+

Let's look at one such example. Imagine that we use JSON Web Token (JWT) +based approach for authorization and every token contains information +about area a user has access to:

+
{
+  "sub": "1234567890",
+  "name": "John Doe",
+  "iat": 1516239022,
+  "scope": "zone_A"
+}
+
+

We want our application to take this information into account while +registering a search query. It can be done in the following way:

+
from contextlib import asynccontextmanager
+
+from typing import Tuple
+import json
+import jwt
+from fastapi import FastAPI
+from fastapi.security.utils import get_authorization_scheme_param
+from starlette.requests import Request
+from titiler.pgstac.factory import MosaicTilerFactory, add_search_register_route
+from titiler.pgstac.model import RegisterMosaic, Metadata, PgSTACSearch
+from titiler.pgstac.db import close_db_connection, connect_to_db
+from titiler.pgstac.extensions import searchInfoExtension
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+    """FastAPI Lifespan."""
+    # Create Connection Pool
+    await connect_to_db(app, settings=postgres_settings)
+    yield
+    # Close the Connection Pool
+    await close_db_connection(app)
+
+app = FastAPI(lifespan=lifespan)
+
+AREAS = {
+    "zone_A": {"type": "Point", "coordinates": [-41.93, -12.76]},
+    "zone_B": {"type": "Point", "coordinates": [2.15, 41.39]},
+}
+
+
+def search_factory(request: Request, body: RegisterMosaic) -> Tuple[PgSTACSearch, Metadata]:
+    authorization = request.headers.get("Authorization")
+    scheme, token = get_authorization_scheme_param(authorization)
+    payload = jwt.decode(token, algorithms=["HS256"], key="your-256-bit-secret")
+
+    search = body.dict(exclude_none=True, exclude={"metadata"}, by_alias=True)
+    search["filter"] = {
+        "op": "and",
+        "args": [
+            {
+                "op": "s_intersects",
+                "args": [{"property": "geometry"}, AREAS[payload["scope"]]],
+            },
+            search["filter"],
+        ],
+    }
+
+    return model.PgSTACSearch(**search), body.metadata
+
+
+mosaic = MosaicTilerFactory(
+    extensions=[
+      searchInfoExtension
+    ]
+)
+app.include_router(mosaic.router)
+add_search_register_route(app, search_dependency=search_factory)
+
+

Checking:

+
$ curl -s -X 'POST' \
+  'http://localhost:8081/register' \
+  -H 'accept: application/json' \
+  -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyLCJzY29wZSI6InpvbmVfQSJ9.BelzluX7v7kYObix2KSyy1T5gEOQYQn_pyNO5Ri0gWo' \
+  -H 'Content-Type: application/json' \
+  -d '{"filter":{"op":"and","args":[{"op":"=","args":[{"property":"collection"},"l1"]}]}}' | jq '.id'
+"bbc3c8f4c392436f74de6cd0308469f6"
+
+$ curl -X 'GET' \
+  'http://localhost:8081/bbc3c8f4c392436f74de6cd0308469f6/info' \
+  -H 'accept: application/json'
+{"hash":"bbc3c8f4c392436f74de6cd0308469f6","search":{"filter":{"op":"and","args":[{"op":"s_intersects","args":[{"property":"geometry"},{"type":"Point","coordinates":[-41.93,-12.76]}]},{"op":"and","args":[{"op":"=","args":[{"property":"collection"},"l1"]}]}]}},"_where":"(  ( st_intersects(geometry, '0101000020E6100000D7A3703D0AF744C085EB51B81E8529C0'::geometry) and  ( (collection_id = 'l1') )  )  )  ","orderby":"datetime DESC, id DESC","lastused":"2022-02-23T13:00:04.090757+00:00","usecount":3,"metadata":{"type":"mosaic"}}
+
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/advanced/custom_tilejson/custom_tilejson.md b/1.3.0/advanced/custom_tilejson/custom_tilejson.md new file mode 100644 index 00000000..ad0cc5d5 --- /dev/null +++ b/1.3.0/advanced/custom_tilejson/custom_tilejson.md @@ -0,0 +1,162 @@ + +Goal: enable users to select a predefined configuration stored in the mosaic Metadata. + + +```python +import sys +from typing import Optional +from dataclasses import dataclass + +from morecantile import TileMatrixSet +from titiler.core.resources.enums import ImageType +from titiler.core.models.mapbox import TileJSON +from titiler.pgstac import factory as TitilerPgSTACFactory +from titiler.pgstac.dependencies import PgSTACParams +from typing_extensions import Annotated + +from fastapi import Depends, Query + +from starlette.requests import Request + + +@dataclass +class MosaicTilerFactory(TitilerPgSTACFactory.MosaicTilerFactory): + """Custom factory.""" + + def _tilejson_routes(self) -> None: + """Custom TileJSON endpoint.""" + + @self.router.get( + "/tilejson.json", + response_model=TileJSON, + responses={200: {"description": "Return a tilejson"}}, + response_model_exclude_none=True, + ) + @self.router.get( + "/{tileMatrixSetId}/tilejson.json", + response_model=TileJSON, + responses={200: {"description": "Return a tilejson"}}, + response_model_exclude_none=True, + ) + def tilejson( + request: Request, + search_id=Depends(self.path_dependency), + tileMatrixSetId: Annotated[ # type: ignore + Literal[tuple(self.supported_tms.list())], + f"Identifier selecting one of the TileMatrixSetId supported (default: '{self.default_tms}')", + ] = self.default_tms, + layer: Annotated[ + str, + Query(description="Name of default configuration"), + ] = None, + tile_format: Annotated[ + Optional[ImageType], + Query( + description="Default will be automatically defined if the output image needs a mask (png) or not (jpeg).", + ), + ] = None, + tile_scale: Annotated[ + Optional[int], + Query( + gt=0, lt=4, description="Tile size scale. 1=256x256, 2=512x512..." + ), + ] = None, + minzoom: Annotated[ + Optional[int], + Query(description="Overwrite default minzoom."), + ] = None, + maxzoom: Annotated[ + Optional[int], + Query(description="Overwrite default maxzoom."), + ] = None, + layer_params=Depends(self.layer_dependency), + dataset_params=Depends(self.dataset_dependency), + pixel_selection=Depends(self.pixel_selection_dependency), + buffer: Annotated[ + Optional[float], + Query( + gt=0, + title="Tile buffer.", + description="Buffer on each side of the given tile. It must be a multiple of `0.5`. Output **tilesize** will be expanded to `tilesize + 2 * buffer` (e.g 0.5 = 257x257, 1.0 = 258x258).", + ), + ] = None, + post_process=Depends(self.process_dependency), + rescale=Depends(self.rescale_dependency), + color_formula: Annotated[ + Optional[str], + Query( + title="Color Formula", + description="rio-color formula (info: https://github.com/mapbox/rio-color)", + ), + ] = None, + colormap=Depends(self.colormap_dependency), + render_params=Depends(self.render_dependency), + pgstac_params: PgSTACParams = Depends(), + backend_params=Depends(self.backend_dependency), + reader_params=Depends(self.reader_dependency), + ): + """Return TileJSON document for a SearchId.""" + with request.app.state.dbpool.connection() as conn: + with conn.cursor(row_factory=class_row(model.Search)) as cursor: + cursor.execute( + "SELECT * FROM searches WHERE hash=%s;", + (search_id,), + ) + search_info = cursor.fetchone() + if not search_info: + raise KeyError(f"search {search_id} not found") + + route_params = { + "search_id": search_info.id, + "z": "{z}", + "x": "{x}", + "y": "{y}", + "tileMatrixSetId": tileMatrixSetId, + } + if tile_scale: + route_params["scale"] = tile_scale + if tile_format: + route_params["format"] = tile_format.value + + tiles_url = self.url_for(request, "tile", **route_params) + + qs_key_to_remove = [ + "tilematrixsetid", + "tile_format", + "tile_scale", + "minzoom", + "maxzoom", + "layer", + ] + qs = [ + (key, value) + for (key, value) in request.query_params._list + if key.lower() not in qs_key_to_remove + ] + + if layer: + config = search_info.metadata.defaults_params.get(layer) + if not config: + raise HTTPException(status_code=404, detail=f"Invalid {layer} configuration.") + + # This assume the default configuration follows the endpoint expected format + # as `"true_color": {"assets": ["B4", "B3", "B2"]}` + qs = QueryParams(config) + + if qs: + tiles_url += f"?{urlencode(qs, doseq=True)}" + + minzoom = _first_value([minzoom, search_info.metadata.minzoom], tms.minzoom) + maxzoom = _first_value([maxzoom, search_info.metadata.maxzoom], tms.maxzoom) + bounds = _first_value( + [search_info.input_search.get("bbox"), search_info.metadata.bounds], + tms.bbox, + ) + return { + "bounds": bounds, + "minzoom": minzoom, + "maxzoom": maxzoom, + "name": search_info.metadata.name or search_info.id, + "tiles": [tiles_url], + } +``` diff --git a/1.3.0/advanced/custom_tilejson/index.html b/1.3.0/advanced/custom_tilejson/index.html new file mode 100644 index 00000000..24384c44 --- /dev/null +++ b/1.3.0/advanced/custom_tilejson/index.html @@ -0,0 +1,1324 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Custom TileJSON endpoint - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Custom TileJSON endpoint

+ +

Goal: enable users to select a predefined configuration stored in the mosaic Metadata.

+
import sys
+from typing import Optional
+from dataclasses import dataclass
+
+from morecantile import TileMatrixSet
+from titiler.core.resources.enums import ImageType
+from titiler.core.models.mapbox import TileJSON
+from titiler.pgstac import factory as TitilerPgSTACFactory
+from titiler.pgstac.dependencies import PgSTACParams
+from typing_extensions import Annotated
+
+from fastapi import Depends, Query
+
+from starlette.requests import Request
+
+
+@dataclass
+class MosaicTilerFactory(TitilerPgSTACFactory.MosaicTilerFactory):
+    """Custom factory."""
+
+    def _tilejson_routes(self) -> None:
+        """Custom TileJSON endpoint."""
+
+        @self.router.get(
+            "/tilejson.json",
+            response_model=TileJSON,
+            responses={200: {"description": "Return a tilejson"}},
+            response_model_exclude_none=True,
+        )
+        @self.router.get(
+            "/{tileMatrixSetId}/tilejson.json",
+            response_model=TileJSON,
+            responses={200: {"description": "Return a tilejson"}},
+            response_model_exclude_none=True,
+        )
+        def tilejson(
+            request: Request,
+            search_id=Depends(self.path_dependency),
+            tileMatrixSetId: Annotated[  # type: ignore
+                Literal[tuple(self.supported_tms.list())],
+                f"Identifier selecting one of the TileMatrixSetId supported (default: '{self.default_tms}')",
+            ] = self.default_tms,
+            layer: Annotated[
+                str,
+                Query(description="Name of default configuration"),
+            ] = None,
+            tile_format: Annotated[
+                Optional[ImageType],
+                Query(
+                    description="Default will be automatically defined if the output image needs a mask (png) or not (jpeg).",
+                ),
+            ] = None,
+            tile_scale: Annotated[
+                Optional[int],
+                Query(
+                    gt=0, lt=4, description="Tile size scale. 1=256x256, 2=512x512..."
+                ),
+            ] = None,
+            minzoom: Annotated[
+                Optional[int],
+                Query(description="Overwrite default minzoom."),
+            ] = None,
+            maxzoom: Annotated[
+                Optional[int],
+                Query(description="Overwrite default maxzoom."),
+            ] = None,
+            layer_params=Depends(self.layer_dependency),
+            dataset_params=Depends(self.dataset_dependency),
+            pixel_selection=Depends(self.pixel_selection_dependency),
+            buffer: Annotated[
+                Optional[float],
+                Query(
+                    gt=0,
+                    title="Tile buffer.",
+                    description="Buffer on each side of the given tile. It must be a multiple of `0.5`. Output **tilesize** will be expanded to `tilesize + 2 * buffer` (e.g 0.5 = 257x257, 1.0 = 258x258).",
+                ),
+            ] = None,
+            post_process=Depends(self.process_dependency),
+            rescale=Depends(self.rescale_dependency),
+            color_formula: Annotated[
+                Optional[str],
+                Query(
+                    title="Color Formula",
+                    description="rio-color formula (info: https://github.com/mapbox/rio-color)",
+                ),
+            ] = None,
+            colormap=Depends(self.colormap_dependency),
+            render_params=Depends(self.render_dependency),
+            pgstac_params: PgSTACParams = Depends(),
+            backend_params=Depends(self.backend_dependency),
+            reader_params=Depends(self.reader_dependency),
+        ):
+            """Return TileJSON document for a SearchId."""
+            with request.app.state.dbpool.connection() as conn:
+                with conn.cursor(row_factory=class_row(model.Search)) as cursor:
+                    cursor.execute(
+                        "SELECT * FROM searches WHERE hash=%s;",
+                        (search_id,),
+                    )
+                    search_info = cursor.fetchone()
+                    if not search_info:
+                        raise KeyError(f"search {search_id} not found")
+
+            route_params = {
+                "search_id": search_info.id,
+                "z": "{z}",
+                "x": "{x}",
+                "y": "{y}",
+                "tileMatrixSetId": tileMatrixSetId,
+            }
+            if tile_scale:
+                route_params["scale"] = tile_scale
+            if tile_format:
+                route_params["format"] = tile_format.value
+
+            tiles_url = self.url_for(request, "tile", **route_params)
+
+            qs_key_to_remove = [
+                "tilematrixsetid",
+                "tile_format",
+                "tile_scale",
+                "minzoom",
+                "maxzoom",
+                "layer",
+            ]
+            qs = [
+                (key, value)
+                for (key, value) in request.query_params._list
+                if key.lower() not in qs_key_to_remove
+            ]
+
+            if layer:
+                config = search_info.metadata.defaults_params.get(layer)
+                if not config:
+                    raise HTTPException(status_code=404, detail=f"Invalid {layer} configuration.")
+
+                # This assume the default configuration follows the endpoint expected format
+                # as `"true_color": {"assets": ["B4", "B3", "B2"]}`
+                qs = QueryParams(config)
+
+            if qs:
+                tiles_url += f"?{urlencode(qs, doseq=True)}"
+
+            minzoom = _first_value([minzoom, search_info.metadata.minzoom], tms.minzoom)
+            maxzoom = _first_value([maxzoom, search_info.metadata.maxzoom], tms.maxzoom)
+            bounds = _first_value(
+                [search_info.input_search.get("bbox"), search_info.metadata.bounds],
+                tms.bbox,
+            )
+            return {
+                "bounds": bounds,
+                "minzoom": minzoom,
+                "maxzoom": maxzoom,
+                "name": search_info.metadata.name or search_info.id,
+                "tiles": [tiles_url],
+            }
+
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/advanced/metadata/index.html b/1.3.0/advanced/metadata/index.html new file mode 100644 index 00000000..8243c45a --- /dev/null +++ b/1.3.0/advanced/metadata/index.html @@ -0,0 +1,1338 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Mosaic metadata specification - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Mosaic metadata specification

+ +

TiTiler-PgSTAC uses PgSTAC search to host mosaic parameters for performance purposes. To help users we added the possibility to add metadata to search entries and in TiTiler-PgSTAC we introduced a non-official specification to help user storing meaningful information.

+

Specification

+
{
+    // OPTIONAL. Default: "mosaic" (No other value accepted for now). Describe the `type` of metadata.
+    "type": "mosaic",
+
+    // OPTIONAL. Default: null.
+    // The maximum extent of available map tiles. The bounds are represented in WGS:84
+    // latitude and longitude values, in the order left, bottom, right, top.
+    // Values may be integers or floating point numbers.
+    "bounds": [ -180, -85.05112877980659, 180, 85.0511287798066 ],
+
+    // OPTIONAL. Default: null.
+    // An integer specifying the minimum zoom level.
+    "minzoom": 0,
+
+    // OPTIONAL. Default: null.
+    // An integer specifying the maximum zoom level. MUST be >= minzoom.
+    "maxzoom": 11,
+
+    // OPTIONAL. Default: null. The name can contain any legal character.
+    "name": "compositing",
+
+    // OPTIONAL. Default: null. An array of available assets.
+    "assets": ["image", "cog"],
+
+    // OPTIONAL. Default: null. A set of `defaults` configuration to be forwarded to the /tiles endpoints.
+    // Note: The defaults block should follow the STAC render extension https://github.com/stac-extensions/render
+    "defaults": {
+        "true_color": {
+            "assets": ["B4", "B3", "B2"],
+            "color_formula": "Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35",
+        },
+        "ndvi": {
+            "expression": "(B4-B3)/(B4+B3)",
+            "rescale": [[-1, 1]],
+            "colormap_name": "viridis"
+        }
+    }
+}
+
+
+

Important

+
    +
  • When using the /searches/register endpoint, {"type": "mosaic"} will be set by default
  • +
  • All metadata fields are optional and custom fields are also allowed.
  • +
+
+
curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"filter": {"op": "=", "args": [{"property": "collection"}, "landsat-c2l2-sr"]}, "metadata": {"name": "landsat mosaic"}}'
+>> {
+  "id": "d7fcdefd0457c949ea7a6192bc2c7122",
+  "links": [
+    {
+      "rel": "metadata",
+      "type": "application/json",
+      "href": "http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info"
+    },
+    {
+      "rel": "tilejson",
+      "type": "application/json",
+      "href": "http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/tilejson.json"
+    }
+  ]
+}
+
+curl http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info | jq '.search.metadata'
+>> {
+  "type": "mosaic",
+  "name": "landsat mosaic"
+}
+
+
curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"collections": ["noaa-emergency-response"], "bbox": [-87.0251, 36.0999, -85.4249, 36.2251], "filter-lang": "cql-json", "metadata": {"bounds": [-87.0251, 36.0999, -85.4249, 36.2251], "minzoom": 14, "maxzoom": 18, "assets": ["cog"], "defaults": {"true_color": {"bidx": [1, 2, 3]}}}}'
+>> {
+  "id":"4b0db3dbd1858d54a3a55f84de97d1ca",
+  "links":[
+    {
+      "rel": "metadata",
+      "type": "application/json",
+      "href": "http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info"
+    },
+    {
+      "rel": "tilejson",
+      "type": "application/json",
+      "href": "http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/tilejson.json"
+    }
+  ]
+}
+
+curl http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info | jq '.search.metadata'
+>> {
+  "type": "mosaic",
+  "bounds": [
+    -87.0251,
+    36.0999,
+    -85.4249,
+    36.2251
+  ],
+  "minzoom": 14,
+  "maxzoom": 18,
+  "assets": [
+    "cog"
+  ],
+  "defaults": {
+    "true_color": {
+      "bidx": [
+        1,
+        2,
+        3
+      ]
+    }
+  }
+}
+
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/advanced/metadata/metadata.md b/1.3.0/advanced/metadata/metadata.md new file mode 100644 index 00000000..8bfd6153 --- /dev/null +++ b/1.3.0/advanced/metadata/metadata.md @@ -0,0 +1,118 @@ +`TiTiler-PgSTAC` uses PgSTAC [search](https://github.com/stac-utils/pgstac/blob/3499daa2bfa700ae7bb07503795c169bf2ebafc7/sql/004_search.sql#L907-L915) to host mosaic parameters for performance purposes. To help users we added the possibility to add `metadata` to search entries and in `TiTiler-PgSTAC` we introduced a `non-official` specification to help user storing meaningful information. + +### Specification + +```js +{ + // OPTIONAL. Default: "mosaic" (No other value accepted for now). Describe the `type` of metadata. + "type": "mosaic", + + // OPTIONAL. Default: null. + // The maximum extent of available map tiles. The bounds are represented in WGS:84 + // latitude and longitude values, in the order left, bottom, right, top. + // Values may be integers or floating point numbers. + "bounds": [ -180, -85.05112877980659, 180, 85.0511287798066 ], + + // OPTIONAL. Default: null. + // An integer specifying the minimum zoom level. + "minzoom": 0, + + // OPTIONAL. Default: null. + // An integer specifying the maximum zoom level. MUST be >= minzoom. + "maxzoom": 11, + + // OPTIONAL. Default: null. The name can contain any legal character. + "name": "compositing", + + // OPTIONAL. Default: null. An array of available assets. + "assets": ["image", "cog"], + + // OPTIONAL. Default: null. A set of `defaults` configuration to be forwarded to the /tiles endpoints. + // Note: The defaults block should follow the STAC render extension https://github.com/stac-extensions/render + "defaults": { + "true_color": { + "assets": ["B4", "B3", "B2"], + "color_formula": "Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35", + }, + "ndvi": { + "expression": "(B4-B3)/(B4+B3)", + "rescale": [[-1, 1]], + "colormap_name": "viridis" + } + } +} +``` + +!!! Important + - When using the `/searches/register` endpoint, `{"type": "mosaic"}` will be set by default + - All metadata fields are optional and custom fields are also allowed. + + +``` +curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"filter": {"op": "=", "args": [{"property": "collection"}, "landsat-c2l2-sr"]}, "metadata": {"name": "landsat mosaic"}}' +>> { + "id": "d7fcdefd0457c949ea7a6192bc2c7122", + "links": [ + { + "rel": "metadata", + "type": "application/json", + "href": "http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info" + }, + { + "rel": "tilejson", + "type": "application/json", + "href": "http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/tilejson.json" + } + ] +} + +curl http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info | jq '.search.metadata' +>> { + "type": "mosaic", + "name": "landsat mosaic" +} +``` + +``` +curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"collections": ["noaa-emergency-response"], "bbox": [-87.0251, 36.0999, -85.4249, 36.2251], "filter-lang": "cql-json", "metadata": {"bounds": [-87.0251, 36.0999, -85.4249, 36.2251], "minzoom": 14, "maxzoom": 18, "assets": ["cog"], "defaults": {"true_color": {"bidx": [1, 2, 3]}}}}' +>> { + "id":"4b0db3dbd1858d54a3a55f84de97d1ca", + "links":[ + { + "rel": "metadata", + "type": "application/json", + "href": "http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info" + }, + { + "rel": "tilejson", + "type": "application/json", + "href": "http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/tilejson.json" + } + ] +} + +curl http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info | jq '.search.metadata' +>> { + "type": "mosaic", + "bounds": [ + -87.0251, + 36.0999, + -85.4249, + 36.2251 + ], + "minzoom": 14, + "maxzoom": 18, + "assets": [ + "cog" + ], + "defaults": { + "true_color": { + "bidx": [ + 1, + 2, + 3 + ] + } + } +} +``` diff --git a/1.3.0/advanced/searches_list/index.html b/1.3.0/advanced/searches_list/index.html new file mode 100644 index 00000000..ea738c72 --- /dev/null +++ b/1.3.0/advanced/searches_list/index.html @@ -0,0 +1,1190 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Mosaic list - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Mosaic list

+ +

Starting with titiler-pgstac>=0.2.0, we've added a /searches/list endpoint to be able to list all registered mosaics. When we add a mosaic via /searches/register we add a specific metadata.type: "mosaic" to the pgstac search entry, which is then used by the /searches/list endpoint to filter the pgstac searches.

+

In order to make the mosaic list performant, users might want to alter their PgSTAC database to add an index

+
$ psql
+postgis=# SET schema 'pgstac';
+>> SET
+
+postgis=# CREATE INDEX IF NOT EXISTS searches_mosaic ON searches ((true)) WHERE metadata->>'type'='mosaic';
+>> NOTICE:  relation "searches_mosaic" already exists, skipping
+>> CREATE INDEX
+
+postgis=# SELECT
+    indexname,
+    indexdef
+FROM
+    pg_indexes
+WHERE
+    tablename = 'searches';
+
+>>     indexname    |                                                         indexdef
+>> -----------------+---------------------------------------------------------------------------------------------------------------------------
+>>  searches_pkey   | CREATE UNIQUE INDEX searches_pkey ON pgstac.searches USING btree (hash)
+>>  searches_mosaic | CREATE INDEX searches_mosaic ON pgstac.searches USING btree ((true)) WHERE ((metadata ->> 'type'::text) = 'mosaic'::text)
+
+

ref: github.com/developmentseed/eoAPI/blob/master/stack/handlers/db_handler.py#L204-L213

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/advanced/searches_list/searches_list.md b/1.3.0/advanced/searches_list/searches_list.md new file mode 100644 index 00000000..3e0331db --- /dev/null +++ b/1.3.0/advanced/searches_list/searches_list.md @@ -0,0 +1,30 @@ + + +Starting with `titiler-pgstac>=0.2.0`, we've added a `/searches/list` endpoint to be able to list all registered mosaics. When we add a mosaic via `/searches/register` we add a specific `metadata.type: "mosaic"` to the pgstac `search` entry, which is then used by the `/searches/list` endpoint to filter the pgstac `searches`. + +In order to make the mosaic list performant, users might want to alter their PgSTAC database to add an **index** + +```sql +$ psql +postgis=# SET schema 'pgstac'; +>> SET + +postgis=# CREATE INDEX IF NOT EXISTS searches_mosaic ON searches ((true)) WHERE metadata->>'type'='mosaic'; +>> NOTICE: relation "searches_mosaic" already exists, skipping +>> CREATE INDEX + +postgis=# SELECT + indexname, + indexdef +FROM + pg_indexes +WHERE + tablename = 'searches'; + +>> indexname | indexdef +>> -----------------+--------------------------------------------------------------------------------------------------------------------------- +>> searches_pkey | CREATE UNIQUE INDEX searches_pkey ON pgstac.searches USING btree (hash) +>> searches_mosaic | CREATE INDEX searches_mosaic ON pgstac.searches USING btree ((true)) WHERE ((metadata ->> 'type'::text) = 'mosaic'::text) +``` + +ref: https://github.com/developmentseed/eoAPI/blob/master/stack/handlers/db_handler.py#L204-L213 diff --git a/1.3.0/api/titiler/pgstac/db/db.md b/1.3.0/api/titiler/pgstac/db/db.md new file mode 100644 index 00000000..05edad88 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/db/db.md @@ -0,0 +1,29 @@ +# Module titiler.pgstac.db + +Database connection handling. + +## Functions + + +### close_db_connection + +```python3 +def close_db_connection( + app: fastapi.applications.FastAPI +) -> None +``` + +Close Pool. + + +### connect_to_db + +```python3 +def connect_to_db( + app: fastapi.applications.FastAPI, + settings: Optional[titiler.pgstac.settings.PostgresSettings] = None, + pool_kwargs: Optional[Dict[str, Any]] = None +) -> None +``` + +Connect to Database. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/db/index.html b/1.3.0/api/titiler/pgstac/db/index.html new file mode 100644 index 00000000..c11b4947 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/db/index.html @@ -0,0 +1,1296 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + db - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.db

+

Database connection handling.

+

Functions

+

close_db_connection

+
def close_db_connection(
+    app: fastapi.applications.FastAPI
+) -> None
+
+

Close Pool.

+

connect_to_db

+
def connect_to_db(
+    app: fastapi.applications.FastAPI,
+    settings: Optional[titiler.pgstac.settings.PostgresSettings] = None,
+    pool_kwargs: Optional[Dict[str, Any]] = None
+) -> None
+
+

Connect to Database.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/dependencies/dependencies.md b/1.3.0/api/titiler/pgstac/dependencies/dependencies.md new file mode 100644 index 00000000..d77d50f8 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/dependencies/dependencies.md @@ -0,0 +1,157 @@ +# Module titiler.pgstac.dependencies + +titiler-pgstac dependencies. + +## Variables + +```python3 +cache_config +``` + +```python3 +retry_config +``` + +## Functions + + +### CollectionIdParams + +```python3 +def CollectionIdParams( + request: starlette.requests.Request, + collection_id: typing.Annotated[str, Path(PydanticUndefined)] +) -> str +``` + +collection_id Path Parameter + + +### ItemIdParams + +```python3 +def ItemIdParams( + request: starlette.requests.Request, + collection_id: typing.Annotated[str, Path(PydanticUndefined)], + item_id: typing.Annotated[str, Path(PydanticUndefined)] +) -> pystac.item.Item +``` + +STAC Item dependency. + + +### SearchIdParams + +```python3 +def SearchIdParams( + search_id: typing.Annotated[str, Path(PydanticUndefined)] +) -> str +``` + +search_id + + +### SearchParams + +```python3 +def SearchParams( + body: titiler.pgstac.model.RegisterMosaic +) -> Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata] +``` + +Search parameters. + + +### TmsTileParams + +```python3 +def TmsTileParams( + z: typing.Annotated[int, Path(PydanticUndefined)], + x: typing.Annotated[int, Path(PydanticUndefined)], + y: typing.Annotated[int, Path(PydanticUndefined)] +) -> morecantile.commons.Tile +``` + +TileMatrixSet Tile parameters. + +## Classes + +### BackendParams + +```python3 +class BackendParams( + request: starlette.requests.Request +) +``` + +backend parameters. + +#### Ancestors (in MRO) + +* titiler.core.dependencies.DefaultDependency + +#### Methods + + +#### keys + +```python3 +def keys( + self +) +``` + +Return Keys. + +### PgSTACParams + +```python3 +class PgSTACParams( + scan_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None, + items_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None, + time_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None, + exitwhenfull: Annotated[Optional[bool], Query(PydanticUndefined)] = None, + skipcovered: Annotated[Optional[bool], Query(PydanticUndefined)] = None +) +``` + +PgSTAC parameters. + +#### Ancestors (in MRO) + +* titiler.core.dependencies.DefaultDependency + +#### Class variables + +```python3 +exitwhenfull +``` + +```python3 +items_limit +``` + +```python3 +scan_limit +``` + +```python3 +skipcovered +``` + +```python3 +time_limit +``` + +#### Methods + + +#### keys + +```python3 +def keys( + self +) +``` + +Return Keys. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/dependencies/index.html b/1.3.0/api/titiler/pgstac/dependencies/index.html new file mode 100644 index 00000000..f09f41c6 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/dependencies/index.html @@ -0,0 +1,1660 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + dependencies - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.dependencies

+

titiler-pgstac dependencies.

+

Variables

+
cache_config
+
+
retry_config
+
+

Functions

+

CollectionIdParams

+
def CollectionIdParams(
+    request: starlette.requests.Request,
+    collection_id: typing.Annotated[str, Path(PydanticUndefined)]
+) -> str
+
+

collection_id Path Parameter

+

ItemIdParams

+
def ItemIdParams(
+    request: starlette.requests.Request,
+    collection_id: typing.Annotated[str, Path(PydanticUndefined)],
+    item_id: typing.Annotated[str, Path(PydanticUndefined)]
+) -> pystac.item.Item
+
+

STAC Item dependency.

+

SearchIdParams

+
def SearchIdParams(
+    search_id: typing.Annotated[str, Path(PydanticUndefined)]
+) -> str
+
+

search_id

+

SearchParams

+
def SearchParams(
+    body: titiler.pgstac.model.RegisterMosaic
+) -> Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata]
+
+

Search parameters.

+

TmsTileParams

+
def TmsTileParams(
+    z: typing.Annotated[int, Path(PydanticUndefined)],
+    x: typing.Annotated[int, Path(PydanticUndefined)],
+    y: typing.Annotated[int, Path(PydanticUndefined)]
+) -> morecantile.commons.Tile
+
+

TileMatrixSet Tile parameters.

+

Classes

+

BackendParams

+
class BackendParams(
+    request: starlette.requests.Request
+)
+
+

backend parameters.

+

Ancestors (in MRO)

+
    +
  • titiler.core.dependencies.DefaultDependency
  • +
+

Methods

+

keys

+
def keys(
+    self
+)
+
+

Return Keys.

+

PgSTACParams

+
class PgSTACParams(
+    scan_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,
+    items_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,
+    time_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,
+    exitwhenfull: Annotated[Optional[bool], Query(PydanticUndefined)] = None,
+    skipcovered: Annotated[Optional[bool], Query(PydanticUndefined)] = None
+)
+
+

PgSTAC parameters.

+

Ancestors (in MRO)

+
    +
  • titiler.core.dependencies.DefaultDependency
  • +
+

Class variables

+
exitwhenfull
+
+
items_limit
+
+
scan_limit
+
+
skipcovered
+
+
time_limit
+
+

Methods

+

keys

+
def keys(
+    self
+)
+
+

Return Keys.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/extensions/extensions.md b/1.3.0/api/titiler/pgstac/extensions/extensions.md new file mode 100644 index 00000000..fca869ad --- /dev/null +++ b/1.3.0/api/titiler/pgstac/extensions/extensions.md @@ -0,0 +1,33 @@ +# Module titiler.pgstac.extensions + +titiler.pgstac extensions. + +## Classes + +### searchInfoExtension + +```python3 +class searchInfoExtension( + +) +``` + +Add /info endpoint + +#### Ancestors (in MRO) + +* titiler.core.factory.FactoryExtension + +#### Methods + + +#### register + +```python3 +def register( + self, + factory: titiler.pgstac.factory.MosaicTilerFactory +) +``` + +Register endpoint to the tiler factory. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/extensions/index.html b/1.3.0/api/titiler/pgstac/extensions/index.html new file mode 100644 index 00000000..655e5e82 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/extensions/index.html @@ -0,0 +1,1348 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + extensions - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + Skip to content + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.extensions

+

titiler.pgstac extensions.

+

Classes

+

searchInfoExtension

+
class searchInfoExtension(
+
+)
+
+

Add /info endpoint

+

Ancestors (in MRO)

+
    +
  • titiler.core.factory.FactoryExtension
  • +
+

Methods

+

register

+
def register(
+    self,
+    factory: titiler.pgstac.factory.MosaicTilerFactory
+)
+
+

Register endpoint to the tiler factory.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/factory/factory.md b/1.3.0/api/titiler/pgstac/factory/factory.md new file mode 100644 index 00000000..9bdf22a6 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/factory/factory.md @@ -0,0 +1,320 @@ +# Module titiler.pgstac.factory + +Custom MosaicTiler Factory for PgSTAC Mosaic Backend. + +## Variables + +```python3 +DEFAULT_TEMPLATES +``` + +```python3 +MAX_THREADS +``` + +```python3 +MOSAIC_STRICT_ZOOM +``` + +```python3 +MOSAIC_THREADS +``` + +```python3 +WGS84_CRS +``` + +```python3 +img_endpoint_params +``` + +```python3 +jinja2_env +``` + +## Functions + + +### add_search_list_route + +```python3 +def add_search_list_route( + app: fastapi.applications.FastAPI, + *, + prefix: str = '', + tags: Optional[List[str]] = None +) +``` + +Add PgSTAC Search (of type mosaic) listing route. + + +### add_search_register_route + +```python3 +def add_search_register_route( + app: fastapi.applications.FastAPI, + *, + prefix: str = '', + search_dependency: Callable[..., Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata]] = , + tile_dependencies: Optional[List[Callable]] = None, + tags: Optional[List[str]] = None +) +``` + +add `/register` route + + +### check_query_params + +```python3 +def check_query_params( + *, + dependencies: List[Callable], + query_params: Union[starlette.datastructures.QueryParams, Dict] +) -> None +``` + +Check QueryParams for Query dependency. + +1. `get_dependant` is used to get the query-parameters required by the `callable` +2. we use `request_params_to_args` to construct arguments needed to call the `callable` +3. we call the `callable` and catch any errors + +Important: We assume the `callable` in not a co-routine + +## Classes + +### MosaicTilerFactory + +```python3 +class MosaicTilerFactory( + reader: Type[cogeo_mosaic.backends.base.BaseBackend] = , + router: fastapi.routing.APIRouter = , + path_dependency: Callable[..., str] = , + layer_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + dataset_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + process_dependency: Callable[..., Optional[titiler.core.algorithm.base.BaseAlgorithm]] = .post_process at 0x7f56f66db560>, + rescale_dependency: Callable[..., Optional[List[Tuple[float, ...]]]] = , + color_formula_dependency: Callable[..., Optional[str]] = , + colormap_dependency: Callable[..., Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType]] = .deps at 0x7f56ff3c9da0>, + render_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + reader_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + environment_dependency: Callable[..., Dict] = at 0x7f56f66db380>, + supported_tms: morecantile.defaults.TileMatrixSets = TileMatrixSets(tms={'CDB1GlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CDB1GlobalGrid.json'), 'CanadianNAD83_LCC': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CanadianNAD83_LCC.json'), 'EuropeanETRS89_LAEAQuad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/EuropeanETRS89_LAEAQuad.json'), 'GNOSISGlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/GNOSISGlobalGrid.json'), 'LINZAntarticaMapTilegrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/LINZAntarticaMapTilegrid.json'), 'NZTM2000Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/NZTM2000Quad.json'), 'UPSAntarcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSAntarcticWGS84Quad.json'), 'UPSArcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSArcticWGS84Quad.json'), 'UTM31WGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UTM31WGS84Quad.json'), 'WGS1984Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WGS1984Quad.json'), 'WebMercatorQuad': , + route_dependencies: List[Tuple[List[titiler.core.routing.EndpointScope], List[fastapi.params.Depends]]] = , + extensions: List[titiler.core.factory.FactoryExtension] = , + templates: starlette.templating.Jinja2Templates = , + stats_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + histogram_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + tile_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + img_part_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + pixel_selection_dependency: Callable[..., rio_tiler.mosaic.methods.base.MosaicMethodBase] = , + pgstac_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + backend_dependency: Type[titiler.core.dependencies.DefaultDependency] = , + add_statistics: bool = False, + add_viewer: bool = False, + add_part: bool = False +) +``` + +Custom MosaicTiler for PgSTAC Mosaic Backend. + +#### Ancestors (in MRO) + +* titiler.core.factory.BaseTilerFactory + +#### Class variables + +```python3 +add_part +``` + +```python3 +add_statistics +``` + +```python3 +add_viewer +``` + +```python3 +backend_dependency +``` + +```python3 +dataset_dependency +``` + +```python3 +default_tms +``` + +```python3 +histogram_dependency +``` + +```python3 +img_part_dependency +``` + +```python3 +layer_dependency +``` + +```python3 +pgstac_dependency +``` + +```python3 +reader +``` + +```python3 +reader_dependency +``` + +```python3 +render_dependency +``` + +```python3 +router_prefix +``` + +```python3 +stats_dependency +``` + +```python3 +supported_tms +``` + +```python3 +templates +``` + +```python3 +tile_dependency +``` + +#### Methods + + +#### add_route_dependencies + +```python3 +def add_route_dependencies( + self, + *, + scopes: List[titiler.core.routing.EndpointScope], + dependencies=typing.List[fastapi.params.Depends] +) +``` + +Add dependencies to routes. + +Allows a developer to add dependencies to a route after the route has been defined. + + +#### color_formula_dependency + +```python3 +def color_formula_dependency( + color_formula: Annotated[Optional[str], Query(PydanticUndefined)] = None +) -> Optional[str] +``` + +ColorFormula Parameter. + + +#### colormap_dependency + +```python3 +def colormap_dependency( + colormap_name: Annotated[Literal['plasma_r', 'pastel2', 'rdylbu_r', 'cividis', 'purples_r', 'twilight_shifted_r', 'brbg_r', 'gist_ncar_r', 'tarn', 'set1_r', 'rainbow_r', 'bupu_r', 'turbid', 'diff', 'turbid_r', 'ice_r', 'ocean', 'accent_r', 'hot', 'reds_r', 'rdylgn_r', 'gist_yarg', 'hsv', 'nipy_spectral_r', 'greens', 'inferno_r', 'ylgnbu', 'prism_r', 'winter', 'set2_r', 'gnuplot2', 'gnuplot2_r', 'set3', 'speed', 'seismic_r', 'cool', 'bwr', 'purples', 'topo_r', 'spring_r', 'blues', 'rdpu_r', 'terrain_r', 'pastel2_r', 'brg_r', 'rain_r', 'binary_r', 'gist_heat', 'tarn_r', 'amp_r', 'topo', 'prgn', 'phase_r', 'gist_ncar', 'blues_r', 'delta_r', 'tab20_r', 'delta', 'solar_r', 'summer_r', 'oranges', 'tempo', 'spectral_r', 'gist_earth', 'gnuplot', 'piyg', 'viridis', 'orrd', 'cubehelix_r', 'phase', 'magma_r', 'viridis_r', 'twilight_r', 'wistia', 'curl', 'cividis_r', 'tab20b', 'gist_rainbow', 'winter_r', 'pastel1_r', 'flag_r', 'ocean_r', 'bugn', 'rdgy', 'rain', 'algae_r', 'wistia_r', 'accent', 'tempo_r', 'afmhot', 'amp', 'rdbu_r', 'puor_r', 'ylorrd', 'pubu_r', 'brbg', 'pink_r', 'greys_r', 'pubugn_r', 'cmrmap', 'flag', 'turbo', 'oxy_r', 'ylorbr_r', 'matter_r', 'twilight', 'deep', 'purd', 'coolwarm', 'gist_rainbow_r', 'spring', 'autumn', 'spectral', 'hot_r', 'coolwarm_r', 'schwarzwald', 'tab10_r', 'speed_r', 'ylgn_r', 'gnuplot_r', 'gist_heat_r', 'rdylbu', 'orrd_r', 'piyg_r', 'balance', 'balance_r', 'pubu', 'pink', 'prgn_r', 'inferno', 'bupu', 'dark2_r', 'deep_r', 'matter', 'jet', 'tab20c_r', 'diff_r', 'nipy_spectral', 'gist_earth_r', 'gist_stern_r', 'haline', 'turbo_r', 'prism', 'purd_r', 'haline_r', 'rplumbo', 'gist_gray', 'greens_r', 'gray', 'algae', 'tab10', 'hsv_r', 'autumn_r', 'rdpu', 'thermal_r', 'oxy', 'cmrmap_r', 'cubehelix', 'ylgnbu_r', 'dense', 'bugn_r', 'gist_stern', 'tab20', 'ylorbr', 'summer', 'rdylgn', 'tab20b_r', 'jet_r', 'paired_r', 'dark2', 'binary', 'twilight_shifted', 'seismic', 'pubugn', 'ylgn', 'rdbu', 'bone', 'tab20c', 'dense_r', 'gnbu', 'set2', 'paired', 'gnbu_r', 'copper_r', 'gist_yarg_r', 'pastel1', 'bwr_r', 'greys', 'puor', 'oranges_r', 'cool_r', 'afmhot_r', 'rdgy_r', 'bone_r', 'thermal', 'ice', 'gray_r', 'copper', 'cfastie', 'curl_r', 'reds', 'brg', 'solar', 'ylorrd_r', 'magma', 'plasma', 'set3_r', 'rainbow', 'terrain', 'gist_gray_r', 'set1'], Query(PydanticUndefined)] = None, + colormap: Annotated[Optional[str], Query(PydanticUndefined)] = None +) +``` + + +#### environment_dependency + +```python3 +def environment_dependency( + +) +``` + + +#### path_dependency + +```python3 +def path_dependency( + url: typing.Annotated[str, Query(PydanticUndefined)] +) -> str +``` + +Create dataset path from args + + +#### pixel_selection_dependency + +```python3 +def pixel_selection_dependency( + pixel_selection: Annotated[Literal['first', 'highest', 'lowest', 'mean', 'median', 'stdev', 'lastbandlow', 'lastbandhight', 'count'], Query(PydanticUndefined)] = 'first' +) -> rio_tiler.mosaic.methods.base.MosaicMethodBase +``` + +Returns the mosaic method used to combine datasets together. + + +#### process_dependency + +```python3 +def process_dependency( + algorithm: Annotated[Literal['hillshade', 'contours', 'normalizedIndex', 'terrarium', 'terrainrgb'], Query(PydanticUndefined)] = None, + algorithm_params: Annotated[Optional[str], Query(PydanticUndefined)] = None +) -> Optional[titiler.core.algorithm.base.BaseAlgorithm] +``` + +Data Post-Processing options. + + +#### register_routes + +```python3 +def register_routes( + self +) -> None +``` + +This Method register routes to the router. + + +#### rescale_dependency + +```python3 +def rescale_dependency( + rescale: Annotated[Optional[List[str]], Query(PydanticUndefined)] = None +) -> Optional[List[Tuple[float, ...]]] +``` + +Min/Max data Rescaling + + +#### url_for + +```python3 +def url_for( + self, + request: starlette.requests.Request, + name: str, + **path_params: Any +) -> str +``` + +Return full url (with prefix) for a specific endpoint. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/factory/index.html b/1.3.0/api/titiler/pgstac/factory/index.html new file mode 100644 index 00000000..920d63f3 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/factory/index.html @@ -0,0 +1,1805 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + factory - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + +

Module titiler.pgstac.factory

+

Custom MosaicTiler Factory for PgSTAC Mosaic Backend.

+

Variables

+
DEFAULT_TEMPLATES
+
+
MAX_THREADS
+
+
MOSAIC_STRICT_ZOOM
+
+
MOSAIC_THREADS
+
+
WGS84_CRS
+
+
img_endpoint_params
+
+
jinja2_env
+
+

Functions

+

add_search_list_route

+
def add_search_list_route(
+    app: fastapi.applications.FastAPI,
+    *,
+    prefix: str = '',
+    tags: Optional[List[str]] = None
+)
+
+

Add PgSTAC Search (of type mosaic) listing route.

+

add_search_register_route

+
def add_search_register_route(
+    app: fastapi.applications.FastAPI,
+    *,
+    prefix: str = '',
+    search_dependency: Callable[..., Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata]] = <function SearchParams at 0x7f56f67a4400>,
+    tile_dependencies: Optional[List[Callable]] = None,
+    tags: Optional[List[str]] = None
+)
+
+

add /register route

+

check_query_params

+
def check_query_params(
+    *,
+    dependencies: List[Callable],
+    query_params: Union[starlette.datastructures.QueryParams, Dict]
+) -> None
+
+

Check QueryParams for Query dependency.

+
    +
  1. get_dependant is used to get the query-parameters required by the callable
  2. +
  3. we use request_params_to_args to construct arguments needed to call the callable
  4. +
  5. we call the callable and catch any errors
  6. +
+

Important: We assume the callable in not a co-routine

+

Classes

+

MosaicTilerFactory

+
class MosaicTilerFactory(
+    reader: Type[cogeo_mosaic.backends.base.BaseBackend] = <class 'titiler.pgstac.mosaic.PGSTACBackend'>,
+    router: fastapi.routing.APIRouter = <factory>,
+    path_dependency: Callable[..., str] = <function DatasetPathParams at 0x7f56ff3c9e40>,
+    layer_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.AssetsBidxExprParams'>,
+    dataset_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.DatasetParams'>,
+    process_dependency: Callable[..., Optional[titiler.core.algorithm.base.BaseAlgorithm]] = <function Algorithms.dependency.<locals>.post_process at 0x7f56f66db560>,
+    rescale_dependency: Callable[..., Optional[List[Tuple[float, ...]]]] = <function RescalingParams at 0x7f56f6aa71a0>,
+    color_formula_dependency: Callable[..., Optional[str]] = <function ColorFormulaParams at 0x7f56f68d1da0>,
+    colormap_dependency: Callable[..., Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType]] = <function create_colormap_dependency.<locals>.deps at 0x7f56ff3c9da0>,
+    render_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.ImageRenderingParams'>,
+    reader_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.DefaultDependency'>,
+    environment_dependency: Callable[..., Dict] = <function BaseTilerFactory.<lambda> at 0x7f56f66db380>,
+    supported_tms: morecantile.defaults.TileMatrixSets = TileMatrixSets(tms={'CDB1GlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CDB1GlobalGrid.json'), 'CanadianNAD83_LCC': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CanadianNAD83_LCC.json'), 'EuropeanETRS89_LAEAQuad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/EuropeanETRS89_LAEAQuad.json'), 'GNOSISGlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/GNOSISGlobalGrid.json'), 'LINZAntarticaMapTilegrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/LINZAntarticaMapTilegrid.json'), 'NZTM2000Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/NZTM2000Quad.json'), 'UPSAntarcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSAntarcticWGS84Quad.json'), 'UPSArcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSArcticWGS84Quad.json'), 'UTM31WGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UTM31WGS84Quad.json'), 'WGS1984Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WGS1984Quad.json'), 'WebMercatorQuad': <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>, 'WorldCRS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WorldCRS84Quad.json'), 'WorldMercatorWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WorldMercatorWGS84Quad.json')}),
+    default_tms: Optional[str] = None,
+    router_prefix: str = '',
+    optional_headers: List[titiler.core.resources.enums.OptionalHeader] = <factory>,
+    route_dependencies: List[Tuple[List[titiler.core.routing.EndpointScope], List[fastapi.params.Depends]]] = <factory>,
+    extensions: List[titiler.core.factory.FactoryExtension] = <factory>,
+    templates: starlette.templating.Jinja2Templates = <starlette.templating.Jinja2Templates object at 0x7f56f6626dd0>,
+    stats_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.StatisticsParams'>,
+    histogram_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.HistogramParams'>,
+    tile_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.TileParams'>,
+    img_part_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.PartFeatureParams'>,
+    pixel_selection_dependency: Callable[..., rio_tiler.mosaic.methods.base.MosaicMethodBase] = <function PixelSelectionParams at 0x7f56f60caca0>,
+    pgstac_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.pgstac.dependencies.PgSTACParams'>,
+    backend_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.pgstac.dependencies.BackendParams'>,
+    add_statistics: bool = False,
+    add_viewer: bool = False,
+    add_part: bool = False
+)
+
+

Custom MosaicTiler for PgSTAC Mosaic Backend.

+

Ancestors (in MRO)

+
    +
  • titiler.core.factory.BaseTilerFactory
  • +
+

Class variables

+
add_part
+
+
add_statistics
+
+
add_viewer
+
+
backend_dependency
+
+
dataset_dependency
+
+
default_tms
+
+
histogram_dependency
+
+
img_part_dependency
+
+
layer_dependency
+
+
pgstac_dependency
+
+
reader
+
+
reader_dependency
+
+
render_dependency
+
+
router_prefix
+
+
stats_dependency
+
+
supported_tms
+
+
templates
+
+
tile_dependency
+
+

Methods

+

add_route_dependencies

+
def add_route_dependencies(
+    self,
+    *,
+    scopes: List[titiler.core.routing.EndpointScope],
+    dependencies=typing.List[fastapi.params.Depends]
+)
+
+

Add dependencies to routes.

+

Allows a developer to add dependencies to a route after the route has been defined.

+

color_formula_dependency

+
def color_formula_dependency(
+    color_formula: Annotated[Optional[str], Query(PydanticUndefined)] = None
+) -> Optional[str]
+
+

ColorFormula Parameter.

+

colormap_dependency

+
def colormap_dependency(
+    colormap_name: Annotated[Literal['plasma_r', 'pastel2', 'rdylbu_r', 'cividis', 'purples_r', 'twilight_shifted_r', 'brbg_r', 'gist_ncar_r', 'tarn', 'set1_r', 'rainbow_r', 'bupu_r', 'turbid', 'diff', 'turbid_r', 'ice_r', 'ocean', 'accent_r', 'hot', 'reds_r', 'rdylgn_r', 'gist_yarg', 'hsv', 'nipy_spectral_r', 'greens', 'inferno_r', 'ylgnbu', 'prism_r', 'winter', 'set2_r', 'gnuplot2', 'gnuplot2_r', 'set3', 'speed', 'seismic_r', 'cool', 'bwr', 'purples', 'topo_r', 'spring_r', 'blues', 'rdpu_r', 'terrain_r', 'pastel2_r', 'brg_r', 'rain_r', 'binary_r', 'gist_heat', 'tarn_r', 'amp_r', 'topo', 'prgn', 'phase_r', 'gist_ncar', 'blues_r', 'delta_r', 'tab20_r', 'delta', 'solar_r', 'summer_r', 'oranges', 'tempo', 'spectral_r', 'gist_earth', 'gnuplot', 'piyg', 'viridis', 'orrd', 'cubehelix_r', 'phase', 'magma_r', 'viridis_r', 'twilight_r', 'wistia', 'curl', 'cividis_r', 'tab20b', 'gist_rainbow', 'winter_r', 'pastel1_r', 'flag_r', 'ocean_r', 'bugn', 'rdgy', 'rain', 'algae_r', 'wistia_r', 'accent', 'tempo_r', 'afmhot', 'amp', 'rdbu_r', 'puor_r', 'ylorrd', 'pubu_r', 'brbg', 'pink_r', 'greys_r', 'pubugn_r', 'cmrmap', 'flag', 'turbo', 'oxy_r', 'ylorbr_r', 'matter_r', 'twilight', 'deep', 'purd', 'coolwarm', 'gist_rainbow_r', 'spring', 'autumn', 'spectral', 'hot_r', 'coolwarm_r', 'schwarzwald', 'tab10_r', 'speed_r', 'ylgn_r', 'gnuplot_r', 'gist_heat_r', 'rdylbu', 'orrd_r', 'piyg_r', 'balance', 'balance_r', 'pubu', 'pink', 'prgn_r', 'inferno', 'bupu', 'dark2_r', 'deep_r', 'matter', 'jet', 'tab20c_r', 'diff_r', 'nipy_spectral', 'gist_earth_r', 'gist_stern_r', 'haline', 'turbo_r', 'prism', 'purd_r', 'haline_r', 'rplumbo', 'gist_gray', 'greens_r', 'gray', 'algae', 'tab10', 'hsv_r', 'autumn_r', 'rdpu', 'thermal_r', 'oxy', 'cmrmap_r', 'cubehelix', 'ylgnbu_r', 'dense', 'bugn_r', 'gist_stern', 'tab20', 'ylorbr', 'summer', 'rdylgn', 'tab20b_r', 'jet_r', 'paired_r', 'dark2', 'binary', 'twilight_shifted', 'seismic', 'pubugn', 'ylgn', 'rdbu', 'bone', 'tab20c', 'dense_r', 'gnbu', 'set2', 'paired', 'gnbu_r', 'copper_r', 'gist_yarg_r', 'pastel1', 'bwr_r', 'greys', 'puor', 'oranges_r', 'cool_r', 'afmhot_r', 'rdgy_r', 'bone_r', 'thermal', 'ice', 'gray_r', 'copper', 'cfastie', 'curl_r', 'reds', 'brg', 'solar', 'ylorrd_r', 'magma', 'plasma', 'set3_r', 'rainbow', 'terrain', 'gist_gray_r', 'set1'], Query(PydanticUndefined)] = None,
+    colormap: Annotated[Optional[str], Query(PydanticUndefined)] = None
+)
+
+

environment_dependency

+
def environment_dependency(
+
+)
+
+

path_dependency

+
def path_dependency(
+    url: typing.Annotated[str, Query(PydanticUndefined)]
+) -> str
+
+

Create dataset path from args

+

pixel_selection_dependency

+
def pixel_selection_dependency(
+    pixel_selection: Annotated[Literal['first', 'highest', 'lowest', 'mean', 'median', 'stdev', 'lastbandlow', 'lastbandhight', 'count'], Query(PydanticUndefined)] = 'first'
+) -> rio_tiler.mosaic.methods.base.MosaicMethodBase
+
+

Returns the mosaic method used to combine datasets together.

+

process_dependency

+
def process_dependency(
+    algorithm: Annotated[Literal['hillshade', 'contours', 'normalizedIndex', 'terrarium', 'terrainrgb'], Query(PydanticUndefined)] = None,
+    algorithm_params: Annotated[Optional[str], Query(PydanticUndefined)] = None
+) -> Optional[titiler.core.algorithm.base.BaseAlgorithm]
+
+

Data Post-Processing options.

+

register_routes

+
def register_routes(
+    self
+) -> None
+
+

This Method register routes to the router.

+

rescale_dependency

+
def rescale_dependency(
+    rescale: Annotated[Optional[List[str]], Query(PydanticUndefined)] = None
+) -> Optional[List[Tuple[float, ...]]]
+
+

Min/Max data Rescaling

+

url_for

+
def url_for(
+    self,
+    request: starlette.requests.Request,
+    name: str,
+    **path_params: Any
+) -> str
+
+

Return full url (with prefix) for a specific endpoint.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/model/index.html b/1.3.0/api/titiler/pgstac/model/index.html new file mode 100644 index 00000000..c41ed40f --- /dev/null +++ b/1.3.0/api/titiler/pgstac/model/index.html @@ -0,0 +1,14550 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + model - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.model

+

Titiler.pgstac models.

+

Note: This is mostly a copy of github.com/stac-utils/stac-fastapi/blob/master/stac_fastapi/pgstac/stac_fastapi/pgstac/types/search.py

+

Variables

+
FilterLang
+
+
Operator
+
+

Classes

+

Context

+
class Context(
+    /,
+    **data: 'Any'
+)
+
+

Context Model.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

validate_limit

+
def validate_limit(
+    v,
+    info: pydantic_core.core_schema.ValidationInfo
+)
+
+

validate limit.

+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

Info

+
class Info(
+    /,
+    **data: 'Any'
+)
+
+

Response model for /info endpoint.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

Infos

+
class Infos(
+    /,
+    **data: 'Any'
+)
+
+

Response model for /list endpoint.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+ +
class Link(
+    /,
+    **data: 'Any'
+)
+
+

Link model.

+

Ref: github.com/opengeospatial/ogcapi-tiles/blob/master/openapi/schemas/common-core/link.yaml

+

Code generated using koxudaxi/datamodel-code-generator

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

Metadata

+
class Metadata(
+    /,
+    **data: 'Any'
+)
+
+

Metadata Model.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
defaults_params
+
+

Return defaults in a form compatible with TiTiler dependencies.

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

PgSTACSearch

+
class PgSTACSearch(
+    /,
+    **data: 'Any'
+)
+
+

Search Query model.

+

Notes/Diff with standard model: + - 'fields' is not in the Model because it's defined at the tiler level + - we don't set limit

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Descendants

+
    +
  • titiler.pgstac.model.RegisterMosaic
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

validate_bbox

+
def validate_bbox(
+    v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]]
+)
+
+

Validate BBOX.

+

validate_datetime

+
def validate_datetime(
+    v
+)
+
+

Pgstac does not require the base validator for datetime.

+

validate_query_fields

+
def validate_query_fields(
+    values: Dict
+) -> Dict
+
+

Pgstac does not require the base validator for query fields.

+

validate_spatial

+
def validate_spatial(
+    v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]],
+    info: pydantic_core.core_schema.ValidationInfo
+)
+
+

Make sure bbox is not used with Intersects.

+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

RegisterMosaic

+
class RegisterMosaic(
+    /,
+    **data: 'Any'
+)
+
+

Model of /register endpoint input.

+

Ancestors (in MRO)

+
    +
  • titiler.pgstac.model.PgSTACSearch
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

validate_bbox

+
def validate_bbox(
+    v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]]
+)
+
+

Validate BBOX.

+

validate_datetime

+
def validate_datetime(
+    v
+)
+
+

Pgstac does not require the base validator for datetime.

+

validate_query_fields

+
def validate_query_fields(
+    values: Dict
+) -> Dict
+
+

Pgstac does not require the base validator for query fields.

+

validate_spatial

+
def validate_spatial(
+    v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]],
+    info: pydantic_core.core_schema.ValidationInfo
+)
+
+

Make sure bbox is not used with Intersects.

+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

RegisterResponse

+
class RegisterResponse(
+    /,
+    **data: 'Any'
+)
+
+

Response model for /register endpoint.

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+ +
class Search(
+    /,
+    **data: 'Any'
+)
+
+

PgSTAC Search entry.

+

ref: github.com/stac-utils/pgstac/blob/3499daa2bfa700ae7bb07503795c169bf2ebafc7/sql/004_search.sql#L907-L915

+

Ancestors (in MRO)

+
    +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

validate_metadata

+
def validate_metadata(
+    v
+)
+
+

Set SearchType.search when not present in metadata.

+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/model/model.md b/1.3.0/api/titiler/pgstac/model/model.md new file mode 100644 index 00000000..264e7493 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/model/model.md @@ -0,0 +1,5416 @@ +# Module titiler.pgstac.model + +Titiler.pgstac models. + +Note: This is mostly a copy of https://github.com/stac-utils/stac-fastapi/blob/master/stac_fastapi/pgstac/stac_fastapi/pgstac/types/search.py + +## Variables + +```python3 +FilterLang +``` + +```python3 +Operator +``` + +## Classes + +### Context + +```python3 +class Context( + /, + **data: 'Any' +) +``` + +Context Model. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + + +#### validate_limit + +```python3 +def validate_limit( + v, + info: pydantic_core.core_schema.ValidationInfo +) +``` + +validate limit. + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Info + +```python3 +class Info( + /, + **data: 'Any' +) +``` + +Response model for /info endpoint. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Infos + +```python3 +class Infos( + /, + **data: 'Any' +) +``` + +Response model for /list endpoint. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Link + +```python3 +class Link( + /, + **data: 'Any' +) +``` + +Link model. + +Ref: https://github.com/opengeospatial/ogcapi-tiles/blob/master/openapi/schemas/common-core/link.yaml + +Code generated using https://github.com/koxudaxi/datamodel-code-generator/ + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Metadata + +```python3 +class Metadata( + /, + **data: 'Any' +) +``` + +Metadata Model. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +defaults_params +``` + +Return defaults in a form compatible with TiTiler dependencies. + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### PgSTACSearch + +```python3 +class PgSTACSearch( + /, + **data: 'Any' +) +``` + +Search Query model. + +Notes/Diff with standard model: + - 'fields' is not in the Model because it's defined at the tiler level + - we don't set limit + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Descendants + +* titiler.pgstac.model.RegisterMosaic + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + + +#### validate_bbox + +```python3 +def validate_bbox( + v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]] +) +``` + +Validate BBOX. + + +#### validate_datetime + +```python3 +def validate_datetime( + v +) +``` + +Pgstac does not require the base validator for datetime. + + +#### validate_query_fields + +```python3 +def validate_query_fields( + values: Dict +) -> Dict +``` + +Pgstac does not require the base validator for query fields. + + +#### validate_spatial + +```python3 +def validate_spatial( + v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]], + info: pydantic_core.core_schema.ValidationInfo +) +``` + +Make sure bbox is not used with Intersects. + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### RegisterMosaic + +```python3 +class RegisterMosaic( + /, + **data: 'Any' +) +``` + +Model of /register endpoint input. + +#### Ancestors (in MRO) + +* titiler.pgstac.model.PgSTACSearch +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + + +#### validate_bbox + +```python3 +def validate_bbox( + v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]] +) +``` + +Validate BBOX. + + +#### validate_datetime + +```python3 +def validate_datetime( + v +) +``` + +Pgstac does not require the base validator for datetime. + + +#### validate_query_fields + +```python3 +def validate_query_fields( + values: Dict +) -> Dict +``` + +Pgstac does not require the base validator for query fields. + + +#### validate_spatial + +```python3 +def validate_spatial( + v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]], + info: pydantic_core.core_schema.ValidationInfo +) +``` + +Make sure bbox is not used with Intersects. + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### RegisterResponse + +```python3 +class RegisterResponse( + /, + **data: 'Any' +) +``` + +Response model for /register endpoint. + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### Search + +```python3 +class Search( + /, + **data: 'Any' +) +``` + +PgSTAC Search entry. + +ref: https://github.com/stac-utils/pgstac/blob/3499daa2bfa700ae7bb07503795c169bf2ebafc7/sql/004_search.sql#L907-L915 + +#### Ancestors (in MRO) + +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + + +#### validate_metadata + +```python3 +def validate_metadata( + v +) +``` + +Set SearchType.search when not present in metadata. + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/mosaic/index.html b/1.3.0/api/titiler/pgstac/mosaic/index.html new file mode 100644 index 00000000..51b5f854 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/mosaic/index.html @@ -0,0 +1,2905 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + mosaic - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + +

Module titiler.pgstac.mosaic

+

TiTiler.PgSTAC custom Mosaic Backend and Custom STACReader.

+

Variables

+
MAX_THREADS
+
+
WGS84_CRS
+
+
cache_config
+
+
retry_config
+
+

Functions

+

multi_points_pgstac

+
def multi_points_pgstac(
+    asset_list: Sequence[Dict[str, Any]],
+    reader: Callable[..., rio_tiler.models.PointData],
+    *args: Any,
+    threads: int = 20,
+    allowed_exceptions: Optional[Tuple] = None,
+    **kwargs: Any
+) -> Dict
+
+

Merge values returned from tasks.

+

Custom version of rio_tiler.task.multi_values which +use constructed item_id as dict key.

+

Classes

+

CustomSTACReader

+
class CustomSTACReader(
+    input: Dict[str, Any],
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = NOTHING,
+    maxzoom: int = NOTHING,
+    reader: Type[rio_tiler.io.base.BaseReader] = <class 'rio_tiler.io.rasterio.Reader'>,
+    reader_options: Dict = NOTHING,
+    ctx: Any = <class 'rasterio.env.Env'>
+)
+
+

Simplified STAC Reader.

+

Inputs should be in form of: +{ + "id": "IAMASTACITEM", + "collection": "mycollection", + "bbox": (0, 0, 10, 10), + "assets": { + "COG": { + "href": "somewhereovertherainbow.io/cog.tif" + } + } +}

+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.MultiBaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts defined by geojson feature from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.feature method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

geographic_bounds

+
def geographic_bounds(
+    ...
+)
+
+

Return dataset bounds in geographic_crs.

+

info

+
def info(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.Info]
+
+

Return metadata from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from. Required keyword argument.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets info in form of {"asset1": rio_tile.models.Info}.
+

merged_statistics

+
def merged_statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    categorical: bool = False,
+    categories: Optional[List[float]] = None,
+    percentiles: Optional[List[int]] = None,
+    hist_options: Optional[Dict] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to the self.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

parse_expression

+
def parse_expression(
+    self,
+    expression: str,
+    asset_as_band: bool = False
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read pixel value from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.point method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge previews from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

statistics

+
def statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_expression: Optional[Dict[str, str]] = None,
+    **kwargs: Any
+) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
asset_expressiondictrio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}).None
kwargsoptionalOptions to forward to the self.reader.statistics method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge Wep Map tiles from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.tile method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

PGSTACBackend

+
class PGSTACBackend(
+    input: str,
+    pool: psycopg_pool.pool.ConnectionPool,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = NOTHING,
+    maxzoom: int = NOTHING,
+    reader_options: Dict = NOTHING,
+    bounds: Tuple[float, float, float, float] = (-180, -90, 180, 90),
+    crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326)
+)
+
+

PgSTAC Mosaic Backend.

+

Ancestors (in MRO)

+
    +
  • cogeo_mosaic.backends.base.BaseBackend
  • +
  • rio_tiler.io.base.BaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Instance variables

+
center
+
+

Return center from the mosaic definition.

+
mosaicid
+
+

Return sha224 id of the mosaicjson document.

+
quadkey_zoom
+
+

Return Quadkey zoom property.

+

Methods

+

assets_for_bbox

+
def assets_for_bbox(
+    self,
+    xmin: float,
+    ymin: float,
+    xmax: float,
+    ymax: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    **kwargs: Any
+) -> List[Dict]
+
+

Retrieve assets for bbox.

+

assets_for_point

+
def assets_for_point(
+    self,
+    lng: float,
+    lat: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    **kwargs: Any
+) -> List[Dict]
+
+

Retrieve assets for point.

+

assets_for_tile

+
def assets_for_tile(
+    self,
+    x: int,
+    y: int,
+    z: int,
+    **kwargs: Any
+) -> List[Dict]
+
+

Retrieve assets for tile.

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    dst_crs: Optional[rasterio.crs.CRS] = None,
+    shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    max_size: int = 1024,
+    scan_limit: Optional[int] = None,
+    items_limit: Optional[int] = None,
+    time_limit: Optional[int] = None,
+    exitwhenfull: Optional[bool] = None,
+    skipcovered: Optional[bool] = None,
+    **kwargs: Any
+) -> Tuple[rio_tiler.models.ImageData, List[str]]
+
+

Create an Image from multiple items for a GeoJSON feature.

+

find_quadkeys

+
def find_quadkeys(
+    self,
+    tile: morecantile.commons.Tile,
+    quadkey_zoom: int
+) -> List[str]
+
+

Find quadkeys at desired zoom for tile

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tilemorecantile.TileInput tile to use when searching for quadkeysNone
quadkey_zoomintZoom levelNone
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
listList[str] of quadkeys
+

geographic_bounds

+
def geographic_bounds(
+    ...
+)
+
+

Return dataset bounds in geographic_crs.

+

get_assets

+
def get_assets(
+    *args: Any,
+    **kwargs: Any
+)
+
+

info

+
def info(
+    self,
+    quadkeys: bool = False
+) -> cogeo_mosaic.models.Info
+
+

Mosaic info.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    dst_crs: Optional[rasterio.crs.CRS] = None,
+    bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    scan_limit: Optional[int] = None,
+    items_limit: Optional[int] = None,
+    time_limit: Optional[int] = None,
+    exitwhenfull: Optional[bool] = None,
+    skipcovered: Optional[bool] = None,
+    **kwargs: Any
+) -> Tuple[rio_tiler.models.ImageData, List[str]]
+
+

Create an Image from multiple items for a bbox.

+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    scan_limit: Optional[int] = None,
+    items_limit: Optional[int] = None,
+    time_limit: Optional[int] = None,
+    exitwhenfull: Optional[bool] = None,
+    skipcovered: Optional[bool] = None,
+    **kwargs: Any
+) -> List
+
+

Get Point value from multiple observation.

+

preview

+
def preview(
+    self
+)
+
+

PlaceHolder for BaseReader.preview.

+

statistics

+
def statistics(
+    self
+)
+
+

PlaceHolder for BaseReader.statistics.

+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    scan_limit: Optional[int] = None,
+    items_limit: Optional[int] = None,
+    time_limit: Optional[int] = None,
+    exitwhenfull: Optional[bool] = None,
+    skipcovered: Optional[bool] = None,
+    **kwargs: Any
+) -> Tuple[rio_tiler.models.ImageData, List[str]]
+
+

Get Tile from multiple observation.

+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+

update

+
def update(
+    self
+) -> None
+
+

We overwrite the default method.

+

write

+
def write(
+    self,
+    overwrite: bool = True
+) -> None
+
+

This method is not used but is required by the abstract class.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/mosaic/mosaic.md b/1.3.0/api/titiler/pgstac/mosaic/mosaic.md new file mode 100644 index 00000000..56413c2c --- /dev/null +++ b/1.3.0/api/titiler/pgstac/mosaic/mosaic.md @@ -0,0 +1,701 @@ +# Module titiler.pgstac.mosaic + +TiTiler.PgSTAC custom Mosaic Backend and Custom STACReader. + +## Variables + +```python3 +MAX_THREADS +``` + +```python3 +WGS84_CRS +``` + +```python3 +cache_config +``` + +```python3 +retry_config +``` + +## Functions + + +### multi_points_pgstac + +```python3 +def multi_points_pgstac( + asset_list: Sequence[Dict[str, Any]], + reader: Callable[..., rio_tiler.models.PointData], + *args: Any, + threads: int = 20, + allowed_exceptions: Optional[Tuple] = None, + **kwargs: Any +) -> Dict +``` + +Merge values returned from tasks. + +Custom version of `rio_tiler.task.multi_values` which +use constructed `item_id` as dict key. + +## Classes + +### CustomSTACReader + +```python3 +class CustomSTACReader( + input: Dict[str, Any], + tms: morecantile.models.TileMatrixSet = , + reader_options: Dict = NOTHING, + ctx: Any = +) +``` + +Simplified STAC Reader. + +Inputs should be in form of: +{ + "id": "IAMASTACITEM", + "collection": "mycollection", + "bbox": (0, 0, 10, 10), + "assets": { + "COG": { + "href": "https://somewhereovertherainbow.io/cog.tif" + } + } +} + +#### Ancestors (in MRO) + +* rio_tiler.io.base.MultiBaseReader +* rio_tiler.io.base.SpatialMixin + +#### Methods + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts defined by geojson feature from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.feature` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### geographic_bounds + +```python3 +def geographic_bounds( + ... +) +``` + +Return dataset bounds in geographic_crs. + + +#### info + +```python3 +def info( + self, + assets: Union[Sequence[str], str] = None, + **kwargs: Any +) -> Dict[str, rio_tiler.models.Info] +``` + +Return metadata from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. Required keyword argument. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets info in form of {"asset1": rio_tile.models.Info}. | + + +#### merged_statistics + +```python3 +def merged_statistics( + self, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + categorical: bool = False, + categories: Optional[List[float]] = None, + percentiles: Optional[List[int]] = None, + hist_options: Optional[Dict] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to the `self.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### parse_expression + +```python3 +def parse_expression( + self, + expression: str, + asset_as_band: bool = False +) -> Tuple +``` + +Parse rio-tiler band math expression. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Read pixel value from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.point` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge previews from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + assets: Union[Sequence[str], str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_expression: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| asset_expression | dict | rio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}). | None | +| kwargs | optional | Options to forward to the `self.reader.statistics` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge Wep Map tiles from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.tile` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | + +### PGSTACBackend + +```python3 +class PGSTACBackend( + input: str, + pool: psycopg_pool.pool.ConnectionPool, + tms: morecantile.models.TileMatrixSet = None +``` + +We overwrite the default method. + + +#### write + +```python3 +def write( + self, + overwrite: bool = True +) -> None +``` + +This method is not used but is required by the abstract class. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/reader/index.html b/1.3.0/api/titiler/pgstac/reader/index.html new file mode 100644 index 00000000..1f3e55ee --- /dev/null +++ b/1.3.0/api/titiler/pgstac/reader/index.html @@ -0,0 +1,2191 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + reader - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.reader

+

Custom STAC reader.

+

Variables

+
DEFAULT_VALID_TYPE
+
+
WGS84_CRS
+
+

Classes

+

PgSTACReader

+
class PgSTACReader(
+    input: pystac.item.Item,
+    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,
+    minzoom: int = NOTHING,
+    maxzoom: int = NOTHING,
+    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),
+    include_assets: Optional[Set[str]] = None,
+    exclude_assets: Optional[Set[str]] = None,
+    include_asset_types: Set[str] = {'image/tiff', 'image/tiff; application=geotiff', 'image/tiff; application=geotiff; profile=cloud-optimized', 'image/x.geotiff', 'image/tiff; profile=cloud-optimized; application=geotiff', 'application/x-hdf', 'image/jp2', 'image/vnd.stac.geotiff; cloud-optimized=true', 'application/x-hdf5'},
+    exclude_asset_types: Optional[Set[str]] = None,
+    reader: Type[rio_tiler.io.base.BaseReader] = <class 'rio_tiler.io.rasterio.Reader'>,
+    reader_options: Dict = NOTHING,
+    ctx: Any = <class 'rasterio.env.Env'>
+)
+
+

Custom STAC Reader.

+

Only accept pystac.Item as input (while rio_tiler.io.STACReader accepts url or pystac.Item)

+

Ancestors (in MRO)

+
    +
  • rio_tiler.io.base.MultiBaseReader
  • +
  • rio_tiler.io.base.SpatialMixin
  • +
+

Methods

+

feature

+
def feature(
+    self,
+    shape: Dict,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts defined by geojson feature from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
shapedictValid GeoJSON feature.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.feature method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

geographic_bounds

+
def geographic_bounds(
+    ...
+)
+
+

Return dataset bounds in geographic_crs.

+

info

+
def info(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.Info]
+
+

Return metadata from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from. Required keyword argument.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets info in form of {"asset1": rio_tile.models.Info}.
+

merged_statistics

+
def merged_statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    categorical: bool = False,
+    categories: Optional[List[float]] = None,
+    percentiles: Optional[List[int]] = None,
+    hist_options: Optional[Dict] = None,
+    max_size: int = 1024,
+    **kwargs: Any
+) -> Dict[str, rio_tiler.models.BandStatistics]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
categoricalbooltreat input data as categorical data. Defaults to False.False
categorieslist of numberslist of categories to return value for.None
percentileslist of numberslist of percentile values to calculate. Defaults to [2, 98].[2, 98]
hist_optionsdictOptions to forward to numpy.histogram function.None
max_sizeintLimit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024.1024
kwargsoptionalOptions to forward to the self.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
Dict[str, rio_tiler.models.BandStatistics]bands statistics.
+

parse_expression

+
def parse_expression(
+    self,
+    expression: str,
+    asset_as_band: bool = False
+) -> Tuple
+
+

Parse rio-tiler band math expression.

+

part

+
def part(
+    self,
+    bbox: Tuple[float, float, float, float],
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge parts from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
bboxtupleOutput bounds (left, bottom, right, top) in target crs.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.part method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

point

+
def point(
+    self,
+    lon: float,
+    lat: float,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.PointData
+
+

Read pixel value from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
lonfloatLongitude.None
latfloatLatitude.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.point method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NonePointData
+

preview

+
def preview(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge previews from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.preview method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

statistics

+
def statistics(
+    self,
+    assets: Union[Sequence[str], str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_expression: Optional[Dict[str, str]] = None,
+    **kwargs: Any
+) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]
+
+

Return array statistics for multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
assetssequence of str or strassets to fetch info from.None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
asset_expressiondictrio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}).None
kwargsoptionalOptions to forward to the self.reader.statistics method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
dictMultiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}.
+

tile

+
def tile(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int,
+    assets: Union[Sequence[str], str] = None,
+    expression: Optional[str] = None,
+    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,
+    asset_as_band: bool = False,
+    **kwargs: Any
+) -> rio_tiler.models.ImageData
+
+

Read and merge Wep Map tiles from multiple assets.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
assetssequence of str or strassets to fetch info from.None
expressionstrrio-tiler expression for the asset list (e.g. asset1/asset2+asset3).None
asset_indexesdictBand indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}).None
kwargsoptionalOptions to forward to the self.reader.tile method.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
rio_tiler.models.ImageDataImageData instance with data, mask and tile spatial info.
+

tile_exists

+
def tile_exists(
+    self,
+    tile_x: int,
+    tile_y: int,
+    tile_z: int
+) -> bool
+
+

Check if a tile intersects the dataset bounds.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
tile_xintTile's horizontal index.None
tile_yintTile's vertical index.None
tile_zintTile's zoom level index.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
boolTrue if the tile intersects the dataset bounds.
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/reader/reader.md b/1.3.0/api/titiler/pgstac/reader/reader.md new file mode 100644 index 00000000..ebe6c389 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/reader/reader.md @@ -0,0 +1,361 @@ +# Module titiler.pgstac.reader + +Custom STAC reader. + +## Variables + +```python3 +DEFAULT_VALID_TYPE +``` + +```python3 +WGS84_CRS +``` + +## Classes + +### PgSTACReader + +```python3 +class PgSTACReader( + input: pystac.item.Item, + tms: morecantile.models.TileMatrixSet = , + reader_options: Dict = NOTHING, + ctx: Any = +) +``` + +Custom STAC Reader. + +Only accept `pystac.Item` as input (while rio_tiler.io.STACReader accepts url or pystac.Item) + +#### Ancestors (in MRO) + +* rio_tiler.io.base.MultiBaseReader +* rio_tiler.io.base.SpatialMixin + +#### Methods + + +#### feature + +```python3 +def feature( + self, + shape: Dict, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts defined by geojson feature from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| shape | dict | Valid GeoJSON feature. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.feature` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### geographic_bounds + +```python3 +def geographic_bounds( + ... +) +``` + +Return dataset bounds in geographic_crs. + + +#### info + +```python3 +def info( + self, + assets: Union[Sequence[str], str] = None, + **kwargs: Any +) -> Dict[str, rio_tiler.models.Info] +``` + +Return metadata from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. Required keyword argument. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets info in form of {"asset1": rio_tile.models.Info}. | + + +#### merged_statistics + +```python3 +def merged_statistics( + self, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + categorical: bool = False, + categories: Optional[List[float]] = None, + percentiles: Optional[List[int]] = None, + hist_options: Optional[Dict] = None, + max_size: int = 1024, + **kwargs: Any +) -> Dict[str, rio_tiler.models.BandStatistics] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| categorical | bool | treat input data as categorical data. Defaults to False. | False | +| categories | list of numbers | list of categories to return value for. | None | +| percentiles | list of numbers | list of percentile values to calculate. Defaults to `[2, 98]`. | `[2, 98]` | +| hist_options | dict | Options to forward to numpy.histogram function. | None | +| max_size | int | Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. | 1024 | +| kwargs | optional | Options to forward to the `self.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| Dict[str, rio_tiler.models.BandStatistics] | bands statistics. | + + +#### parse_expression + +```python3 +def parse_expression( + self, + expression: str, + asset_as_band: bool = False +) -> Tuple +``` + +Parse rio-tiler band math expression. + + +#### part + +```python3 +def part( + self, + bbox: Tuple[float, float, float, float], + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge parts from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| bbox | tuple | Output bounds (left, bottom, right, top) in target crs. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.part` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### point + +```python3 +def point( + self, + lon: float, + lat: float, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.PointData +``` + +Read pixel value from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| lon | float | Longitude. | None | +| lat | float | Latitude. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.point` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | PointData | + + +#### preview + +```python3 +def preview( + self, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge previews from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.preview` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### statistics + +```python3 +def statistics( + self, + assets: Union[Sequence[str], str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_expression: Optional[Dict[str, str]] = None, + **kwargs: Any +) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]] +``` + +Return array statistics for multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| assets | sequence of str or str | assets to fetch info from. | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| asset_expression | dict | rio-tiler expression for each asset (e.g. {"asset1": "b1/b2+b3", "asset2": ...}). | None | +| kwargs | optional | Options to forward to the `self.reader.statistics` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| dict | Multiple assets statistics in form of {"asset1": {"1": rio_tiler.models.BandStatistics, ...}}. | + + +#### tile + +```python3 +def tile( + self, + tile_x: int, + tile_y: int, + tile_z: int, + assets: Union[Sequence[str], str] = None, + expression: Optional[str] = None, + asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None, + asset_as_band: bool = False, + **kwargs: Any +) -> rio_tiler.models.ImageData +``` + +Read and merge Wep Map tiles from multiple assets. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | +| assets | sequence of str or str | assets to fetch info from. | None | +| expression | str | rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). | None | +| asset_indexes | dict | Band indexes for each asset (e.g {"asset1": 1, "asset2": (1, 2,)}). | None | +| kwargs | optional | Options to forward to the `self.reader.tile` method. | None | + +**Returns:** + +| Type | Description | +|---|---| +| rio_tiler.models.ImageData | ImageData instance with data, mask and tile spatial info. | + + +#### tile_exists + +```python3 +def tile_exists( + self, + tile_x: int, + tile_y: int, + tile_z: int +) -> bool +``` + +Check if a tile intersects the dataset bounds. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| tile_x | int | Tile's horizontal index. | None | +| tile_y | int | Tile's vertical index. | None | +| tile_z | int | Tile's zoom level index. | None | + +**Returns:** + +| Type | Description | +|---|---| +| bool | True if the tile intersects the dataset bounds. | \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/settings/index.html b/1.3.0/api/titiler/pgstac/settings/index.html new file mode 100644 index 00000000..180a16e5 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/settings/index.html @@ -0,0 +1,6055 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + settings - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.settings

+

API settings.

+

Functions

+

RetrySettings

+
def RetrySettings(
+
+) -> titiler.pgstac.settings._RetrySettings
+
+

This function returns a cached instance of the RetrySettings object.

+

Classes

+

ApiSettings

+
class ApiSettings(
+    __pydantic_self__,
+    _case_sensitive: 'bool | None' = None,
+    _env_prefix: 'str | None' = None,
+    _env_file: 'DotenvType | None' = PosixPath('.'),
+    _env_file_encoding: 'str | None' = None,
+    _env_ignore_empty: 'bool | None' = None,
+    _env_nested_delimiter: 'str | None' = None,
+    _env_parse_none_str: 'str | None' = None,
+    _secrets_dir: 'str | Path | None' = None,
+    **values: 'Any'
+)
+
+

API settings

+

Ancestors (in MRO)

+
    +
  • pydantic_settings.main.BaseSettings
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_cors_origin

+
def parse_cors_origin(
+    v
+)
+
+

Parse CORS origins.

+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

settings_customise_sources

+
def settings_customise_sources(
+    settings_cls: 'type[BaseSettings]',
+    init_settings: 'PydanticBaseSettingsSource',
+    env_settings: 'PydanticBaseSettingsSource',
+    dotenv_settings: 'PydanticBaseSettingsSource',
+    file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
+

Define the sources and their order for loading the settings values.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
settings_clsNoneThe Settings class.None
init_settingsNoneThe InitSettingsSource instance.None
env_settingsNoneThe EnvSettingsSource instance.None
dotenv_settingsNoneThe DotEnvSettingsSource instance.None
file_secret_settingsNoneThe SecretsSettingsSource instance.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA tuple containing the sources and their order for loading the settings values.
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

CacheSettings

+
class CacheSettings(
+    __pydantic_self__,
+    _case_sensitive: 'bool | None' = None,
+    _env_prefix: 'str | None' = None,
+    _env_file: 'DotenvType | None' = PosixPath('.'),
+    _env_file_encoding: 'str | None' = None,
+    _env_ignore_empty: 'bool | None' = None,
+    _env_nested_delimiter: 'str | None' = None,
+    _env_parse_none_str: 'str | None' = None,
+    _secrets_dir: 'str | Path | None' = None,
+    **values: 'Any'
+)
+
+

Cache settings

+

Ancestors (in MRO)

+
    +
  • pydantic_settings.main.BaseSettings
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

settings_customise_sources

+
def settings_customise_sources(
+    settings_cls: 'type[BaseSettings]',
+    init_settings: 'PydanticBaseSettingsSource',
+    env_settings: 'PydanticBaseSettingsSource',
+    dotenv_settings: 'PydanticBaseSettingsSource',
+    file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
+

Define the sources and their order for loading the settings values.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
settings_clsNoneThe Settings class.None
init_settingsNoneThe InitSettingsSource instance.None
env_settingsNoneThe EnvSettingsSource instance.None
dotenv_settingsNoneThe DotEnvSettingsSource instance.None
file_secret_settingsNoneThe SecretsSettingsSource instance.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA tuple containing the sources and their order for loading the settings values.
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

check_enable

+
def check_enable(
+    self
+)
+
+

Check if cache is disabled.

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+

PostgresSettings

+
class PostgresSettings(
+    __pydantic_self__,
+    _case_sensitive: 'bool | None' = None,
+    _env_prefix: 'str | None' = None,
+    _env_file: 'DotenvType | None' = PosixPath('.'),
+    _env_file_encoding: 'str | None' = None,
+    _env_ignore_empty: 'bool | None' = None,
+    _env_nested_delimiter: 'str | None' = None,
+    _env_parse_none_str: 'str | None' = None,
+    _secrets_dir: 'str | Path | None' = None,
+    **values: 'Any'
+)
+
+

Postgres-specific API settings.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
postgres_userNonepostgres username.None
postgres_passNonepostgres password.None
postgres_hostNonedatabase hostname.None
postgres_portNonedatabase port.None
postgres_dbnameNonedatabase name.None
+

Ancestors (in MRO)

+
    +
  • pydantic_settings.main.BaseSettings
  • +
  • pydantic.main.BaseModel
  • +
+

Class variables

+
model_computed_fields
+
+
model_config
+
+
model_fields
+
+

Static methods

+

assemble_db_connection

+
def assemble_db_connection(
+    v: Optional[str],
+    info: pydantic_core.core_schema.ValidationInfo
+) -> Any
+
+

Validate database config.

+

construct

+
def construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

from_orm

+
def from_orm(
+    obj: 'Any'
+) -> 'Model'
+
+

model_construct

+
def model_construct(
+    _fields_set: 'set[str] | None' = None,
+    **values: 'Any'
+) -> 'Model'
+
+

Creates a new instance of the Model class with validated data.

+

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. +Default values are respected, but no other validation is performed.

+
+

Note

+

model_construct() generally respects the model_config.extra setting on the provided model. +That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ +and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. +Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in +an error if extra values are passed, but they will be ignored.

+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
_fields_setNoneThe set of field names accepted for the Model instance.None
valuesNoneTrusted or pre-validated data dictionary.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA new instance of the Model class with validated data.
+

model_json_schema

+
def model_json_schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,
+    mode: 'JsonSchemaMode' = 'validation'
+) -> 'dict[str, Any]'
+
+

Generates a JSON schema for a model class.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
by_aliasNoneWhether to use attribute aliases or not.None
ref_templateNoneThe reference template.None
schema_generatorNoneTo override the logic used to generate the JSON schema, as a subclass of
GenerateJsonSchema with your desired modifications
None
modeNoneThe mode in which to generate the schema.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe JSON schema for the given model class.
+

model_parametrized_name

+
def model_parametrized_name(
+    params: 'tuple[type[Any], ...]'
+) -> 'str'
+
+

Compute the class name for parametrizations of generic classes.

+

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

+

Parameters:

+ + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
paramsNoneTuple of types of the class. Given a generic class
Model with 2 type variables and a concrete model Model[str, int],
the value (str, int) would be passed to params.
None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneString representing the new class where params are passed to cls as type variables.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
TypeErrorRaised when trying to generate concrete names for non-generic models.
+

model_rebuild

+
def model_rebuild(
+    *,
+    force: 'bool' = False,
+    raise_errors: 'bool' = True,
+    _parent_namespace_depth: 'int' = 2,
+    _types_namespace: 'dict[str, Any] | None' = None
+) -> 'bool | None'
+
+

Try to rebuild the pydantic-core schema for the model.

+

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
forceNoneWhether to force the rebuilding of the model schema, defaults to False.None
raise_errorsNoneWhether to raise errors, defaults to True.None
_parent_namespace_depthNoneThe depth level of the parent namespace, defaults to 2.None
_types_namespaceNoneThe types namespace, defaults to None.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneReturns None if the schema is already "complete" and rebuilding was not required.
If rebuilding was required, returns True if rebuilding was successful, otherwise False.
+

model_validate

+
def model_validate(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    from_attributes: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate a pydantic model instance.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object to validate.None
strictNoneWhether to enforce types strictly.None
from_attributesNoneWhether to extract data from object attributes.None
contextNoneAdditional context to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated model instance.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValidationErrorIf the object could not be validated.
+

model_validate_json

+
def model_validate_json(
+    json_data: 'str | bytes | bytearray',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

+

Validate the given JSON data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
json_dataNoneThe JSON data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

Raises:

+ + + + + + + + + + + + + +
TypeDescription
ValueErrorIf json_data is not a JSON string.
+

model_validate_strings

+
def model_validate_strings(
+    obj: 'Any',
+    *,
+    strict: 'bool | None' = None,
+    context: 'dict[str, Any] | None' = None
+) -> 'Model'
+
+

Validate the given object contains string data against the Pydantic model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
objNoneThe object contains string data to validate.None
strictNoneWhether to enforce types strictly.None
contextNoneExtra variables to pass to the validator.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneThe validated Pydantic model.
+

parse_file

+
def parse_file(
+    path: 'str | Path',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

parse_obj

+
def parse_obj(
+    obj: 'Any'
+) -> 'Model'
+
+

parse_raw

+
def parse_raw(
+    b: 'str | bytes',
+    *,
+    content_type: 'str | None' = None,
+    encoding: 'str' = 'utf8',
+    proto: 'DeprecatedParseProtocol | None' = None,
+    allow_pickle: 'bool' = False
+) -> 'Model'
+
+

schema

+
def schema(
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}'
+) -> 'typing.Dict[str, Any]'
+
+

schema_json

+
def schema_json(
+    *,
+    by_alias: 'bool' = True,
+    ref_template: 'str' = '#/$defs/{model}',
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

settings_customise_sources

+
def settings_customise_sources(
+    settings_cls: 'type[BaseSettings]',
+    init_settings: 'PydanticBaseSettingsSource',
+    env_settings: 'PydanticBaseSettingsSource',
+    dotenv_settings: 'PydanticBaseSettingsSource',
+    file_secret_settings: 'PydanticBaseSettingsSource'
+) -> 'tuple[PydanticBaseSettingsSource, ...]'
+
+

Define the sources and their order for loading the settings values.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
settings_clsNoneThe Settings class.None
init_settingsNoneThe InitSettingsSource instance.None
env_settingsNoneThe EnvSettingsSource instance.None
dotenv_settingsNoneThe DotEnvSettingsSource instance.None
file_secret_settingsNoneThe SecretsSettingsSource instance.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA tuple containing the sources and their order for loading the settings values.
+

update_forward_refs

+
def update_forward_refs(
+    **localns: 'Any'
+) -> 'None'
+
+

validate

+
def validate(
+    value: 'Any'
+) -> 'Model'
+
+

Instance variables

+
model_extra
+
+

Get extra fields set during validation.

+
model_fields_set
+
+

Returns the set of fields that have been explicitly set on this model instance.

+

Methods

+

copy

+
def copy(
+    self: 'Model',
+    *,
+    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,
+    update: 'typing.Dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Returns a copy of the model.

+
+

Deprecated

+

This method is now deprecated; use model_copy instead.

+
+

If you need include or exclude, use:

+
data = self.model_dump(include=include, exclude=exclude, round_trip=True)
+data = {**data, **(update or {})}
+copied = self.model_validate(data)
+
+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
includeNoneOptional set or mapping specifying which fields to include in the copied model.None
excludeNoneOptional set or mapping specifying which fields to exclude in the copied model.None
updateNoneOptional dictionary of field-value pairs to override field values in the copied model.None
deepNoneIf True, the values of fields that are Pydantic models will be deep-copied.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA copy of the model with included, excluded and updated fields as specified.
+

dict

+
def dict(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False
+) -> 'typing.Dict[str, Any]'
+
+

json

+
def json(
+    self,
+    *,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,
+    models_as_dict: 'bool' = PydanticUndefined,
+    **dumps_kwargs: 'Any'
+) -> 'str'
+
+

model_copy

+
def model_copy(
+    self: 'Model',
+    *,
+    update: 'dict[str, Any] | None' = None,
+    deep: 'bool' = False
+) -> 'Model'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

+

Returns a copy of the model.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
updateNoneValues to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data.
None
deepNoneSet to True to make a deep copy of the model.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneNew model instance.
+

model_dump

+
def model_dump(
+    self,
+    *,
+    mode: "Literal['json', 'python'] | str" = 'python',
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'dict[str, Any]'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

+

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
modeNoneThe mode in which to_python should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects.
None
includeNoneA set of fields to include in the output.None
excludeNoneA set of fields to exclude from the output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to use the field's alias in the dictionary key if defined.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA dictionary representation of the model.
+

model_dump_json

+
def model_dump_json(
+    self,
+    *,
+    indent: 'int | None' = None,
+    include: 'IncEx' = None,
+    exclude: 'IncEx' = None,
+    context: 'dict[str, Any] | None' = None,
+    by_alias: 'bool' = False,
+    exclude_unset: 'bool' = False,
+    exclude_defaults: 'bool' = False,
+    exclude_none: 'bool' = False,
+    round_trip: 'bool' = False,
+    warnings: "bool | Literal['none', 'warn', 'error']" = True,
+    serialize_as_any: 'bool' = False
+) -> 'str'
+
+

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

+

Generates a JSON representation of the model using Pydantic's to_json method.

+

Parameters:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameTypeDescriptionDefault
indentNoneIndentation to use in the JSON output. If None is passed, the output will be compact.None
includeNoneField(s) to include in the JSON output.None
excludeNoneField(s) to exclude from the JSON output.None
contextNoneAdditional context to pass to the serializer.None
by_aliasNoneWhether to serialize using field aliases.None
exclude_unsetNoneWhether to exclude fields that have not been explicitly set.None
exclude_defaultsNoneWhether to exclude fields that are set to their default value.None
exclude_noneNoneWhether to exclude fields that have a value of None.None
round_tripNoneIf True, dumped values should be valid as input for non-idempotent types such as Json[T].None
warningsNoneHow to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError].
None
serialize_as_anyNoneWhether to serialize fields with duck-typing serialization behavior.None
+

Returns:

+ + + + + + + + + + + + + +
TypeDescription
NoneA JSON string representation of the model.
+

model_post_init

+
def model_post_init(
+    self,
+    _BaseModel__context: 'Any'
+) -> 'None'
+
+

Override this method to perform additional initialization after __init__ and model_construct.

+

This is useful if you want to do some validation that requires the entire model to be initialized.

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/settings/settings.md b/1.3.0/api/titiler/pgstac/settings/settings.md new file mode 100644 index 00000000..ac4e2439 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/settings/settings.md @@ -0,0 +1,1937 @@ +# Module titiler.pgstac.settings + +API settings. + +## Functions + + +### RetrySettings + +```python3 +def RetrySettings( + +) -> titiler.pgstac.settings._RetrySettings +``` + +This function returns a cached instance of the RetrySettings object. + +## Classes + +### ApiSettings + +```python3 +class ApiSettings( + __pydantic_self__, + _case_sensitive: 'bool | None' = None, + _env_prefix: 'str | None' = None, + _env_file: 'DotenvType | None' = PosixPath('.'), + _env_file_encoding: 'str | None' = None, + _env_ignore_empty: 'bool | None' = None, + _env_nested_delimiter: 'str | None' = None, + _env_parse_none_str: 'str | None' = None, + _secrets_dir: 'str | Path | None' = None, + **values: 'Any' +) +``` + +API settings + +#### Ancestors (in MRO) + +* pydantic_settings.main.BaseSettings +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_cors_origin + +```python3 +def parse_cors_origin( + v +) +``` + +Parse CORS origins. + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### settings_customise_sources + +```python3 +def settings_customise_sources( + settings_cls: 'type[BaseSettings]', + init_settings: 'PydanticBaseSettingsSource', + env_settings: 'PydanticBaseSettingsSource', + dotenv_settings: 'PydanticBaseSettingsSource', + file_secret_settings: 'PydanticBaseSettingsSource' +) -> 'tuple[PydanticBaseSettingsSource, ...]' +``` + +Define the sources and their order for loading the settings values. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| settings_cls | None | The Settings class. | None | +| init_settings | None | The `InitSettingsSource` instance. | None | +| env_settings | None | The `EnvSettingsSource` instance. | None | +| dotenv_settings | None | The `DotEnvSettingsSource` instance. | None | +| file_secret_settings | None | The `SecretsSettingsSource` instance. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A tuple containing the sources and their order for loading the settings values. | + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### CacheSettings + +```python3 +class CacheSettings( + __pydantic_self__, + _case_sensitive: 'bool | None' = None, + _env_prefix: 'str | None' = None, + _env_file: 'DotenvType | None' = PosixPath('.'), + _env_file_encoding: 'str | None' = None, + _env_ignore_empty: 'bool | None' = None, + _env_nested_delimiter: 'str | None' = None, + _env_parse_none_str: 'str | None' = None, + _secrets_dir: 'str | Path | None' = None, + **values: 'Any' +) +``` + +Cache settings + +#### Ancestors (in MRO) + +* pydantic_settings.main.BaseSettings +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### settings_customise_sources + +```python3 +def settings_customise_sources( + settings_cls: 'type[BaseSettings]', + init_settings: 'PydanticBaseSettingsSource', + env_settings: 'PydanticBaseSettingsSource', + dotenv_settings: 'PydanticBaseSettingsSource', + file_secret_settings: 'PydanticBaseSettingsSource' +) -> 'tuple[PydanticBaseSettingsSource, ...]' +``` + +Define the sources and their order for loading the settings values. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| settings_cls | None | The Settings class. | None | +| init_settings | None | The `InitSettingsSource` instance. | None | +| env_settings | None | The `EnvSettingsSource` instance. | None | +| dotenv_settings | None | The `DotEnvSettingsSource` instance. | None | +| file_secret_settings | None | The `SecretsSettingsSource` instance. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A tuple containing the sources and their order for loading the settings values. | + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### check_enable + +```python3 +def check_enable( + self +) +``` + +Check if cache is disabled. + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. + +### PostgresSettings + +```python3 +class PostgresSettings( + __pydantic_self__, + _case_sensitive: 'bool | None' = None, + _env_prefix: 'str | None' = None, + _env_file: 'DotenvType | None' = PosixPath('.'), + _env_file_encoding: 'str | None' = None, + _env_ignore_empty: 'bool | None' = None, + _env_nested_delimiter: 'str | None' = None, + _env_parse_none_str: 'str | None' = None, + _secrets_dir: 'str | Path | None' = None, + **values: 'Any' +) +``` + +Postgres-specific API settings. + +#### Attributes + +| Name | Type | Description | Default | +|---|---|---|---| +| postgres_user | None | postgres username. | None | +| postgres_pass | None | postgres password. | None | +| postgres_host | None | database hostname. | None | +| postgres_port | None | database port. | None | +| postgres_dbname | None | database name. | None | + +#### Ancestors (in MRO) + +* pydantic_settings.main.BaseSettings +* pydantic.main.BaseModel + +#### Class variables + +```python3 +model_computed_fields +``` + +```python3 +model_config +``` + +```python3 +model_fields +``` + +#### Static methods + + +#### assemble_db_connection + +```python3 +def assemble_db_connection( + v: Optional[str], + info: pydantic_core.core_schema.ValidationInfo +) -> Any +``` + +Validate database config. + + +#### construct + +```python3 +def construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + + +#### from_orm + +```python3 +def from_orm( + obj: 'Any' +) -> 'Model' +``` + + +#### model_construct + +```python3 +def model_construct( + _fields_set: 'set[str] | None' = None, + **values: 'Any' +) -> 'Model' +``` + +Creates a new instance of the `Model` class with validated data. + +Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. +Default values are respected, but no other validation is performed. + +!!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| _fields_set | None | The set of field names accepted for the Model instance. | None | +| values | None | Trusted or pre-validated data dictionary. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A new instance of the `Model` class with validated data. | + + +#### model_json_schema + +```python3 +def model_json_schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + schema_generator: 'type[GenerateJsonSchema]' = , + mode: 'JsonSchemaMode' = 'validation' +) -> 'dict[str, Any]' +``` + +Generates a JSON schema for a model class. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| by_alias | None | Whether to use attribute aliases or not. | None | +| ref_template | None | The reference template. | None | +| schema_generator | None | To override the logic used to generate the JSON schema, as a subclass of
`GenerateJsonSchema` with your desired modifications | None | +| mode | None | The mode in which to generate the schema. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The JSON schema for the given model class. | + + +#### model_parametrized_name + +```python3 +def model_parametrized_name( + params: 'tuple[type[Any], ...]' +) -> 'str' +``` + +Compute the class name for parametrizations of generic classes. + +This method can be overridden to achieve a custom naming scheme for generic BaseModels. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| params | None | Tuple of types of the class. Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | String representing the new class where `params` are passed to `cls` as type variables. | + +**Raises:** + +| Type | Description | +|---|---| +| TypeError | Raised when trying to generate concrete names for non-generic models. | + + +#### model_rebuild + +```python3 +def model_rebuild( + *, + force: 'bool' = False, + raise_errors: 'bool' = True, + _parent_namespace_depth: 'int' = 2, + _types_namespace: 'dict[str, Any] | None' = None +) -> 'bool | None' +``` + +Try to rebuild the pydantic-core schema for the model. + +This may be necessary when one of the annotations is a ForwardRef which could not be resolved during +the initial attempt to build the schema, and automatic rebuilding fails. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| force | None | Whether to force the rebuilding of the model schema, defaults to `False`. | None | +| raise_errors | None | Whether to raise errors, defaults to `True`. | None | +| _parent_namespace_depth | None | The depth level of the parent namespace, defaults to 2. | None | +| _types_namespace | None | The types namespace, defaults to `None`. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | Returns `None` if the schema is already "complete" and rebuilding was not required.
If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. | + + +#### model_validate + +```python3 +def model_validate( + obj: 'Any', + *, + strict: 'bool | None' = None, + from_attributes: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate a pydantic model instance. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| from_attributes | None | Whether to extract data from object attributes. | None | +| context | None | Additional context to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated model instance. | + +**Raises:** + +| Type | Description | +|---|---| +| ValidationError | If the object could not be validated. | + + +#### model_validate_json + +```python3 +def model_validate_json( + json_data: 'str | bytes | bytearray', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + +Validate the given JSON data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| json_data | None | The JSON data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + +**Raises:** + +| Type | Description | +|---|---| +| ValueError | If `json_data` is not a JSON string. | + + +#### model_validate_strings + +```python3 +def model_validate_strings( + obj: 'Any', + *, + strict: 'bool | None' = None, + context: 'dict[str, Any] | None' = None +) -> 'Model' +``` + +Validate the given object contains string data against the Pydantic model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| obj | None | The object contains string data to validate. | None | +| strict | None | Whether to enforce types strictly. | None | +| context | None | Extra variables to pass to the validator. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | The validated Pydantic model. | + + +#### parse_file + +```python3 +def parse_file( + path: 'str | Path', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### parse_obj + +```python3 +def parse_obj( + obj: 'Any' +) -> 'Model' +``` + + +#### parse_raw + +```python3 +def parse_raw( + b: 'str | bytes', + *, + content_type: 'str | None' = None, + encoding: 'str' = 'utf8', + proto: 'DeprecatedParseProtocol | None' = None, + allow_pickle: 'bool' = False +) -> 'Model' +``` + + +#### schema + +```python3 +def schema( + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}' +) -> 'typing.Dict[str, Any]' +``` + + +#### schema_json + +```python3 +def schema_json( + *, + by_alias: 'bool' = True, + ref_template: 'str' = '#/$defs/{model}', + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### settings_customise_sources + +```python3 +def settings_customise_sources( + settings_cls: 'type[BaseSettings]', + init_settings: 'PydanticBaseSettingsSource', + env_settings: 'PydanticBaseSettingsSource', + dotenv_settings: 'PydanticBaseSettingsSource', + file_secret_settings: 'PydanticBaseSettingsSource' +) -> 'tuple[PydanticBaseSettingsSource, ...]' +``` + +Define the sources and their order for loading the settings values. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| settings_cls | None | The Settings class. | None | +| init_settings | None | The `InitSettingsSource` instance. | None | +| env_settings | None | The `EnvSettingsSource` instance. | None | +| dotenv_settings | None | The `DotEnvSettingsSource` instance. | None | +| file_secret_settings | None | The `SecretsSettingsSource` instance. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A tuple containing the sources and their order for loading the settings values. | + + +#### update_forward_refs + +```python3 +def update_forward_refs( + **localns: 'Any' +) -> 'None' +``` + + +#### validate + +```python3 +def validate( + value: 'Any' +) -> 'Model' +``` + +#### Instance variables + +```python3 +model_extra +``` + +Get extra fields set during validation. + +```python3 +model_fields_set +``` + +Returns the set of fields that have been explicitly set on this model instance. + +#### Methods + + +#### copy + +```python3 +def copy( + self: 'Model', + *, + include: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None, + update: 'typing.Dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Returns a copy of the model. + +!!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + +If you need `include` or `exclude`, use: + +```py +data = self.model_dump(include=include, exclude=exclude, round_trip=True) +data = {**data, **(update or {})} +copied = self.model_validate(data) +``` + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| include | None | Optional set or mapping specifying which fields to include in the copied model. | None | +| exclude | None | Optional set or mapping specifying which fields to exclude in the copied model. | None | +| update | None | Optional dictionary of field-value pairs to override field values in the copied model. | None | +| deep | None | If True, the values of fields that are Pydantic models will be deep-copied. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A copy of the model with included, excluded and updated fields as specified. | + + +#### dict + +```python3 +def dict( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False +) -> 'typing.Dict[str, Any]' +``` + + +#### json + +```python3 +def json( + self, + *, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined, + models_as_dict: 'bool' = PydanticUndefined, + **dumps_kwargs: 'Any' +) -> 'str' +``` + + +#### model_copy + +```python3 +def model_copy( + self: 'Model', + *, + update: 'dict[str, Any] | None' = None, + deep: 'bool' = False +) -> 'Model' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + +Returns a copy of the model. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| update | None | Values to change/add in the new model. Note: the data is not validated
before creating the new model. You should trust this data. | None | +| deep | None | Set to `True` to make a deep copy of the model. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | New model instance. | + + +#### model_dump + +```python3 +def model_dump( + self, + *, + mode: "Literal['json', 'python'] | str" = 'python', + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'dict[str, Any]' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + +Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| mode | None | The mode in which `to_python` should run.
If mode is 'json', the output will only contain JSON serializable types.
If mode is 'python', the output may contain non-JSON-serializable Python objects. | None | +| include | None | A set of fields to include in the output. | None | +| exclude | None | A set of fields to exclude from the output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to use the field's alias in the dictionary key if defined. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A dictionary representation of the model. | + + +#### model_dump_json + +```python3 +def model_dump_json( + self, + *, + indent: 'int | None' = None, + include: 'IncEx' = None, + exclude: 'IncEx' = None, + context: 'dict[str, Any] | None' = None, + by_alias: 'bool' = False, + exclude_unset: 'bool' = False, + exclude_defaults: 'bool' = False, + exclude_none: 'bool' = False, + round_trip: 'bool' = False, + warnings: "bool | Literal['none', 'warn', 'error']" = True, + serialize_as_any: 'bool' = False +) -> 'str' +``` + +Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + +Generates a JSON representation of the model using Pydantic's `to_json` method. + +**Parameters:** + +| Name | Type | Description | Default | +|---|---|---|---| +| indent | None | Indentation to use in the JSON output. If None is passed, the output will be compact. | None | +| include | None | Field(s) to include in the JSON output. | None | +| exclude | None | Field(s) to exclude from the JSON output. | None | +| context | None | Additional context to pass to the serializer. | None | +| by_alias | None | Whether to serialize using field aliases. | None | +| exclude_unset | None | Whether to exclude fields that have not been explicitly set. | None | +| exclude_defaults | None | Whether to exclude fields that are set to their default value. | None | +| exclude_none | None | Whether to exclude fields that have a value of `None`. | None | +| round_trip | None | If True, dumped values should be valid as input for non-idempotent types such as Json[T]. | None | +| warnings | None | How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
"error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. | None | +| serialize_as_any | None | Whether to serialize fields with duck-typing serialization behavior. | None | + +**Returns:** + +| Type | Description | +|---|---| +| None | A JSON string representation of the model. | + + +#### model_post_init + +```python3 +def model_post_init( + self, + _BaseModel__context: 'Any' +) -> 'None' +``` + +Override this method to perform additional initialization after `__init__` and `model_construct`. + +This is useful if you want to do some validation that requires the entire model to be initialized. \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/utils/index.html b/1.3.0/api/titiler/pgstac/utils/index.html new file mode 100644 index 00000000..3c3e2efe --- /dev/null +++ b/1.3.0/api/titiler/pgstac/utils/index.html @@ -0,0 +1,1272 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + utils - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Module titiler.pgstac.utils

+

titiler.pgstac utilities.

+

Functions

+

retry

+
def retry(
+    tries: int,
+    exceptions: Union[Type[Exception], Sequence[Type[Exception]]] = <class 'Exception'>,
+    delay: float = 0.0
+)
+
+

Retry Decorator

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/api/titiler/pgstac/utils/utils.md b/1.3.0/api/titiler/pgstac/utils/utils.md new file mode 100644 index 00000000..88016256 --- /dev/null +++ b/1.3.0/api/titiler/pgstac/utils/utils.md @@ -0,0 +1,18 @@ +# Module titiler.pgstac.utils + +titiler.pgstac utilities. + +## Functions + + +### retry + +```python3 +def retry( + tries: int, + exceptions: Union[Type[Exception], Sequence[Type[Exception]]] = , + delay: float = 0.0 +) +``` + +Retry Decorator \ No newline at end of file diff --git a/1.3.0/assets/images/favicon.png b/1.3.0/assets/images/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..1cf13b9f9d978896599290a74f77d5dbe7d1655c GIT binary patch literal 1870 zcmV-U2eJ5xP)Gc)JR9QMau)O=X#!i9;T z37kk-upj^(fsR36MHs_+1RCI)NNu9}lD0S{B^g8PN?Ww(5|~L#Ng*g{WsqleV}|#l zz8@ri&cTzw_h33bHI+12+kK6WN$h#n5cD8OQt`5kw6p~9H3()bUQ8OS4Q4HTQ=1Ol z_JAocz`fLbT2^{`8n~UAo=#AUOf=SOq4pYkt;XbC&f#7lb$*7=$na!mWCQ`dBQsO0 zLFBSPj*N?#u5&pf2t4XjEGH|=pPQ8xh7tpx;US5Cx_Ju;!O`ya-yF`)b%TEt5>eP1ZX~}sjjA%FJF?h7cX8=b!DZl<6%Cv z*G0uvvU+vmnpLZ2paivG-(cd*y3$hCIcsZcYOGh{$&)A6*XX&kXZd3G8m)G$Zz-LV z^GF3VAW^Mdv!)4OM8EgqRiz~*Cji;uzl2uC9^=8I84vNp;ltJ|q-*uQwGp2ma6cY7 z;`%`!9UXO@fr&Ebapfs34OmS9^u6$)bJxrucutf>`dKPKT%%*d3XlFVKunp9 zasduxjrjs>f8V=D|J=XNZp;_Zy^WgQ$9WDjgY=z@stwiEBm9u5*|34&1Na8BMjjgf3+SHcr`5~>oz1Y?SW^=K z^bTyO6>Gar#P_W2gEMwq)ot3; zREHn~U&Dp0l6YT0&k-wLwYjb?5zGK`W6S2v+K>AM(95m2C20L|3m~rN8dprPr@t)5lsk9Hu*W z?pS990s;Ez=+Rj{x7p``4>+c0G5^pYnB1^!TL=(?HLHZ+HicG{~4F1d^5Awl_2!1jICM-!9eoLhbbT^;yHcefyTAaqRcY zmuctDopPT!%k+}x%lZRKnzykr2}}XfG_ne?nRQO~?%hkzo;@RN{P6o`&mMUWBYMTe z6i8ChtjX&gXl`nvrU>jah)2iNM%JdjqoaeaU%yVn!^70x-flljp6Q5tK}5}&X8&&G zX3fpb3E(!rH=zVI_9Gjl45w@{(ITqngWFe7@9{mX;tO25Z_8 zQHEpI+FkTU#4xu>RkN>b3Tnc3UpWzPXWm#o55GKF09j^Mh~)K7{QqbO_~(@CVq! zS<8954|P8mXN2MRs86xZ&Q4EfM@JB94b=(YGuk)s&^jiSF=t3*oNK3`rD{H`yQ?d; ztE=laAUoZx5?RC8*WKOj`%LXEkgDd>&^Q4M^z`%u0rg-It=hLCVsq!Z%^6eB-OvOT zFZ28TN&cRmgU}Elrnk43)!>Z1FCPL2K$7}gwzIc48NX}#!A1BpJP?#v5wkNprhV** z?Cpalt1oH&{r!o3eSKc&ap)iz2BTn_VV`4>9M^b3;(YY}4>#ML6{~(4mH+?%07*qo IM6N<$f(jP3KmY&$ literal 0 HcmV?d00001 diff --git a/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js b/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js new file mode 100644 index 00000000..f6e2dc69 --- /dev/null +++ b/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Fi=Object.create;var gr=Object.defineProperty;var ji=Object.getOwnPropertyDescriptor;var Wi=Object.getOwnPropertyNames,Dt=Object.getOwnPropertySymbols,Ui=Object.getPrototypeOf,xr=Object.prototype.hasOwnProperty,no=Object.prototype.propertyIsEnumerable;var oo=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,R=(e,t)=>{for(var r in t||(t={}))xr.call(t,r)&&oo(e,r,t[r]);if(Dt)for(var r of Dt(t))no.call(t,r)&&oo(e,r,t[r]);return e};var io=(e,t)=>{var r={};for(var o in e)xr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Dt)for(var o of Dt(e))t.indexOf(o)<0&&no.call(e,o)&&(r[o]=e[o]);return r};var yr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Di=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of Wi(t))!xr.call(e,n)&&n!==r&&gr(e,n,{get:()=>t[n],enumerable:!(o=ji(t,n))||o.enumerable});return e};var Vt=(e,t,r)=>(r=e!=null?Fi(Ui(e)):{},Di(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var ao=(e,t,r)=>new Promise((o,n)=>{var i=p=>{try{s(r.next(p))}catch(c){n(c)}},a=p=>{try{s(r.throw(p))}catch(c){n(c)}},s=p=>p.done?o(p.value):Promise.resolve(p.value).then(i,a);s((r=r.apply(e,t)).next())});var co=yr((Er,so)=>{(function(e,t){typeof Er=="object"&&typeof so!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Er,function(){"use strict";function e(r){var o=!0,n=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(H){return!!(H&&H!==document&&H.nodeName!=="HTML"&&H.nodeName!=="BODY"&&"classList"in H&&"contains"in H.classList)}function p(H){var mt=H.type,ze=H.tagName;return!!(ze==="INPUT"&&a[mt]&&!H.readOnly||ze==="TEXTAREA"&&!H.readOnly||H.isContentEditable)}function c(H){H.classList.contains("focus-visible")||(H.classList.add("focus-visible"),H.setAttribute("data-focus-visible-added",""))}function l(H){H.hasAttribute("data-focus-visible-added")&&(H.classList.remove("focus-visible"),H.removeAttribute("data-focus-visible-added"))}function f(H){H.metaKey||H.altKey||H.ctrlKey||(s(r.activeElement)&&c(r.activeElement),o=!0)}function u(H){o=!1}function h(H){s(H.target)&&(o||p(H.target))&&c(H.target)}function w(H){s(H.target)&&(H.target.classList.contains("focus-visible")||H.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(H.target))}function A(H){document.visibilityState==="hidden"&&(n&&(o=!0),te())}function te(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function ie(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(H){H.target.nodeName&&H.target.nodeName.toLowerCase()==="html"||(o=!1,ie())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",A,!0),te(),r.addEventListener("focus",h,!0),r.addEventListener("blur",w,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Yr=yr((Rt,Kr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Rt=="object"&&typeof Kr=="object"?Kr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Rt=="object"?Rt.ClipboardJS=r():t.ClipboardJS=r()})(Rt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Ii}});var a=i(279),s=i.n(a),p=i(370),c=i.n(p),l=i(817),f=i.n(l);function u(V){try{return document.execCommand(V)}catch(_){return!1}}var h=function(_){var O=f()(_);return u("cut"),O},w=h;function A(V){var _=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[_?"right":"left"]="-9999px";var j=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat(j,"px"),O.setAttribute("readonly",""),O.value=V,O}var te=function(_,O){var j=A(_);O.container.appendChild(j);var D=f()(j);return u("copy"),j.remove(),D},ie=function(_){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},j="";return typeof _=="string"?j=te(_,O):_ instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(_==null?void 0:_.type)?j=te(_.value,O):(j=f()(_),u("copy")),j},J=ie;function H(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?H=function(O){return typeof O}:H=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},H(V)}var mt=function(){var _=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=_.action,j=O===void 0?"copy":O,D=_.container,Y=_.target,ke=_.text;if(j!=="copy"&&j!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(Y!==void 0)if(Y&&H(Y)==="object"&&Y.nodeType===1){if(j==="copy"&&Y.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(j==="cut"&&(Y.hasAttribute("readonly")||Y.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(ke)return J(ke,{container:D});if(Y)return j==="cut"?w(Y):J(Y,{container:D})},ze=mt;function Ie(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Ie=function(O){return typeof O}:Ie=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Ie(V)}function _i(V,_){if(!(V instanceof _))throw new TypeError("Cannot call a class as a function")}function ro(V,_){for(var O=0;O<_.length;O++){var j=_[O];j.enumerable=j.enumerable||!1,j.configurable=!0,"value"in j&&(j.writable=!0),Object.defineProperty(V,j.key,j)}}function Ai(V,_,O){return _&&ro(V.prototype,_),O&&ro(V,O),V}function Ci(V,_){if(typeof _!="function"&&_!==null)throw new TypeError("Super expression must either be null or a function");V.prototype=Object.create(_&&_.prototype,{constructor:{value:V,writable:!0,configurable:!0}}),_&&br(V,_)}function br(V,_){return br=Object.setPrototypeOf||function(j,D){return j.__proto__=D,j},br(V,_)}function Hi(V){var _=Pi();return function(){var j=Wt(V),D;if(_){var Y=Wt(this).constructor;D=Reflect.construct(j,arguments,Y)}else D=j.apply(this,arguments);return ki(this,D)}}function ki(V,_){return _&&(Ie(_)==="object"||typeof _=="function")?_:$i(V)}function $i(V){if(V===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return V}function Pi(){if(typeof Reflect=="undefined"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(V){return!1}}function Wt(V){return Wt=Object.setPrototypeOf?Object.getPrototypeOf:function(O){return O.__proto__||Object.getPrototypeOf(O)},Wt(V)}function vr(V,_){var O="data-clipboard-".concat(V);if(_.hasAttribute(O))return _.getAttribute(O)}var Ri=function(V){Ci(O,V);var _=Hi(O);function O(j,D){var Y;return _i(this,O),Y=_.call(this),Y.resolveOptions(D),Y.listenClick(j),Y}return Ai(O,[{key:"resolveOptions",value:function(){var D=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof D.action=="function"?D.action:this.defaultAction,this.target=typeof D.target=="function"?D.target:this.defaultTarget,this.text=typeof D.text=="function"?D.text:this.defaultText,this.container=Ie(D.container)==="object"?D.container:document.body}},{key:"listenClick",value:function(D){var Y=this;this.listener=c()(D,"click",function(ke){return Y.onClick(ke)})}},{key:"onClick",value:function(D){var Y=D.delegateTarget||D.currentTarget,ke=this.action(Y)||"copy",Ut=ze({action:ke,container:this.container,target:this.target(Y),text:this.text(Y)});this.emit(Ut?"success":"error",{action:ke,text:Ut,trigger:Y,clearSelection:function(){Y&&Y.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(D){return vr("action",D)}},{key:"defaultTarget",value:function(D){var Y=vr("target",D);if(Y)return document.querySelector(Y)}},{key:"defaultText",value:function(D){return vr("text",D)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(D){var Y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(D,Y)}},{key:"cut",value:function(D){return w(D)}},{key:"isSupported",value:function(){var D=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],Y=typeof D=="string"?[D]:D,ke=!!document.queryCommandSupported;return Y.forEach(function(Ut){ke=ke&&!!document.queryCommandSupported(Ut)}),ke}}]),O}(s()),Ii=Ri},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,p){for(;s&&s.nodeType!==n;){if(typeof s.matches=="function"&&s.matches(p))return s;s=s.parentNode}}o.exports=a},438:function(o,n,i){var a=i(828);function s(l,f,u,h,w){var A=c.apply(this,arguments);return l.addEventListener(u,A,w),{destroy:function(){l.removeEventListener(u,A,w)}}}function p(l,f,u,h,w){return typeof l.addEventListener=="function"?s.apply(null,arguments):typeof u=="function"?s.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(A){return s(A,f,u,h,w)}))}function c(l,f,u,h){return function(w){w.delegateTarget=a(w.target,f),w.delegateTarget&&h.call(l,w)}}o.exports=p},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(o,n,i){var a=i(879),s=i(438);function p(u,h,w){if(!u&&!h&&!w)throw new Error("Missing required arguments");if(!a.string(h))throw new TypeError("Second argument must be a String");if(!a.fn(w))throw new TypeError("Third argument must be a Function");if(a.node(u))return c(u,h,w);if(a.nodeList(u))return l(u,h,w);if(a.string(u))return f(u,h,w);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(u,h,w){return u.addEventListener(h,w),{destroy:function(){u.removeEventListener(h,w)}}}function l(u,h,w){return Array.prototype.forEach.call(u,function(A){A.addEventListener(h,w)}),{destroy:function(){Array.prototype.forEach.call(u,function(A){A.removeEventListener(h,w)})}}}function f(u,h,w){return s(document.body,u,h,w)}o.exports=p},817:function(o){function n(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var p=window.getSelection(),c=document.createRange();c.selectNodeContents(i),p.removeAllRanges(),p.addRange(c),a=p.toString()}return a}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,a,s){var p=this.e||(this.e={});return(p[i]||(p[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var p=this;function c(){p.off(i,c),a.apply(s,arguments)}return c._=a,this.on(i,c,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),p=0,c=s.length;for(p;p{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var ts=/["'&<>]/;ei.exports=rs;function rs(e){var t=""+e,r=ts.exec(t);if(!r)return t;var o,n="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function N(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],a;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(s){a={error:s}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(a)throw a.error}}return i}function q(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||s(u,h)})})}function s(u,h){try{p(o[u](h))}catch(w){f(i[0][3],w)}}function p(u){u.value instanceof nt?Promise.resolve(u.value.v).then(c,l):f(i[0][2],u)}function c(u){s("next",u)}function l(u){s("throw",u)}function f(u,h){u(h),i.shift(),i.length&&s(i[0][0],i[0][1])}}function mo(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof de=="function"?de(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(a){return new Promise(function(s,p){a=e[i](a),n(s,p,a.done,a.value)})}}function n(i,a,s,p){Promise.resolve(p).then(function(c){i({value:c,done:s})},a)}}function k(e){return typeof e=="function"}function ft(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var zt=ft(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function qe(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Fe=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=de(a),p=s.next();!p.done;p=s.next()){var c=p.value;c.remove(this)}}catch(A){t={error:A}}finally{try{p&&!p.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(A){i=A instanceof zt?A.errors:[A]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=de(f),h=u.next();!h.done;h=u.next()){var w=h.value;try{fo(w)}catch(A){i=i!=null?i:[],A instanceof zt?i=q(q([],N(i)),N(A.errors)):i.push(A)}}}catch(A){o={error:A}}finally{try{h&&!h.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new zt(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)fo(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&qe(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&qe(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=Fe.EMPTY;function qt(e){return e instanceof Fe||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function fo(e){k(e)?e():e.unsubscribe()}var $e={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var ut={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,a=n.isStopped,s=n.observers;return i||a?Tr:(this.currentObservers=null,s.push(r),new Fe(function(){o.currentObservers=null,qe(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,a=o.isStopped;n?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,o){return new Eo(r,o)},t}(F);var Eo=function(e){re(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Tr},t}(g);var _r=function(e){re(t,e);function t(r){var o=e.call(this)||this;return o._value=r,o}return Object.defineProperty(t.prototype,"value",{get:function(){return this.getValue()},enumerable:!1,configurable:!0}),t.prototype._subscribe=function(r){var o=e.prototype._subscribe.call(this,r);return!o.closed&&r.next(this._value),o},t.prototype.getValue=function(){var r=this,o=r.hasError,n=r.thrownError,i=r._value;if(o)throw n;return this._throwIfClosed(),i},t.prototype.next=function(r){e.prototype.next.call(this,this._value=r)},t}(g);var Lt={now:function(){return(Lt.delegate||Date).now()},delegate:void 0};var _t=function(e){re(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=Lt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,a=o._infiniteTimeWindow,s=o._timestampProvider,p=o._windowTime;n||(i.push(r),!a&&i.push(s.now()+p)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,a=n._buffer,s=a.slice(),p=0;p0?e.prototype.schedule.call(this,r,o):(this.delay=o,this.state=r,this.scheduler.flush(this),this)},t.prototype.execute=function(r,o){return o>0||this.closed?e.prototype.execute.call(this,r,o):this._execute(r,o)},t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!=null&&n>0||n==null&&this.delay>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.flush(this),0)},t}(vt);var So=function(e){re(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t}(gt);var Hr=new So(To);var Oo=function(e){re(t,e);function t(r,o){var n=e.call(this,r,o)||this;return n.scheduler=r,n.work=o,n}return t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!==null&&n>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=bt.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var a=r.actions;o!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==o&&(bt.cancelAnimationFrame(o),r._scheduled=void 0)},t}(vt);var Mo=function(e){re(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(gt);var me=new Mo(Oo);var M=new F(function(e){return e.complete()});function Yt(e){return e&&k(e.schedule)}function kr(e){return e[e.length-1]}function Xe(e){return k(kr(e))?e.pop():void 0}function He(e){return Yt(kr(e))?e.pop():void 0}function Bt(e,t){return typeof kr(e)=="number"?e.pop():t}var xt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Gt(e){return k(e==null?void 0:e.then)}function Jt(e){return k(e[ht])}function Xt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Zt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Gi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var er=Gi();function tr(e){return k(e==null?void 0:e[er])}function rr(e){return lo(this,arguments,function(){var r,o,n,i;return Nt(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,nt(r.read())];case 3:return o=a.sent(),n=o.value,i=o.done,i?[4,nt(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,nt(n)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function or(e){return k(e==null?void 0:e.getReader)}function W(e){if(e instanceof F)return e;if(e!=null){if(Jt(e))return Ji(e);if(xt(e))return Xi(e);if(Gt(e))return Zi(e);if(Xt(e))return Lo(e);if(tr(e))return ea(e);if(or(e))return ta(e)}throw Zt(e)}function Ji(e){return new F(function(t){var r=e[ht]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Xi(e){return new F(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?v(function(n,i){return e(n,i,o)}):le,Te(1),r?Be(t):zo(function(){return new ir}))}}function Fr(e){return e<=0?function(){return M}:y(function(t,r){var o=[];t.subscribe(T(r,function(n){o.push(n),e=2,!0))}function pe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new g}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,p=s===void 0?!0:s;return function(c){var l,f,u,h=0,w=!1,A=!1,te=function(){f==null||f.unsubscribe(),f=void 0},ie=function(){te(),l=u=void 0,w=A=!1},J=function(){var H=l;ie(),H==null||H.unsubscribe()};return y(function(H,mt){h++,!A&&!w&&te();var ze=u=u!=null?u:r();mt.add(function(){h--,h===0&&!A&&!w&&(f=Wr(J,p))}),ze.subscribe(mt),!l&&h>0&&(l=new at({next:function(Ie){return ze.next(Ie)},error:function(Ie){A=!0,te(),f=Wr(ie,n,Ie),ze.error(Ie)},complete:function(){w=!0,te(),f=Wr(ie,a),ze.complete()}}),W(H).subscribe(l))})(c)}}function Wr(e,t){for(var r=[],o=2;oe.next(document)),e}function $(e,t=document){return Array.from(t.querySelectorAll(e))}function P(e,t=document){let r=fe(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function fe(e,t=document){return t.querySelector(e)||void 0}function Re(){var e,t,r,o;return(o=(r=(t=(e=document.activeElement)==null?void 0:e.shadowRoot)==null?void 0:t.activeElement)!=null?r:document.activeElement)!=null?o:void 0}var xa=S(d(document.body,"focusin"),d(document.body,"focusout")).pipe(_e(1),Q(void 0),m(()=>Re()||document.body),B(1));function et(e){return xa.pipe(m(t=>e.contains(t)),K())}function kt(e,t){return C(()=>S(d(e,"mouseenter").pipe(m(()=>!0)),d(e,"mouseleave").pipe(m(()=>!1))).pipe(t?Ht(r=>Me(+!r*t)):le,Q(e.matches(":hover"))))}function Bo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Bo(e,r)}function x(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)Bo(o,n);return o}function sr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function wt(e){let t=x("script",{src:e});return C(()=>(document.head.appendChild(t),S(d(t,"load"),d(t,"error").pipe(b(()=>$r(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),L(()=>document.head.removeChild(t)),Te(1))))}var Go=new g,ya=C(()=>typeof ResizeObserver=="undefined"?wt("https://unpkg.com/resize-observer-polyfill"):I(void 0)).pipe(m(()=>new ResizeObserver(e=>e.forEach(t=>Go.next(t)))),b(e=>S(Ke,I(e)).pipe(L(()=>e.disconnect()))),B(1));function ce(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ge(e){let t=e;for(;t.clientWidth===0&&t.parentElement;)t=t.parentElement;return ya.pipe(E(r=>r.observe(t)),b(r=>Go.pipe(v(o=>o.target===t),L(()=>r.unobserve(t)))),m(()=>ce(e)),Q(ce(e)))}function Tt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function cr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}function Jo(e){let t=[],r=e.parentElement;for(;r;)(e.clientWidth>r.clientWidth||e.clientHeight>r.clientHeight)&&t.push(r),r=(e=r).parentElement;return t.length===0&&t.push(document.documentElement),t}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function Xo(e){let t=e.getBoundingClientRect();return{x:t.x+window.scrollX,y:t.y+window.scrollY}}function Zo(e){return S(d(window,"load"),d(window,"resize")).pipe(Le(0,me),m(()=>Ue(e)),Q(Ue(e)))}function pr(e){return{x:e.scrollLeft,y:e.scrollTop}}function De(e){return S(d(e,"scroll"),d(window,"scroll"),d(window,"resize")).pipe(Le(0,me),m(()=>pr(e)),Q(pr(e)))}var en=new g,Ea=C(()=>I(new IntersectionObserver(e=>{for(let t of e)en.next(t)},{threshold:0}))).pipe(b(e=>S(Ke,I(e)).pipe(L(()=>e.disconnect()))),B(1));function tt(e){return Ea.pipe(E(t=>t.observe(e)),b(t=>en.pipe(v(({target:r})=>r===e),L(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function tn(e,t=16){return De(e).pipe(m(({y:r})=>{let o=ce(e),n=Tt(e);return r>=n.height-o.height-t}),K())}var lr={drawer:P("[data-md-toggle=drawer]"),search:P("[data-md-toggle=search]")};function rn(e){return lr[e].checked}function Je(e,t){lr[e].checked!==t&&lr[e].click()}function Ve(e){let t=lr[e];return d(t,"change").pipe(m(()=>t.checked),Q(t.checked))}function wa(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ta(){return S(d(window,"compositionstart").pipe(m(()=>!0)),d(window,"compositionend").pipe(m(()=>!1))).pipe(Q(!1))}function on(){let e=d(window,"keydown").pipe(v(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:rn("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),v(({mode:t,type:r})=>{if(t==="global"){let o=Re();if(typeof o!="undefined")return!wa(o,r)}return!0}),pe());return Ta().pipe(b(t=>t?M:e))}function xe(){return new URL(location.href)}function pt(e,t=!1){if(G("navigation.instant")&&!t){let r=x("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function nn(){return new g}function an(){return location.hash.slice(1)}function sn(e){let t=x("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Sa(e){return S(d(window,"hashchange"),e).pipe(m(an),Q(an()),v(t=>t.length>0),B(1))}function cn(e){return Sa(e).pipe(m(t=>fe(`[id="${t}"]`)),v(t=>typeof t!="undefined"))}function $t(e){let t=matchMedia(e);return ar(r=>t.addListener(()=>r(t.matches))).pipe(Q(t.matches))}function pn(){let e=matchMedia("print");return S(d(window,"beforeprint").pipe(m(()=>!0)),d(window,"afterprint").pipe(m(()=>!1))).pipe(Q(e.matches))}function Nr(e,t){return e.pipe(b(r=>r?t():M))}function zr(e,t){return new F(r=>{let o=new XMLHttpRequest;return o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network error"))}),o.addEventListener("abort",()=>{r.complete()}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{var i;if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let a=(i=o.getResponseHeader("Content-Length"))!=null?i:0;t.progress$.next(n.loaded/+a*100)}}),t.progress$.next(5)),o.send(),()=>o.abort()})}function Ne(e,t){return zr(e,t).pipe(b(r=>r.text()),m(r=>JSON.parse(r)),B(1))}function ln(e,t){let r=new DOMParser;return zr(e,t).pipe(b(o=>o.text()),m(o=>r.parseFromString(o,"text/html")),B(1))}function mn(e,t){let r=new DOMParser;return zr(e,t).pipe(b(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),B(1))}function fn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function un(){return S(d(window,"scroll",{passive:!0}),d(window,"resize",{passive:!0})).pipe(m(fn),Q(fn()))}function dn(){return{width:innerWidth,height:innerHeight}}function hn(){return d(window,"resize",{passive:!0}).pipe(m(dn),Q(dn()))}function bn(){return z([un(),hn()]).pipe(m(([e,t])=>({offset:e,size:t})),B(1))}function mr(e,{viewport$:t,header$:r}){let o=t.pipe(Z("size")),n=z([o,r]).pipe(m(()=>Ue(e)));return z([r,t,n]).pipe(m(([{height:i},{offset:a,size:s},{x:p,y:c}])=>({offset:{x:a.x-p,y:a.y-c+i},size:s})))}function Oa(e){return d(e,"message",t=>t.data)}function Ma(e){let t=new g;return t.subscribe(r=>e.postMessage(r)),t}function vn(e,t=new Worker(e)){let r=Oa(t),o=Ma(t),n=new g;n.subscribe(o);let i=o.pipe(X(),ne(!0));return n.pipe(X(),Pe(r.pipe(U(i))),pe())}var La=P("#__config"),St=JSON.parse(La.textContent);St.base=`${new URL(St.base,xe())}`;function ye(){return St}function G(e){return St.features.includes(e)}function Ee(e,t){return typeof t!="undefined"?St.translations[e].replace("#",t.toString()):St.translations[e]}function Se(e,t=document){return P(`[data-md-component=${e}]`,t)}function ae(e,t=document){return $(`[data-md-component=${e}]`,t)}function _a(e){let t=P(".md-typeset > :first-child",e);return d(t,"click",{once:!0}).pipe(m(()=>P(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function gn(e){if(!G("announce.dismiss")||!e.childElementCount)return M;if(!e.hidden){let t=P(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return C(()=>{let t=new g;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),_a(e).pipe(E(r=>t.next(r)),L(()=>t.complete()),m(r=>R({ref:e},r)))})}function Aa(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function xn(e,t){let r=new g;return r.subscribe(({hidden:o})=>{e.hidden=o}),Aa(e,t).pipe(E(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))}function Pt(e,t){return t==="inline"?x("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},x("div",{class:"md-tooltip__inner md-typeset"})):x("div",{class:"md-tooltip",id:e,role:"tooltip"},x("div",{class:"md-tooltip__inner md-typeset"}))}function yn(...e){return x("div",{class:"md-tooltip2",role:"tooltip"},x("div",{class:"md-tooltip2__inner md-typeset"},e))}function En(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return x("aside",{class:"md-annotation",tabIndex:0},Pt(t),x("a",{href:r,class:"md-annotation__index",tabIndex:-1},x("span",{"data-md-annotation-id":e})))}else return x("aside",{class:"md-annotation",tabIndex:0},Pt(t),x("span",{class:"md-annotation__index",tabIndex:-1},x("span",{"data-md-annotation-id":e})))}function wn(e){return x("button",{class:"md-clipboard md-icon",title:Ee("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function qr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(p=>!e.terms[p]).reduce((p,c)=>[...p,x("del",null,c)," "],[]).slice(0,-1),i=ye(),a=new URL(e.location,i.base);G("search.highlight")&&a.searchParams.set("h",Object.entries(e.terms).filter(([,p])=>p).reduce((p,[c])=>`${p} ${c}`.trim(),""));let{tags:s}=ye();return x("a",{href:`${a}`,class:"md-search-result__link",tabIndex:-1},x("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&x("div",{class:"md-search-result__icon md-icon"}),r>0&&x("h1",null,e.title),r<=0&&x("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(p=>{let c=s?p in s?`md-tag-icon md-tag--${s[p]}`:"md-tag-icon":"";return x("span",{class:`md-tag ${c}`},p)}),o>0&&n.length>0&&x("p",{class:"md-search-result__terms"},Ee("search.result.term.missing"),": ",...n)))}function Tn(e){let t=e[0].score,r=[...e],o=ye(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),a=r.findIndex(l=>l.scoreqr(l,1)),...p.length?[x("details",{class:"md-search-result__more"},x("summary",{tabIndex:-1},x("div",null,p.length>0&&p.length===1?Ee("search.result.more.one"):Ee("search.result.more.other",p.length))),...p.map(l=>qr(l,1)))]:[]];return x("li",{class:"md-search-result__item"},c)}function Sn(e){return x("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>x("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?sr(r):r)))}function Qr(e){let t=`tabbed-control tabbed-control--${e}`;return x("div",{class:t,hidden:!0},x("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function On(e){return x("div",{class:"md-typeset__scrollwrap"},x("div",{class:"md-typeset__table"},e))}function Ca(e){var o;let t=ye(),r=new URL(`../${e.version}/`,t.base);return x("li",{class:"md-version__item"},x("a",{href:`${r}`,class:"md-version__link"},e.title,((o=t.version)==null?void 0:o.alias)&&e.aliases.length&&x("span",{class:"md-version__alias"},e.aliases[0])))}function Mn(e,t){var o;let r=ye();return e=e.filter(n=>{var i;return!((i=n.properties)!=null&&i.hidden)}),x("div",{class:"md-version"},x("button",{class:"md-version__current","aria-label":Ee("select.version")},t.title,((o=r.version)==null?void 0:o.alias)&&t.aliases.length&&x("span",{class:"md-version__alias"},t.aliases[0])),x("ul",{class:"md-version__list"},e.map(Ca)))}var Ha=0;function ka(e){let t=z([et(e),kt(e)]).pipe(m(([o,n])=>o||n),K()),r=C(()=>Jo(e)).pipe(oe(De),ct(1),m(()=>Xo(e)));return t.pipe(Ae(o=>o),b(()=>z([t,r])),m(([o,n])=>({active:o,offset:n})),pe())}function $a(e,t){let{content$:r,viewport$:o}=t,n=`__tooltip2_${Ha++}`;return C(()=>{let i=new g,a=new _r(!1);i.pipe(X(),ne(!1)).subscribe(a);let s=a.pipe(Ht(c=>Me(+!c*250,Hr)),K(),b(c=>c?r:M),E(c=>c.id=n),pe());z([i.pipe(m(({active:c})=>c)),s.pipe(b(c=>kt(c,250)),Q(!1))]).pipe(m(c=>c.some(l=>l))).subscribe(a);let p=a.pipe(v(c=>c),ee(s,o),m(([c,l,{size:f}])=>{let u=e.getBoundingClientRect(),h=u.width/2;if(l.role==="tooltip")return{x:h,y:8+u.height};if(u.y>=f.height/2){let{height:w}=ce(l);return{x:h,y:-16-w}}else return{x:h,y:16+u.height}}));return z([s,i,p]).subscribe(([c,{offset:l},f])=>{c.style.setProperty("--md-tooltip-host-x",`${l.x}px`),c.style.setProperty("--md-tooltip-host-y",`${l.y}px`),c.style.setProperty("--md-tooltip-x",`${f.x}px`),c.style.setProperty("--md-tooltip-y",`${f.y}px`),c.classList.toggle("md-tooltip2--top",f.y<0),c.classList.toggle("md-tooltip2--bottom",f.y>=0)}),a.pipe(v(c=>c),ee(s,(c,l)=>l),v(c=>c.role==="tooltip")).subscribe(c=>{let l=ce(P(":scope > *",c));c.style.setProperty("--md-tooltip-width",`${l.width}px`),c.style.setProperty("--md-tooltip-tail","0px")}),a.pipe(K(),be(me),ee(s)).subscribe(([c,l])=>{l.classList.toggle("md-tooltip2--active",c)}),z([a.pipe(v(c=>c)),s]).subscribe(([c,l])=>{l.role==="dialog"?(e.setAttribute("aria-controls",n),e.setAttribute("aria-haspopup","dialog")):e.setAttribute("aria-describedby",n)}),a.pipe(v(c=>!c)).subscribe(()=>{e.removeAttribute("aria-controls"),e.removeAttribute("aria-describedby"),e.removeAttribute("aria-haspopup")}),ka(e).pipe(E(c=>i.next(c)),L(()=>i.complete()),m(c=>R({ref:e},c)))})}function lt(e,{viewport$:t},r=document.body){return $a(e,{content$:new F(o=>{let n=e.title,i=yn(n);return o.next(i),e.removeAttribute("title"),r.append(i),()=>{i.remove(),e.setAttribute("title",n)}}),viewport$:t})}function Pa(e,t){let r=C(()=>z([Zo(e),De(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:a,height:s}=ce(e);return{x:o-i.x+a/2,y:n-i.y+s/2}}));return et(e).pipe(b(o=>r.pipe(m(n=>({active:o,offset:n})),Te(+!o||1/0))))}function Ln(e,t,{target$:r}){let[o,n]=Array.from(e.children);return C(()=>{let i=new g,a=i.pipe(X(),ne(!0));return i.subscribe({next({offset:s}){e.style.setProperty("--md-tooltip-x",`${s.x}px`),e.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),tt(e).pipe(U(a)).subscribe(s=>{e.toggleAttribute("data-md-visible",s)}),S(i.pipe(v(({active:s})=>s)),i.pipe(_e(250),v(({active:s})=>!s))).subscribe({next({active:s}){s?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Le(16,me)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(ct(125,me),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?e.style.setProperty("--md-tooltip-0",`${-s}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),d(n,"click").pipe(U(a),v(s=>!(s.metaKey||s.ctrlKey))).subscribe(s=>{s.stopPropagation(),s.preventDefault()}),d(n,"mousedown").pipe(U(a),ee(i)).subscribe(([s,{active:p}])=>{var c;if(s.button!==0||s.metaKey||s.ctrlKey)s.preventDefault();else if(p){s.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(c=Re())==null||c.blur()}}),r.pipe(U(a),v(s=>s===o),Ge(125)).subscribe(()=>e.focus()),Pa(e,t).pipe(E(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))})}function Ra(e){return e.tagName==="CODE"?$(".c, .c1, .cm",e):[e]}function Ia(e){let t=[];for(let r of Ra(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let a;for(;a=/(\(\d+\))(!)?/.exec(i.textContent);){let[,s,p]=a;if(typeof p=="undefined"){let c=i.splitText(a.index);i=c.splitText(s.length),t.push(c)}else{i.textContent=s,t.push(i);break}}}}return t}function _n(e,t){t.append(...Array.from(e.childNodes))}function fr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,a=new Map;for(let s of Ia(t)){let[,p]=s.textContent.match(/\((\d+)\)/);fe(`:scope > li:nth-child(${p})`,e)&&(a.set(p,En(p,i)),s.replaceWith(a.get(p)))}return a.size===0?M:C(()=>{let s=new g,p=s.pipe(X(),ne(!0)),c=[];for(let[l,f]of a)c.push([P(".md-typeset",f),P(`:scope > li:nth-child(${l})`,e)]);return o.pipe(U(p)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of c)l?_n(f,u):_n(u,f)}),S(...[...a].map(([,l])=>Ln(l,t,{target$:r}))).pipe(L(()=>s.complete()),pe())})}function An(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return An(t)}}function Cn(e,t){return C(()=>{let r=An(e);return typeof r!="undefined"?fr(r,e,t):M})}var Hn=Vt(Yr());var Fa=0;function kn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return kn(t)}}function ja(e){return ge(e).pipe(m(({width:t})=>({scrollable:Tt(e).width>t})),Z("scrollable"))}function $n(e,t){let{matches:r}=matchMedia("(hover)"),o=C(()=>{let n=new g,i=n.pipe(Fr(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let a=[];if(Hn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${Fa++}`;let l=wn(c.id);c.insertBefore(l,e),G("content.tooltips")&&a.push(lt(l,{viewport$}))}let s=e.closest(".highlight");if(s instanceof HTMLElement){let c=kn(s);if(typeof c!="undefined"&&(s.classList.contains("annotate")||G("content.code.annotate"))){let l=fr(c,e,t);a.push(ge(s).pipe(U(i),m(({width:f,height:u})=>f&&u),K(),b(f=>f?l:M)))}}return $(":scope > span[id]",e).length&&e.classList.add("md-code__content"),ja(e).pipe(E(c=>n.next(c)),L(()=>n.complete()),m(c=>R({ref:e},c)),Pe(...a))});return G("content.lazy")?tt(e).pipe(v(n=>n),Te(1),b(()=>o)):o}function Wa(e,{target$:t,print$:r}){let o=!0;return S(t.pipe(m(n=>n.closest("details:not([open])")),v(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(v(n=>n||!o),E(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Pn(e,t){return C(()=>{let r=new g;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),Wa(e,t).pipe(E(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}var Rn=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel,.nodeLabel p{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}a .nodeLabel{text-decoration:underline}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var Br,Da=0;function Va(){return typeof mermaid=="undefined"||mermaid instanceof Element?wt("https://unpkg.com/mermaid@10/dist/mermaid.min.js"):I(void 0)}function In(e){return e.classList.remove("mermaid"),Br||(Br=Va().pipe(E(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Rn,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),B(1))),Br.subscribe(()=>ao(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${Da++}`,r=x("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),a=r.attachShadow({mode:"closed"});a.innerHTML=n,e.replaceWith(r),i==null||i(a)})),Br.pipe(m(()=>({ref:e})))}var Fn=x("table");function jn(e){return e.replaceWith(Fn),Fn.replaceWith(On(e)),I({ref:e})}function Na(e){let t=e.find(r=>r.checked)||e[0];return S(...e.map(r=>d(r,"change").pipe(m(()=>P(`label[for="${r.id}"]`))))).pipe(Q(P(`label[for="${t.id}"]`)),m(r=>({active:r})))}function Wn(e,{viewport$:t,target$:r}){let o=P(".tabbed-labels",e),n=$(":scope > input",e),i=Qr("prev");e.append(i);let a=Qr("next");return e.append(a),C(()=>{let s=new g,p=s.pipe(X(),ne(!0));z([s,ge(e)]).pipe(U(p),Le(1,me)).subscribe({next([{active:c},l]){let f=Ue(c),{width:u}=ce(c);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let h=pr(o);(f.xh.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),z([De(o),ge(o)]).pipe(U(p)).subscribe(([c,l])=>{let f=Tt(o);i.hidden=c.x<16,a.hidden=c.x>f.width-l.width-16}),S(d(i,"click").pipe(m(()=>-1)),d(a,"click").pipe(m(()=>1))).pipe(U(p)).subscribe(c=>{let{width:l}=ce(o);o.scrollBy({left:l*c,behavior:"smooth"})}),r.pipe(U(p),v(c=>n.includes(c))).subscribe(c=>c.click()),o.classList.add("tabbed-labels--linked");for(let c of n){let l=P(`label[for="${c.id}"]`);l.replaceChildren(x("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),d(l.firstElementChild,"click").pipe(U(p),v(f=>!(f.metaKey||f.ctrlKey)),E(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&s.pipe(Ce(1),ee(t)).subscribe(([{active:c},{offset:l}])=>{let f=c.innerText.trim();if(c.hasAttribute("data-md-switching"))c.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let w of $("[data-tabs]"))for(let A of $(":scope > input",w)){let te=P(`label[for="${A.id}"]`);if(te!==c&&te.innerText.trim()===f){te.setAttribute("data-md-switching",""),A.click();break}}window.scrollTo({top:e.offsetTop-u});let h=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...h])])}}),s.pipe(U(p)).subscribe(()=>{for(let c of $("audio, video",e))c.pause()}),tt(e).pipe(b(()=>Na(n)),E(c=>s.next(c)),L(()=>s.complete()),m(c=>R({ref:e},c)))}).pipe(Qe(se))}function Un(e,{viewport$:t,target$:r,print$:o}){return S(...$(".annotate:not(.highlight)",e).map(n=>Cn(n,{target$:r,print$:o})),...$("pre:not(.mermaid) > code",e).map(n=>$n(n,{target$:r,print$:o})),...$("pre.mermaid",e).map(n=>In(n)),...$("table:not([class])",e).map(n=>jn(n)),...$("details",e).map(n=>Pn(n,{target$:r,print$:o})),...$("[data-tabs]",e).map(n=>Wn(n,{viewport$:t,target$:r})),...$("[title]",e).filter(()=>G("content.tooltips")).map(n=>lt(n,{viewport$:t})))}function za(e,{alert$:t}){return t.pipe(b(r=>S(I(!0),I(!1).pipe(Ge(2e3))).pipe(m(o=>({message:r,active:o})))))}function Dn(e,t){let r=P(".md-typeset",e);return C(()=>{let o=new g;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),za(e,t).pipe(E(n=>o.next(n)),L(()=>o.complete()),m(n=>R({ref:e},n)))})}var qa=0;function Qa(e,t){document.body.append(e);let{width:r}=ce(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=cr(t),n=typeof o!="undefined"?De(o):I({x:0,y:0}),i=S(et(t),kt(t)).pipe(K());return z([i,n]).pipe(m(([a,s])=>{let{x:p,y:c}=Ue(t),l=ce(t),f=t.closest("table");return f&&t.parentElement&&(p+=f.offsetLeft+t.parentElement.offsetLeft,c+=f.offsetTop+t.parentElement.offsetTop),{active:a,offset:{x:p-s.x+l.width/2-r/2,y:c-s.y+l.height+8}}}))}function Vn(e){let t=e.title;if(!t.length)return M;let r=`__tooltip_${qa++}`,o=Pt(r,"inline"),n=P(".md-typeset",o);return n.innerHTML=t,C(()=>{let i=new g;return i.subscribe({next({offset:a}){o.style.setProperty("--md-tooltip-x",`${a.x}px`),o.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),S(i.pipe(v(({active:a})=>a)),i.pipe(_e(250),v(({active:a})=>!a))).subscribe({next({active:a}){a?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Le(16,me)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(ct(125,me),v(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?o.style.setProperty("--md-tooltip-0",`${-a}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Qa(o,e).pipe(E(a=>i.next(a)),L(()=>i.complete()),m(a=>R({ref:e},a)))}).pipe(Qe(se))}function Ka({viewport$:e}){if(!G("header.autohide"))return I(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ye(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),K()),o=Ve("search");return z([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),K(),b(n=>n?r:I(!1)),Q(!1))}function Nn(e,t){return C(()=>z([ge(e),Ka(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),K((r,o)=>r.height===o.height&&r.hidden===o.hidden),B(1))}function zn(e,{header$:t,main$:r}){return C(()=>{let o=new g,n=o.pipe(X(),ne(!0));o.pipe(Z("active"),We(t)).subscribe(([{active:a},{hidden:s}])=>{e.classList.toggle("md-header--shadow",a&&!s),e.hidden=s});let i=ue($("[title]",e)).pipe(v(()=>G("content.tooltips")),oe(a=>Vn(a)));return r.subscribe(o),t.pipe(U(n),m(a=>R({ref:e},a)),Pe(i.pipe(U(n))))})}function Ya(e,{viewport$:t,header$:r}){return mr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=ce(e);return{active:o>=n}}),Z("active"))}function qn(e,t){return C(()=>{let r=new g;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=fe(".md-content h1");return typeof o=="undefined"?M:Ya(o,t).pipe(E(n=>r.next(n)),L(()=>r.complete()),m(n=>R({ref:e},n)))})}function Qn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),K()),n=o.pipe(b(()=>ge(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),Z("bottom"))));return z([o,n,t]).pipe(m(([i,{top:a,bottom:s},{offset:{y:p},size:{height:c}}])=>(c=Math.max(0,c-Math.max(0,a-p,i)-Math.max(0,c+p-s)),{offset:a-i,height:c,active:a-i<=p})),K((i,a)=>i.offset===a.offset&&i.height===a.height&&i.active===a.active))}function Ba(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return I(...e).pipe(oe(o=>d(o,"change").pipe(m(()=>o))),Q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),B(1))}function Kn(e){let t=$("input",e),r=x("meta",{name:"theme-color"});document.head.appendChild(r);let o=x("meta",{name:"color-scheme"});document.head.appendChild(o);let n=$t("(prefers-color-scheme: light)");return C(()=>{let i=new g;return i.subscribe(a=>{if(document.body.setAttribute("data-md-color-switching",""),a.color.media==="(prefers-color-scheme)"){let s=matchMedia("(prefers-color-scheme: light)"),p=document.querySelector(s.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");a.color.scheme=p.getAttribute("data-md-color-scheme"),a.color.primary=p.getAttribute("data-md-color-primary"),a.color.accent=p.getAttribute("data-md-color-accent")}for(let[s,p]of Object.entries(a.color))document.body.setAttribute(`data-md-color-${s}`,p);for(let s=0;sa.key==="Enter"),ee(i,(a,s)=>s)).subscribe(({index:a})=>{a=(a+1)%t.length,t[a].click(),t[a].focus()}),i.pipe(m(()=>{let a=Se("header"),s=window.getComputedStyle(a);return o.content=s.colorScheme,s.backgroundColor.match(/\d+/g).map(p=>(+p).toString(16).padStart(2,"0")).join("")})).subscribe(a=>r.content=`#${a}`),i.pipe(be(se)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Ba(t).pipe(U(n.pipe(Ce(1))),st(),E(a=>i.next(a)),L(()=>i.complete()),m(a=>R({ref:e},a)))})}function Yn(e,{progress$:t}){return C(()=>{let r=new g;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(E(o=>r.next({value:o})),L(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Gr=Vt(Yr());function Ga(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Bn({alert$:e}){Gr.default.isSupported()&&new F(t=>{new Gr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Ga(P(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(E(t=>{t.trigger.focus()}),m(()=>Ee("clipboard.copied"))).subscribe(e)}function Gn(e,t){return e.protocol=t.protocol,e.hostname=t.hostname,e}function Ja(e,t){let r=new Map;for(let o of $("url",e)){let n=P("loc",o),i=[Gn(new URL(n.textContent),t)];r.set(`${i[0]}`,i);for(let a of $("[rel=alternate]",o)){let s=a.getAttribute("href");s!=null&&i.push(Gn(new URL(s),t))}}return r}function ur(e){return mn(new URL("sitemap.xml",e)).pipe(m(t=>Ja(t,new URL(e))),ve(()=>I(new Map)))}function Xa(e,t){if(!(e.target instanceof Element))return M;let r=e.target.closest("a");if(r===null)return M;if(r.target||e.metaKey||e.ctrlKey)return M;let o=new URL(r.href);return o.search=o.hash="",t.has(`${o}`)?(e.preventDefault(),I(new URL(r.href))):M}function Jn(e){let t=new Map;for(let r of $(":scope > *",e.head))t.set(r.outerHTML,r);return t}function Xn(e){for(let t of $("[href], [src]",e))for(let r of["href","src"]){let o=t.getAttribute(r);if(o&&!/^(?:[a-z]+:)?\/\//i.test(o)){t[r]=t[r];break}}return I(e)}function Za(e){for(let o of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let n=fe(o),i=fe(o,e);typeof n!="undefined"&&typeof i!="undefined"&&n.replaceWith(i)}let t=Jn(document);for(let[o,n]of Jn(e))t.has(o)?t.delete(o):document.head.appendChild(n);for(let o of t.values()){let n=o.getAttribute("name");n!=="theme-color"&&n!=="color-scheme"&&o.remove()}let r=Se("container");return je($("script",r)).pipe(b(o=>{let n=e.createElement("script");if(o.src){for(let i of o.getAttributeNames())n.setAttribute(i,o.getAttribute(i));return o.replaceWith(n),new F(i=>{n.onload=()=>i.complete()})}else return n.textContent=o.textContent,o.replaceWith(n),M}),X(),ne(document))}function Zn({location$:e,viewport$:t,progress$:r}){let o=ye();if(location.protocol==="file:")return M;let n=ur(o.base);I(document).subscribe(Xn);let i=d(document.body,"click").pipe(We(n),b(([p,c])=>Xa(p,c)),pe()),a=d(window,"popstate").pipe(m(xe),pe());i.pipe(ee(t)).subscribe(([p,{offset:c}])=>{history.replaceState(c,""),history.pushState(null,"",p)}),S(i,a).subscribe(e);let s=e.pipe(Z("pathname"),b(p=>ln(p,{progress$:r}).pipe(ve(()=>(pt(p,!0),M)))),b(Xn),b(Za),pe());return S(s.pipe(ee(e,(p,c)=>c)),s.pipe(b(()=>e),Z("pathname"),b(()=>e),Z("hash")),e.pipe(K((p,c)=>p.pathname===c.pathname&&p.hash===c.hash),b(()=>i),E(()=>history.back()))).subscribe(p=>{var c,l;history.state!==null||!p.hash?window.scrollTo(0,(l=(c=history.state)==null?void 0:c.y)!=null?l:0):(history.scrollRestoration="auto",sn(p.hash),history.scrollRestoration="manual")}),e.subscribe(()=>{history.scrollRestoration="manual"}),d(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),t.pipe(Z("offset"),_e(100)).subscribe(({offset:p})=>{history.replaceState(p,"")}),s}var ri=Vt(ti());function oi(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,a)=>`${i}${a}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return a=>(0,ri.default)(a).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function It(e){return e.type===1}function dr(e){return e.type===3}function ni(e,t){let r=vn(e);return S(I(location.protocol!=="file:"),Ve("search")).pipe(Ae(o=>o),b(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function ii({document$:e}){let t=ye(),r=Ne(new URL("../versions.json",t.base)).pipe(ve(()=>M)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:a,aliases:s})=>a===i||s.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),b(n=>d(document.body,"click").pipe(v(i=>!i.metaKey&&!i.ctrlKey),ee(o),b(([i,a])=>{if(i.target instanceof Element){let s=i.target.closest("a");if(s&&!s.target&&n.has(s.href)){let p=s.href;return!i.target.closest(".md-version")&&n.get(p)===a?M:(i.preventDefault(),I(p))}}return M}),b(i=>ur(new URL(i)).pipe(m(a=>{let p=xe().href.replace(t.base,i);return a.has(p.split("#")[0])?new URL(p):new URL(i)})))))).subscribe(n=>pt(n,!0)),z([r,o]).subscribe(([n,i])=>{P(".md-header__topic").appendChild(Mn(n,i))}),e.pipe(b(()=>o)).subscribe(n=>{var a;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let s=((a=t.version)==null?void 0:a.default)||"latest";Array.isArray(s)||(s=[s]);e:for(let p of s)for(let c of n.aliases.concat(n.version))if(new RegExp(p,"i").test(c)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let s of ae("outdated"))s.hidden=!1})}function ns(e,{worker$:t}){let{searchParams:r}=xe();r.has("q")&&(Je("search",!0),e.value=r.get("q"),e.focus(),Ve("search").pipe(Ae(i=>!i)).subscribe(()=>{let i=xe();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=et(e),n=S(t.pipe(Ae(It)),d(e,"keyup"),o).pipe(m(()=>e.value),K());return z([n,o]).pipe(m(([i,a])=>({value:i,focus:a})),B(1))}function ai(e,{worker$:t}){let r=new g,o=r.pipe(X(),ne(!0));z([t.pipe(Ae(It)),r],(i,a)=>a).pipe(Z("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(Z("focus")).subscribe(({focus:i})=>{i&&Je("search",i)}),d(e.form,"reset").pipe(U(o)).subscribe(()=>e.focus());let n=P("header [for=__search]");return d(n,"click").subscribe(()=>e.focus()),ns(e,{worker$:t}).pipe(E(i=>r.next(i)),L(()=>r.complete()),m(i=>R({ref:e},i)),B(1))}function si(e,{worker$:t,query$:r}){let o=new g,n=tn(e.parentElement).pipe(v(Boolean)),i=e.parentElement,a=P(":scope > :first-child",e),s=P(":scope > :last-child",e);Ve("search").subscribe(l=>s.setAttribute("role",l?"list":"presentation")),o.pipe(ee(r),Ur(t.pipe(Ae(It)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:a.textContent=f.length?Ee("search.result.none"):Ee("search.result.placeholder");break;case 1:a.textContent=Ee("search.result.one");break;default:let u=sr(l.length);a.textContent=Ee("search.result.other",u)}});let p=o.pipe(E(()=>s.innerHTML=""),b(({items:l})=>S(I(...l.slice(0,10)),I(...l.slice(10)).pipe(Ye(4),Vr(n),b(([f])=>f)))),m(Tn),pe());return p.subscribe(l=>s.appendChild(l)),p.pipe(oe(l=>{let f=fe("details",l);return typeof f=="undefined"?M:d(f,"toggle").pipe(U(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(v(dr),m(({data:l})=>l)).pipe(E(l=>o.next(l)),L(()=>o.complete()),m(l=>R({ref:e},l)))}function is(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=xe();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function ci(e,t){let r=new g,o=r.pipe(X(),ne(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),d(e,"click").pipe(U(o)).subscribe(n=>n.preventDefault()),is(e,t).pipe(E(n=>r.next(n)),L(()=>r.complete()),m(n=>R({ref:e},n)))}function pi(e,{worker$:t,keyboard$:r}){let o=new g,n=Se("search-query"),i=S(d(n,"keydown"),d(n,"focus")).pipe(be(se),m(()=>n.value),K());return o.pipe(We(i),m(([{suggest:s},p])=>{let c=p.split(/([\s-]+)/);if(s!=null&&s.length&&c[c.length-1]){let l=s[s.length-1];l.startsWith(c[c.length-1])&&(c[c.length-1]=l)}else c.length=0;return c})).subscribe(s=>e.innerHTML=s.join("").replace(/\s/g," ")),r.pipe(v(({mode:s})=>s==="search")).subscribe(s=>{switch(s.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(v(dr),m(({data:s})=>s)).pipe(E(s=>o.next(s)),L(()=>o.complete()),m(()=>({ref:e})))}function li(e,{index$:t,keyboard$:r}){let o=ye();try{let n=ni(o.search,t),i=Se("search-query",e),a=Se("search-result",e);d(e,"click").pipe(v(({target:p})=>p instanceof Element&&!!p.closest("a"))).subscribe(()=>Je("search",!1)),r.pipe(v(({mode:p})=>p==="search")).subscribe(p=>{let c=Re();switch(p.type){case"Enter":if(c===i){let l=new Map;for(let f of $(":first-child [href]",a)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,h])=>h-u);f.click()}p.claim()}break;case"Escape":case"Tab":Je("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof c=="undefined")i.focus();else{let l=[i,...$(":not(details) > [href], summary, details[open] [href]",a)],f=Math.max(0,(Math.max(0,l.indexOf(c))+l.length+(p.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}p.claim();break;default:i!==Re()&&i.focus()}}),r.pipe(v(({mode:p})=>p==="global")).subscribe(p=>{switch(p.type){case"f":case"s":case"/":i.focus(),i.select(),p.claim();break}});let s=ai(i,{worker$:n});return S(s,si(a,{worker$:n,query$:s})).pipe(Pe(...ae("search-share",e).map(p=>ci(p,{query$:s})),...ae("search-suggest",e).map(p=>pi(p,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ke}}function mi(e,{index$:t,location$:r}){return z([t,r.pipe(Q(xe()),v(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>oi(o.config)(n.searchParams.get("h"))),m(o=>{var a;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let s=i.nextNode();s;s=i.nextNode())if((a=s.parentElement)!=null&&a.offsetHeight){let p=s.textContent,c=o(p);c.length>p.length&&n.set(s,c)}for(let[s,p]of n){let{childNodes:c}=x("span",null,p);s.replaceWith(...Array.from(c))}return{ref:e,nodes:n}}))}function as(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return z([r,t]).pipe(m(([{offset:i,height:a},{offset:{y:s}}])=>(a=a+Math.min(n,Math.max(0,s-i))-n,{height:a,locked:s>=i+n})),K((i,a)=>i.height===a.height&&i.locked===a.locked))}function Jr(e,o){var n=o,{header$:t}=n,r=io(n,["header$"]);let i=P(".md-sidebar__scrollwrap",e),{y:a}=Ue(i);return C(()=>{let s=new g,p=s.pipe(X(),ne(!0)),c=s.pipe(Le(0,me));return c.pipe(ee(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*a}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),c.pipe(Ae()).subscribe(()=>{for(let l of $(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=ce(f);f.scrollTo({top:u-h/2})}}}),ue($("label[tabindex]",e)).pipe(oe(l=>d(l,"click").pipe(be(se),m(()=>l),U(p)))).subscribe(l=>{let f=P(`[id="${l.htmlFor}"]`);P(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),as(e,r).pipe(E(l=>s.next(l)),L(()=>s.complete()),m(l=>R({ref:e},l)))})}function fi(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Ct(Ne(`${r}/releases/latest`).pipe(ve(()=>M),m(o=>({version:o.tag_name})),Be({})),Ne(r).pipe(ve(()=>M),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),Be({}))).pipe(m(([o,n])=>R(R({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return Ne(r).pipe(m(o=>({repositories:o.public_repos})),Be({}))}}function ui(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return Ne(r).pipe(ve(()=>M),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),Be({}))}function di(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return fi(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ui(r,o)}return M}var ss;function cs(e){return ss||(ss=C(()=>{let t=__md_get("__source",sessionStorage);if(t)return I(t);if(ae("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return M}return di(e.href).pipe(E(o=>__md_set("__source",o,sessionStorage)))}).pipe(ve(()=>M),v(t=>Object.keys(t).length>0),m(t=>({facts:t})),B(1)))}function hi(e){let t=P(":scope > :last-child",e);return C(()=>{let r=new g;return r.subscribe(({facts:o})=>{t.appendChild(Sn(o)),t.classList.add("md-source__repository--active")}),cs(e).pipe(E(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}function ps(e,{viewport$:t,header$:r}){return ge(document.body).pipe(b(()=>mr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),Z("hidden"))}function bi(e,t){return C(()=>{let r=new g;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?I({hidden:!1}):ps(e,t)).pipe(E(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}function ls(e,{viewport$:t,header$:r}){let o=new Map,n=$(".md-nav__link",e);for(let s of n){let p=decodeURIComponent(s.hash.substring(1)),c=fe(`[id="${p}"]`);typeof c!="undefined"&&o.set(s,c)}let i=r.pipe(Z("height"),m(({height:s})=>{let p=Se("main"),c=P(":scope > :first-child",p);return s+.8*(c.offsetTop-p.offsetTop)}),pe());return ge(document.body).pipe(Z("height"),b(s=>C(()=>{let p=[];return I([...o].reduce((c,[l,f])=>{for(;p.length&&o.get(p[p.length-1]).tagName>=f.tagName;)p.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let h=f.offsetParent;for(;h;h=h.offsetParent)u+=h.offsetTop;return c.set([...p=[...p,l]].reverse(),u)},new Map))}).pipe(m(p=>new Map([...p].sort(([,c],[,l])=>c-l))),We(i),b(([p,c])=>t.pipe(jr(([l,f],{offset:{y:u},size:h})=>{let w=u+h.height>=Math.floor(s.height);for(;f.length;){let[,A]=f[0];if(A-c=u&&!w)f=[l.pop(),...f];else break}return[l,f]},[[],[...p]]),K((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([s,p])=>({prev:s.map(([c])=>c),next:p.map(([c])=>c)})),Q({prev:[],next:[]}),Ye(2,1),m(([s,p])=>s.prev.length{let i=new g,a=i.pipe(X(),ne(!0));if(i.subscribe(({prev:s,next:p})=>{for(let[c]of p)c.classList.remove("md-nav__link--passed"),c.classList.remove("md-nav__link--active");for(let[c,[l]]of s.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",c===s.length-1)}),G("toc.follow")){let s=S(t.pipe(_e(1),m(()=>{})),t.pipe(_e(250),m(()=>"smooth")));i.pipe(v(({prev:p})=>p.length>0),We(o.pipe(be(se))),ee(s)).subscribe(([[{prev:p}],c])=>{let[l]=p[p.length-1];if(l.offsetHeight){let f=cr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=ce(f);f.scrollTo({top:u-h/2,behavior:c})}}})}return G("navigation.tracking")&&t.pipe(U(a),Z("offset"),_e(250),Ce(1),U(n.pipe(Ce(1))),st({delay:250}),ee(i)).subscribe(([,{prev:s}])=>{let p=xe(),c=s[s.length-1];if(c&&c.length){let[l]=c,{hash:f}=new URL(l.href);p.hash!==f&&(p.hash=f,history.replaceState({},"",`${p}`))}else p.hash="",history.replaceState({},"",`${p}`)}),ls(e,{viewport$:t,header$:r}).pipe(E(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))})}function ms(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:a}})=>a),Ye(2,1),m(([a,s])=>a>s&&s>0),K()),i=r.pipe(m(({active:a})=>a));return z([i,n]).pipe(m(([a,s])=>!(a&&s)),K(),U(o.pipe(Ce(1))),ne(!0),st({delay:250}),m(a=>({hidden:a})))}function gi(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new g,a=i.pipe(X(),ne(!0));return i.subscribe({next({hidden:s}){e.hidden=s,s?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(U(a),Z("height")).subscribe(({height:s})=>{e.style.top=`${s+16}px`}),d(e,"click").subscribe(s=>{s.preventDefault(),window.scrollTo({top:0})}),ms(e,{viewport$:t,main$:o,target$:n}).pipe(E(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))}function xi({document$:e,viewport$:t}){e.pipe(b(()=>$(".md-ellipsis")),oe(r=>tt(r).pipe(U(e.pipe(Ce(1))),v(o=>o),m(()=>r),Te(1))),v(r=>r.offsetWidth{let o=r.innerText,n=r.closest("a")||r;return n.title=o,lt(n,{viewport$:t}).pipe(U(e.pipe(Ce(1))),L(()=>n.removeAttribute("title")))})).subscribe(),e.pipe(b(()=>$(".md-status")),oe(r=>lt(r,{viewport$:t}))).subscribe()}function yi({document$:e,tablet$:t}){e.pipe(b(()=>$(".md-toggle--indeterminate")),E(r=>{r.indeterminate=!0,r.checked=!1}),oe(r=>d(r,"change").pipe(Dr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ee(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function fs(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function Ei({document$:e}){e.pipe(b(()=>$("[data-md-scrollfix]")),E(t=>t.removeAttribute("data-md-scrollfix")),v(fs),oe(t=>d(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function wi({viewport$:e,tablet$:t}){z([Ve("search"),t]).pipe(m(([r,o])=>r&&!o),b(r=>I(r).pipe(Ge(r?400:100))),ee(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function us(){return location.protocol==="file:"?wt(`${new URL("search/search_index.js",Xr.base)}`).pipe(m(()=>__index),B(1)):Ne(new URL("search/search_index.json",Xr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var ot=Yo(),jt=nn(),Ot=cn(jt),Zr=on(),Oe=bn(),hr=$t("(min-width: 960px)"),Si=$t("(min-width: 1220px)"),Oi=pn(),Xr=ye(),Mi=document.forms.namedItem("search")?us():Ke,eo=new g;Bn({alert$:eo});var to=new g;G("navigation.instant")&&Zn({location$:jt,viewport$:Oe,progress$:to}).subscribe(ot);var Ti;((Ti=Xr.version)==null?void 0:Ti.provider)==="mike"&&ii({document$:ot});S(jt,Ot).pipe(Ge(125)).subscribe(()=>{Je("drawer",!1),Je("search",!1)});Zr.pipe(v(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=fe("link[rel=prev]");typeof t!="undefined"&&pt(t);break;case"n":case".":let r=fe("link[rel=next]");typeof r!="undefined"&&pt(r);break;case"Enter":let o=Re();o instanceof HTMLLabelElement&&o.click()}});xi({viewport$:Oe,document$:ot});yi({document$:ot,tablet$:hr});Ei({document$:ot});wi({viewport$:Oe,tablet$:hr});var rt=Nn(Se("header"),{viewport$:Oe}),Ft=ot.pipe(m(()=>Se("main")),b(e=>Qn(e,{viewport$:Oe,header$:rt})),B(1)),ds=S(...ae("consent").map(e=>xn(e,{target$:Ot})),...ae("dialog").map(e=>Dn(e,{alert$:eo})),...ae("header").map(e=>zn(e,{viewport$:Oe,header$:rt,main$:Ft})),...ae("palette").map(e=>Kn(e)),...ae("progress").map(e=>Yn(e,{progress$:to})),...ae("search").map(e=>li(e,{index$:Mi,keyboard$:Zr})),...ae("source").map(e=>hi(e))),hs=C(()=>S(...ae("announce").map(e=>gn(e)),...ae("content").map(e=>Un(e,{viewport$:Oe,target$:Ot,print$:Oi})),...ae("content").map(e=>G("search.highlight")?mi(e,{index$:Mi,location$:jt}):M),...ae("header-title").map(e=>qn(e,{viewport$:Oe,header$:rt})),...ae("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Nr(Si,()=>Jr(e,{viewport$:Oe,header$:rt,main$:Ft})):Nr(hr,()=>Jr(e,{viewport$:Oe,header$:rt,main$:Ft}))),...ae("tabs").map(e=>bi(e,{viewport$:Oe,header$:rt})),...ae("toc").map(e=>vi(e,{viewport$:Oe,header$:rt,main$:Ft,target$:Ot})),...ae("top").map(e=>gi(e,{viewport$:Oe,header$:rt,main$:Ft,target$:Ot})))),Li=ot.pipe(b(()=>hs),Pe(ds),B(1));Li.subscribe();window.document$=ot;window.location$=jt;window.target$=Ot;window.keyboard$=Zr;window.viewport$=Oe;window.tablet$=hr;window.screen$=Si;window.print$=Oi;window.alert$=eo;window.progress$=to;window.component$=Li;})(); +//# sourceMappingURL=bundle.ebd0bdb7.min.js.map + diff --git a/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js.map b/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js.map new file mode 100644 index 00000000..cec23df7 --- /dev/null +++ b/1.3.0/assets/javascripts/bundle.ebd0bdb7.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/BehaviorSubject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/QueueAction.ts", "node_modules/rxjs/src/internal/scheduler/QueueScheduler.ts", "node_modules/rxjs/src/internal/scheduler/queue.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounce.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip2/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], + "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ viewport$, document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { Subject } from './Subject';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\n\n/**\n * A variant of Subject that requires an initial value and emits its current\n * value whenever it is subscribed to.\n *\n * @class BehaviorSubject\n */\nexport class BehaviorSubject extends Subject {\n constructor(private _value: T) {\n super();\n }\n\n get value(): T {\n return this.getValue();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n const subscription = super._subscribe(subscriber);\n !subscription.closed && subscriber.next(this._value);\n return subscription;\n }\n\n getValue(): T {\n const { hasError, thrownError, _value } = this;\n if (hasError) {\n throw thrownError;\n }\n this._throwIfClosed();\n return _value;\n }\n\n next(value: T): void {\n super.next((this._value = value));\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { Subscription } from '../Subscription';\nimport { QueueScheduler } from './QueueScheduler';\nimport { SchedulerAction } from '../types';\nimport { TimerHandle } from './timerHandle';\n\nexport class QueueAction extends AsyncAction {\n constructor(protected scheduler: QueueScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (delay > 0) {\n return super.schedule(state, delay);\n }\n this.delay = delay;\n this.state = state;\n this.scheduler.flush(this);\n return this;\n }\n\n public execute(state: T, delay: number): any {\n return delay > 0 || this.closed ? super.execute(state, delay) : this._execute(state, delay);\n }\n\n protected requestAsyncId(scheduler: QueueScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n\n if ((delay != null && delay > 0) || (delay == null && this.delay > 0)) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n\n // Otherwise flush the scheduler starting with this action.\n scheduler.flush(this);\n\n // HACK: In the past, this was returning `void`. However, `void` isn't a valid\n // `TimerHandle`, and generally the return value here isn't really used. So the\n // compromise is to return `0` which is both \"falsy\" and a valid `TimerHandle`,\n // as opposed to refactoring every other instanceo of `requestAsyncId`.\n return 0;\n }\n}\n", "import { AsyncScheduler } from './AsyncScheduler';\n\nexport class QueueScheduler extends AsyncScheduler {\n}\n", "import { QueueAction } from './QueueAction';\nimport { QueueScheduler } from './QueueScheduler';\n\n/**\n *\n * Queue Scheduler\n *\n * Put every next task on a queue, instead of executing it immediately\n *\n * `queue` scheduler, when used with delay, behaves the same as {@link asyncScheduler} scheduler.\n *\n * When used without delay, it schedules given task synchronously - executes it right when\n * it is scheduled. However when called recursively, that is when inside the scheduled task,\n * another task is scheduled with queue scheduler, instead of executing immediately as well,\n * that task will be put on a queue and wait for current one to finish.\n *\n * This means that when you execute task with `queue` scheduler, you are sure it will end\n * before any other task scheduled with that scheduler will start.\n *\n * ## Examples\n * Schedule recursively first, then do something\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(() => {\n * queueScheduler.schedule(() => console.log('second')); // will not happen now, but will be put on a queue\n *\n * console.log('first');\n * });\n *\n * // Logs:\n * // \"first\"\n * // \"second\"\n * ```\n *\n * Reschedule itself recursively\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(function(state) {\n * if (state !== 0) {\n * console.log('before', state);\n * this.schedule(state - 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * console.log('after', state);\n * }\n * }, 0, 3);\n *\n * // In scheduler that runs recursively, you would expect:\n * // \"before\", 3\n * // \"before\", 2\n * // \"before\", 1\n * // \"after\", 1\n * // \"after\", 2\n * // \"after\", 3\n *\n * // But with queue it logs:\n * // \"before\", 3\n * // \"after\", 3\n * // \"before\", 2\n * // \"after\", 2\n * // \"before\", 1\n * // \"after\", 1\n * ```\n */\n\nexport const queueScheduler = new QueueScheduler(QueueAction);\n\n/**\n * @deprecated Renamed to {@link queueScheduler}. Will be removed in v8.\n */\nexport const queue = queueScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an " + ], + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# bounds of the noaa-eri-nashville2020.json items\n", + "bounds = (-87.0251, 36.0999, -85.4249, 36.2251)\n", + "\n", + "poly = Polygon.from_bounds(*bounds)\n", + "geojson = Feature(type=\"Feature\", geometry=poly, properties=None).dict(exclude_none=True)\n", + "\n", + "m = Map(\n", + " tiles=\"OpenStreetMap\",\n", + " location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n", + " zoom_start=8\n", + ")\n", + "\n", + "geo_json = GeoJson(\n", + " data=geojson,\n", + " style_function=lambda x: {\n", + " 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n", + " },\n", + ")\n", + "geo_json.add_to(m)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Register Search query" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'searchid': '6d436413d0eed760acc2f6bd16ca77a5', 'links': [{'rel': 'metadata', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tilejson.json'}]}\n" + ] + } + ], + "source": [ + "search_request = {\n", + " # Filter collection\n", + " \"collections\": [\"noaa-emergency-response\"],\n", + " # limit bounds of the known items (note: the bbox will also be used in the tilejson response)\n", + " \"bbox\": bounds,\n", + " \"filter-lang\": \"cql-json\",\n", + "}\n", + "\n", + "response = httpx.post(\n", + " f\"{endpoint}/mosaic/register\", json=search_request,\n", + ").json()\n", + "print(response)\n", + "\n", + "searchid = response[\"id\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Show list of Mosaics" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['b2b4e952ae7a8dd69cd11d595b398945', '6d436413d0eed760acc2f6bd16ca77a5']\n" + ] + } + ], + "source": [ + "response = httpx.get(f\"{endpoint}/mosaic/list\").json()\n", + "print(\n", + " [\n", + " search[\"search\"][\"hash\"]\n", + " for search in response[\"searches\"]\n", + " ]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Get Search Metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'search': {'hash': '6d436413d0eed760acc2f6bd16ca77a5', 'search': {'bbox': [-87.0251, 36.0999, -85.4249, 36.2251], 'collections': ['noaa-emergency-response'], 'filter-lang': 'cql-json'}, '_where': \"collection = ANY ('{noaa-emergency-response}') AND st_intersects(geometry, '0103000020E610000001000000050000004BC8073D9BC155C0696FF085C90C42404BC8073D9BC155C0302AA913D01C42408104C58F315B55C0302AA913D01C42408104C58F315B55C0696FF085C90C42404BC8073D9BC155C0696FF085C90C4240')\", 'orderby': 'datetime DESC, id DESC', 'lastused': '2023-05-24T10:09:15.184690+00:00', 'usecount': 1, 'metadata': {'type': 'mosaic'}}, 'links': [{'rel': 'self', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tilejson.json'}]}\n" + ] + } + ], + "source": [ + "info_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/info\").json()\n", + "print(info_response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Get TileJSON\n", + "\n", + "Note: to return a valid tilejson document you'll need to pass either the `assets` or `expression` option." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'tilejson': '2.2.0', 'name': '6d436413d0eed760acc2f6bd16ca77a5', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?assets=cog'], 'minzoom': 0, 'maxzoom': 24, 'bounds': [-87.0251, 36.0999, -85.4249, 36.2251], 'center': [-86.225, 36.162499999999994, 0]}\n" + ] + } + ], + "source": [ + "tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json()\n", + "print(tj_response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load tiles" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
Make this Notebook Trusted to load map: File -> Trust Notebook
" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "m = Map(\n", + " location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n", + " zoom_start=14\n", + ")\n", + "\n", + "geo_json = GeoJson(\n", + " data=geojson,\n", + " style_function=lambda x: {\n", + " 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n", + " },\n", + ")\n", + "geo_json.add_to(m)\n", + "\n", + "aod_layer = TileLayer(\n", + " tiles=tj_response[\"tiles\"][0],\n", + " attr=\"Mosaic\",\n", + " min_zoom=14,\n", + " max_zoom=18,\n", + " max_native_zoom=18,\n", + ")\n", + "aod_layer.add_to(m)\n", + "m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Register a Mosaic with Metadata" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'searchid': '4b0db3dbd1858d54a3a55f84de97d1ca', 'links': [{'rel': 'metadata', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/tilejson.json'}]}\n" + ] + } + ], + "source": [ + "search_request = {\n", + " # Filter collection\n", + " \"collections\": [\"noaa-emergency-response\"],\n", + " # limit bounds of the known items (note: the bbox will also be used in the tilejson response)\n", + " \"bbox\": bounds,\n", + " \"filter-lang\": \"cql-json\",\n", + " \"metadata\": {\n", + " \"bounds\": [-87.0251, 36.0999, -85.4249, 36.2251], # This is redondant because it's in the bbox filter\n", + " \"minzoom\": 14,\n", + " \"maxzoom\": 18,\n", + " \"assets\": [\"cog\"],\n", + " \"defaults\": {\n", + " \"true_color\": {\n", + " \"bidx\": [1, 2, 3],\n", + " },\n", + " },\n", + " },\n", + "}\n", + "\n", + "response = httpx.post(\n", + " f\"{endpoint}/mosaic/register\", json=search_request,\n", + ").json()\n", + "print(response)\n", + "\n", + "searchid = response[\"id\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'tilejson': '2.2.0', 'name': '4b0db3dbd1858d54a3a55f84de97d1ca', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?assets=cog'], 'minzoom': 14, 'maxzoom': 18, 'bounds': [-87.0251, 36.0999, -85.4249, 36.2251], 'center': [-86.225, 36.162499999999994, 14]}\n" + ] + } + ], + "source": [ + "tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json()\n", + "print(tj_response)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
Make this Notebook Trusted to load map: File -> Trust Notebook
" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "m = Map(\n", + " location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n", + " zoom_start=14\n", + ")\n", + "\n", + "geo_json = GeoJson(\n", + " data=geojson,\n", + " style_function=lambda x: {\n", + " 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n", + " },\n", + ")\n", + "geo_json.add_to(m)\n", + "\n", + "aod_layer = TileLayer(\n", + " tiles=tj_response[\"tiles\"][0],\n", + " attr=\"Mosaic\",\n", + " min_zoom=tj_response[\"minzoom\"],\n", + " max_zoom=tj_response[\"maxzoom\"],\n", + " max_native_zoom=tj_response[\"maxzoom\"], \n", + ")\n", + "aod_layer.add_to(m)\n", + "m" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "vscode": { + "interpreter": { + "hash": "2590a9e34ee6c8bdce5141410f2a072bbabd2a859a8a48acdaa85720923a90ef" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/1.3.0/notebooks/demo/index.html b/1.3.0/notebooks/demo/index.html new file mode 100644 index 00000000..b8197c0e --- /dev/null +++ b/1.3.0/notebooks/demo/index.html @@ -0,0 +1,2850 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + demo - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + + + + + +
+
+ + + + + + + +

demo

+ + + + + + + + + + + + +
+ +
+ + + + + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/overrides/stylesheets/extra.css b/1.3.0/overrides/stylesheets/extra.css new file mode 100644 index 00000000..594ac072 --- /dev/null +++ b/1.3.0/overrides/stylesheets/extra.css @@ -0,0 +1,5 @@ +:root { + --md-primary-fg-color: rgb(61, 121, 153); + --md-primary-fg-color--light: rgb(61, 121, 153); + --md-primary-fg-color--dark: rgb(61, 121, 153); + } diff --git a/1.3.0/release-notes/index.html b/1.3.0/release-notes/index.html new file mode 100644 index 00000000..72eaa055 --- /dev/null +++ b/1.3.0/release-notes/index.html @@ -0,0 +1,2633 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Release notes - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + + + +

Release Notes

+

1.3.0 (2024-05-17)

+
    +
  • update titiler requirement to >=0.18.0,<0.19
  • +
  • Add /colorMaps endpoints to the application
  • +
  • +

    Deprecation remove default WebMercatorQuad tile matrix set in /tiles, /tilesjson.json, /map, /WMTSCapabilities.xml and /assets endpoints

    +
    # Before
    +/tiles/{z}/{x}/{y}
    +/tilejson.json
    +/map
    +/WMTSCapabilities.xml
    +
    +# Now
    +/tiles/WebMercatorQuad/{z}/{x}/{y}
    +/WebMercatorQuad/tilejson.json
    +/WebMercatorQuad/map
    +/WebMercatorQuad/WMTSCapabilities.xml
    +
    +
  • +
  • +

    update titiler.pgstac.model.Link to match the OGC specification

    +
  • +
  • use {tileMatrixSetId} in templated URL links
  • +
  • add support for render and item-assets STAC Collection extensions for the STAC Collections info and wmts endpoints
  • +
  • add /info endpoint to the STAC Collections endpoints
  • +
  • add /collections and /collections/{collection_id} endpoints when TITILER_PGSTAC_API_DEBUG=TRUE
  • +
  • +

    Expect the Metadata.defaults configurations to follow the STAC render extension (stac-extensions/render)

    +
        // before
    +    "blue": {
    +        "rescale": ["0,100"],
    +        "assets": "b1",
    +    }
    +
    +    // now
    +    "blue": {
    +        "rescale": [[0, 100]],
    +        "assets": ["b1"],
    +    }
    +
    +
  • +
+

1.2.3 (2024-03-25)

+
    +
  • add python 3.12 support
  • +
  • Add extra="ignore" option CacheSettings to fix pydantic issue when using .env file
  • +
+

1.2.2 (2024-02-21)

+ +

1.2.1 (2024-01-19)

+
    +
  • fix invalid url parsing in HTML responses
  • +
+

1.2.0 (2024-01-17)

+
    +
  • update titiler requirement to >=0.17.0,<0.18
  • +
  • use new align_bounds_with_dataset=True rio-tiler option in GeoJSON statistics methods for more precise calculation
  • +
+

1.1.0 (2024-01-10)

+
    +
  • update titiler requirement to >=0.16.0,<0.17
  • +
  • use morecantile TileMatrixSet.cellSize property instead of deprecated/private TileMatrixSet._resolution method (author @hrodmn, stac-utils/titiler-pgstac!148)
  • +
  • add /point/{lon},{lat} endpoint in MosaicTilerFactory (co-author @hrodmn, stac-utils/titiler-pgstac!150)
  • +
+

1.0.0 (2023-12-12)

+
    +
  • no change since 1.0.0a4
  • +
+

1.0.0a4 (2023-11-10)

+
    +
  • add algorithm options for /statistics [POST] endpoints (back-ported from 0.8.1)
  • +
+

1.0.0a3 (2023-11-03)

+
    +
  • remove reverse option in PGSTACBackend mosaic backend. Reverse item order should be achieved with STAC search sortby.
  • +
+

1.0.0a2 (2023-11-02)

+
    +
  • update titiler's dependency to >=0.15.2,<0.16
  • +
  • rename dependencies.TileParams to dependencies.TmsTileParams
  • +
+

1.0.0a1 (2023-10-20)

+
    +
  • rename dependencies.ItemPathParams to ItemIdParams breaking change
  • +
+

1.0.0a0 (2023-10-20)

+
    +
  • +

    add pgstac_dependency attribute in MosaicTilerFactory (defaults to dependencies.PgSTACParams)

    +
  • +
  • +

    add database's pool check in startup event

    +
  • +
  • +

    add metadata layers links in mosaic's /info response for TileJSON, map and wmts endpoint links

    +
  • +
  • +

    add CollectionIdParams dependency to retrieve a SearchId for a CollectionId

    +
  • +
  • +

    add /collections/{collection_id} virtual mosaic endpoints

    +
  • +
  • +

    update endpoints Tags (STAC Search, STAC Collection, STAC Item)

    +
  • +
+

Endpoint breaking changes

+
    +
  • +

    move PgSTAC Search Virtual Mosaic's endpoints from /mosaic to /searches

    +
  • +
  • +

    in model.RegisterResponse (model used in /register endpoint) rename searchid by id

    +
    # before
    +resp = httpx.post("/mosaic/register", body={"collections": ["my-collection"], "filter-lang": "cql-json"})
    +assert resp.json()["searchid"]
    +
    +# now
    +resp = httpx.post("/searches/register", body={"collections": ["my-collection"], "filter-lang": "cql-json"})
    +assert resp.json()["id"]
    +
    +
  • +
+

API breaking changes

+
    +
  • +

    rename dependencies.PathParams to dependencies.SearchIdParams

    +
  • +
  • +

    rename searchid path parameter to search_id in SearchIdParams

    +
  • +
  • +

    move check_query_params methods outside MosaicTilerFactory class

    +
  • +
  • +

    make path_dependency a required input to MosaicTilerFactory class

    +
    # before
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(...)
    +app.include_router(mosaic.router)
    +
    +# now
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(
    +    ...,
    +    path_dependency=lambda: "aaaaaaaaaaaaaa"
    +)
    +app.include_router(mosaic.router)
    +
    +
  • +
  • +

    remove /{search_id} prefix in MosaicTilerFactory routes. Now use parameter injection from global prefix

    +
    # Before
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(
    +    ...,
    +    router_prefix="/mosaics"
    +)
    +app.include_router(mosaic.router, prefix="/mosaics")
    +
    +# Now
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(
    +    ...
    +    router_prefix="/mosaics/{search_id}"
    +)
    +app.include_router(mosaic.router, prefix="/mosaics/{search_id}")
    +
    +
  • +
  • +

    move /info endpoint outside the MosaicTilerFactory to its own extension (titiler.pgstac.extension.searchInfoExtension)

    +
    # Before
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(...)
    +app.include_router(mosaic.router)
    +
    +# Now
    +app = FastAPI()
    +mosaic = MosaicTilerFactory(
    +    ...
    +    extensions=[
    +        searchInfoExtension(),
    +    ]
    +)
    +app.include_router(mosaic.router)
    +
    +
  • +
  • +

    move /register and /list endpoint creation outside the MosaicTilerFactory class

    +
    # before
    +from titiler.pgstac.factory import MosaicTilerFactory
    +
    +mosaic = MosaicTilerFactory(
    +    ...,
    +    router_prefix="/{search_id}",
    +)
    +app.include_router(mosaic.router, prefix="/{search_id}")
    +
    +# Now
    +from titiler.pgstac.factory import (
    +    MosaicTilerFactory,
    +    add_search_register_route,
    +    add_mosaic_register_route,
    +)
    +
    +mosaic = MosaicTilerFactory(
    +    ...,
    +    router_prefix="/{search_id}",
    +)
    +app.include_router(mosaic.router, prefix="/{search_id}")
    +
    +# add /register endpoint
    +add_search_register_route(
    +    app,
    +    # any dependency we want to validate
    +    # when creating the tilejson/map links
    +    tile_dependencies=[
    +        mosaic.layer_dependency,
    +        mosaic.dataset_dependency,
    +        mosaic.pixel_selection_dependency,
    +        mosaic.process_dependency,
    +        mosaic.rescale_dependency,
    +        mosaic.colormap_dependency,
    +        mosaic.render_dependency,
    +        mosaic.pgstac_dependency,
    +        mosaic.reader_dependency,
    +        mosaic.backend_dependency,
    +    ],
    +)
    +# add /list endpoint
    +add_search_list_route(app)
    +
    +
  • +
+

0.8.3 (2024-02-21)

+
    +
  • enable passing ConnectionPool kwargs option in titiler.pgstac.db.connect_to_db function (author @smohiudd, #155) [backported from 1.2.2]
  • +
+

0.8.2 (2024-01-23)

+
    +
  • update rio-tiler version to >6.3.0 (defined in titiler>=0.17)
  • +
  • use new align_bounds_with_dataset=True rio-tiler option in GeoJSON statistics methods for more precise calculation [backported from 1.2.0]
  • +
  • use morecantile TileMatrixSet.cellSize property instead of deprecated/private TileMatrixSet._resolution method [backported from 1.1.0]
  • +
+

0.8.1 (2023-11-10)

+
    +
  • add algorithm options for /statistics [POST] endpoints
  • +
+

0.8.0 (2023-10-06)

+
    +
  • update titiler requirement to >=0.15.0,<0.16
  • +
  • remove max_size default for mosaic's /statistics [POST] endpoint breaking change
  • +
  • add /bbox and /feature [POST] optional endpoints
  • +
  • add img_part_dependency attribute in MosaicTilerFactory (defaults to titiler.code.dependencies.PartFeatureParams)
  • +
+

0.7.0 (2023-09-28)

+
    +
  • update requirements to switch to pydantic~=2.0
  • +
  • pydantic>=2.4,<3.0
  • +
  • pydantic-settings~=2.0
  • +
  • geojson-pydantic~=1.0
  • +
  • +

    cogeo-mosaic>=7.0,<8.0

    +
  • +
  • +

    update titiler requirement to >=0.14.0,<0.15

    +
      +
    • +

      replace - by _ in query parameters

      +
        +
      • coord-crs -> coord_crs
      • +
      • dst-crs -> dst_crs
      • +
      +
    • +
    +
  • +
+

0.6.0 (2023-09-18)

+
    +
  • add tilejson URL links for layers defined in mosaic's metadata in /mosaic/register and /mosaic/{mosaic_id}/info response
  • +
  • support multiple layers in /mosaic/{mosaic_id}/WMTSCapabilities.xml endpoint created from mosaic's metadata
  • +
+

breaking change

+
    +
  • +

    In /mosaic/WMTSCapabilities.xml we removed the query-parameters related to the tile endpoint (which are forwarded) so ?assets= is no more required. +The endpoint will still raise an error if there are no layers in the mosaic metadata and no required tile's parameters are passed.

    +
    # before
    +response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml")
    +assert response.status_code == 400
    +
    +response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog")
    +assert response.status_code == 200
    +
    +# now
    +# If the mosaic has `defaults` layers set in the metadata
    +# we will construct a WMTS document with multiple layers, so no need for the user to pass any `assets=`
    +response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml")
    +assert response.status_code == 200
    +with rasterio.open(io.BytesIO(response.content)) as src:
    +    assert src.profile["driver"] == "WMTS"
    +    assert len(src.subdatasets) == 2
    +
    +# If the user pass any valid `tile` parameters, an additional layer will be added to the one from the metadata
    +response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog")
    +assert response.status_code == 200
    +with rasterio.open(io.BytesIO(response.content)) as src:
    +    assert src.profile["driver"] == "WMTS"
    +    assert len(src.subdatasets) == 3
    +
    +
  • +
+

0.5.1 (2023-08-03)

+
    +
  • add python-dotenv requirement
  • +
+

0.5.0 (2023-07-20)

+
    +
  • update titiler requirement to >=0.12.0,<0.13
  • +
  • use Annotated Type for Query/Path parameters
  • +
  • re-order endpoints in MosaicTilerFactory to avoid conflicts between tiles and assets endpoints
  • +
  • remove stac-pydantic dependency
  • +
  • add optional root_path setting to specify a url path prefix to use when running the app behind a reverse proxy
  • +
  • add landing page /
  • +
  • use lifespan option instead of deprecated @app.on_event method to initiate/close DB connection
  • +
+

breaking changes

+
    +
  • remove deprecated /{searchid}/{z}/{x}/{y}/assets endpoints
  • +
  • use /api and /api.html for documentation (instead of /openapi.json and /docs)
  • +
  • replace Enum's with Literal types
  • +
  • replace variable TileMatrixSetId by tileMatrixSetId
  • +
  • add pixel_selection_dependency attribute to the MosaicTilerFactory
  • +
+

0.4.1 (2023-06-21)

+
    +
  • update titiler requirement to >=0.11.7
  • +
  • fix /map endpoint template name
  • +
  • rename add_map_viewer to add_viewer option in MosaicTilerFactory for consistency with titiler's options
  • +
+

0.4.0 (2023-05-22)

+
    +
  • remove deprecated /tiles/{searchid}/... endpoints (replaced with /{searchid}/tiles/...)
  • +
  • depreciate /{searchid}/{z}/{x}/{y}/assets endpoints and add /{searchid}/tiles/{z}/{x}/{y}/assets
  • +
  • update minimum titiler requirement to >=0.11.6
  • +
  • remove timing headers
  • +
  • add strict_zoom option (controled with MOSAIC_STRICT_ZOOM environment variable) to raise (or not) error when fetching tile outside mosaic min/max zoom range
  • +
+

0.3.3 (2023-04-27)

+
    +
  • update python packaging/build system to pdm-pep517
  • +
  • use Ruff for lint
  • +
  • add retry mechanism on Database connection issues for PGSTACBackend.get_assets() and get_stac_item methods (back ported from 0.2.4)
  • +
+

0.3.2 (2023-03-14)

+
    +
  • update titiler requirement to 0.10.2
  • +
  • fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25)
  • +
+

0.3.1 (2022-12-16)

+
    +
  • update Type information for dependencies.get_stac_item (back ported from 0.2.2)
  • +
+

0.3.0 (2022-12-16)

+

breaking changes

+
    +
  • +

    Use /collections/{collection_id}/items/{item_id} prefix for Item endpoint. +

    # Before
    +{endpoint}/stac/info?collection=collection1&item=item1
    +
    +# Now
    +{endpoint}/collections/collection1/items/item1/info
    +

    +
  • +
  • +

    Change tile url path parameter order from /tiles/{searchid}/{TileMatrixSetId}/{z}/{x}/{y} to /{searchid}/tiles/{TileMatrixSetId}/{z}/{x}/{y} +

    # Before
    +{endpoint}/mosaic/tiles/20200307aC0853900w361030/0/0/0
    +
    +# Now
    +{endpoint}/mosaic/20200307aC0853900w361030/tiles/0/0/0
    +

    +
  • +
+

0.2.4 (2023-04-27)

+
    +
  • add retry mechanism on Database connection issues for PGSTACBackend.get_assets() and get_stac_item methods
  • +
+

0.2.3 (2023-03-14)

+
    +
  • fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25)
  • +
+

0.2.2 (2022-12-16)

+
    +
  • update Type information for dependencies.get_stac_item
  • +
+

0.2.1 (2022-12-15)

+
    +
  • update titiler requirement to >=0.10.1,<0.11 and fix /map endpoint (to accept multiple TMS)
  • +
+

0.2.0 (2022-12-13)

+
    +
  • add python 3.10 and 3.11 support
  • +
  • update to rio-tiler 4.1
  • +
  • add /{searchid}/map endpoint to the MosaicTilerFactory (added when add_map_viewer is set to True)
  • +
  • add /{searchid}/WMTSCapabilities.xml OGC WMTS endpoint to the MosaicTilerFactory
  • +
  • add /list to the MosaicTilerFactory to list available mosaics (added when add_mosaic_list is set to True)
  • +
+

breaking changes

+
    +
  • remove python 3.7 support
  • +
  • update titiler requirement to >=0.10.0
  • +
  • replace connection_string by database_url in settings.PostgresSettings. We can now directly set DATABASE_URL environment variable.
  • +
+

Frontend changes

+ +

0.1.0 (2022-06-27)

+
    +
  • update titiler.core and titiler.mosaic requirement to 0.7
  • +
  • add MosaicTilerFactory._tilejson_routes method to register TileJSON routes
  • +
  • raise cogeo_mosaic.errors.MosaicNotFoundError when SearchId is not found in pgstac.searches table
  • +
+

breaking changes

+
    +
  • move version definition in titiler.pgstac.__version__
  • +
  • remove unused fetch_options in titiler.pgstac.reader.PgSTACReader
  • +
+

0.1.0a10 (2022-05-16) Pre-Release

+
    +
  • update titiler version and add reader_dependency and backend_dependency in endpoint factory.
  • +
+

0.1.0.a9 (2022-05-05) Pre-Release

+
    +
  • remove LRU cache on all settings classes to enable support for manually providing settings via keyword arguments and to minimize lines of code (author @alukach, stac-utils/titiler-pgstac!54)
  • +
+

0.1.0.a8 (2022-05-02) Pre-Release

+
    +
  • Insert mosaic metadata min/max zoom and bounds in tilejson (stac-utils/titiler-pgstac!51)
  • +
  • allow users the ability to optionally provide PostgresSettings to connect_to_db() function in the event that they want to customize how their DB credentials are populated (author @alukach, stac-utils/titiler-pgstac!53)
  • +
+

0.1.0.a7 (2022-04-05) Pre-Release

+
    +
  • add feature() method to PGSTACBackend mosaic backend
  • +
  • add /statistics endpoint to return statistics given a GeoJSON feature or featureCollection
  • +
  • add collection in allowed returned fields
  • +
  • switch to pgstac.search to get the STAC Item in titiler.pgstac.dependencies.get_stac_item (stac-utils/titiler-pgstac!50)
  • +
+

0.1.0.a6 (2022-03-14) Pre-Release

+
    +
  • move dependencies to titiler.pgstac.dependencies
  • +
  • add /stac endpoints to work with PgSTAC items
  • +
+

breaking changes

+
    +
  • add /mosaic prefix to the PgSTAC mosaic endpoints
  • +
+

0.1.0.a5 (2022-03-03) Pre-Release

+ +

breaking changes

+
    +
  • update titiler.core and titiler.mosaic requirement to >=0.5
  • +
  • When registering a search to PgSTAC with the /register endpoint, a default metadata {"type": "mosaic"} will be set.
  • +
  • Renamed titiler.pgstac.models to titiler.pgstac.model
  • +
  • Renamed titiler.pgstac.models.SearchQuery to titiler.pgstac.model.PgSTACSearch (and removed metadata)
  • +
  • +

    output response for /register endpoint: +

    // before
    +{
    +    "searchid": "...",
    +    "metadata": "http://endpoint/.../info",
    +    "tiles": "http://endpoint/.../tilejson.json",
    +}
    +
    +// now
    +{
    +    "searchid": "...",
    +    "links": [
    +        {
    +            "rel": "info",
    +            "href": "http://endpoint/.../info",
    +            "type": "application/json",
    +        },
    +        {
    +            "rel": "tilejson",
    +            "href": "http://endpoint/.../tilejson.json",
    +            "type": "application/json",
    +        }
    +    ]
    +}
    +

    +
  • +
  • +

    output response for /info endpoint: +

    // before
    +{
    +    "hash": "...",
    +    "search": {},
    +    "_where": "...",
    +    ...
    +}
    +
    +// now
    +{
    +    "search": {
    +        "hash": "...",
    +        "search": {},
    +        "_where": "...",
    +        ...
    +    },
    +    "links": [
    +        {
    +            "rel": "self",
    +            "href": "http://endpoint/.../info",
    +            "type": "application/json",
    +        },
    +        {
    +            "rel": "tilejson",
    +            "href": "http://endpoint/.../tilejson.json",
    +            "type": "application/json",
    +        }
    +    ]
    +}
    +

    +
  • +
+

0.1.0.a4 (2022-02-07) Pre-Release

+ +

0.1.0.a3 (2021-12-15) Pre-Release

+ +

0.1.0.a2 (2021-12-13) Pre-Release

+
    +
  • Switch to psycopg3
  • +
  • add filter-lang in Search model to support newer PgSTAC (with CQL-2)
  • +
  • add metadata in Search model to allow forwarding metadata to the search query entry in PgSTAC
  • +
+

breaking changes

+
    +
  • Unify reader/writer db pools to request.app.state.dbpool
  • +
  • rename PostgresSettings.db_max_inactive_conn_lifetime to PostgresSettings.max_idle
  • +
  • remove PostgresSettings().reader_connection_string and PostgresSettings().writer_connection_string. Replaced with PostgresSettings().connection_string
  • +
  • update titiler requirement (>= 0.4)
  • +
+

0.1.0.a1 (2021-09-15) Pre-Release

+
    +
  • Surface PgSTAC options (scan_limit, items_limit, time_limit, exitwhenfull and skipcovered) in Tile endpoints
  • +
+

breaking changes

+ +

0.1.0.a0 (2021-09-06) Pre-Release

+

Initial release

+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/release-notes/release-notes.md b/1.3.0/release-notes/release-notes.md new file mode 100644 index 00000000..8abb5b6e --- /dev/null +++ b/1.3.0/release-notes/release-notes.md @@ -0,0 +1,556 @@ +# Release Notes + +## 1.3.0 (2024-05-17) + +* update titiler requirement to `>=0.18.0,<0.19` +* Add `/colorMaps` endpoints to the application +* **Deprecation** remove default `WebMercatorQuad` tile matrix set in `/tiles`, `/tilesjson.json`, `/map`, `/WMTSCapabilities.xml` and `/assets` endpoints + + ``` + # Before + /tiles/{z}/{x}/{y} + /tilejson.json + /map + /WMTSCapabilities.xml + + # Now + /tiles/WebMercatorQuad/{z}/{x}/{y} + /WebMercatorQuad/tilejson.json + /WebMercatorQuad/map + /WebMercatorQuad/WMTSCapabilities.xml + ``` + +* update `titiler.pgstac.model.Link` to match the OGC specification +* use `{tileMatrixSetId}` in templated URL links +* add support for [`render`](https://github.com/stac-extensions/render) and [`item-assets`](https://github.com/stac-extensions/item-assets) STAC Collection extensions for the `STAC Collections` *info* and *wmts* endpoints +* add `/info` endpoint to the `STAC Collections` endpoints +* add `/collections` and `/collections/{collection_id}` endpoints when `TITILER_PGSTAC_API_DEBUG=TRUE` +* Expect the `Metadata.defaults` configurations to follow the STAC `render` extension (https://github.com/stac-extensions/render) + + ```json + // before + "blue": { + "rescale": ["0,100"], + "assets": "b1", + } + + // now + "blue": { + "rescale": [[0, 100]], + "assets": ["b1"], + } + ``` + +## 1.2.3 (2024-03-25) + +* add python 3.12 support +* Add `extra="ignore"` option `CacheSettings` to fix pydantic issue when using `.env` file + +## 1.2.2 (2024-02-21) + +* enable passing `ConnectionPool` *kwargs* option in `titiler.pgstac.db.connect_to_db` function (author @smohiudd, https://github.com/stac-utils/titiler-pgstac/pull/155) + +## 1.2.1 (2024-01-19) + +* fix invalid url parsing in HTML responses + +## 1.2.0 (2024-01-17) + +* update titiler requirement to `>=0.17.0,<0.18` +* use new `align_bounds_with_dataset=True` rio-tiler option in GeoJSON statistics methods for more precise calculation + +## 1.1.0 (2024-01-10) + +* update titiler requirement to `>=0.16.0,<0.17` +* use morecantile `TileMatrixSet.cellSize` property instead of deprecated/private `TileMatrixSet._resolution` method (author @hrodmn, https://github.com/stac-utils/titiler-pgstac/pull/148) +* add `/point/{lon},{lat}` endpoint in `MosaicTilerFactory` (co-author @hrodmn, https://github.com/stac-utils/titiler-pgstac/pull/150) + +## 1.0.0 (2023-12-12) + +* no change since `1.0.0a4` + +## 1.0.0a4 (2023-11-10) + +* add `algorithm` options for `/statistics [POST]` endpoints (back-ported from 0.8.1) + +## 1.0.0a3 (2023-11-03) + +* remove `reverse` option in `PGSTACBackend` mosaic backend. Reverse item order should be achieved with STAC search sortby. + +## 1.0.0a2 (2023-11-02) + +* update titiler's dependency to `>=0.15.2,<0.16` +* rename `dependencies.TileParams` to `dependencies.TmsTileParams` + +## 1.0.0a1 (2023-10-20) + +* rename `dependencies.ItemPathParams` to `ItemIdParams` **breaking change** + +## 1.0.0a0 (2023-10-20) + +* add `pgstac_dependency` attribute in `MosaicTilerFactory` (defaults to `dependencies.PgSTACParams`) + +* add database's `pool` check in startup event + +* add *metadata layers* links in mosaic's `/info` response for TileJSON, map and wmts endpoint links + +* add `CollectionIdParams` dependency to retrieve a SearchId for a CollectionId + +* add `/collections/{collection_id}` virtual mosaic endpoints + +* update endpoints Tags (`STAC Search`, `STAC Collection`, `STAC Item`) + +### Endpoint breaking changes + +* move PgSTAC Search Virtual Mosaic's endpoints from `/mosaic` to `/searches` + +* in `model.RegisterResponse` (model used in `/register` endpoint) rename `searchid` by `id` + + ```python + # before + resp = httpx.post("/mosaic/register", body={"collections": ["my-collection"], "filter-lang": "cql-json"}) + assert resp.json()["searchid"] + + # now + resp = httpx.post("/searches/register", body={"collections": ["my-collection"], "filter-lang": "cql-json"}) + assert resp.json()["id"] + ``` + +### API breaking changes + +* rename `dependencies.PathParams` to `dependencies.SearchIdParams` + +* rename `searchid` path parameter to `search_id` in `SearchIdParams` + +* move `check_query_params` methods outside `MosaicTilerFactory` class + +* make `path_dependency` a required input to `MosaicTilerFactory` class + + ```python + # before + app = FastAPI() + mosaic = MosaicTilerFactory(...) + app.include_router(mosaic.router) + + # now + app = FastAPI() + mosaic = MosaicTilerFactory( + ..., + path_dependency=lambda: "aaaaaaaaaaaaaa" + ) + app.include_router(mosaic.router) + ``` + +* remove `/{search_id}` prefix in `MosaicTilerFactory` routes. Now use parameter injection from global prefix + + ```python + # Before + app = FastAPI() + mosaic = MosaicTilerFactory( + ..., + router_prefix="/mosaics" + ) + app.include_router(mosaic.router, prefix="/mosaics") + + # Now + app = FastAPI() + mosaic = MosaicTilerFactory( + ... + router_prefix="/mosaics/{search_id}" + ) + app.include_router(mosaic.router, prefix="/mosaics/{search_id}") + ``` + +* move `/info` endpoint outside the `MosaicTilerFactory` to its own *extension* (`titiler.pgstac.extension.searchInfoExtension`) + + ```python + # Before + app = FastAPI() + mosaic = MosaicTilerFactory(...) + app.include_router(mosaic.router) + + # Now + app = FastAPI() + mosaic = MosaicTilerFactory( + ... + extensions=[ + searchInfoExtension(), + ] + ) + app.include_router(mosaic.router) + ``` + +* move `/register` and `/list` endpoint creation outside the `MosaicTilerFactory` class + + ```python + # before + from titiler.pgstac.factory import MosaicTilerFactory + + mosaic = MosaicTilerFactory( + ..., + router_prefix="/{search_id}", + ) + app.include_router(mosaic.router, prefix="/{search_id}") + + # Now + from titiler.pgstac.factory import ( + MosaicTilerFactory, + add_search_register_route, + add_mosaic_register_route, + ) + + mosaic = MosaicTilerFactory( + ..., + router_prefix="/{search_id}", + ) + app.include_router(mosaic.router, prefix="/{search_id}") + + # add /register endpoint + add_search_register_route( + app, + # any dependency we want to validate + # when creating the tilejson/map links + tile_dependencies=[ + mosaic.layer_dependency, + mosaic.dataset_dependency, + mosaic.pixel_selection_dependency, + mosaic.process_dependency, + mosaic.rescale_dependency, + mosaic.colormap_dependency, + mosaic.render_dependency, + mosaic.pgstac_dependency, + mosaic.reader_dependency, + mosaic.backend_dependency, + ], + ) + # add /list endpoint + add_search_list_route(app) + ``` + +## 0.8.3 (2024-02-21) + +* enable passing `ConnectionPool` kwargs option in `titiler.pgstac.db.connect_to_db` function (author @smohiudd, #155) [backported from 1.2.2] + +## 0.8.2 (2024-01-23) + +* update rio-tiler version to `>6.3.0` (defined in `titiler>=0.17`) +* use new `align_bounds_with_dataset=True` rio-tiler option in GeoJSON statistics methods for more precise calculation [backported from 1.2.0] +* use morecantile `TileMatrixSet.cellSize` property instead of deprecated/private TileMatrixSet._resolution method [backported from 1.1.0] + +## 0.8.1 (2023-11-10) + +* add `algorithm` options for `/statistics [POST]` endpoints + +## 0.8.0 (2023-10-06) + +* update titiler requirement to `>=0.15.0,<0.16` +* remove `max_size` default for mosaic's `/statistics [POST]` endpoint **breaking change** +* add `/bbox` and `/feature [POST]` optional endpoints +* add `img_part_dependency` attribute in `MosaicTilerFactory` (defaults to `titiler.code.dependencies.PartFeatureParams`) + +## 0.7.0 (2023-09-28) + +* update requirements to switch to pydantic~=2.0 + - pydantic>=2.4,<3.0 + - pydantic-settings~=2.0 + - geojson-pydantic~=1.0 + - cogeo-mosaic>=7.0,<8.0 + +* update titiler requirement to `>=0.14.0,<0.15` + + - replace `-` by `_` in query parameters + + * coord-crs -> coord_crs + * dst-crs -> dst_crs + +## 0.6.0 (2023-09-18) + +* add `tilejson` URL links for `layers` defined in mosaic's metadata in `/mosaic/register` and `/mosaic/{mosaic_id}/info` response +* support multiple `layers` in `/mosaic/{mosaic_id}/WMTSCapabilities.xml` endpoint created from mosaic's metadata + +**breaking change** + +* In `/mosaic/WMTSCapabilities.xml` we removed the query-parameters related to the `tile` endpoint (which are forwarded) so `?assets=` is no more required. +The endpoint will still raise an error if there are no `layers` in the mosaic metadata and no required tile's parameters are passed. + + ```python + # before + response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml") + assert response.status_code == 400 + + response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog") + assert response.status_code == 200 + + # now + # If the mosaic has `defaults` layers set in the metadata + # we will construct a WMTS document with multiple layers, so no need for the user to pass any `assets=` + response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml") + assert response.status_code == 200 + with rasterio.open(io.BytesIO(response.content)) as src: + assert src.profile["driver"] == "WMTS" + assert len(src.subdatasets) == 2 + + # If the user pass any valid `tile` parameters, an additional layer will be added to the one from the metadata + response = httpx.get("/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog") + assert response.status_code == 200 + with rasterio.open(io.BytesIO(response.content)) as src: + assert src.profile["driver"] == "WMTS" + assert len(src.subdatasets) == 3 + ``` + +## 0.5.1 (2023-08-03) + +* add `python-dotenv` requirement + +## 0.5.0 (2023-07-20) + +* update `titiler` requirement to `>=0.12.0,<0.13` +* use `Annotated` Type for Query/Path parameters +* re-order endpoints in `MosaicTilerFactory` to avoid conflicts between `tiles` and `assets` endpoints +* remove `stac-pydantic` dependency +* add optional `root_path` setting to specify a url path prefix to use when running the app behind a reverse proxy +* add landing page `/` +* use `lifespan` option instead of deprecated `@app.on_event` method to initiate/close DB connection + +**breaking changes** + +* remove deprecated `/{searchid}/{z}/{x}/{y}/assets` endpoints +* use /api and /api.html for documentation (instead of /openapi.json and /docs) +* replace Enum's with `Literal` types +* replace variable `TileMatrixSetId` by `tileMatrixSetId` +* add `pixel_selection_dependency` attribute to the `MosaicTilerFactory` + +## 0.4.1 (2023-06-21) + +* update `titiler` requirement to `>=0.11.7` +* fix `/map` endpoint template name +* rename `add_map_viewer` to `add_viewer` option in `MosaicTilerFactory` for consistency with `titiler's` options + +## 0.4.0 (2023-05-22) + +* remove deprecated `/tiles/{searchid}/...` endpoints (replaced with `/{searchid}/tiles/...`) +* depreciate `/{searchid}/{z}/{x}/{y}/assets` endpoints and add `/{searchid}/tiles/{z}/{x}/{y}/assets` +* update minimum titiler requirement to `>=0.11.6` +* remove timing headers +* add `strict_zoom` option (controled with `MOSAIC_STRICT_ZOOM` environment variable) to raise (or not) error when fetching tile outside mosaic min/max zoom range + +## 0.3.3 (2023-04-27) + +* update python packaging/build system to `pdm-pep517` +* use `Ruff` for lint +* add retry mechanism on Database connection issues for `PGSTACBackend.get_assets()` and `get_stac_item` methods (back ported from 0.2.4) + +## 0.3.2 (2023-03-14) + +* update titiler requirement to `0.10.2` +* fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25) + +## 0.3.1 (2022-12-16) + +* update Type information for `dependencies.get_stac_item` (back ported from 0.2.2) + +## 0.3.0 (2022-12-16) + +**breaking changes** + +* Use `/collections/{collection_id}/items/{item_id}` prefix for **Item** endpoint. + ``` + # Before + {endpoint}/stac/info?collection=collection1&item=item1 + + # Now + {endpoint}/collections/collection1/items/item1/info + ``` + +* Change tile url path parameter order from `/tiles/{searchid}/{TileMatrixSetId}/{z}/{x}/{y}` to `/{searchid}/tiles/{TileMatrixSetId}/{z}/{x}/{y}` + ``` + # Before + {endpoint}/mosaic/tiles/20200307aC0853900w361030/0/0/0 + + # Now + {endpoint}/mosaic/20200307aC0853900w361030/tiles/0/0/0 + ``` +## 0.2.4 (2023-04-27) + +* add retry mechanism on Database connection issues for `PGSTACBackend.get_assets()` and `get_stac_item` methods + +## 0.2.3 (2023-03-14) + +* fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25) + +## 0.2.2 (2022-12-16) + +* update Type information for `dependencies.get_stac_item` + +## 0.2.1 (2022-12-15) + +* update titiler requirement to `>=0.10.1,<0.11` and fix `/map` endpoint (to accept multiple TMS) + +## 0.2.0 (2022-12-13) + +* add python 3.10 and 3.11 support +* update to rio-tiler 4.1 +* add `/{searchid}/map` endpoint to the `MosaicTilerFactory` (added when `add_map_viewer` is set to `True`) +* add `/{searchid}/WMTSCapabilities.xml` OGC WMTS endpoint to the `MosaicTilerFactory` +* add `/list` to the `MosaicTilerFactory` to list available mosaics (added when `add_mosaic_list` is set to `True`) + +**breaking changes** + +* remove python 3.7 support +* update titiler requirement to `>=0.10.0` +* replace `connection_string` by `database_url` in `settings.PostgresSettings`. We can now directly set `DATABASE_URL` environment variable. + +#### Frontend changes + +- remove `asset_expression` (Mosaic and Item) +- histogram band names are prefixed with `b` (e.g `b1`) (Mosaic and Item) (ref: https://github.com/cogeotiff/rio-tiler/blob/main/docs/src/v4_migration.md#band-names) +- expression for STAC have to be in form of `{asset}_b{band_name}` (e.g `red_b1/green_b1`) (Mosaic and Item) (ref: https://github.com/cogeotiff/rio-tiler/blob/main/docs/src/v4_migration.md#multibasereader-expressions) +- added `asset_as_band` option to force expression to be in form of `{asset}` (e.g `red/green`) (Mosaic and Item) +- expression's band should now be delimited with `;` (previously `,` was accepted) (Mosaic and Item) +- point output model to include band_names (Item) +- added `algorithm` options + +## 0.1.0 (2022-06-27) + +* update `titiler.core` and `titiler.mosaic` requirement to `0.7` +* add `MosaicTilerFactory._tilejson_routes` method to register `TileJSON` routes +* raise `cogeo_mosaic.errors.MosaicNotFoundError` when SearchId is not found in *pgstac.searches* table + +**breaking changes** + +* move version definition in `titiler.pgstac.__version__` +* remove unused `fetch_options` in `titiler.pgstac.reader.PgSTACReader` + +## 0.1.0a10 (2022-05-16) Pre-Release + +* update `titiler` version and add `reader_dependency` and `backend_dependency` in endpoint factory. + +## 0.1.0.a9 (2022-05-05) Pre-Release + +* remove LRU cache on all settings classes to enable support for manually providing settings via keyword arguments and to minimize lines of code (author @alukach, https://github.com/stac-utils/titiler-pgstac/pull/54) + +## 0.1.0.a8 (2022-05-02) Pre-Release + +* Insert mosaic metadata `min/max zoom` and `bounds` in tilejson (https://github.com/stac-utils/titiler-pgstac/pull/51) +* allow users the ability to optionally provide `PostgresSettings` to `connect_to_db()` function in the event that they want to customize how their DB credentials are populated (author @alukach, https://github.com/stac-utils/titiler-pgstac/pull/53) + +## 0.1.0.a7 (2022-04-05) Pre-Release + +* add `feature()` method to `PGSTACBackend` mosaic backend +* add `/statistics` endpoint to return statistics given a GeoJSON feature or featureCollection +* add `collection` in allowed returned fields +* switch to `pgstac.search` to get the STAC Item in `titiler.pgstac.dependencies.get_stac_item` (https://github.com/stac-utils/titiler-pgstac/pull/50) + +## 0.1.0.a6 (2022-03-14) Pre-Release + +* move dependencies to `titiler.pgstac.dependencies` +* add `/stac` endpoints to work with PgSTAC items + +**breaking changes** + +* add `/mosaic` prefix to the PgSTAC mosaic endpoints + +## 0.1.0.a5 (2022-03-03) Pre-Release + +* Add `search_dependency` to allow customization of the PgSTAC Search query (Author @drnextgis, https://github.com/stac-utils/titiler-pgstac/pull/41) +* Add PgSTAC Search entries model (https://github.com/stac-utils/titiler-pgstac/pull/43) +* Add `Metadata` specification (https://github.com/stac-utils/titiler-pgstac/pull/38) + +**breaking changes** + +* update `titiler.core` and `titiler.mosaic` requirement to `>=0.5` +* When registering a `search` to PgSTAC with the `/register` endpoint, a default metadata `{"type": "mosaic"}` will be set. +* Renamed `titiler.pgstac.models` to `titiler.pgstac.model` +* Renamed `titiler.pgstac.models.SearchQuery` to `titiler.pgstac.model.PgSTACSearch` (and removed `metadata`) +* output response for `/register` endpoint: +```js +// before +{ + "searchid": "...", + "metadata": "http://endpoint/.../info", + "tiles": "http://endpoint/.../tilejson.json", +} + +// now +{ + "searchid": "...", + "links": [ + { + "rel": "info", + "href": "http://endpoint/.../info", + "type": "application/json", + }, + { + "rel": "tilejson", + "href": "http://endpoint/.../tilejson.json", + "type": "application/json", + } + ] +} +``` + +* output response for `/info` endpoint: +```js +// before +{ + "hash": "...", + "search": {}, + "_where": "...", + ... +} + +// now +{ + "search": { + "hash": "...", + "search": {}, + "_where": "...", + ... + }, + "links": [ + { + "rel": "self", + "href": "http://endpoint/.../info", + "type": "application/json", + }, + { + "rel": "tilejson", + "href": "http://endpoint/.../tilejson.json", + "type": "application/json", + } + ] +} +``` + +## 0.1.0.a4 (2022-02-07) Pre-Release + +* add tile `buffer` option to match rio-tiler tile options (https://github.com/stac-utils/titiler-pgstac/pull/31) + +## 0.1.0.a3 (2021-12-15) Pre-Release + +* Forward TMS to the STAC Reader (allow multiple TMS) (https://github.com/stac-utils/titiler-pgstac/pull/28) + +## 0.1.0.a2 (2021-12-13) Pre-Release + +* Switch to **psycopg3** +* add `filter-lang` in Search model to support newer PgSTAC (with CQL-2) +* add `metadata` in Search model to allow forwarding metadata to the search query entry in PgSTAC + +**breaking changes** + +* Unify *reader/writer* db pools to `request.app.state.dbpool` +* rename `PostgresSettings.db_max_inactive_conn_lifetime` to `PostgresSettings.max_idle` +* remove `PostgresSettings().reader_connection_string` and `PostgresSettings().writer_connection_string`. Replaced with `PostgresSettings().connection_string` +* update titiler requirement (>= 0.4) + +## 0.1.0.a1 (2021-09-15) Pre-Release + +* Surface PgSTAC options (`scan_limit`, `items_limit`, `time_limit`, `exitwhenfull` and `skipcovered`) in Tile endpoints + +**breaking changes** + +* remove `psycopg2` requirements to avoid conflict with `psycopg2-binary` (https://github.com/stac-utils/titiler-pgstac/pull/15) + +## 0.1.0.a0 (2021-09-06) Pre-Release + +Initial release diff --git a/1.3.0/search/search_index.json b/1.3.0/search/search_index.json new file mode 100644 index 00000000..cf7361c1 --- /dev/null +++ b/1.3.0/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"Home","text":"

Connect PgSTAC and TiTiler.

Documentation: stac-utils.github.io/titiler-pgstac/

Source Code: stac-utils/titiler-pgstac

TiTiler-PgSTAC is a TiTiler extension that connects to a PgSTAC database to create dynamic mosaics based on search queries.

"},{"location":"#installation","title":"Installation","text":"

To install from PyPI and run:

# Make sure to have pip up to date\n$ python -m pip install -U pip\n\n# Install `psycopg` or `psycopg[\"binary\"]` or `psycopg[\"c\"]`\n$ python -m pip install psycopg[\"binary\"]\n\n$ python -m pip install titiler.pgstac\n

To install from sources and run for development:

$ git clone https://github.com/stac-utils/titiler-pgstac.git\n$ cd titiler-pgstac\n$ python -m pip install -e .\n
"},{"location":"#pgstac-version","title":"PgSTAC version","text":"

titiler.pgstac depends on pgstac >=0.3.4 (github.com/stac-utils/pgstac/blob/main/CHANGELOG.md#v034).

"},{"location":"#psycopg-requirement","title":"psycopg requirement","text":"

titiler.pgstac depends on the psycopg library. Because there are three ways of installing this package (psycopg or , psycopg[\"c\"], psycopg[\"binary\"]), the user must install this separately from titiler.pgstac.

  • psycopg: no wheel, pure python implementation. It requires the libpq installed in the system.
  • psycopg[\"binary\"]: binary wheel distribution (shipped with libpq) of the psycopg package and is simpler for development. It requires development packages installed on the client machine.
  • psycopg[\"c\"]: a C (faster) implementation of the libpq wrapper. It requires the libpq installed in the system.

psycopg[c] or psycopg are generally recommended for production use.

In titiler.pgstac setup.py, we have added three options to let users choose which psycopg install to use:

  • pip install titiler.pgstac[\"psycopg\"]: pure python
  • pip install titiler.pgstac[\"psycopg-c\"]: use the C wrapper (requires development packages installed on the client machine)
  • pip install titiler.pgstac[\"psycopg-binary\"]: binary wheels
"},{"location":"#launch","title":"Launch","text":"

You'll need to have POSTGRES_USER, POSTGRES_PASS, POSTGRES_DBNAME, POSTGRES_HOST, POSTGRES_PORT variables set in your environment pointing to your Postgres database where pgstac has been installed.

export POSTGRES_USER=username\nexport POSTGRES_PASS=password\nexport POSTGRES_DBNAME=postgis\nexport POSTGRES_HOST=database\nexport POSTGRES_PORT=5432\n
$ pip install uvicorn\n$ uvicorn titiler.pgstac.main:app --reload\n
"},{"location":"#using-docker","title":"Using Docker","text":"
$ git clone https://github.com/stac-utils/titiler-pgstac.git\n$ cd titiler-pgstac\n$ docker-compose up --build tiler\n

It runs titiler.pgstac using Gunicorn web server. To run Uvicorn based version:

$ docker-compose up --build tiler-uvicorn\n
"},{"location":"#contribution-development","title":"Contribution & Development","text":"

See CONTRIBUTING.md

"},{"location":"#license","title":"License","text":"

See LICENSE

"},{"location":"#authors","title":"Authors","text":"

See contributors for a listing of individual contributors.

"},{"location":"#changes","title":"Changes","text":"

See CHANGES.md.

"},{"location":"contributing/","title":"Development - Contributing","text":"

Issues and pull requests are more than welcome: github.com/stac-utils/titiler-pgstac/issues

dev install

$ git clone https://github.com/stac-utils/titiler-pgstac.git\n$ cd titiler\n$ pip install pre-commit -e .[\"dev,test\"]\n

You can then run the tests with the following command:

python -m pytest --cov titiler.pgstac --cov-report term-missing\n

This repo is set to use pre-commit to run isort, flake8, pydocstring, black (\"uncompromising Python code formatter\") and mypy when committing new code.

$ pre-commit install\n
"},{"location":"intro/","title":"Intro","text":"

TiTiler.PgSTAC is a TiTiler extension, which create dynamic tiler connected to PgSTAC databases.

By default the main application (titiler.pgstac.main.app) provides three sets of endpoints:

  • /searches/{search_id}: Dynamic mosaic tiler based on PgSTAC Search Query

  • /collections/{collection_id}: Dynamic mosaic tiler based on STAC Collection

  • /collections/{collection_id}/items/{item_id}: Dynamic tiler for single STAC item (stored in PgSTAC)

"},{"location":"intro/#stac-searches-searchessearch_id","title":"STAC Searches - /searches/{search_id}","text":""},{"location":"intro/#register-a-pgstac-search-request","title":"Register a PgSTAC Search request","text":"

Important

In TiTiler.PgSTAC a STAC Search Query is equivalent to a Virtual Mosaic and a PgSTAC Search Hash is equivalent to a Mosaic Identifier.

Before being able to create Map Tiles, the user needs to register a Search Query within the PgSTAC database (in the searches table). By default, TiTiler.PgSTAC has a /searches/register (POST) endpoint which will:

  • validate the search query (based on the STAC API specification item-search)

  • send the search query to the postgres database using the search_query PgSTAC function

  • return a PgSTAC Search hash

Example

curl -X 'POST' 'http://127.0.0.1:8081/searches/register' \\\n  -H 'accept: application/json' \\\n  -H 'Content-Type: application/json' \\\n  -d '{\"collections\":[\"landsat-c2l2-sr\"], \"bbox\":[-123.75,34.30714385628804,-118.125,38.82259097617712], \"filter-lang\": \"cql-json\"}' | jq\n\n>> {\n  \"id\": \"5a1b82d38d53a5d200273cbada886bd7\",\n  \"links\": [\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/tilejson.json\"\n    }\n  ]\n}\n\n# Or using CQL-2\ncurl -X 'POST' 'http://127.0.0.1:8081/searches/register' \\\n  -H 'accept: application/json' \\\n  -H 'Content-Type: application/json' \\\n  -d '{\"filter\": {\"op\": \"and\", \"args\": [{\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat-c2l2-sr\"]}, {\"op\": \"s_intersects\", \"args\": [{\"property\": \"geometry\"}, {\"coordinates\": [[[-123.75, 34.30714385628804], [-123.75, 38.82259097617712], [-118.125, 38.82259097617712], [-118.125, 34.30714385628804], [-123.75, 34.30714385628804]]], \"type\": \"Polygon\"}]}]}}' | jq\n\n>> {\n  \"id\": \"5063721f06957d6b2320326d82e90d1e\",\n  \"links\": [\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5063721f06957d6b2320326d82e90d1e/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5063721f06957d6b2320326d82e90d1e/tilejson.json\"\n    }\n  ]\n}\n
curl http://127.0.0.1:8081/searches/5063721f06957d6b2320326d82e90d1e/info | jq\n\n>> {\n  \"search\": {\n    \"hash\": \"5063721f06957d6b2320326d82e90d1e\",  # <-- this is the PgSTAC Hash = search/mosaic identifier\n    \"search\": {  # <-- Summary of the search request\n      \"filter\": {  # <-- this is CQL2 filter associated with the search\n        \"op\": \"and\",\n        \"args\": [\n          {\n            \"op\": \"=\",\n            \"args\": [\n              {\n                \"property\": \"collection\"\n              },\n              \"landsat-c2l2-sr\"\n            ]\n          },\n          {\n            \"op\": \"s_intersects\",\n            \"args\": [\n              {\n                \"property\": \"geometry\"\n              },\n              {\n                \"type\": \"Polygon\",\n                \"coordinates\": [\n                  [\n                    [\n                      -123.75,\n                      34.30714385628804\n                    ],\n                    [\n                      -123.75,\n                      38.82259097617712\n                    ],\n                    [\n                      -118.125,\n                      38.82259097617712\n                    ],\n                    [\n                      -118.125,\n                      34.30714385628804\n                    ],\n                    [\n                      -123.75,\n                      34.30714385628804\n                    ]\n                  ]\n                ]\n              }\n            ]\n          }\n        ]\n      }\n    },\n    \"_where\": \"(  ( (collection_id = 'landsat-c2l2-sr') and st_intersects(geometry, '0103000020E610000001000000050000000000000000F05EC055F6687D502741400000000000F05EC02D553EA94A6943400000000000885DC02D553EA94A6943400000000000885DC055F6687D502741400000000000F05EC055F6687D50274140'::geometry) )  )  \",  # <-- internal pgstac WHERE expression\n    \"orderby\": \"datetime DESC, id DESC\",\n    \"lastused\": \"2022-03-03T11:44:55.878504+00:00\",  # <-- internal pgstac variable\n    \"usecount\": 2,  # <-- internal pgstac variable\n    \"metadata\": {  # <-- titiler-pgstac Mosaic Metadata\n      \"type\": \"mosaic\"  # <-- when we use the `/searches/register` endpoint, titiler-pgstac will add `type=mosaic` to the metadata\n    }\n  },\n  \"links\": [\n    {\n      \"rel\": \"self\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5063721f06957d6b2320326d82e90d1e/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5063721f06957d6b2320326d82e90d1e/tilejson.json\"\n    }\n  ]\n}\n
"},{"location":"intro/#mosaic-metadata","title":"Mosaic Metadata","text":"

In addition to the search query, a user can pass metadata, which will be saved in the postgres table.

curl -X 'POST' 'http://127.0.0.1:8081/searches/register' \\\n  -H 'accept: application/json' \\\n  -H 'Content-Type: application/json' \\\n  -d '{\"collections\":[\"landsat-c2l2-sr\"], \"bbox\":[-123.75,34.30714385628804,-118.125,38.82259097617712], \"filter-lang\": \"cql-json\", \"metadata\": {\"minzoom\": 8, \"maxzoom\": 13, \"assets\": [\"B04\", \"B03\", \"B02\"], \"defaults\": {\"true_color\": {\"assets\": [\"B04\", \"B03\", \"B02\"], \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"}}}}' | jq\n\n>> {\n  \"id\": \"f31d7de8a5ddfa3a80b9a9dd06378db1\",\n  \"links\": [\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/f31d7de8a5ddfa3a80b9a9dd06378db1/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/f31d7de8a5ddfa3a80b9a9dd06378db1/tilejson.json\"\n    }\n  ]\n}\n\ncurl http://127.0.0.1:8081/searches/f31d7de8a5ddfa3a80b9a9dd06378db1/info | jq '.search.metadata'\n>> {\n  \"type\": \"mosaic\",\n  \"minzoom\": 8,\n  \"maxzoom\": 13,\n  \"assets\": [\n    \"B04\",\n    \"B03\",\n    \"B02\"\n  ],\n  \"defaults\": {\n    \"true_color\": {\n      \"assets\": [\n        \"B04\",\n        \"B03\",\n        \"B02\"\n      ],\n      \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\"\n    }\n  }\n}\n
"},{"location":"intro/#fetch-mosaic-tiles","title":"Fetch mosaic Tiles","text":"

When we have an id we can call the dynamic tiler and ask for Map Tiles.

How it works

On each Tile request, the tiler api is going to call the PgSTAC geometrysearch function with the id and the Tile geometry to get the list of STAC Items (code). Then based on the assets parameter, the tiler will construct the tile image (code).

Important

Because Tiles will be created from STAC Items we HAVE TO pass assets={stac asset} option to the tile endpoint to tell the tiler which STAC assets has to be used.

See full list of options

Example

curl 'http://127.0.0.1:8081/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/8/40/102.png?assets=B01&rescale=0,16000 > 8-40-102.png\n
"},{"location":"intro/#stac-collection-collectionscollection_id","title":"STAC Collection - /collections/{collection_id}","text":"

No need for the user to register search queries for those endpoints. The tiler will automatically register a search query (collection={collection_id}).

example

curl http://127.0.0.1:8081/collections/my-collection/tilejson.json?assets=data\n{\n  \"tilejson\": \"2.2.0\",\n  \"name\": \"Mosaic for 'my-collection' Collection\",\n  \"version\": \"1.0.0\",\n  \"scheme\": \"xyz\",\n  \"tiles\": [\n    \"http://127.0.0.1:8081/collections/my-collection/tiles/WebMercatorQuad/{z}/{x}/{y}?assets=data\"\n  ],\n  \"minzoom\": 0,\n  \"maxzoom\": 24,\n  \"bounds\": [\n    -180,\n    -90,\n    180,\n    90\n  ],\n  \"center\": [\n    0,\n    0,\n    0\n  ]\n}\n
"},{"location":"intro/#stac-item-collectionscollection_iditemsitem_id","title":"STAC Item - /collections/{collection_id}/items/{item_id}","text":"

titiler-pgstac can also be used to access individual item stored in the PgSTAC database. By default the titiler-pgstac application will have a set of /collections/{collection_id}/items/{item_id}/... endpoints. The endpoints are created using titiler.core.factory.MultiBaseTilerFactory but using a custom path_dependency with collection_id and item_id path parameters instead of the STAC url as query parameter.

example

curl http://127.0.0.1:8081/collections/world/items/world_20000_5000/info | jq\n{\n  \"asset\": {\n    \"bounds\": [\n      153.5000000000667,\n      -76.83333333336668,\n      179.8333333334053,\n      6.4999999999833165\n    ],\n    \"minzoom\": 3,\n    \"maxzoom\": 6,\n    \"band_metadata\": [\n      [\n        \"b1\",\n        {}\n      ],\n      [\n        \"b2\",\n        {}\n      ],\n      [\n        \"b3\",\n        {}\n      ]\n    ],\n    \"band_descriptions\": [\n      [\n        \"b1\",\n        \"\"\n      ],\n      [\n        \"b2\",\n        \"\"\n      ],\n      [\n        \"b3\",\n        \"\"\n      ]\n    ],\n    \"dtype\": \"uint8\",\n    \"nodata_type\": \"None\",\n    \"colorinterp\": [\n      \"red\",\n      \"green\",\n      \"blue\"\n    ],\n    \"driver\": \"GTiff\",\n    \"count\": 3,\n    \"width\": 1580,\n    \"height\": 5000,\n    \"overviews\": [\n      2,\n      4,\n      8,\n      16\n    ]\n  }\n}\n

See full list of endpoints

"},{"location":"release-notes/","title":"Release Notes","text":""},{"location":"release-notes/#130-2024-05-17","title":"1.3.0 (2024-05-17)","text":"
  • update titiler requirement to >=0.18.0,<0.19
  • Add /colorMaps endpoints to the application
  • Deprecation remove default WebMercatorQuad tile matrix set in /tiles, /tilesjson.json, /map, /WMTSCapabilities.xml and /assets endpoints

    # Before\n/tiles/{z}/{x}/{y}\n/tilejson.json\n/map\n/WMTSCapabilities.xml\n\n# Now\n/tiles/WebMercatorQuad/{z}/{x}/{y}\n/WebMercatorQuad/tilejson.json\n/WebMercatorQuad/map\n/WebMercatorQuad/WMTSCapabilities.xml\n
  • update titiler.pgstac.model.Link to match the OGC specification

  • use {tileMatrixSetId} in templated URL links
  • add support for render and item-assets STAC Collection extensions for the STAC Collections info and wmts endpoints
  • add /info endpoint to the STAC Collections endpoints
  • add /collections and /collections/{collection_id} endpoints when TITILER_PGSTAC_API_DEBUG=TRUE
  • Expect the Metadata.defaults configurations to follow the STAC render extension (stac-extensions/render)

        // before\n    \"blue\": {\n        \"rescale\": [\"0,100\"],\n        \"assets\": \"b1\",\n    }\n\n    // now\n    \"blue\": {\n        \"rescale\": [[0, 100]],\n        \"assets\": [\"b1\"],\n    }\n
"},{"location":"release-notes/#123-2024-03-25","title":"1.2.3 (2024-03-25)","text":"
  • add python 3.12 support
  • Add extra=\"ignore\" option CacheSettings to fix pydantic issue when using .env file
"},{"location":"release-notes/#122-2024-02-21","title":"1.2.2 (2024-02-21)","text":"
  • enable passing ConnectionPool kwargs option in titiler.pgstac.db.connect_to_db function (author @smohiudd, stac-utils/titiler-pgstac!155)
"},{"location":"release-notes/#121-2024-01-19","title":"1.2.1 (2024-01-19)","text":"
  • fix invalid url parsing in HTML responses
"},{"location":"release-notes/#120-2024-01-17","title":"1.2.0 (2024-01-17)","text":"
  • update titiler requirement to >=0.17.0,<0.18
  • use new align_bounds_with_dataset=True rio-tiler option in GeoJSON statistics methods for more precise calculation
"},{"location":"release-notes/#110-2024-01-10","title":"1.1.0 (2024-01-10)","text":"
  • update titiler requirement to >=0.16.0,<0.17
  • use morecantile TileMatrixSet.cellSize property instead of deprecated/private TileMatrixSet._resolution method (author @hrodmn, stac-utils/titiler-pgstac!148)
  • add /point/{lon},{lat} endpoint in MosaicTilerFactory (co-author @hrodmn, stac-utils/titiler-pgstac!150)
"},{"location":"release-notes/#100-2023-12-12","title":"1.0.0 (2023-12-12)","text":"
  • no change since 1.0.0a4
"},{"location":"release-notes/#100a4-2023-11-10","title":"1.0.0a4 (2023-11-10)","text":"
  • add algorithm options for /statistics [POST] endpoints (back-ported from 0.8.1)
"},{"location":"release-notes/#100a3-2023-11-03","title":"1.0.0a3 (2023-11-03)","text":"
  • remove reverse option in PGSTACBackend mosaic backend. Reverse item order should be achieved with STAC search sortby.
"},{"location":"release-notes/#100a2-2023-11-02","title":"1.0.0a2 (2023-11-02)","text":"
  • update titiler's dependency to >=0.15.2,<0.16
  • rename dependencies.TileParams to dependencies.TmsTileParams
"},{"location":"release-notes/#100a1-2023-10-20","title":"1.0.0a1 (2023-10-20)","text":"
  • rename dependencies.ItemPathParams to ItemIdParams breaking change
"},{"location":"release-notes/#100a0-2023-10-20","title":"1.0.0a0 (2023-10-20)","text":"
  • add pgstac_dependency attribute in MosaicTilerFactory (defaults to dependencies.PgSTACParams)

  • add database's pool check in startup event

  • add metadata layers links in mosaic's /info response for TileJSON, map and wmts endpoint links

  • add CollectionIdParams dependency to retrieve a SearchId for a CollectionId

  • add /collections/{collection_id} virtual mosaic endpoints

  • update endpoints Tags (STAC Search, STAC Collection, STAC Item)

"},{"location":"release-notes/#endpoint-breaking-changes","title":"Endpoint breaking changes","text":"
  • move PgSTAC Search Virtual Mosaic's endpoints from /mosaic to /searches

  • in model.RegisterResponse (model used in /register endpoint) rename searchid by id

    # before\nresp = httpx.post(\"/mosaic/register\", body={\"collections\": [\"my-collection\"], \"filter-lang\": \"cql-json\"})\nassert resp.json()[\"searchid\"]\n\n# now\nresp = httpx.post(\"/searches/register\", body={\"collections\": [\"my-collection\"], \"filter-lang\": \"cql-json\"})\nassert resp.json()[\"id\"]\n
"},{"location":"release-notes/#api-breaking-changes","title":"API breaking changes","text":"
  • rename dependencies.PathParams to dependencies.SearchIdParams

  • rename searchid path parameter to search_id in SearchIdParams

  • move check_query_params methods outside MosaicTilerFactory class

  • make path_dependency a required input to MosaicTilerFactory class

    # before\napp = FastAPI()\nmosaic = MosaicTilerFactory(...)\napp.include_router(mosaic.router)\n\n# now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...,\n    path_dependency=lambda: \"aaaaaaaaaaaaaa\"\n)\napp.include_router(mosaic.router)\n
  • remove /{search_id} prefix in MosaicTilerFactory routes. Now use parameter injection from global prefix

    # Before\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/mosaics\"\n)\napp.include_router(mosaic.router, prefix=\"/mosaics\")\n\n# Now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...\n    router_prefix=\"/mosaics/{search_id}\"\n)\napp.include_router(mosaic.router, prefix=\"/mosaics/{search_id}\")\n
  • move /info endpoint outside the MosaicTilerFactory to its own extension (titiler.pgstac.extension.searchInfoExtension)

    # Before\napp = FastAPI()\nmosaic = MosaicTilerFactory(...)\napp.include_router(mosaic.router)\n\n# Now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...\n    extensions=[\n        searchInfoExtension(),\n    ]\n)\napp.include_router(mosaic.router)\n
  • move /register and /list endpoint creation outside the MosaicTilerFactory class

    # before\nfrom titiler.pgstac.factory import MosaicTilerFactory\n\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/{search_id}\",\n)\napp.include_router(mosaic.router, prefix=\"/{search_id}\")\n\n# Now\nfrom titiler.pgstac.factory import (\n    MosaicTilerFactory,\n    add_search_register_route,\n    add_mosaic_register_route,\n)\n\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/{search_id}\",\n)\napp.include_router(mosaic.router, prefix=\"/{search_id}\")\n\n# add /register endpoint\nadd_search_register_route(\n    app,\n    # any dependency we want to validate\n    # when creating the tilejson/map links\n    tile_dependencies=[\n        mosaic.layer_dependency,\n        mosaic.dataset_dependency,\n        mosaic.pixel_selection_dependency,\n        mosaic.process_dependency,\n        mosaic.rescale_dependency,\n        mosaic.colormap_dependency,\n        mosaic.render_dependency,\n        mosaic.pgstac_dependency,\n        mosaic.reader_dependency,\n        mosaic.backend_dependency,\n    ],\n)\n# add /list endpoint\nadd_search_list_route(app)\n
"},{"location":"release-notes/#083-2024-02-21","title":"0.8.3 (2024-02-21)","text":"
  • enable passing ConnectionPool kwargs option in titiler.pgstac.db.connect_to_db function (author @smohiudd, #155) [backported from 1.2.2]
"},{"location":"release-notes/#082-2024-01-23","title":"0.8.2 (2024-01-23)","text":"
  • update rio-tiler version to >6.3.0 (defined in titiler>=0.17)
  • use new align_bounds_with_dataset=True rio-tiler option in GeoJSON statistics methods for more precise calculation [backported from 1.2.0]
  • use morecantile TileMatrixSet.cellSize property instead of deprecated/private TileMatrixSet._resolution method [backported from 1.1.0]
"},{"location":"release-notes/#081-2023-11-10","title":"0.8.1 (2023-11-10)","text":"
  • add algorithm options for /statistics [POST] endpoints
"},{"location":"release-notes/#080-2023-10-06","title":"0.8.0 (2023-10-06)","text":"
  • update titiler requirement to >=0.15.0,<0.16
  • remove max_size default for mosaic's /statistics [POST] endpoint breaking change
  • add /bbox and /feature [POST] optional endpoints
  • add img_part_dependency attribute in MosaicTilerFactory (defaults to titiler.code.dependencies.PartFeatureParams)
"},{"location":"release-notes/#070-2023-09-28","title":"0.7.0 (2023-09-28)","text":"
  • update requirements to switch to pydantic~=2.0
  • pydantic>=2.4,<3.0
  • pydantic-settings~=2.0
  • geojson-pydantic~=1.0
  • cogeo-mosaic>=7.0,<8.0

  • update titiler requirement to >=0.14.0,<0.15

    • replace - by _ in query parameters

      • coord-crs -> coord_crs
      • dst-crs -> dst_crs
"},{"location":"release-notes/#060-2023-09-18","title":"0.6.0 (2023-09-18)","text":"
  • add tilejson URL links for layers defined in mosaic's metadata in /mosaic/register and /mosaic/{mosaic_id}/info response
  • support multiple layers in /mosaic/{mosaic_id}/WMTSCapabilities.xml endpoint created from mosaic's metadata

breaking change

  • In /mosaic/WMTSCapabilities.xml we removed the query-parameters related to the tile endpoint (which are forwarded) so ?assets= is no more required. The endpoint will still raise an error if there are no layers in the mosaic metadata and no required tile's parameters are passed.

    # before\nresponse = httpx.get(\"/mosaic/{mosaic_id}/WMTSCapabilities.xml\")\nassert response.status_code == 400\n\nresponse = httpx.get(\"/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog\")\nassert response.status_code == 200\n\n# now\n# If the mosaic has `defaults` layers set in the metadata\n# we will construct a WMTS document with multiple layers, so no need for the user to pass any `assets=`\nresponse = httpx.get(\"/mosaic/{mosaic_id}/WMTSCapabilities.xml\")\nassert response.status_code == 200\nwith rasterio.open(io.BytesIO(response.content)) as src:\n    assert src.profile[\"driver\"] == \"WMTS\"\n    assert len(src.subdatasets) == 2\n\n# If the user pass any valid `tile` parameters, an additional layer will be added to the one from the metadata\nresponse = httpx.get(\"/mosaic/{mosaic_id}/WMTSCapabilities.xml?assets=cog\")\nassert response.status_code == 200\nwith rasterio.open(io.BytesIO(response.content)) as src:\n    assert src.profile[\"driver\"] == \"WMTS\"\n    assert len(src.subdatasets) == 3\n
"},{"location":"release-notes/#051-2023-08-03","title":"0.5.1 (2023-08-03)","text":"
  • add python-dotenv requirement
"},{"location":"release-notes/#050-2023-07-20","title":"0.5.0 (2023-07-20)","text":"
  • update titiler requirement to >=0.12.0,<0.13
  • use Annotated Type for Query/Path parameters
  • re-order endpoints in MosaicTilerFactory to avoid conflicts between tiles and assets endpoints
  • remove stac-pydantic dependency
  • add optional root_path setting to specify a url path prefix to use when running the app behind a reverse proxy
  • add landing page /
  • use lifespan option instead of deprecated @app.on_event method to initiate/close DB connection

breaking changes

  • remove deprecated /{searchid}/{z}/{x}/{y}/assets endpoints
  • use /api and /api.html for documentation (instead of /openapi.json and /docs)
  • replace Enum's with Literal types
  • replace variable TileMatrixSetId by tileMatrixSetId
  • add pixel_selection_dependency attribute to the MosaicTilerFactory
"},{"location":"release-notes/#041-2023-06-21","title":"0.4.1 (2023-06-21)","text":"
  • update titiler requirement to >=0.11.7
  • fix /map endpoint template name
  • rename add_map_viewer to add_viewer option in MosaicTilerFactory for consistency with titiler's options
"},{"location":"release-notes/#040-2023-05-22","title":"0.4.0 (2023-05-22)","text":"
  • remove deprecated /tiles/{searchid}/... endpoints (replaced with /{searchid}/tiles/...)
  • depreciate /{searchid}/{z}/{x}/{y}/assets endpoints and add /{searchid}/tiles/{z}/{x}/{y}/assets
  • update minimum titiler requirement to >=0.11.6
  • remove timing headers
  • add strict_zoom option (controled with MOSAIC_STRICT_ZOOM environment variable) to raise (or not) error when fetching tile outside mosaic min/max zoom range
"},{"location":"release-notes/#033-2023-04-27","title":"0.3.3 (2023-04-27)","text":"
  • update python packaging/build system to pdm-pep517
  • use Ruff for lint
  • add retry mechanism on Database connection issues for PGSTACBackend.get_assets() and get_stac_item methods (back ported from 0.2.4)
"},{"location":"release-notes/#032-2023-03-14","title":"0.3.2 (2023-03-14)","text":"
  • update titiler requirement to 0.10.2
  • fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25)
"},{"location":"release-notes/#031-2022-12-16","title":"0.3.1 (2022-12-16)","text":"
  • update Type information for dependencies.get_stac_item (back ported from 0.2.2)
"},{"location":"release-notes/#030-2022-12-16","title":"0.3.0 (2022-12-16)","text":"

breaking changes

  • Use /collections/{collection_id}/items/{item_id} prefix for Item endpoint.

    # Before\n{endpoint}/stac/info?collection=collection1&item=item1\n\n# Now\n{endpoint}/collections/collection1/items/item1/info\n

  • Change tile url path parameter order from /tiles/{searchid}/{TileMatrixSetId}/{z}/{x}/{y} to /{searchid}/tiles/{TileMatrixSetId}/{z}/{x}/{y}

    # Before\n{endpoint}/mosaic/tiles/20200307aC0853900w361030/0/0/0\n\n# Now\n{endpoint}/mosaic/20200307aC0853900w361030/tiles/0/0/0\n

"},{"location":"release-notes/#024-2023-04-27","title":"0.2.4 (2023-04-27)","text":"
  • add retry mechanism on Database connection issues for PGSTACBackend.get_assets() and get_stac_item methods
"},{"location":"release-notes/#023-2023-03-14","title":"0.2.3 (2023-03-14)","text":"
  • fix maximum version of FastAPI to 0.92 (to avoid breaking change of starlette >0.25)
"},{"location":"release-notes/#022-2022-12-16","title":"0.2.2 (2022-12-16)","text":"
  • update Type information for dependencies.get_stac_item
"},{"location":"release-notes/#021-2022-12-15","title":"0.2.1 (2022-12-15)","text":"
  • update titiler requirement to >=0.10.1,<0.11 and fix /map endpoint (to accept multiple TMS)
"},{"location":"release-notes/#020-2022-12-13","title":"0.2.0 (2022-12-13)","text":"
  • add python 3.10 and 3.11 support
  • update to rio-tiler 4.1
  • add /{searchid}/map endpoint to the MosaicTilerFactory (added when add_map_viewer is set to True)
  • add /{searchid}/WMTSCapabilities.xml OGC WMTS endpoint to the MosaicTilerFactory
  • add /list to the MosaicTilerFactory to list available mosaics (added when add_mosaic_list is set to True)

breaking changes

  • remove python 3.7 support
  • update titiler requirement to >=0.10.0
  • replace connection_string by database_url in settings.PostgresSettings. We can now directly set DATABASE_URL environment variable.
"},{"location":"release-notes/#frontend-changes","title":"Frontend changes","text":"
  • remove asset_expression (Mosaic and Item)
  • histogram band names are prefixed with b (e.g b1) (Mosaic and Item) (ref: github.com/cogeotiff/rio-tiler/blob/main/docs/src/v4_migration.md#band-names)
  • expression for STAC have to be in form of {asset}_b{band_name} (e.g red_b1/green_b1) (Mosaic and Item) (ref: github.com/cogeotiff/rio-tiler/blob/main/docs/src/v4_migration.md#multibasereader-expressions)
  • added asset_as_band option to force expression to be in form of {asset} (e.g red/green) (Mosaic and Item)
  • expression's band should now be delimited with ; (previously , was accepted) (Mosaic and Item)
  • point output model to include band_names (Item)
  • added algorithm options
"},{"location":"release-notes/#010-2022-06-27","title":"0.1.0 (2022-06-27)","text":"
  • update titiler.core and titiler.mosaic requirement to 0.7
  • add MosaicTilerFactory._tilejson_routes method to register TileJSON routes
  • raise cogeo_mosaic.errors.MosaicNotFoundError when SearchId is not found in pgstac.searches table

breaking changes

  • move version definition in titiler.pgstac.__version__
  • remove unused fetch_options in titiler.pgstac.reader.PgSTACReader
"},{"location":"release-notes/#010a10-2022-05-16-pre-release","title":"0.1.0a10 (2022-05-16) Pre-Release","text":"
  • update titiler version and add reader_dependency and backend_dependency in endpoint factory.
"},{"location":"release-notes/#010a9-2022-05-05-pre-release","title":"0.1.0.a9 (2022-05-05) Pre-Release","text":"
  • remove LRU cache on all settings classes to enable support for manually providing settings via keyword arguments and to minimize lines of code (author @alukach, stac-utils/titiler-pgstac!54)
"},{"location":"release-notes/#010a8-2022-05-02-pre-release","title":"0.1.0.a8 (2022-05-02) Pre-Release","text":"
  • Insert mosaic metadata min/max zoom and bounds in tilejson (stac-utils/titiler-pgstac!51)
  • allow users the ability to optionally provide PostgresSettings to connect_to_db() function in the event that they want to customize how their DB credentials are populated (author @alukach, stac-utils/titiler-pgstac!53)
"},{"location":"release-notes/#010a7-2022-04-05-pre-release","title":"0.1.0.a7 (2022-04-05) Pre-Release","text":"
  • add feature() method to PGSTACBackend mosaic backend
  • add /statistics endpoint to return statistics given a GeoJSON feature or featureCollection
  • add collection in allowed returned fields
  • switch to pgstac.search to get the STAC Item in titiler.pgstac.dependencies.get_stac_item (stac-utils/titiler-pgstac!50)
"},{"location":"release-notes/#010a6-2022-03-14-pre-release","title":"0.1.0.a6 (2022-03-14) Pre-Release","text":"
  • move dependencies to titiler.pgstac.dependencies
  • add /stac endpoints to work with PgSTAC items

breaking changes

  • add /mosaic prefix to the PgSTAC mosaic endpoints
"},{"location":"release-notes/#010a5-2022-03-03-pre-release","title":"0.1.0.a5 (2022-03-03) Pre-Release","text":"
  • Add search_dependency to allow customization of the PgSTAC Search query (Author @drnextgis, stac-utils/titiler-pgstac!41)
  • Add PgSTAC Search entries model (stac-utils/titiler-pgstac!43)
  • Add Metadata specification (stac-utils/titiler-pgstac!38)

breaking changes

  • update titiler.core and titiler.mosaic requirement to >=0.5
  • When registering a search to PgSTAC with the /register endpoint, a default metadata {\"type\": \"mosaic\"} will be set.
  • Renamed titiler.pgstac.models to titiler.pgstac.model
  • Renamed titiler.pgstac.models.SearchQuery to titiler.pgstac.model.PgSTACSearch (and removed metadata)
  • output response for /register endpoint:

    // before\n{\n    \"searchid\": \"...\",\n    \"metadata\": \"http://endpoint/.../info\",\n    \"tiles\": \"http://endpoint/.../tilejson.json\",\n}\n\n// now\n{\n    \"searchid\": \"...\",\n    \"links\": [\n        {\n            \"rel\": \"info\",\n            \"href\": \"http://endpoint/.../info\",\n            \"type\": \"application/json\",\n        },\n        {\n            \"rel\": \"tilejson\",\n            \"href\": \"http://endpoint/.../tilejson.json\",\n            \"type\": \"application/json\",\n        }\n    ]\n}\n

  • output response for /info endpoint:

    // before\n{\n    \"hash\": \"...\",\n    \"search\": {},\n    \"_where\": \"...\",\n    ...\n}\n\n// now\n{\n    \"search\": {\n        \"hash\": \"...\",\n        \"search\": {},\n        \"_where\": \"...\",\n        ...\n    },\n    \"links\": [\n        {\n            \"rel\": \"self\",\n            \"href\": \"http://endpoint/.../info\",\n            \"type\": \"application/json\",\n        },\n        {\n            \"rel\": \"tilejson\",\n            \"href\": \"http://endpoint/.../tilejson.json\",\n            \"type\": \"application/json\",\n        }\n    ]\n}\n

"},{"location":"release-notes/#010a4-2022-02-07-pre-release","title":"0.1.0.a4 (2022-02-07) Pre-Release","text":"
  • add tile buffer option to match rio-tiler tile options (stac-utils/titiler-pgstac!31)
"},{"location":"release-notes/#010a3-2021-12-15-pre-release","title":"0.1.0.a3 (2021-12-15) Pre-Release","text":"
  • Forward TMS to the STAC Reader (allow multiple TMS) (stac-utils/titiler-pgstac!28)
"},{"location":"release-notes/#010a2-2021-12-13-pre-release","title":"0.1.0.a2 (2021-12-13) Pre-Release","text":"
  • Switch to psycopg3
  • add filter-lang in Search model to support newer PgSTAC (with CQL-2)
  • add metadata in Search model to allow forwarding metadata to the search query entry in PgSTAC

breaking changes

  • Unify reader/writer db pools to request.app.state.dbpool
  • rename PostgresSettings.db_max_inactive_conn_lifetime to PostgresSettings.max_idle
  • remove PostgresSettings().reader_connection_string and PostgresSettings().writer_connection_string. Replaced with PostgresSettings().connection_string
  • update titiler requirement (>= 0.4)
"},{"location":"release-notes/#010a1-2021-09-15-pre-release","title":"0.1.0.a1 (2021-09-15) Pre-Release","text":"
  • Surface PgSTAC options (scan_limit, items_limit, time_limit, exitwhenfull and skipcovered) in Tile endpoints

breaking changes

  • remove psycopg2 requirements to avoid conflict with psycopg2-binary (stac-utils/titiler-pgstac!15)
"},{"location":"release-notes/#010a0-2021-09-06-pre-release","title":"0.1.0.a0 (2021-09-06) Pre-Release","text":"

Initial release

"},{"location":"tiler_factories/","title":"Tiler Factories","text":""},{"location":"tiler_factories/#mosaics-titilerpgstacfactorymosaictilerfactory","title":"Mosaics: titiler.pgstac.factory.MosaicTilerFactory","text":"

TiTiler.PgSTAC provides a MosaicTilerFactory factory which is an helper functions to create FastAPI router (fastapi.APIRouter) with a minimal set of endpoints.

# Minimal PgSTAC Mosaic Application\nfrom contextlib import asynccontextmanager\n\nfrom fastapi import FastAPI\nfrom titiler.pgstac.db import close_db_connection, connect_to_db\nfrom titiler.pgstac.factory import MosaicTilerFactory\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    \"\"\"FastAPI Lifespan.\"\"\"\n    # Create Connection Pool\n    await connect_to_db(app)\n    yield\n    # Close the Connection Pool\n    await close_db_connection(app)\n\n\napp = FastAPI(lifespan=lifespan)\n\nmosaic = MosaicTilerFactory(\n    path_dependency=lambda: \"aaaaaaaaaaaaaaaaaaaaa\",\n)\napp.include_router(mosaic.router)\n

Important

The MosaicTilerFactory requires a path_dependency, which should be a Callable that return a search_id (PgSTAC Search Hash).

For the /searches/{search_id} endpoints the path_dependency is set to titiler.pgstac.dependencies.SearchIdParams and to titiler.pgstac.dependencies.CollectionIdParams for the /collections/{collection_id} endpoints.

`

Method URL Output Description GET /{lon},{lat}/assets JSON Return a list of assets which overlap a given point GET /tiles[/{TileMatrixSetId}]/{z}/{x}/{Y}/assets JSON Return a list of assets which overlap a given tile GET /tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}] image/bin Create a web map tile image for a search query and a tile index GET [/{TileMatrixSetId}]/tilejson.json JSON (TileJSON) Return a Mapbox TileJSON document GET [/{TileMatrixSetId}]/WMTSCapabilities.xml XML Return OGC WMTS Get Capabilities GET [/{TileMatrixSetId}]/map HTML Simple map viewer OPTIONAL POST /statistics GeoJSON (Statistics) Return statistics for geojson features OPTIONAL GET /bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format} image/bin Create an image from part of a dataset OPTIONAL POST /feature[/{width}x{height}][.{format}] image/bin Create an image from a GeoJSON feature OPTIONAL GET /point/{lon}x{lat} JSON (Point) Return pixel values from assets intersecting with a given point"},{"location":"tiler_factories/#extensions","title":"Extensions","text":""},{"location":"tiler_factories/#searchinfoextension","title":"searchInfoExtension","text":"Method URL Output Description GET /info JSON (Infos) Return list of Search entries with Mosaic type OPTIONAL
app = FastAPI()\nmosaic = MosaicTilerFactory(\n    path_dependency=lambda: \"aaaaaaaaaaaaaaaaaaaaa\",\n    extensions=[\n        searchInfoExtension(),\n    ],\n)\napp.include_router(mosaic.router)\n
"},{"location":"tiler_factories/#register-and-list","title":"register and list","text":"Method URL Output Description POST /register JSON (Register) Register Search query OPTIONAL GET /list JSON (Info) Return Search query infos OPTIONAL
app = FastAPI()\nmosaic = MosaicTilerFactory(\n    path_dependency=lambda: \"aaaaaaaaaaaaaaaaaaaaa\",\n)\napp.include_router(mosaic.router)\n\nadd_search_register_route(app)\nadd_search_list_route(app)\n
"},{"location":"tiler_factories/#items-titilercorefactorymultibasetilerfactory","title":"Items: titiler.core.factory.MultiBaseTilerFactory","text":"

For the single STAC item endpoints we use TiTiler's MultiBaseTilerFactory with a custom path_dependency to use item_id and collection_id path parameter (instead of the default url query param).

This custom path_dependency will connect to PgSTAC directly to fetch the STAC Item and pass it to a custom Reader.

# Minimal PgSTAC Item Application\nfrom contextlib import asynccontextmanager\n\nfrom fastapi import FastAPI\n\nfrom titiler.core.factory import MultiBaseTilerFactory\n\nfrom titiler.pgstac.db import close_db_connection, connect_to_db\nfrom titiler.pgstac.dependencies import ItemPathParams\nfrom titiler.pgstac.reader import PgSTACReader\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    \"\"\"FastAPI Lifespan.\"\"\"\n    # Create Connection Pool\n    await connect_to_db(app)\n    yield\n    # Close the Connection Pool\n    await close_db_connection(app)\n\n\napp = FastAPI(lifespan=lifespan)\n\nitem = MultiBaseTilerFactory(\n    reader=PgSTACReader,\n    path_dependency=ItemPathParams,\n    router_prefix=\"/collections/{collection_id}/items/{item_id}\",\n)\napp.include_router(item.router, prefix=\"/collections/{collection_id}/items/{item_id}\")\n
"},{"location":"advanced/custom_search/","title":"Custom search model","text":"

Even though TiTiler.PgSTAC includes default FastAPI application, it also can be used like a library if you want to extend or override default behavior.

Let's look at one such example. Imagine that we use JSON Web Token (JWT) based approach for authorization and every token contains information about area a user has access to:

{\n  \"sub\": \"1234567890\",\n  \"name\": \"John Doe\",\n  \"iat\": 1516239022,\n  \"scope\": \"zone_A\"\n}\n

We want our application to take this information into account while registering a search query. It can be done in the following way:

from contextlib import asynccontextmanager\n\nfrom typing import Tuple\nimport json\nimport jwt\nfrom fastapi import FastAPI\nfrom fastapi.security.utils import get_authorization_scheme_param\nfrom starlette.requests import Request\nfrom titiler.pgstac.factory import MosaicTilerFactory, add_search_register_route\nfrom titiler.pgstac.model import RegisterMosaic, Metadata, PgSTACSearch\nfrom titiler.pgstac.db import close_db_connection, connect_to_db\nfrom titiler.pgstac.extensions import searchInfoExtension\n\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    \"\"\"FastAPI Lifespan.\"\"\"\n    # Create Connection Pool\n    await connect_to_db(app, settings=postgres_settings)\n    yield\n    # Close the Connection Pool\n    await close_db_connection(app)\n\napp = FastAPI(lifespan=lifespan)\n\nAREAS = {\n    \"zone_A\": {\"type\": \"Point\", \"coordinates\": [-41.93, -12.76]},\n    \"zone_B\": {\"type\": \"Point\", \"coordinates\": [2.15, 41.39]},\n}\n\n\ndef search_factory(request: Request, body: RegisterMosaic) -> Tuple[PgSTACSearch, Metadata]:\n    authorization = request.headers.get(\"Authorization\")\n    scheme, token = get_authorization_scheme_param(authorization)\n    payload = jwt.decode(token, algorithms=[\"HS256\"], key=\"your-256-bit-secret\")\n\n    search = body.dict(exclude_none=True, exclude={\"metadata\"}, by_alias=True)\n    search[\"filter\"] = {\n        \"op\": \"and\",\n        \"args\": [\n            {\n                \"op\": \"s_intersects\",\n                \"args\": [{\"property\": \"geometry\"}, AREAS[payload[\"scope\"]]],\n            },\n            search[\"filter\"],\n        ],\n    }\n\n    return model.PgSTACSearch(**search), body.metadata\n\n\nmosaic = MosaicTilerFactory(\n    extensions=[\n      searchInfoExtension\n    ]\n)\napp.include_router(mosaic.router)\nadd_search_register_route(app, search_dependency=search_factory)\n

Checking:

$ curl -s -X 'POST' \\\n  'http://localhost:8081/register' \\\n  -H 'accept: application/json' \\\n  -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyLCJzY29wZSI6InpvbmVfQSJ9.BelzluX7v7kYObix2KSyy1T5gEOQYQn_pyNO5Ri0gWo' \\\n  -H 'Content-Type: application/json' \\\n  -d '{\"filter\":{\"op\":\"and\",\"args\":[{\"op\":\"=\",\"args\":[{\"property\":\"collection\"},\"l1\"]}]}}' | jq '.id'\n\"bbc3c8f4c392436f74de6cd0308469f6\"\n\n$ curl -X 'GET' \\\n  'http://localhost:8081/bbc3c8f4c392436f74de6cd0308469f6/info' \\\n  -H 'accept: application/json'\n{\"hash\":\"bbc3c8f4c392436f74de6cd0308469f6\",\"search\":{\"filter\":{\"op\":\"and\",\"args\":[{\"op\":\"s_intersects\",\"args\":[{\"property\":\"geometry\"},{\"type\":\"Point\",\"coordinates\":[-41.93,-12.76]}]},{\"op\":\"and\",\"args\":[{\"op\":\"=\",\"args\":[{\"property\":\"collection\"},\"l1\"]}]}]}},\"_where\":\"(  ( st_intersects(geometry, '0101000020E6100000D7A3703D0AF744C085EB51B81E8529C0'::geometry) and  ( (collection_id = 'l1') )  )  )  \",\"orderby\":\"datetime DESC, id DESC\",\"lastused\":\"2022-02-23T13:00:04.090757+00:00\",\"usecount\":3,\"metadata\":{\"type\":\"mosaic\"}}\n
"},{"location":"advanced/custom_tilejson/","title":"Custom TileJSON endpoint","text":"

Goal: enable users to select a predefined configuration stored in the mosaic Metadata.

import sys\nfrom typing import Optional\nfrom dataclasses import dataclass\n\nfrom morecantile import TileMatrixSet\nfrom titiler.core.resources.enums import ImageType\nfrom titiler.core.models.mapbox import TileJSON\nfrom titiler.pgstac import factory as TitilerPgSTACFactory\nfrom titiler.pgstac.dependencies import PgSTACParams\nfrom typing_extensions import Annotated\n\nfrom fastapi import Depends, Query\n\nfrom starlette.requests import Request\n\n\n@dataclass\nclass MosaicTilerFactory(TitilerPgSTACFactory.MosaicTilerFactory):\n    \"\"\"Custom factory.\"\"\"\n\n    def _tilejson_routes(self) -> None:\n        \"\"\"Custom TileJSON endpoint.\"\"\"\n\n        @self.router.get(\n            \"/tilejson.json\",\n            response_model=TileJSON,\n            responses={200: {\"description\": \"Return a tilejson\"}},\n            response_model_exclude_none=True,\n        )\n        @self.router.get(\n            \"/{tileMatrixSetId}/tilejson.json\",\n            response_model=TileJSON,\n            responses={200: {\"description\": \"Return a tilejson\"}},\n            response_model_exclude_none=True,\n        )\n        def tilejson(\n            request: Request,\n            search_id=Depends(self.path_dependency),\n            tileMatrixSetId: Annotated[  # type: ignore\n                Literal[tuple(self.supported_tms.list())],\n                f\"Identifier selecting one of the TileMatrixSetId supported (default: '{self.default_tms}')\",\n            ] = self.default_tms,\n            layer: Annotated[\n                str,\n                Query(description=\"Name of default configuration\"),\n            ] = None,\n            tile_format: Annotated[\n                Optional[ImageType],\n                Query(\n                    description=\"Default will be automatically defined if the output image needs a mask (png) or not (jpeg).\",\n                ),\n            ] = None,\n            tile_scale: Annotated[\n                Optional[int],\n                Query(\n                    gt=0, lt=4, description=\"Tile size scale. 1=256x256, 2=512x512...\"\n                ),\n            ] = None,\n            minzoom: Annotated[\n                Optional[int],\n                Query(description=\"Overwrite default minzoom.\"),\n            ] = None,\n            maxzoom: Annotated[\n                Optional[int],\n                Query(description=\"Overwrite default maxzoom.\"),\n            ] = None,\n            layer_params=Depends(self.layer_dependency),\n            dataset_params=Depends(self.dataset_dependency),\n            pixel_selection=Depends(self.pixel_selection_dependency),\n            buffer: Annotated[\n                Optional[float],\n                Query(\n                    gt=0,\n                    title=\"Tile buffer.\",\n                    description=\"Buffer on each side of the given tile. It must be a multiple of `0.5`. Output **tilesize** will be expanded to `tilesize + 2 * buffer` (e.g 0.5 = 257x257, 1.0 = 258x258).\",\n                ),\n            ] = None,\n            post_process=Depends(self.process_dependency),\n            rescale=Depends(self.rescale_dependency),\n            color_formula: Annotated[\n                Optional[str],\n                Query(\n                    title=\"Color Formula\",\n                    description=\"rio-color formula (info: https://github.com/mapbox/rio-color)\",\n                ),\n            ] = None,\n            colormap=Depends(self.colormap_dependency),\n            render_params=Depends(self.render_dependency),\n            pgstac_params: PgSTACParams = Depends(),\n            backend_params=Depends(self.backend_dependency),\n            reader_params=Depends(self.reader_dependency),\n        ):\n            \"\"\"Return TileJSON document for a SearchId.\"\"\"\n            with request.app.state.dbpool.connection() as conn:\n                with conn.cursor(row_factory=class_row(model.Search)) as cursor:\n                    cursor.execute(\n                        \"SELECT * FROM searches WHERE hash=%s;\",\n                        (search_id,),\n                    )\n                    search_info = cursor.fetchone()\n                    if not search_info:\n                        raise KeyError(f\"search {search_id} not found\")\n\n            route_params = {\n                \"search_id\": search_info.id,\n                \"z\": \"{z}\",\n                \"x\": \"{x}\",\n                \"y\": \"{y}\",\n                \"tileMatrixSetId\": tileMatrixSetId,\n            }\n            if tile_scale:\n                route_params[\"scale\"] = tile_scale\n            if tile_format:\n                route_params[\"format\"] = tile_format.value\n\n            tiles_url = self.url_for(request, \"tile\", **route_params)\n\n            qs_key_to_remove = [\n                \"tilematrixsetid\",\n                \"tile_format\",\n                \"tile_scale\",\n                \"minzoom\",\n                \"maxzoom\",\n                \"layer\",\n            ]\n            qs = [\n                (key, value)\n                for (key, value) in request.query_params._list\n                if key.lower() not in qs_key_to_remove\n            ]\n\n            if layer:\n                config = search_info.metadata.defaults_params.get(layer)\n                if not config:\n                    raise HTTPException(status_code=404, detail=f\"Invalid {layer} configuration.\")\n\n                # This assume the default configuration follows the endpoint expected format\n                # as `\"true_color\": {\"assets\": [\"B4\", \"B3\", \"B2\"]}`\n                qs = QueryParams(config)\n\n            if qs:\n                tiles_url += f\"?{urlencode(qs, doseq=True)}\"\n\n            minzoom = _first_value([minzoom, search_info.metadata.minzoom], tms.minzoom)\n            maxzoom = _first_value([maxzoom, search_info.metadata.maxzoom], tms.maxzoom)\n            bounds = _first_value(\n                [search_info.input_search.get(\"bbox\"), search_info.metadata.bounds],\n                tms.bbox,\n            )\n            return {\n                \"bounds\": bounds,\n                \"minzoom\": minzoom,\n                \"maxzoom\": maxzoom,\n                \"name\": search_info.metadata.name or search_info.id,\n                \"tiles\": [tiles_url],\n            }\n
"},{"location":"advanced/metadata/","title":"Mosaic metadata specification","text":"

TiTiler-PgSTAC uses PgSTAC search to host mosaic parameters for performance purposes. To help users we added the possibility to add metadata to search entries and in TiTiler-PgSTAC we introduced a non-official specification to help user storing meaningful information.

"},{"location":"advanced/metadata/#specification","title":"Specification","text":"
{\n    // OPTIONAL. Default: \"mosaic\" (No other value accepted for now). Describe the `type` of metadata.\n    \"type\": \"mosaic\",\n\n    // OPTIONAL. Default: null.\n    // The maximum extent of available map tiles. The bounds are represented in WGS:84\n    // latitude and longitude values, in the order left, bottom, right, top.\n    // Values may be integers or floating point numbers.\n    \"bounds\": [ -180, -85.05112877980659, 180, 85.0511287798066 ],\n\n    // OPTIONAL. Default: null.\n    // An integer specifying the minimum zoom level.\n    \"minzoom\": 0,\n\n    // OPTIONAL. Default: null.\n    // An integer specifying the maximum zoom level. MUST be >= minzoom.\n    \"maxzoom\": 11,\n\n    // OPTIONAL. Default: null. The name can contain any legal character.\n    \"name\": \"compositing\",\n\n    // OPTIONAL. Default: null. An array of available assets.\n    \"assets\": [\"image\", \"cog\"],\n\n    // OPTIONAL. Default: null. A set of `defaults` configuration to be forwarded to the /tiles endpoints.\n    // Note: The defaults block should follow the STAC render extension https://github.com/stac-extensions/render\n    \"defaults\": {\n        \"true_color\": {\n            \"assets\": [\"B4\", \"B3\", \"B2\"],\n            \"color_formula\": \"Gamma RGB 3.5 Saturation 1.7 Sigmoidal RGB 15 0.35\",\n        },\n        \"ndvi\": {\n            \"expression\": \"(B4-B3)/(B4+B3)\",\n            \"rescale\": [[-1, 1]],\n            \"colormap_name\": \"viridis\"\n        }\n    }\n}\n

Important

  • When using the /searches/register endpoint, {\"type\": \"mosaic\"} will be set by default
  • All metadata fields are optional and custom fields are also allowed.
curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{\"filter\": {\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat-c2l2-sr\"]}, \"metadata\": {\"name\": \"landsat mosaic\"}}'\n>> {\n  \"id\": \"d7fcdefd0457c949ea7a6192bc2c7122\",\n  \"links\": [\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/tilejson.json\"\n    }\n  ]\n}\n\ncurl http://127.0.0.1:8081/searches/d7fcdefd0457c949ea7a6192bc2c7122/info | jq '.search.metadata'\n>> {\n  \"type\": \"mosaic\",\n  \"name\": \"landsat mosaic\"\n}\n
curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{\"collections\": [\"noaa-emergency-response\"], \"bbox\": [-87.0251, 36.0999, -85.4249, 36.2251], \"filter-lang\": \"cql-json\", \"metadata\": {\"bounds\": [-87.0251, 36.0999, -85.4249, 36.2251], \"minzoom\": 14, \"maxzoom\": 18, \"assets\": [\"cog\"], \"defaults\": {\"true_color\": {\"bidx\": [1, 2, 3]}}}}'\n>> {\n  \"id\":\"4b0db3dbd1858d54a3a55f84de97d1ca\",\n  \"links\":[\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/tilejson.json\"\n    }\n  ]\n}\n\ncurl http://127.0.0.1:8081/searches/4b0db3dbd1858d54a3a55f84de97d1ca/info | jq '.search.metadata'\n>> {\n  \"type\": \"mosaic\",\n  \"bounds\": [\n    -87.0251,\n    36.0999,\n    -85.4249,\n    36.2251\n  ],\n  \"minzoom\": 14,\n  \"maxzoom\": 18,\n  \"assets\": [\n    \"cog\"\n  ],\n  \"defaults\": {\n    \"true_color\": {\n      \"bidx\": [\n        1,\n        2,\n        3\n      ]\n    }\n  }\n}\n
"},{"location":"advanced/searches_list/","title":"Mosaic list","text":"

Starting with titiler-pgstac>=0.2.0, we've added a /searches/list endpoint to be able to list all registered mosaics. When we add a mosaic via /searches/register we add a specific metadata.type: \"mosaic\" to the pgstac search entry, which is then used by the /searches/list endpoint to filter the pgstac searches.

In order to make the mosaic list performant, users might want to alter their PgSTAC database to add an index

$ psql\npostgis=# SET schema 'pgstac';\n>> SET\n\npostgis=# CREATE INDEX IF NOT EXISTS searches_mosaic ON searches ((true)) WHERE metadata->>'type'='mosaic';\n>> NOTICE:  relation \"searches_mosaic\" already exists, skipping\n>> CREATE INDEX\n\npostgis=# SELECT\n    indexname,\n    indexdef\nFROM\n    pg_indexes\nWHERE\n    tablename = 'searches';\n\n>>     indexname    |                                                         indexdef\n>> -----------------+---------------------------------------------------------------------------------------------------------------------------\n>>  searches_pkey   | CREATE UNIQUE INDEX searches_pkey ON pgstac.searches USING btree (hash)\n>>  searches_mosaic | CREATE INDEX searches_mosaic ON pgstac.searches USING btree ((true)) WHERE ((metadata ->> 'type'::text) = 'mosaic'::text)\n

ref: github.com/developmentseed/eoAPI/blob/master/stack/handlers/db_handler.py#L204-L213

"},{"location":"api/titiler/pgstac/db/","title":"Module titiler.pgstac.db","text":"

Database connection handling.

"},{"location":"api/titiler/pgstac/db/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/db/#close_db_connection","title":"close_db_connection","text":"
def close_db_connection(\n    app: fastapi.applications.FastAPI\n) -> None\n

Close Pool.

"},{"location":"api/titiler/pgstac/db/#connect_to_db","title":"connect_to_db","text":"
def connect_to_db(\n    app: fastapi.applications.FastAPI,\n    settings: Optional[titiler.pgstac.settings.PostgresSettings] = None,\n    pool_kwargs: Optional[Dict[str, Any]] = None\n) -> None\n

Connect to Database.

"},{"location":"api/titiler/pgstac/dependencies/","title":"Module titiler.pgstac.dependencies","text":"

titiler-pgstac dependencies.

"},{"location":"api/titiler/pgstac/dependencies/#variables","title":"Variables","text":"
cache_config\n
retry_config\n
"},{"location":"api/titiler/pgstac/dependencies/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/dependencies/#collectionidparams","title":"CollectionIdParams","text":"
def CollectionIdParams(\n    request: starlette.requests.Request,\n    collection_id: typing.Annotated[str, Path(PydanticUndefined)]\n) -> str\n

collection_id Path Parameter

"},{"location":"api/titiler/pgstac/dependencies/#itemidparams","title":"ItemIdParams","text":"
def ItemIdParams(\n    request: starlette.requests.Request,\n    collection_id: typing.Annotated[str, Path(PydanticUndefined)],\n    item_id: typing.Annotated[str, Path(PydanticUndefined)]\n) -> pystac.item.Item\n

STAC Item dependency.

"},{"location":"api/titiler/pgstac/dependencies/#searchidparams","title":"SearchIdParams","text":"
def SearchIdParams(\n    search_id: typing.Annotated[str, Path(PydanticUndefined)]\n) -> str\n

search_id

"},{"location":"api/titiler/pgstac/dependencies/#searchparams","title":"SearchParams","text":"
def SearchParams(\n    body: titiler.pgstac.model.RegisterMosaic\n) -> Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata]\n

Search parameters.

"},{"location":"api/titiler/pgstac/dependencies/#tmstileparams","title":"TmsTileParams","text":"
def TmsTileParams(\n    z: typing.Annotated[int, Path(PydanticUndefined)],\n    x: typing.Annotated[int, Path(PydanticUndefined)],\n    y: typing.Annotated[int, Path(PydanticUndefined)]\n) -> morecantile.commons.Tile\n

TileMatrixSet Tile parameters.

"},{"location":"api/titiler/pgstac/dependencies/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/dependencies/#backendparams","title":"BackendParams","text":"
class BackendParams(\n    request: starlette.requests.Request\n)\n

backend parameters.

"},{"location":"api/titiler/pgstac/dependencies/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • titiler.core.dependencies.DefaultDependency
"},{"location":"api/titiler/pgstac/dependencies/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/dependencies/#keys","title":"keys","text":"
def keys(\n    self\n)\n

Return Keys.

"},{"location":"api/titiler/pgstac/dependencies/#pgstacparams","title":"PgSTACParams","text":"
class PgSTACParams(\n    scan_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,\n    items_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,\n    time_limit: Annotated[Optional[int], Query(PydanticUndefined)] = None,\n    exitwhenfull: Annotated[Optional[bool], Query(PydanticUndefined)] = None,\n    skipcovered: Annotated[Optional[bool], Query(PydanticUndefined)] = None\n)\n

PgSTAC parameters.

"},{"location":"api/titiler/pgstac/dependencies/#ancestors-in-mro_1","title":"Ancestors (in MRO)","text":"
  • titiler.core.dependencies.DefaultDependency
"},{"location":"api/titiler/pgstac/dependencies/#class-variables","title":"Class variables","text":"
exitwhenfull\n
items_limit\n
scan_limit\n
skipcovered\n
time_limit\n
"},{"location":"api/titiler/pgstac/dependencies/#methods_1","title":"Methods","text":""},{"location":"api/titiler/pgstac/dependencies/#keys_1","title":"keys","text":"
def keys(\n    self\n)\n

Return Keys.

"},{"location":"api/titiler/pgstac/extensions/","title":"Module titiler.pgstac.extensions","text":"

titiler.pgstac extensions.

"},{"location":"api/titiler/pgstac/extensions/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/extensions/#searchinfoextension","title":"searchInfoExtension","text":"
class searchInfoExtension(\n\n)\n

Add /info endpoint

"},{"location":"api/titiler/pgstac/extensions/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • titiler.core.factory.FactoryExtension
"},{"location":"api/titiler/pgstac/extensions/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/extensions/#register","title":"register","text":"
def register(\n    self,\n    factory: titiler.pgstac.factory.MosaicTilerFactory\n)\n

Register endpoint to the tiler factory.

"},{"location":"api/titiler/pgstac/factory/","title":"Module titiler.pgstac.factory","text":"

Custom MosaicTiler Factory for PgSTAC Mosaic Backend.

"},{"location":"api/titiler/pgstac/factory/#variables","title":"Variables","text":"
DEFAULT_TEMPLATES\n
MAX_THREADS\n
MOSAIC_STRICT_ZOOM\n
MOSAIC_THREADS\n
WGS84_CRS\n
img_endpoint_params\n
jinja2_env\n
"},{"location":"api/titiler/pgstac/factory/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/factory/#add_search_list_route","title":"add_search_list_route","text":"
def add_search_list_route(\n    app: fastapi.applications.FastAPI,\n    *,\n    prefix: str = '',\n    tags: Optional[List[str]] = None\n)\n

Add PgSTAC Search (of type mosaic) listing route.

"},{"location":"api/titiler/pgstac/factory/#add_search_register_route","title":"add_search_register_route","text":"
def add_search_register_route(\n    app: fastapi.applications.FastAPI,\n    *,\n    prefix: str = '',\n    search_dependency: Callable[..., Tuple[titiler.pgstac.model.PgSTACSearch, titiler.pgstac.model.Metadata]] = <function SearchParams at 0x7f56f67a4400>,\n    tile_dependencies: Optional[List[Callable]] = None,\n    tags: Optional[List[str]] = None\n)\n

add /register route

"},{"location":"api/titiler/pgstac/factory/#check_query_params","title":"check_query_params","text":"
def check_query_params(\n    *,\n    dependencies: List[Callable],\n    query_params: Union[starlette.datastructures.QueryParams, Dict]\n) -> None\n

Check QueryParams for Query dependency.

  1. get_dependant is used to get the query-parameters required by the callable
  2. we use request_params_to_args to construct arguments needed to call the callable
  3. we call the callable and catch any errors

Important: We assume the callable in not a co-routine

"},{"location":"api/titiler/pgstac/factory/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/factory/#mosaictilerfactory","title":"MosaicTilerFactory","text":"
class MosaicTilerFactory(\n    reader: Type[cogeo_mosaic.backends.base.BaseBackend] = <class 'titiler.pgstac.mosaic.PGSTACBackend'>,\n    router: fastapi.routing.APIRouter = <factory>,\n    path_dependency: Callable[..., str] = <function DatasetPathParams at 0x7f56ff3c9e40>,\n    layer_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.AssetsBidxExprParams'>,\n    dataset_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.DatasetParams'>,\n    process_dependency: Callable[..., Optional[titiler.core.algorithm.base.BaseAlgorithm]] = <function Algorithms.dependency.<locals>.post_process at 0x7f56f66db560>,\n    rescale_dependency: Callable[..., Optional[List[Tuple[float, ...]]]] = <function RescalingParams at 0x7f56f6aa71a0>,\n    color_formula_dependency: Callable[..., Optional[str]] = <function ColorFormulaParams at 0x7f56f68d1da0>,\n    colormap_dependency: Callable[..., Union[Dict[int, Tuple[int, int, int, int]], Sequence[Tuple[Tuple[Union[float, int], Union[float, int]], Tuple[int, int, int, int]]], NoneType]] = <function create_colormap_dependency.<locals>.deps at 0x7f56ff3c9da0>,\n    render_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.ImageRenderingParams'>,\n    reader_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.DefaultDependency'>,\n    environment_dependency: Callable[..., Dict] = <function BaseTilerFactory.<lambda> at 0x7f56f66db380>,\n    supported_tms: morecantile.defaults.TileMatrixSets = TileMatrixSets(tms={'CDB1GlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CDB1GlobalGrid.json'), 'CanadianNAD83_LCC': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/CanadianNAD83_LCC.json'), 'EuropeanETRS89_LAEAQuad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/EuropeanETRS89_LAEAQuad.json'), 'GNOSISGlobalGrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/GNOSISGlobalGrid.json'), 'LINZAntarticaMapTilegrid': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/LINZAntarticaMapTilegrid.json'), 'NZTM2000Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/NZTM2000Quad.json'), 'UPSAntarcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSAntarcticWGS84Quad.json'), 'UPSArcticWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UPSArcticWGS84Quad.json'), 'UTM31WGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/UTM31WGS84Quad.json'), 'WGS1984Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WGS1984Quad.json'), 'WebMercatorQuad': <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>, 'WorldCRS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WorldCRS84Quad.json'), 'WorldMercatorWGS84Quad': PosixPath('/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/morecantile/data/WorldMercatorWGS84Quad.json')}),\n    default_tms: Optional[str] = None,\n    router_prefix: str = '',\n    optional_headers: List[titiler.core.resources.enums.OptionalHeader] = <factory>,\n    route_dependencies: List[Tuple[List[titiler.core.routing.EndpointScope], List[fastapi.params.Depends]]] = <factory>,\n    extensions: List[titiler.core.factory.FactoryExtension] = <factory>,\n    templates: starlette.templating.Jinja2Templates = <starlette.templating.Jinja2Templates object at 0x7f56f6626dd0>,\n    stats_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.StatisticsParams'>,\n    histogram_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.HistogramParams'>,\n    tile_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.TileParams'>,\n    img_part_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.core.dependencies.PartFeatureParams'>,\n    pixel_selection_dependency: Callable[..., rio_tiler.mosaic.methods.base.MosaicMethodBase] = <function PixelSelectionParams at 0x7f56f60caca0>,\n    pgstac_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.pgstac.dependencies.PgSTACParams'>,\n    backend_dependency: Type[titiler.core.dependencies.DefaultDependency] = <class 'titiler.pgstac.dependencies.BackendParams'>,\n    add_statistics: bool = False,\n    add_viewer: bool = False,\n    add_part: bool = False\n)\n

Custom MosaicTiler for PgSTAC Mosaic Backend.

"},{"location":"api/titiler/pgstac/factory/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • titiler.core.factory.BaseTilerFactory
"},{"location":"api/titiler/pgstac/factory/#class-variables","title":"Class variables","text":"
add_part\n
add_statistics\n
add_viewer\n
backend_dependency\n
dataset_dependency\n
default_tms\n
histogram_dependency\n
img_part_dependency\n
layer_dependency\n
pgstac_dependency\n
reader\n
reader_dependency\n
render_dependency\n
router_prefix\n
stats_dependency\n
supported_tms\n
templates\n
tile_dependency\n
"},{"location":"api/titiler/pgstac/factory/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/factory/#add_route_dependencies","title":"add_route_dependencies","text":"
def add_route_dependencies(\n    self,\n    *,\n    scopes: List[titiler.core.routing.EndpointScope],\n    dependencies=typing.List[fastapi.params.Depends]\n)\n

Add dependencies to routes.

Allows a developer to add dependencies to a route after the route has been defined.

"},{"location":"api/titiler/pgstac/factory/#color_formula_dependency","title":"color_formula_dependency","text":"
def color_formula_dependency(\n    color_formula: Annotated[Optional[str], Query(PydanticUndefined)] = None\n) -> Optional[str]\n

ColorFormula Parameter.

"},{"location":"api/titiler/pgstac/factory/#colormap_dependency","title":"colormap_dependency","text":"
def colormap_dependency(\n    colormap_name: Annotated[Literal['plasma_r', 'pastel2', 'rdylbu_r', 'cividis', 'purples_r', 'twilight_shifted_r', 'brbg_r', 'gist_ncar_r', 'tarn', 'set1_r', 'rainbow_r', 'bupu_r', 'turbid', 'diff', 'turbid_r', 'ice_r', 'ocean', 'accent_r', 'hot', 'reds_r', 'rdylgn_r', 'gist_yarg', 'hsv', 'nipy_spectral_r', 'greens', 'inferno_r', 'ylgnbu', 'prism_r', 'winter', 'set2_r', 'gnuplot2', 'gnuplot2_r', 'set3', 'speed', 'seismic_r', 'cool', 'bwr', 'purples', 'topo_r', 'spring_r', 'blues', 'rdpu_r', 'terrain_r', 'pastel2_r', 'brg_r', 'rain_r', 'binary_r', 'gist_heat', 'tarn_r', 'amp_r', 'topo', 'prgn', 'phase_r', 'gist_ncar', 'blues_r', 'delta_r', 'tab20_r', 'delta', 'solar_r', 'summer_r', 'oranges', 'tempo', 'spectral_r', 'gist_earth', 'gnuplot', 'piyg', 'viridis', 'orrd', 'cubehelix_r', 'phase', 'magma_r', 'viridis_r', 'twilight_r', 'wistia', 'curl', 'cividis_r', 'tab20b', 'gist_rainbow', 'winter_r', 'pastel1_r', 'flag_r', 'ocean_r', 'bugn', 'rdgy', 'rain', 'algae_r', 'wistia_r', 'accent', 'tempo_r', 'afmhot', 'amp', 'rdbu_r', 'puor_r', 'ylorrd', 'pubu_r', 'brbg', 'pink_r', 'greys_r', 'pubugn_r', 'cmrmap', 'flag', 'turbo', 'oxy_r', 'ylorbr_r', 'matter_r', 'twilight', 'deep', 'purd', 'coolwarm', 'gist_rainbow_r', 'spring', 'autumn', 'spectral', 'hot_r', 'coolwarm_r', 'schwarzwald', 'tab10_r', 'speed_r', 'ylgn_r', 'gnuplot_r', 'gist_heat_r', 'rdylbu', 'orrd_r', 'piyg_r', 'balance', 'balance_r', 'pubu', 'pink', 'prgn_r', 'inferno', 'bupu', 'dark2_r', 'deep_r', 'matter', 'jet', 'tab20c_r', 'diff_r', 'nipy_spectral', 'gist_earth_r', 'gist_stern_r', 'haline', 'turbo_r', 'prism', 'purd_r', 'haline_r', 'rplumbo', 'gist_gray', 'greens_r', 'gray', 'algae', 'tab10', 'hsv_r', 'autumn_r', 'rdpu', 'thermal_r', 'oxy', 'cmrmap_r', 'cubehelix', 'ylgnbu_r', 'dense', 'bugn_r', 'gist_stern', 'tab20', 'ylorbr', 'summer', 'rdylgn', 'tab20b_r', 'jet_r', 'paired_r', 'dark2', 'binary', 'twilight_shifted', 'seismic', 'pubugn', 'ylgn', 'rdbu', 'bone', 'tab20c', 'dense_r', 'gnbu', 'set2', 'paired', 'gnbu_r', 'copper_r', 'gist_yarg_r', 'pastel1', 'bwr_r', 'greys', 'puor', 'oranges_r', 'cool_r', 'afmhot_r', 'rdgy_r', 'bone_r', 'thermal', 'ice', 'gray_r', 'copper', 'cfastie', 'curl_r', 'reds', 'brg', 'solar', 'ylorrd_r', 'magma', 'plasma', 'set3_r', 'rainbow', 'terrain', 'gist_gray_r', 'set1'], Query(PydanticUndefined)] = None,\n    colormap: Annotated[Optional[str], Query(PydanticUndefined)] = None\n)\n
"},{"location":"api/titiler/pgstac/factory/#environment_dependency","title":"environment_dependency","text":"
def environment_dependency(\n\n)\n
"},{"location":"api/titiler/pgstac/factory/#path_dependency","title":"path_dependency","text":"
def path_dependency(\n    url: typing.Annotated[str, Query(PydanticUndefined)]\n) -> str\n

Create dataset path from args

"},{"location":"api/titiler/pgstac/factory/#pixel_selection_dependency","title":"pixel_selection_dependency","text":"
def pixel_selection_dependency(\n    pixel_selection: Annotated[Literal['first', 'highest', 'lowest', 'mean', 'median', 'stdev', 'lastbandlow', 'lastbandhight', 'count'], Query(PydanticUndefined)] = 'first'\n) -> rio_tiler.mosaic.methods.base.MosaicMethodBase\n

Returns the mosaic method used to combine datasets together.

"},{"location":"api/titiler/pgstac/factory/#process_dependency","title":"process_dependency","text":"
def process_dependency(\n    algorithm: Annotated[Literal['hillshade', 'contours', 'normalizedIndex', 'terrarium', 'terrainrgb'], Query(PydanticUndefined)] = None,\n    algorithm_params: Annotated[Optional[str], Query(PydanticUndefined)] = None\n) -> Optional[titiler.core.algorithm.base.BaseAlgorithm]\n

Data Post-Processing options.

"},{"location":"api/titiler/pgstac/factory/#register_routes","title":"register_routes","text":"
def register_routes(\n    self\n) -> None\n

This Method register routes to the router.

"},{"location":"api/titiler/pgstac/factory/#rescale_dependency","title":"rescale_dependency","text":"
def rescale_dependency(\n    rescale: Annotated[Optional[List[str]], Query(PydanticUndefined)] = None\n) -> Optional[List[Tuple[float, ...]]]\n

Min/Max data Rescaling

"},{"location":"api/titiler/pgstac/factory/#url_for","title":"url_for","text":"
def url_for(\n    self,\n    request: starlette.requests.Request,\n    name: str,\n    **path_params: Any\n) -> str\n

Return full url (with prefix) for a specific endpoint.

"},{"location":"api/titiler/pgstac/model/","title":"Module titiler.pgstac.model","text":"

Titiler.pgstac models.

Note: This is mostly a copy of github.com/stac-utils/stac-fastapi/blob/master/stac_fastapi/pgstac/stac_fastapi/pgstac/types/search.py

"},{"location":"api/titiler/pgstac/model/#variables","title":"Variables","text":"
FilterLang\n
Operator\n
"},{"location":"api/titiler/pgstac/model/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/model/#context","title":"Context","text":"
class Context(\n    /,\n    **data: 'Any'\n)\n

Context Model.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#validate_limit","title":"validate_limit","text":"
def validate_limit(\n    v,\n    info: pydantic_core.core_schema.ValidationInfo\n)\n

validate limit.

"},{"location":"api/titiler/pgstac/model/#instance-variables","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#info","title":"Info","text":"
class Info(\n    /,\n    **data: 'Any'\n)\n

Response model for /info endpoint.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_1","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_1","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_1","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_1","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_1","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_1","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_1","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_1","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_1","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_1","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_1","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_1","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_1","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_1","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_1","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_1","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_1","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_1","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_1","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#instance-variables_1","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_1","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_1","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_1","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_1","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_1","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_1","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_1","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_1","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#infos","title":"Infos","text":"
class Infos(\n    /,\n    **data: 'Any'\n)\n

Response model for /list endpoint.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_2","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_2","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_2","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_2","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_2","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_2","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_2","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_2","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_2","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_2","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_2","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_2","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_2","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_2","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_2","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_2","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_2","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_2","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_2","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#instance-variables_2","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_2","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_2","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_2","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_2","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_2","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_2","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_2","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_2","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#link","title":"Link","text":"
class Link(\n    /,\n    **data: 'Any'\n)\n

Link model.

Ref: github.com/opengeospatial/ogcapi-tiles/blob/master/openapi/schemas/common-core/link.yaml

Code generated using koxudaxi/datamodel-code-generator

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_3","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_3","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_3","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_3","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_3","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_3","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_3","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_3","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_3","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_3","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_3","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_3","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_3","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_3","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_3","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_3","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_3","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_3","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_3","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#instance-variables_3","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_3","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_3","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_3","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_3","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_3","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_3","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_3","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_3","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#metadata","title":"Metadata","text":"
class Metadata(\n    /,\n    **data: 'Any'\n)\n

Metadata Model.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_4","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_4","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_4","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_4","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_4","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_4","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_4","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_4","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_4","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_4","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_4","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_4","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_4","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_4","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_4","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_4","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_4","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_4","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_4","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#instance-variables_4","title":"Instance variables","text":"
defaults_params\n

Return defaults in a form compatible with TiTiler dependencies.

model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_4","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_4","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_4","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_4","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_4","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_4","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_4","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_4","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#pgstacsearch","title":"PgSTACSearch","text":"
class PgSTACSearch(\n    /,\n    **data: 'Any'\n)\n

Search Query model.

Notes/Diff with standard model: - 'fields' is not in the Model because it's defined at the tiler level - we don't set limit

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_5","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#descendants","title":"Descendants","text":"
  • titiler.pgstac.model.RegisterMosaic
"},{"location":"api/titiler/pgstac/model/#class-variables_5","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_5","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_5","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_5","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_5","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_5","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_5","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_5","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_5","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_5","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_5","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_5","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_5","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_5","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_5","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_5","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_5","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_5","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#validate_bbox","title":"validate_bbox","text":"
def validate_bbox(\n    v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]]\n)\n

Validate BBOX.

"},{"location":"api/titiler/pgstac/model/#validate_datetime","title":"validate_datetime","text":"
def validate_datetime(\n    v\n)\n

Pgstac does not require the base validator for datetime.

"},{"location":"api/titiler/pgstac/model/#validate_query_fields","title":"validate_query_fields","text":"
def validate_query_fields(\n    values: Dict\n) -> Dict\n

Pgstac does not require the base validator for query fields.

"},{"location":"api/titiler/pgstac/model/#validate_spatial","title":"validate_spatial","text":"
def validate_spatial(\n    v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]],\n    info: pydantic_core.core_schema.ValidationInfo\n)\n

Make sure bbox is not used with Intersects.

"},{"location":"api/titiler/pgstac/model/#instance-variables_5","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_5","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_5","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_5","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_5","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_5","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_5","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_5","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_5","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#registermosaic","title":"RegisterMosaic","text":"
class RegisterMosaic(\n    /,\n    **data: 'Any'\n)\n

Model of /register endpoint input.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_6","title":"Ancestors (in MRO)","text":"
  • titiler.pgstac.model.PgSTACSearch
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_6","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_6","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_6","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_6","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_6","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_6","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_6","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_6","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_6","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_6","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_6","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_6","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_6","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_6","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_6","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_6","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_6","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_6","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#validate_bbox_1","title":"validate_bbox","text":"
def validate_bbox(\n    v: Union[Tuple[float, float, float, float], Tuple[float, float, float, float, float, float]]\n)\n

Validate BBOX.

"},{"location":"api/titiler/pgstac/model/#validate_datetime_1","title":"validate_datetime","text":"
def validate_datetime(\n    v\n)\n

Pgstac does not require the base validator for datetime.

"},{"location":"api/titiler/pgstac/model/#validate_query_fields_1","title":"validate_query_fields","text":"
def validate_query_fields(\n    values: Dict\n) -> Dict\n

Pgstac does not require the base validator for query fields.

"},{"location":"api/titiler/pgstac/model/#validate_spatial_1","title":"validate_spatial","text":"
def validate_spatial(\n    v: Optional[Annotated[Union[geojson_pydantic.geometries.Point, geojson_pydantic.geometries.MultiPoint, geojson_pydantic.geometries.LineString, geojson_pydantic.geometries.MultiLineString, geojson_pydantic.geometries.Polygon, geojson_pydantic.geometries.MultiPolygon, geojson_pydantic.geometries.GeometryCollection], FieldInfo(annotation=NoneType, required=True, discriminator='type')]],\n    info: pydantic_core.core_schema.ValidationInfo\n)\n

Make sure bbox is not used with Intersects.

"},{"location":"api/titiler/pgstac/model/#instance-variables_6","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_6","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_6","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_6","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_6","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_6","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_6","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_6","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_6","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#registerresponse","title":"RegisterResponse","text":"
class RegisterResponse(\n    /,\n    **data: 'Any'\n)\n

Response model for /register endpoint.

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_7","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_7","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_7","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_7","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_7","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_7","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_7","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_7","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_7","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_7","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_7","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_7","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_7","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_7","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_7","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_7","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_7","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_7","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_7","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#instance-variables_7","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_7","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_7","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_7","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_7","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_7","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_7","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_7","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_7","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/model/#search","title":"Search","text":"
class Search(\n    /,\n    **data: 'Any'\n)\n

PgSTAC Search entry.

ref: github.com/stac-utils/pgstac/blob/3499daa2bfa700ae7bb07503795c169bf2ebafc7/sql/004_search.sql#L907-L915

"},{"location":"api/titiler/pgstac/model/#ancestors-in-mro_8","title":"Ancestors (in MRO)","text":"
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/model/#class-variables_8","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/model/#static-methods_8","title":"Static methods","text":""},{"location":"api/titiler/pgstac/model/#construct_8","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#from_orm_8","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#model_construct_8","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/model/#model_json_schema_8","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/model/#model_parametrized_name_8","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/model/#model_rebuild_8","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/model/#model_validate_8","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/model/#model_validate_json_8","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/model/#model_validate_strings_8","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/model/#parse_file_8","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_obj_8","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#parse_raw_8","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#schema_8","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#schema_json_8","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#update_forward_refs_8","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/model/#validate_8","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/model/#validate_metadata","title":"validate_metadata","text":"
def validate_metadata(\n    v\n)\n

Set SearchType.search when not present in metadata.

"},{"location":"api/titiler/pgstac/model/#instance-variables_8","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/model/#methods_8","title":"Methods","text":""},{"location":"api/titiler/pgstac/model/#copy_8","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/model/#dict_8","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/model/#json_8","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/model/#model_copy_8","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/model/#model_dump_8","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/model/#model_dump_json_8","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/model/#model_post_init_8","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/mosaic/","title":"Module titiler.pgstac.mosaic","text":"

TiTiler.PgSTAC custom Mosaic Backend and Custom STACReader.

"},{"location":"api/titiler/pgstac/mosaic/#variables","title":"Variables","text":"
MAX_THREADS\n
WGS84_CRS\n
cache_config\n
retry_config\n
"},{"location":"api/titiler/pgstac/mosaic/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/mosaic/#multi_points_pgstac","title":"multi_points_pgstac","text":"
def multi_points_pgstac(\n    asset_list: Sequence[Dict[str, Any]],\n    reader: Callable[..., rio_tiler.models.PointData],\n    *args: Any,\n    threads: int = 20,\n    allowed_exceptions: Optional[Tuple] = None,\n    **kwargs: Any\n) -> Dict\n

Merge values returned from tasks.

Custom version of rio_tiler.task.multi_values which use constructed item_id as dict key.

"},{"location":"api/titiler/pgstac/mosaic/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/mosaic/#customstacreader","title":"CustomSTACReader","text":"
class CustomSTACReader(\n    input: Dict[str, Any],\n    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,\n    minzoom: int = NOTHING,\n    maxzoom: int = NOTHING,\n    reader: Type[rio_tiler.io.base.BaseReader] = <class 'rio_tiler.io.rasterio.Reader'>,\n    reader_options: Dict = NOTHING,\n    ctx: Any = <class 'rasterio.env.Env'>\n)\n

Simplified STAC Reader.

Inputs should be in form of: { \"id\": \"IAMASTACITEM\", \"collection\": \"mycollection\", \"bbox\": (0, 0, 10, 10), \"assets\": { \"COG\": { \"href\": \"somewhereovertherainbow.io/cog.tif\" } } }

"},{"location":"api/titiler/pgstac/mosaic/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • rio_tiler.io.base.MultiBaseReader
  • rio_tiler.io.base.SpatialMixin
"},{"location":"api/titiler/pgstac/mosaic/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/mosaic/#feature","title":"feature","text":"
def feature(\n    self,\n    shape: Dict,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge parts defined by geojson feature from multiple assets.

Parameters:

Name Type Description Default shape dict Valid GeoJSON feature. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.feature method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/mosaic/#geographic_bounds","title":"geographic_bounds","text":"
def geographic_bounds(\n    ...\n)\n

Return dataset bounds in geographic_crs.

"},{"location":"api/titiler/pgstac/mosaic/#info","title":"info","text":"
def info(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    **kwargs: Any\n) -> Dict[str, rio_tiler.models.Info]\n

Return metadata from multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. Required keyword argument. None

Returns:

Type Description dict Multiple assets info in form of {\"asset1\": rio_tile.models.Info}."},{"location":"api/titiler/pgstac/mosaic/#merged_statistics","title":"merged_statistics","text":"
def merged_statistics(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    categorical: bool = False,\n    categories: Optional[List[float]] = None,\n    percentiles: Optional[List[int]] = None,\n    hist_options: Optional[Dict] = None,\n    max_size: int = 1024,\n    **kwargs: Any\n) -> Dict[str, rio_tiler.models.BandStatistics]\n

Return array statistics for multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None categorical bool treat input data as categorical data. Defaults to False. False categories list of numbers list of categories to return value for. None percentiles list of numbers list of percentile values to calculate. Defaults to [2, 98]. [2, 98] hist_options dict Options to forward to numpy.histogram function. None max_size int Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. 1024 kwargs optional Options to forward to the self.preview method. None

Returns:

Type Description Dict[str, rio_tiler.models.BandStatistics] bands statistics."},{"location":"api/titiler/pgstac/mosaic/#parse_expression","title":"parse_expression","text":"
def parse_expression(\n    self,\n    expression: str,\n    asset_as_band: bool = False\n) -> Tuple\n

Parse rio-tiler band math expression.

"},{"location":"api/titiler/pgstac/mosaic/#part","title":"part","text":"
def part(\n    self,\n    bbox: Tuple[float, float, float, float],\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge parts from multiple assets.

Parameters:

Name Type Description Default bbox tuple Output bounds (left, bottom, right, top) in target crs. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.part method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/mosaic/#point","title":"point","text":"
def point(\n    self,\n    lon: float,\n    lat: float,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.PointData\n

Read pixel value from multiple assets.

Parameters:

Name Type Description Default lon float Longitude. None lat float Latitude. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.point method. None

Returns:

Type Description None PointData"},{"location":"api/titiler/pgstac/mosaic/#preview","title":"preview","text":"
def preview(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge previews from multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.preview method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/mosaic/#statistics","title":"statistics","text":"
def statistics(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_expression: Optional[Dict[str, str]] = None,\n    **kwargs: Any\n) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]\n

Return array statistics for multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None asset_expression dict rio-tiler expression for each asset (e.g. {\"asset1\": \"b1/b2+b3\", \"asset2\": ...}). None kwargs optional Options to forward to the self.reader.statistics method. None

Returns:

Type Description dict Multiple assets statistics in form of {\"asset1\": {\"1\": rio_tiler.models.BandStatistics, ...}}."},{"location":"api/titiler/pgstac/mosaic/#tile","title":"tile","text":"
def tile(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge Wep Map tiles from multiple assets.

Parameters:

Name Type Description Default tile_x int Tile's horizontal index. None tile_y int Tile's vertical index. None tile_z int Tile's zoom level index. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.tile method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/mosaic/#tile_exists","title":"tile_exists","text":"
def tile_exists(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int\n) -> bool\n

Check if a tile intersects the dataset bounds.

Parameters:

Name Type Description Default tile_x int Tile's horizontal index. None tile_y int Tile's vertical index. None tile_z int Tile's zoom level index. None

Returns:

Type Description bool True if the tile intersects the dataset bounds."},{"location":"api/titiler/pgstac/mosaic/#pgstacbackend","title":"PGSTACBackend","text":"
class PGSTACBackend(\n    input: str,\n    pool: psycopg_pool.pool.ConnectionPool,\n    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,\n    minzoom: int = NOTHING,\n    maxzoom: int = NOTHING,\n    reader_options: Dict = NOTHING,\n    bounds: Tuple[float, float, float, float] = (-180, -90, 180, 90),\n    crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326)\n)\n

PgSTAC Mosaic Backend.

"},{"location":"api/titiler/pgstac/mosaic/#ancestors-in-mro_1","title":"Ancestors (in MRO)","text":"
  • cogeo_mosaic.backends.base.BaseBackend
  • rio_tiler.io.base.BaseReader
  • rio_tiler.io.base.SpatialMixin
"},{"location":"api/titiler/pgstac/mosaic/#instance-variables","title":"Instance variables","text":"
center\n

Return center from the mosaic definition.

mosaicid\n

Return sha224 id of the mosaicjson document.

quadkey_zoom\n

Return Quadkey zoom property.

"},{"location":"api/titiler/pgstac/mosaic/#methods_1","title":"Methods","text":""},{"location":"api/titiler/pgstac/mosaic/#assets_for_bbox","title":"assets_for_bbox","text":"
def assets_for_bbox(\n    self,\n    xmin: float,\n    ymin: float,\n    xmax: float,\n    ymax: float,\n    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    **kwargs: Any\n) -> List[Dict]\n

Retrieve assets for bbox.

"},{"location":"api/titiler/pgstac/mosaic/#assets_for_point","title":"assets_for_point","text":"
def assets_for_point(\n    self,\n    lng: float,\n    lat: float,\n    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    **kwargs: Any\n) -> List[Dict]\n

Retrieve assets for point.

"},{"location":"api/titiler/pgstac/mosaic/#assets_for_tile","title":"assets_for_tile","text":"
def assets_for_tile(\n    self,\n    x: int,\n    y: int,\n    z: int,\n    **kwargs: Any\n) -> List[Dict]\n

Retrieve assets for tile.

"},{"location":"api/titiler/pgstac/mosaic/#feature_1","title":"feature","text":"
def feature(\n    self,\n    shape: Dict,\n    dst_crs: Optional[rasterio.crs.CRS] = None,\n    shape_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    max_size: int = 1024,\n    scan_limit: Optional[int] = None,\n    items_limit: Optional[int] = None,\n    time_limit: Optional[int] = None,\n    exitwhenfull: Optional[bool] = None,\n    skipcovered: Optional[bool] = None,\n    **kwargs: Any\n) -> Tuple[rio_tiler.models.ImageData, List[str]]\n

Create an Image from multiple items for a GeoJSON feature.

"},{"location":"api/titiler/pgstac/mosaic/#find_quadkeys","title":"find_quadkeys","text":"
def find_quadkeys(\n    self,\n    tile: morecantile.commons.Tile,\n    quadkey_zoom: int\n) -> List[str]\n

Find quadkeys at desired zoom for tile

Parameters:

Name Type Description Default tile morecantile.Tile Input tile to use when searching for quadkeys None quadkey_zoom int Zoom level None

Returns:

Type Description list List[str] of quadkeys"},{"location":"api/titiler/pgstac/mosaic/#geographic_bounds_1","title":"geographic_bounds","text":"
def geographic_bounds(\n    ...\n)\n

Return dataset bounds in geographic_crs.

"},{"location":"api/titiler/pgstac/mosaic/#get_assets","title":"get_assets","text":"
def get_assets(\n    *args: Any,\n    **kwargs: Any\n)\n
"},{"location":"api/titiler/pgstac/mosaic/#info_1","title":"info","text":"
def info(\n    self,\n    quadkeys: bool = False\n) -> cogeo_mosaic.models.Info\n

Mosaic info.

"},{"location":"api/titiler/pgstac/mosaic/#part_1","title":"part","text":"
def part(\n    self,\n    bbox: Tuple[float, float, float, float],\n    dst_crs: Optional[rasterio.crs.CRS] = None,\n    bounds_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    scan_limit: Optional[int] = None,\n    items_limit: Optional[int] = None,\n    time_limit: Optional[int] = None,\n    exitwhenfull: Optional[bool] = None,\n    skipcovered: Optional[bool] = None,\n    **kwargs: Any\n) -> Tuple[rio_tiler.models.ImageData, List[str]]\n

Create an Image from multiple items for a bbox.

"},{"location":"api/titiler/pgstac/mosaic/#point_1","title":"point","text":"
def point(\n    self,\n    lon: float,\n    lat: float,\n    coord_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    scan_limit: Optional[int] = None,\n    items_limit: Optional[int] = None,\n    time_limit: Optional[int] = None,\n    exitwhenfull: Optional[bool] = None,\n    skipcovered: Optional[bool] = None,\n    **kwargs: Any\n) -> List\n

Get Point value from multiple observation.

"},{"location":"api/titiler/pgstac/mosaic/#preview_1","title":"preview","text":"
def preview(\n    self\n)\n

PlaceHolder for BaseReader.preview.

"},{"location":"api/titiler/pgstac/mosaic/#statistics_1","title":"statistics","text":"
def statistics(\n    self\n)\n

PlaceHolder for BaseReader.statistics.

"},{"location":"api/titiler/pgstac/mosaic/#tile_1","title":"tile","text":"
def tile(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int,\n    scan_limit: Optional[int] = None,\n    items_limit: Optional[int] = None,\n    time_limit: Optional[int] = None,\n    exitwhenfull: Optional[bool] = None,\n    skipcovered: Optional[bool] = None,\n    **kwargs: Any\n) -> Tuple[rio_tiler.models.ImageData, List[str]]\n

Get Tile from multiple observation.

"},{"location":"api/titiler/pgstac/mosaic/#tile_exists_1","title":"tile_exists","text":"
def tile_exists(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int\n) -> bool\n

Check if a tile intersects the dataset bounds.

Parameters:

Name Type Description Default tile_x int Tile's horizontal index. None tile_y int Tile's vertical index. None tile_z int Tile's zoom level index. None

Returns:

Type Description bool True if the tile intersects the dataset bounds."},{"location":"api/titiler/pgstac/mosaic/#update","title":"update","text":"
def update(\n    self\n) -> None\n

We overwrite the default method.

"},{"location":"api/titiler/pgstac/mosaic/#write","title":"write","text":"
def write(\n    self,\n    overwrite: bool = True\n) -> None\n

This method is not used but is required by the abstract class.

"},{"location":"api/titiler/pgstac/reader/","title":"Module titiler.pgstac.reader","text":"

Custom STAC reader.

"},{"location":"api/titiler/pgstac/reader/#variables","title":"Variables","text":"
DEFAULT_VALID_TYPE\n
WGS84_CRS\n
"},{"location":"api/titiler/pgstac/reader/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/reader/#pgstacreader","title":"PgSTACReader","text":"
class PgSTACReader(\n    input: pystac.item.Item,\n    tms: morecantile.models.TileMatrixSet = <TileMatrixSet title='Google Maps Compatible for the World' id='WebMercatorQuad' crs='http://www.opengis.net/def/crs/EPSG/0/3857>,\n    minzoom: int = NOTHING,\n    maxzoom: int = NOTHING,\n    geographic_crs: rasterio.crs.CRS = CRS.from_epsg(4326),\n    include_assets: Optional[Set[str]] = None,\n    exclude_assets: Optional[Set[str]] = None,\n    include_asset_types: Set[str] = {'image/tiff', 'image/tiff; application=geotiff', 'image/tiff; application=geotiff; profile=cloud-optimized', 'image/x.geotiff', 'image/tiff; profile=cloud-optimized; application=geotiff', 'application/x-hdf', 'image/jp2', 'image/vnd.stac.geotiff; cloud-optimized=true', 'application/x-hdf5'},\n    exclude_asset_types: Optional[Set[str]] = None,\n    reader: Type[rio_tiler.io.base.BaseReader] = <class 'rio_tiler.io.rasterio.Reader'>,\n    reader_options: Dict = NOTHING,\n    ctx: Any = <class 'rasterio.env.Env'>\n)\n

Custom STAC Reader.

Only accept pystac.Item as input (while rio_tiler.io.STACReader accepts url or pystac.Item)

"},{"location":"api/titiler/pgstac/reader/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • rio_tiler.io.base.MultiBaseReader
  • rio_tiler.io.base.SpatialMixin
"},{"location":"api/titiler/pgstac/reader/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/reader/#feature","title":"feature","text":"
def feature(\n    self,\n    shape: Dict,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge parts defined by geojson feature from multiple assets.

Parameters:

Name Type Description Default shape dict Valid GeoJSON feature. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.feature method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/reader/#geographic_bounds","title":"geographic_bounds","text":"
def geographic_bounds(\n    ...\n)\n

Return dataset bounds in geographic_crs.

"},{"location":"api/titiler/pgstac/reader/#info","title":"info","text":"
def info(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    **kwargs: Any\n) -> Dict[str, rio_tiler.models.Info]\n

Return metadata from multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. Required keyword argument. None

Returns:

Type Description dict Multiple assets info in form of {\"asset1\": rio_tile.models.Info}."},{"location":"api/titiler/pgstac/reader/#merged_statistics","title":"merged_statistics","text":"
def merged_statistics(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    categorical: bool = False,\n    categories: Optional[List[float]] = None,\n    percentiles: Optional[List[int]] = None,\n    hist_options: Optional[Dict] = None,\n    max_size: int = 1024,\n    **kwargs: Any\n) -> Dict[str, rio_tiler.models.BandStatistics]\n

Return array statistics for multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None categorical bool treat input data as categorical data. Defaults to False. False categories list of numbers list of categories to return value for. None percentiles list of numbers list of percentile values to calculate. Defaults to [2, 98]. [2, 98] hist_options dict Options to forward to numpy.histogram function. None max_size int Limit the size of the longest dimension of the dataset read, respecting bounds X/Y aspect ratio. Defaults to 1024. 1024 kwargs optional Options to forward to the self.preview method. None

Returns:

Type Description Dict[str, rio_tiler.models.BandStatistics] bands statistics."},{"location":"api/titiler/pgstac/reader/#parse_expression","title":"parse_expression","text":"
def parse_expression(\n    self,\n    expression: str,\n    asset_as_band: bool = False\n) -> Tuple\n

Parse rio-tiler band math expression.

"},{"location":"api/titiler/pgstac/reader/#part","title":"part","text":"
def part(\n    self,\n    bbox: Tuple[float, float, float, float],\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge parts from multiple assets.

Parameters:

Name Type Description Default bbox tuple Output bounds (left, bottom, right, top) in target crs. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.part method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/reader/#point","title":"point","text":"
def point(\n    self,\n    lon: float,\n    lat: float,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.PointData\n

Read pixel value from multiple assets.

Parameters:

Name Type Description Default lon float Longitude. None lat float Latitude. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.point method. None

Returns:

Type Description None PointData"},{"location":"api/titiler/pgstac/reader/#preview","title":"preview","text":"
def preview(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge previews from multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.preview method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/reader/#statistics","title":"statistics","text":"
def statistics(\n    self,\n    assets: Union[Sequence[str], str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_expression: Optional[Dict[str, str]] = None,\n    **kwargs: Any\n) -> Dict[str, Dict[str, rio_tiler.models.BandStatistics]]\n

Return array statistics for multiple assets.

Parameters:

Name Type Description Default assets sequence of str or str assets to fetch info from. None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None asset_expression dict rio-tiler expression for each asset (e.g. {\"asset1\": \"b1/b2+b3\", \"asset2\": ...}). None kwargs optional Options to forward to the self.reader.statistics method. None

Returns:

Type Description dict Multiple assets statistics in form of {\"asset1\": {\"1\": rio_tiler.models.BandStatistics, ...}}."},{"location":"api/titiler/pgstac/reader/#tile","title":"tile","text":"
def tile(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int,\n    assets: Union[Sequence[str], str] = None,\n    expression: Optional[str] = None,\n    asset_indexes: Optional[Dict[str, Union[Sequence[int], int]]] = None,\n    asset_as_band: bool = False,\n    **kwargs: Any\n) -> rio_tiler.models.ImageData\n

Read and merge Wep Map tiles from multiple assets.

Parameters:

Name Type Description Default tile_x int Tile's horizontal index. None tile_y int Tile's vertical index. None tile_z int Tile's zoom level index. None assets sequence of str or str assets to fetch info from. None expression str rio-tiler expression for the asset list (e.g. asset1/asset2+asset3). None asset_indexes dict Band indexes for each asset (e.g {\"asset1\": 1, \"asset2\": (1, 2,)}). None kwargs optional Options to forward to the self.reader.tile method. None

Returns:

Type Description rio_tiler.models.ImageData ImageData instance with data, mask and tile spatial info."},{"location":"api/titiler/pgstac/reader/#tile_exists","title":"tile_exists","text":"
def tile_exists(\n    self,\n    tile_x: int,\n    tile_y: int,\n    tile_z: int\n) -> bool\n

Check if a tile intersects the dataset bounds.

Parameters:

Name Type Description Default tile_x int Tile's horizontal index. None tile_y int Tile's vertical index. None tile_z int Tile's zoom level index. None

Returns:

Type Description bool True if the tile intersects the dataset bounds."},{"location":"api/titiler/pgstac/settings/","title":"Module titiler.pgstac.settings","text":"

API settings.

"},{"location":"api/titiler/pgstac/settings/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/settings/#retrysettings","title":"RetrySettings","text":"
def RetrySettings(\n\n) -> titiler.pgstac.settings._RetrySettings\n

This function returns a cached instance of the RetrySettings object.

"},{"location":"api/titiler/pgstac/settings/#classes","title":"Classes","text":""},{"location":"api/titiler/pgstac/settings/#apisettings","title":"ApiSettings","text":"
class ApiSettings(\n    __pydantic_self__,\n    _case_sensitive: 'bool | None' = None,\n    _env_prefix: 'str | None' = None,\n    _env_file: 'DotenvType | None' = PosixPath('.'),\n    _env_file_encoding: 'str | None' = None,\n    _env_ignore_empty: 'bool | None' = None,\n    _env_nested_delimiter: 'str | None' = None,\n    _env_parse_none_str: 'str | None' = None,\n    _secrets_dir: 'str | Path | None' = None,\n    **values: 'Any'\n)\n

API settings

"},{"location":"api/titiler/pgstac/settings/#ancestors-in-mro","title":"Ancestors (in MRO)","text":"
  • pydantic_settings.main.BaseSettings
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/settings/#class-variables","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/settings/#static-methods","title":"Static methods","text":""},{"location":"api/titiler/pgstac/settings/#construct","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#from_orm","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#model_construct","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/settings/#model_json_schema","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/settings/#model_parametrized_name","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/settings/#model_rebuild","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/settings/#model_validate","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/settings/#model_validate_json","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/settings/#model_validate_strings","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/settings/#parse_cors_origin","title":"parse_cors_origin","text":"
def parse_cors_origin(\n    v\n)\n

Parse CORS origins.

"},{"location":"api/titiler/pgstac/settings/#parse_file","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_obj","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_raw","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#schema","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#schema_json","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#settings_customise_sources","title":"settings_customise_sources","text":"
def settings_customise_sources(\n    settings_cls: 'type[BaseSettings]',\n    init_settings: 'PydanticBaseSettingsSource',\n    env_settings: 'PydanticBaseSettingsSource',\n    dotenv_settings: 'PydanticBaseSettingsSource',\n    file_secret_settings: 'PydanticBaseSettingsSource'\n) -> 'tuple[PydanticBaseSettingsSource, ...]'\n

Define the sources and their order for loading the settings values.

Parameters:

Name Type Description Default settings_cls None The Settings class. None init_settings None The InitSettingsSource instance. None env_settings None The EnvSettingsSource instance. None dotenv_settings None The DotEnvSettingsSource instance. None file_secret_settings None The SecretsSettingsSource instance. None

Returns:

Type Description None A tuple containing the sources and their order for loading the settings values."},{"location":"api/titiler/pgstac/settings/#update_forward_refs","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/settings/#validate","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#instance-variables","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/settings/#methods","title":"Methods","text":""},{"location":"api/titiler/pgstac/settings/#copy","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/settings/#dict","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#json","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#model_copy","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/settings/#model_dump","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/settings/#model_dump_json","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/settings/#model_post_init","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/settings/#cachesettings","title":"CacheSettings","text":"
class CacheSettings(\n    __pydantic_self__,\n    _case_sensitive: 'bool | None' = None,\n    _env_prefix: 'str | None' = None,\n    _env_file: 'DotenvType | None' = PosixPath('.'),\n    _env_file_encoding: 'str | None' = None,\n    _env_ignore_empty: 'bool | None' = None,\n    _env_nested_delimiter: 'str | None' = None,\n    _env_parse_none_str: 'str | None' = None,\n    _secrets_dir: 'str | Path | None' = None,\n    **values: 'Any'\n)\n

Cache settings

"},{"location":"api/titiler/pgstac/settings/#ancestors-in-mro_1","title":"Ancestors (in MRO)","text":"
  • pydantic_settings.main.BaseSettings
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/settings/#class-variables_1","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/settings/#static-methods_1","title":"Static methods","text":""},{"location":"api/titiler/pgstac/settings/#construct_1","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#from_orm_1","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#model_construct_1","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/settings/#model_json_schema_1","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/settings/#model_parametrized_name_1","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/settings/#model_rebuild_1","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/settings/#model_validate_1","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/settings/#model_validate_json_1","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/settings/#model_validate_strings_1","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/settings/#parse_file_1","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_obj_1","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_raw_1","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#schema_1","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#schema_json_1","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#settings_customise_sources_1","title":"settings_customise_sources","text":"
def settings_customise_sources(\n    settings_cls: 'type[BaseSettings]',\n    init_settings: 'PydanticBaseSettingsSource',\n    env_settings: 'PydanticBaseSettingsSource',\n    dotenv_settings: 'PydanticBaseSettingsSource',\n    file_secret_settings: 'PydanticBaseSettingsSource'\n) -> 'tuple[PydanticBaseSettingsSource, ...]'\n

Define the sources and their order for loading the settings values.

Parameters:

Name Type Description Default settings_cls None The Settings class. None init_settings None The InitSettingsSource instance. None env_settings None The EnvSettingsSource instance. None dotenv_settings None The DotEnvSettingsSource instance. None file_secret_settings None The SecretsSettingsSource instance. None

Returns:

Type Description None A tuple containing the sources and their order for loading the settings values."},{"location":"api/titiler/pgstac/settings/#update_forward_refs_1","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/settings/#validate_1","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#instance-variables_1","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/settings/#methods_1","title":"Methods","text":""},{"location":"api/titiler/pgstac/settings/#check_enable","title":"check_enable","text":"
def check_enable(\n    self\n)\n

Check if cache is disabled.

"},{"location":"api/titiler/pgstac/settings/#copy_1","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/settings/#dict_1","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#json_1","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#model_copy_1","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/settings/#model_dump_1","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/settings/#model_dump_json_1","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/settings/#model_post_init_1","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/settings/#postgressettings","title":"PostgresSettings","text":"
class PostgresSettings(\n    __pydantic_self__,\n    _case_sensitive: 'bool | None' = None,\n    _env_prefix: 'str | None' = None,\n    _env_file: 'DotenvType | None' = PosixPath('.'),\n    _env_file_encoding: 'str | None' = None,\n    _env_ignore_empty: 'bool | None' = None,\n    _env_nested_delimiter: 'str | None' = None,\n    _env_parse_none_str: 'str | None' = None,\n    _secrets_dir: 'str | Path | None' = None,\n    **values: 'Any'\n)\n

Postgres-specific API settings.

"},{"location":"api/titiler/pgstac/settings/#attributes","title":"Attributes","text":"Name Type Description Default postgres_user None postgres username. None postgres_pass None postgres password. None postgres_host None database hostname. None postgres_port None database port. None postgres_dbname None database name. None"},{"location":"api/titiler/pgstac/settings/#ancestors-in-mro_2","title":"Ancestors (in MRO)","text":"
  • pydantic_settings.main.BaseSettings
  • pydantic.main.BaseModel
"},{"location":"api/titiler/pgstac/settings/#class-variables_2","title":"Class variables","text":"
model_computed_fields\n
model_config\n
model_fields\n
"},{"location":"api/titiler/pgstac/settings/#static-methods_2","title":"Static methods","text":""},{"location":"api/titiler/pgstac/settings/#assemble_db_connection","title":"assemble_db_connection","text":"
def assemble_db_connection(\n    v: Optional[str],\n    info: pydantic_core.core_schema.ValidationInfo\n) -> Any\n

Validate database config.

"},{"location":"api/titiler/pgstac/settings/#construct_2","title":"construct","text":"
def construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#from_orm_2","title":"from_orm","text":"
def from_orm(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#model_construct_2","title":"model_construct","text":"
def model_construct(\n    _fields_set: 'set[str] | None' = None,\n    **values: 'Any'\n) -> 'Model'\n

Creates a new instance of the Model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed.

Note

model_construct() generally respects the model_config.extra setting on the provided model. That is, if model_config.extra == 'allow', then all extra passed values are added to the model instance's __dict__ and __pydantic_extra__ fields. If model_config.extra == 'ignore' (the default), then all extra passed values are ignored. Because no validation is performed with a call to model_construct(), having model_config.extra == 'forbid' does not result in an error if extra values are passed, but they will be ignored.

Parameters:

Name Type Description Default _fields_set None The set of field names accepted for the Model instance. None values None Trusted or pre-validated data dictionary. None

Returns:

Type Description None A new instance of the Model class with validated data."},{"location":"api/titiler/pgstac/settings/#model_json_schema_2","title":"model_json_schema","text":"
def model_json_schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    schema_generator: 'type[GenerateJsonSchema]' = <class 'pydantic.json_schema.GenerateJsonSchema'>,\n    mode: 'JsonSchemaMode' = 'validation'\n) -> 'dict[str, Any]'\n

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default by_alias None Whether to use attribute aliases or not. None ref_template None The reference template. None schema_generator None To override the logic used to generate the JSON schema, as a subclass ofGenerateJsonSchema with your desired modifications None mode None The mode in which to generate the schema. None

Returns:

Type Description None The JSON schema for the given model class."},{"location":"api/titiler/pgstac/settings/#model_parametrized_name_2","title":"model_parametrized_name","text":"
def model_parametrized_name(\n    params: 'tuple[type[Any], ...]'\n) -> 'str'\n

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default params None Tuple of types of the class. Given a generic classModel with 2 type variables and a concrete model Model[str, int],the value (str, int) would be passed to params. None

Returns:

Type Description None String representing the new class where params are passed to cls as type variables.

Raises:

Type Description TypeError Raised when trying to generate concrete names for non-generic models."},{"location":"api/titiler/pgstac/settings/#model_rebuild_2","title":"model_rebuild","text":"
def model_rebuild(\n    *,\n    force: 'bool' = False,\n    raise_errors: 'bool' = True,\n    _parent_namespace_depth: 'int' = 2,\n    _types_namespace: 'dict[str, Any] | None' = None\n) -> 'bool | None'\n

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default force None Whether to force the rebuilding of the model schema, defaults to False. None raise_errors None Whether to raise errors, defaults to True. None _parent_namespace_depth None The depth level of the parent namespace, defaults to 2. None _types_namespace None The types namespace, defaults to None. None

Returns:

Type Description None Returns None if the schema is already \"complete\" and rebuilding was not required.If rebuilding was required, returns True if rebuilding was successful, otherwise False."},{"location":"api/titiler/pgstac/settings/#model_validate_2","title":"model_validate","text":"
def model_validate(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    from_attributes: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate a pydantic model instance.

Parameters:

Name Type Description Default obj None The object to validate. None strict None Whether to enforce types strictly. None from_attributes None Whether to extract data from object attributes. None context None Additional context to pass to the validator. None

Returns:

Type Description None The validated model instance.

Raises:

Type Description ValidationError If the object could not be validated."},{"location":"api/titiler/pgstac/settings/#model_validate_json_2","title":"model_validate_json","text":"
def model_validate_json(\n    json_data: 'str | bytes | bytearray',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/json/#json-parsing

Validate the given JSON data against the Pydantic model.

Parameters:

Name Type Description Default json_data None The JSON data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model.

Raises:

Type Description ValueError If json_data is not a JSON string."},{"location":"api/titiler/pgstac/settings/#model_validate_strings_2","title":"model_validate_strings","text":"
def model_validate_strings(\n    obj: 'Any',\n    *,\n    strict: 'bool | None' = None,\n    context: 'dict[str, Any] | None' = None\n) -> 'Model'\n

Validate the given object contains string data against the Pydantic model.

Parameters:

Name Type Description Default obj None The object contains string data to validate. None strict None Whether to enforce types strictly. None context None Extra variables to pass to the validator. None

Returns:

Type Description None The validated Pydantic model."},{"location":"api/titiler/pgstac/settings/#parse_file_2","title":"parse_file","text":"
def parse_file(\n    path: 'str | Path',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_obj_2","title":"parse_obj","text":"
def parse_obj(\n    obj: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#parse_raw_2","title":"parse_raw","text":"
def parse_raw(\n    b: 'str | bytes',\n    *,\n    content_type: 'str | None' = None,\n    encoding: 'str' = 'utf8',\n    proto: 'DeprecatedParseProtocol | None' = None,\n    allow_pickle: 'bool' = False\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#schema_2","title":"schema","text":"
def schema(\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}'\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#schema_json_2","title":"schema_json","text":"
def schema_json(\n    *,\n    by_alias: 'bool' = True,\n    ref_template: 'str' = '#/$defs/{model}',\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#settings_customise_sources_2","title":"settings_customise_sources","text":"
def settings_customise_sources(\n    settings_cls: 'type[BaseSettings]',\n    init_settings: 'PydanticBaseSettingsSource',\n    env_settings: 'PydanticBaseSettingsSource',\n    dotenv_settings: 'PydanticBaseSettingsSource',\n    file_secret_settings: 'PydanticBaseSettingsSource'\n) -> 'tuple[PydanticBaseSettingsSource, ...]'\n

Define the sources and their order for loading the settings values.

Parameters:

Name Type Description Default settings_cls None The Settings class. None init_settings None The InitSettingsSource instance. None env_settings None The EnvSettingsSource instance. None dotenv_settings None The DotEnvSettingsSource instance. None file_secret_settings None The SecretsSettingsSource instance. None

Returns:

Type Description None A tuple containing the sources and their order for loading the settings values."},{"location":"api/titiler/pgstac/settings/#update_forward_refs_2","title":"update_forward_refs","text":"
def update_forward_refs(\n    **localns: 'Any'\n) -> 'None'\n
"},{"location":"api/titiler/pgstac/settings/#validate_2","title":"validate","text":"
def validate(\n    value: 'Any'\n) -> 'Model'\n
"},{"location":"api/titiler/pgstac/settings/#instance-variables_2","title":"Instance variables","text":"
model_extra\n

Get extra fields set during validation.

model_fields_set\n

Returns the set of fields that have been explicitly set on this model instance.

"},{"location":"api/titiler/pgstac/settings/#methods_2","title":"Methods","text":""},{"location":"api/titiler/pgstac/settings/#copy_2","title":"copy","text":"
def copy(\n    self: 'Model',\n    *,\n    include: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    exclude: 'AbstractSetIntStr | MappingIntStrAny | None' = None,\n    update: 'typing.Dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)\ndata = {**data, **(update or {})}\ncopied = self.model_validate(data)\n

Parameters:

Name Type Description Default include None Optional set or mapping specifying which fields to include in the copied model. None exclude None Optional set or mapping specifying which fields to exclude in the copied model. None update None Optional dictionary of field-value pairs to override field values in the copied model. None deep None If True, the values of fields that are Pydantic models will be deep-copied. None

Returns:

Type Description None A copy of the model with included, excluded and updated fields as specified."},{"location":"api/titiler/pgstac/settings/#dict_2","title":"dict","text":"
def dict(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False\n) -> 'typing.Dict[str, Any]'\n
"},{"location":"api/titiler/pgstac/settings/#json_2","title":"json","text":"
def json(\n    self,\n    *,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    encoder: 'typing.Callable[[Any], Any] | None' = PydanticUndefined,\n    models_as_dict: 'bool' = PydanticUndefined,\n    **dumps_kwargs: 'Any'\n) -> 'str'\n
"},{"location":"api/titiler/pgstac/settings/#model_copy_2","title":"model_copy","text":"
def model_copy(\n    self: 'Model',\n    *,\n    update: 'dict[str, Any] | None' = None,\n    deep: 'bool' = False\n) -> 'Model'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#model_copy

Returns a copy of the model.

Parameters:

Name Type Description Default update None Values to change/add in the new model. Note: the data is not validatedbefore creating the new model. You should trust this data. None deep None Set to True to make a deep copy of the model. None

Returns:

Type Description None New model instance."},{"location":"api/titiler/pgstac/settings/#model_dump_2","title":"model_dump","text":"
def model_dump(\n    self,\n    *,\n    mode: \"Literal['json', 'python'] | str\" = 'python',\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'dict[str, Any]'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default mode None The mode in which to_python should run.If mode is 'json', the output will only contain JSON serializable types.If mode is 'python', the output may contain non-JSON-serializable Python objects. None include None A set of fields to include in the output. None exclude None A set of fields to exclude from the output. None context None Additional context to pass to the serializer. None by_alias None Whether to use the field's alias in the dictionary key if defined. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A dictionary representation of the model."},{"location":"api/titiler/pgstac/settings/#model_dump_json_2","title":"model_dump_json","text":"
def model_dump_json(\n    self,\n    *,\n    indent: 'int | None' = None,\n    include: 'IncEx' = None,\n    exclude: 'IncEx' = None,\n    context: 'dict[str, Any] | None' = None,\n    by_alias: 'bool' = False,\n    exclude_unset: 'bool' = False,\n    exclude_defaults: 'bool' = False,\n    exclude_none: 'bool' = False,\n    round_trip: 'bool' = False,\n    warnings: \"bool | Literal['none', 'warn', 'error']\" = True,\n    serialize_as_any: 'bool' = False\n) -> 'str'\n

Usage docs: docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default indent None Indentation to use in the JSON output. If None is passed, the output will be compact. None include None Field(s) to include in the JSON output. None exclude None Field(s) to exclude from the JSON output. None context None Additional context to pass to the serializer. None by_alias None Whether to serialize using field aliases. None exclude_unset None Whether to exclude fields that have not been explicitly set. None exclude_defaults None Whether to exclude fields that are set to their default value. None exclude_none None Whether to exclude fields that have a value of None. None round_trip None If True, dumped values should be valid as input for non-idempotent types such as Json[T]. None warnings None How to handle serialization errors. False/\"none\" ignores them, True/\"warn\" logs errors,\"error\" raises a [PydanticSerializationError][pydantic_core.PydanticSerializationError]. None serialize_as_any None Whether to serialize fields with duck-typing serialization behavior. None

Returns:

Type Description None A JSON string representation of the model."},{"location":"api/titiler/pgstac/settings/#model_post_init_2","title":"model_post_init","text":"
def model_post_init(\n    self,\n    _BaseModel__context: 'Any'\n) -> 'None'\n

Override this method to perform additional initialization after __init__ and model_construct.

This is useful if you want to do some validation that requires the entire model to be initialized.

"},{"location":"api/titiler/pgstac/utils/","title":"Module titiler.pgstac.utils","text":"

titiler.pgstac utilities.

"},{"location":"api/titiler/pgstac/utils/#functions","title":"Functions","text":""},{"location":"api/titiler/pgstac/utils/#retry","title":"retry","text":"
def retry(\n    tries: int,\n    exceptions: Union[Type[Exception], Sequence[Type[Exception]]] = <class 'Exception'>,\n    delay: float = 0.0\n)\n

Retry Decorator

"},{"location":"endpoints/","title":"Endpoints","text":"

By default the main application (titiler.pgstac.main.app) provides three sets of endpoints:

  • Searches: Dynamic mosaic tiler based on PgSTAC Search Query
  • Collections: Dynamic mosaic tiler based on STAC Collection
  • Items: Dynamic tiler for single STAC item (stored in PgSTAC)
  • TileMatrixSet: Available TileMatrixSets for the service
"},{"location":"endpoints/collections_endpoints/","title":"Collections","text":""},{"location":"endpoints/collections_endpoints/#stac-collections-endpoints","title":"STAC Collections endpoints","text":"Method URL Output Description GET /collections/{collection_id}/{lon},{lat}/assets JSON Return a list of assets which overlap a given point GET /collections/{collection_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{Y}/assets JSON Return a list of assets which overlap a given tile GET /collections/{collection_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}] image/bin Create a web map tile image for a collection and a tile index GET /collections/{collection_id}[/{TileMatrixSetId}]/tilejson.json JSON (TileJSON) Return a Mapbox TileJSON document GET /collections/{collection_id}[/{TileMatrixSetId}]/WMTSCapabilities.xml XML return OGC WMTS Get Capabilities GET /collections/{collection_id}[/{TileMatrixSetId}]/map HTML simple map viewer POST /collections/{collection_id}/statistics GeoJSON (Statistics) Return statistics for geojson features GET /collections/{collection_id}/bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format} image/bin Create an image from part of a dataset POST /collections/{collection_id}/feature[/{width}x{height}][.{format}] image/bin Create an image from a GeoJSON feature GET /collections/{collection_id}/point/{lon},{lat} JSON (Point) Return pixel values from assets intersecting with a given point GET /collections/{collection_id}/info JSON ([Info][info_model]) Return Search query infos from collection_id"},{"location":"endpoints/collections_endpoints/#tiles","title":"Tiles","text":"

:endpoint:/collections/{collection_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}]

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
    • z: Tile's zoom level.
    • x: Tile's column.
    • y: Tile's row.
    • scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • format: Output image format, default is set to None and will be either JPEG or PNG depending on masked value. OPTIONAL
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/my-collection/tiles/1/2/3?assets=B01
  • https://myendpoint/collections/my-collection/tiles/1/2/3.jpg?assets=B01
  • https://myendpoint/collections/my-collection/tiles/WorldCRS84Quad/1/2/3@2x.png?assets=B01&assets=B02&assets=B03
  • https://myendpoint/collections/my-collection/tiles/WorldCRS84Quad/1/2/3?assets=B01&rescale=0,1000&colormap_name=cfastie
"},{"location":"endpoints/collections_endpoints/#tilesjson","title":"TilesJSON","text":"

:endpoint:/collections/{collection_id}[/{TileMatrixSetId}]/tilejson.json

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
  • QueryParams:

    • tile_format: Output image format, default is set to None and will be either JPEG or PNG depending on masked value.
    • tile_scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • minzoom: Overwrite default minzoom. OPTIONAL
    • maxzoom: Overwrite default maxzoom. OPTIONAL
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/my-collection/tilejson.json?assets=B01
  • https://myendpoint/collections/my-collection/tilejson.json?assets=B01&tile_format=png
  • https://myendpoint/collections/my-collection/WorldCRS84Quad/tilejson.json?assets=B01&tile_scale=2
"},{"location":"endpoints/collections_endpoints/#wmts","title":"WMTS","text":"

:endpoint:/collections/{collection_id}[/{TileMatrixSetId}]/WMTSCapabilities.xml

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
  • QueryParams:

    • tile_format: Output image format, default is set to PNG.
    • tile_scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • minzoom: Overwrite default minzoom. OPTIONAL
    • maxzoom: Overwrite default maxzoom. OPTIONAL

Important

additional query-parameters will be forwarded to the tile URL. If no defaults mosaic metadata, assets OR expression will be required

Example:

  • https://myendpoint/collections/my-collection/WMTSCapabilities.xml?assets=B01
  • https://myendpoint/collections/my-collection/WMTSCapabilities.xml?assets=B01&tile_format=png
  • https://myendpoint/collections/my-collection/WorldCRS84Quad/WMTSCapabilities.xml?assets=B01&tile_scale=2
"},{"location":"endpoints/collections_endpoints/#assets","title":"Assets","text":"

:endpoint:/collections/{collection_id}/tiles/[{TileMatrixSetId}]/{z}/{x}/{y}/assets

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
    • z: Tile's zoom level.
    • x: Tile's column.
    • y: Tile's row.
  • QueryParams:

    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Example:

  • https://myendpoint/collections/my-collection/tiles/0/0/0/assets

:endpoint:/collections/{collection_id}/{lon},{lat}/assets

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • lon: Longitude (in WGS84 CRS).
    • lat: Latitude (in WGS84 CRS).
  • QueryParams:

    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Example:

  • https://myendpoint/collections/my-collection/0.0,0.0/assets
"},{"location":"endpoints/collections_endpoints/#statistics","title":"Statistics","text":"

:endpoint:/collections/{collection_id}/statistics - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature or FeatureCollection
  • PathParams:

    • collection_id: STAC Collection Identifier.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size from which to calculate statistics.
    • height (int): Force image height from which to calculate statistics.
    • width (int): Force image width from which to calculate statistics.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • categorical (bool): Return statistics for categorical dataset, default is false.
    • c (array[float]): Pixels values for categories.
    • p (array[int]): Percentile values.
    • histogram_bins (str): Histogram bins.
    • histogram_range (str): Comma (',') delimited Min,Max histogram bounds.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/my-collection/statistics?assets=B01
"},{"location":"endpoints/collections_endpoints/#bboxfeature","title":"BBOX/Feature","text":"

:endpoint:/collections/{collection_id}/bbox/{minx},{miny},{maxx},{maxy}.{format}

:endpoint:/collections/{collection_id}/bbox/{minx},{miny},{maxx},{maxy}/{width}x{height}.{format}

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • minx,miny,maxx,maxy (str): Comma (',') delimited bounding box in WGS84.
    • format (str): Output image format.
    • height (int): Force output image height.
    • width (int): Force output image width.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input coordinates. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/my-collection/bbox/0,0,10,10.png?assets=B01
  • https://myendpoint/collections/my-collection/bbox/0,0,10,10/400x300.png?assets=B01

:endpoint:/collections/{collection_id}/feature[/{width}x{height}][].{format}] - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature (Polygon or MultiPolygon)
  • PathParams:

    • collection_id: STAC Collection Identifier.
    • height (int): Force output image height. Optional
    • width (int): Force output image width. Optional
    • format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/my-collection/feature?assets=B01
  • https://myendpoint/collections/my-collection/feature.png?assets=B01f
  • https://myendpoint/collections/my-collection/feature/100x100.png?assets=B01
"},{"location":"endpoints/collections_endpoints/#point","title":"Point","text":"

:endpoint:/collections/{collection_id}/point/{lon},{lat}

  • PathParams:

    • collection_id: STAC Collection Identifier.
    • lon: Longitude (in coord-crs, defaults to WGS84).
    • lat: Latitude (in coord-crs, defaults to WGS84).
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/my-collection/point/0,0?assets=B01
"},{"location":"endpoints/collections_endpoints/#collection-search-infos","title":"Collection Search infos","text":"

:endpoint:/collections/{collection_id}/info - [GET]

  • PathParams:
    • collection_id: STAC Collection Identifier.

Example:

  • https://myendpoint/collections/my-collection/info
curl 'http://myendpoint/collections/my-collection/info' | jq\n>> {\n  \"search\": {\n    \"hash\": \"37c6ebb942cc5393a9eb408ad8431f62\",\n    \"search\": {\n      \"collections\": [\n        \"my-collection\"\n      ]\n    },\n    \"_where\": \"collection = ANY ('{my-collection}') \",\n    \"orderby\": \"datetime DESC, id DESC\",\n    \"lastused\": \"2024-05-17T06:44:45.980518Z\",\n    \"usecount\": 1,\n    \"metadata\": {\n      \"type\": \"mosaic\",\n      \"bounds\": [\n        91.831615,\n        19.982078842323997,\n        92.97426268500965,\n        21.666101\n      ],\n      \"name\": \"Mosaic for 'my-collection' Collection\",\n      \"assets\": [\n        \"visual\",\n        \"data-mask\",\n        \"ms_analytic\",\n        \"pan_analytic\"\n      ],\n      \"defaults\": {\n        \"color\": {\n          \"assets\": [\n            \"visual\"\n          ],\n          \"colormap\": {\n            \"1\": [\n              0,\n              0,\n              0,\n              255\n            ],\n            \"1000\": [\n              255,\n              255,\n              255,\n              255\n            ]\n          },\n          \"asset_bidx\": [\"visual|1\"]\n        },\n        \"visual\": {\n          \"assets\": [\n            \"visual\"\n          ],\n          \"maxzoom\": 22,\n          \"minzoom\": 8,\n          \"asset_bidx\": [\"visual|1,2,3\"]\n        },\n        \"visualr\": {\n          \"assets\": [\n            \"visual\"\n          ],\n          \"rescale\": [\n            [\n              0,\n              100\n            ]\n          ],\n          \"asset_bidx\": [\"visual|1\"]\n        }\n      }\n    }\n  },\n  \"links\": [\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/info\",\n      \"rel\": \"self\",\n      \"title\": \"Mosaic metadata\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/tilejson.json\",\n      \"rel\": \"tilejson\",\n      \"templated\": true,\n      \"title\": \"TileJSON link (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/tilejson.json?colormap=%7B%221%22%3A+%5B0%2C+0%2C+0%2C+255%5D%2C+%221000%22%3A+%5B255%2C+255%2C+255%2C+255%5D%7D&assets=visual&asset_bidx=visual%7C1\",\n      \"rel\": \"tilejson\",\n      \"templated\": true,\n      \"title\": \"TileJSON link for `color` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/tilejson.json?maxzoom=22&minzoom=8&assets=visual&asset_bidx=visual%7C1%2C2%2C3\",\n      \"rel\": \"tilejson\",\n      \"templated\": true,\n      \"title\": \"TileJSON link for `visual` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/tilejson.json?rescale=0%2C100&assets=visual&asset_bidx=visual%7C1\",\n      \"rel\": \"tilejson\",\n      \"templated\": true,\n      \"title\": \"TileJSON link for `visualr` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/map\",\n      \"rel\": \"map\",\n      \"templated\": true,\n      \"title\": \"Map viewer link (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/map?colormap=%7B%221%22%3A+%5B0%2C+0%2C+0%2C+255%5D%2C+%221000%22%3A+%5B255%2C+255%2C+255%2C+255%5D%7D&assets=visual&asset_bidx=visual%7C1\",\n      \"rel\": \"map\",\n      \"templated\": true,\n      \"title\": \"Map viewer link for `color` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/map?maxzoom=22&minzoom=8&assets=visual&asset_bidx=visual%7C1%2C2%2C3\",\n      \"rel\": \"map\",\n      \"templated\": true,\n      \"title\": \"Map viewer link for `visual` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/map?rescale=0%2C100&assets=visual&asset_bidx=visual%7C1\",\n      \"rel\": \"map\",\n      \"templated\": true,\n      \"title\": \"Map viewer link for `visualr` layer (Template URL).\"\n    },\n    {\n      \"href\": \"http://myendpoint/collections/my-collection/{tileMatrixSetId}/WMTSCapabilities.xml\",\n      \"rel\": \"wmts\",\n      \"templated\": true,\n      \"title\": \"WMTS link (Template URL)\"\n    }\n  ]\n}\n
"},{"location":"endpoints/items_endpoints/","title":"Items","text":""},{"location":"endpoints/items_endpoints/#stac-items-endpoints","title":"STAC Items endpoints","text":"

The Item endpoints are created using TiTiler's MultiBaseTilerFactory

Method URL Output Description GET /collections/{collection_id}/items/{item_id}/bounds JSON (Bounds) return dataset's bounds GET /collections/{collection_id}/items/{item_id}/assets JSON return the list of available assets GET /collections/{collection_id}/items/{item_id}/info JSON (Info) return assets basic info GET /collections/{collection_id}/items/{item_id}/info.geojson GeoJSON (InfoGeoJSON) return assets basic info as a GeoJSON feature GET /collections/{collection_id}/items/{item_id}/asset_statistics JSON (Statistics) return per asset statistics GET /collections/{collection_id}/items/{item_id}/statistics JSON (Statistics) return assets statistics (merged) POST /collections/{collection_id}/items/{item_id}/statistics GeoJSON (Statistics) return assets statistics for a GeoJSON (merged) GET /collections/{collection_id}/items/{item_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}] image/bin create a web map tile image from assets GET /collections/{collection_id}/items/{item_id}[/{TileMatrixSetId}]/tilejson.json JSON (TileJSON) return a Mapbox TileJSON document GET /collections/{collection_id}/items/{item_id}[/{TileMatrixSetId}]/WMTSCapabilities.xml XML return OGC WMTS Get Capabilities GET /collections/{collection_id}/items/{item_id}[/{TileMatrixSetId}]/map HTML simple map viewer GET /collections/{collection_id}/items/{item_id}/point/{lon},{lat} JSON (Point) return pixel values from assets GET /collections/{collection_id}/items/{item_id}/preview[.{format}] image/bin create a preview image from assets GET /collections/{collection_id}/items/{item_id}/bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format} image/bin create an image from part of assets POST /collections/{collection_id}/items/{item_id}/feature[/{width}x{height}][.{format}] image/bin create an image from a geojson feature intersecting assets"},{"location":"endpoints/items_endpoints/#tiles","title":"Tiles","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}]

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • TileMatrixSetId (str): TileMatrixSet name, default is WebMercatorQuad. Optional
    • z (int): TMS tile's zoom level.
    • x (int): TMS tile's column.
    • y (int): TMS tile's row.
    • scale (int): Tile size scale, default is set to 1 (256x256). Optional
    • format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/tiles/1/2/3?assets=B01&assets=B00
  • https://myendpoint/collections/mycollection/items/oneitem/tiles/1/2/3.jpg?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/tiles/WorldCRS84Quad/1/2/3@2x.png?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/tiles/WorldCRS84Quad/1/2/3?expression=B01/B02&rescale=0,1000&colormap_name=cfastie&asset_as_band=True
"},{"location":"endpoints/items_endpoints/#preview","title":"Preview","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/preview[.{format}]

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • format: Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • max_size (int): Max image size, default is 1024.
    • height (int): Force output image height.
    • width (int): Force output image width.
    • dst_crs (str): Output Coordinate Reference System. Default to dataset's CRS.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.

Important

  • assets OR expression is required

  • if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/preview?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/preview.jpg?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/preview?assets=B01&rescale=0,1000&colormap_name=cfastie
"},{"location":"endpoints/items_endpoints/#bboxfeature","title":"BBOX/Feature","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/bbox/{minx},{miny},{maxx},{maxy}.{format} :endpoint:/collections/{collection_id}/items/{item_id}/bbox/{minx},{miny},{maxx},{maxy}/{width}x{height}.{format}

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • minx,miny,maxx,maxy (str): Comma (',') delimited bounding box in WGS84.
    • height (int): Force output image height. Optional
    • width (int): Force output image width. Optional
    • format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • max_size (int): Max image size.
    • coord_crs (str): Coordinate Reference System of the input coordinates. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.

Important

  • assets OR expression is required

  • if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/bbox/0,0,10,10.png?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/bbox/0,0,10,10.png?assets=B01&rescale=0,1000&colormap_name=cfastie

:endpoint:/collections/{collection_id}/items/{item_id}/feature[/{width}x{height}][].{format}] - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature (Polygon or MultiPolygon)
  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • height (int): Force output image height. Optional
    • width (int): Force output image width. Optional
    • format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • max_size (int): Max image size.
    • coord_crs (str): Coordinate Reference System of the input coordinates. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.

Important

  • assets OR expression is required

  • if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/crop?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/crop.png?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/100x100.png?assets=B01&rescale=0,1000&colormap_name=cfastie
"},{"location":"endpoints/items_endpoints/#point","title":"Point","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/point/{lon},{lat}

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • lon,lat, (str): Comma (',') delimited point Longitude and Latitude WGS84.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/point/0,0?assets=B01
"},{"location":"endpoints/items_endpoints/#tilesjson","title":"TilesJSON","text":"

:endpoint:/collections/{collection_id}/items/{item_id}[/{TileMatrixSetId}]/tilejson.json tileJSON document

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • tile_format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value.
    • tile_scale (int): Tile size scale, default is set to 1 (256x256).
    • minzoom (int): Overwrite default minzoom.
    • maxzoom (int): Overwrite default maxzoom.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0

Important

assets OR expression is required

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/tilejson.json?assets=B01
  • https://myendpoint/collections/mycollection/items/oneitem/tilejson.json?assets=B01&tile_format=png
  • https://myendpoint/collections/mycollection/items/oneitem/WorldCRS84Quad/tilejson.json?tile_scale=2&expression=B01/B02&asset_as_band=True
"},{"location":"endpoints/items_endpoints/#bounds","title":"Bounds","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/bounds - Return the bounds of the STAC item.

  • PathParams:
    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/bounds
"},{"location":"endpoints/items_endpoints/#info","title":"Info","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/info - Return basic info on STAC item's COG.

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
  • QueryParams:

    • assets (array[str]): asset names. Default to all available assets.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/info?assets=B01

:endpoint:/collections/{collection_id}/items/{item_id}/info.geojson - Return basic info on STAC item's COG as a GeoJSON feature

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
  • QueryParams:

    • collection (str): STAC Collection Identifier. Required
    • item (str): STAC Item Identifier. Required
    • assets (array[str]): asset names. Default to all available assets.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/info.geojson?assets=B01

:endpoint:/collections/{collection_id}/items/{item_id}/assets - Return the list of available assets

"},{"location":"endpoints/items_endpoints/#available-assets","title":"Available Assets","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/assets - Return a list of available assets

  • PathParams:
    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/assets
"},{"location":"endpoints/items_endpoints/#statistics","title":"Statistics","text":"

:endpoint:/collections/{collection_id}/items/{item_id}/asset_statistics - [GET]

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
  • QueryParams:

    • collection (str): STAC Collection Identifier. Required
    • item (str): STAC Item Identifier. Required
    • assets (array[str]): asset names. Default to all available assets.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • asset_expression (array[str]): Per asset band math expression (e.g Asset1|b1\\*b2).
    • max_size (int): Max image size from which to calculate statistics, default is 1024.
    • height (int): Force image height from which to calculate statistics.
    • width (int): Force image width from which to calculate statistics.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • categorical (bool): Return statistics for categorical dataset, default is false.
    • c (array[float]): Pixels values for categories.
    • p (array[int]): Percentile values.
    • histogram_bins (str): Histogram bins.
    • histogram_range (str): Comma (',') delimited Min,Max histogram bounds

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/statistics?assets=B01&categorical=true&c=1&c=2&c=3&p=2&p98

:endpoint:/collections/{collection_id}/items/{item_id}/statistics - [GET]

  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
  • QueryParams:

    • collection (str): STAC Collection Identifier. Required
    • item (str): STAC Item Identifier. Required
    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • max_size (int): Max image size from which to calculate statistics, default is 1024.
    • height (int): Force image height from which to calculate statistics.
    • width (int): Force image width from which to calculate statistics.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • categorical (bool): Return statistics for categorical dataset, default is false.
    • c (array[float]): Pixels values for categories.
    • p (array[int]): Percentile values.
    • histogram_bins (str): Histogram bins.
    • histogram_range (str): Comma (',') delimited Min,Max histogram bounds

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/statistics?assets=B01&categorical=true&c=1&c=2&c=3&p=2&p98

:endpoint:/collections/{collection_id}/items/{item_id}/statistics - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature or FeatureCollection
  • PathParams:

    • collection_id (str): STAC Collection Identifier.
    • item_id (str): STAC Item Identifier.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band math expression (e.g Asset1|1;2;3).
    • max_size (int): Max image size from which to calculate statistics.
    • height (int): Force image height from which to calculate statistics.
    • width (int): Force image width from which to calculate statistics.
    • coord_crs (str): Coordinate Reference System of the input coordinates. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • categorical (bool): Return statistics for categorical dataset, default is false.
    • c (array[float]): Pixels values for categories.
    • p (array[int]): Percentile values.
    • histogram_bins (str): Histogram bins.
    • histogram_range (str): Comma (',') delimited Min,Max histogram bounds

Example:

  • https://myendpoint/collections/mycollection/items/oneitem/statistics?assets=B01&categorical=true&c=1&c=2&c=3&p=2&p98
"},{"location":"endpoints/searches_endpoints/","title":"Searches","text":""},{"location":"endpoints/searches_endpoints/#stac-searches-endpoints","title":"STAC Searches endpoints","text":"Method URL Output Description GET /searches/{search_id}/{lon},{lat}/assets JSON Return a list of assets which overlap a given point GET /searches/{search_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{Y}/assets JSON Return a list of assets which overlap a given tile GET /searches/{search_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}] image/bin Create a web map tile image for a search query and a tile index GET /searches/{search_id}[/{TileMatrixSetId}]/tilejson.json JSON (TileJSON) Return a Mapbox TileJSON document GET /searches/{search_id}[/{TileMatrixSetId}]/WMTSCapabilities.xml XML return OGC WMTS Get Capabilities GET /searches/{search_id}[/{TileMatrixSetId}]/map HTML simple map viewer POST /searches/{search_id}/statistics GeoJSON (Statistics) Return statistics for geojson features GET /searches/{search_id}/bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format} image/bin Create an image from part of a dataset POST /searches/{search_id}/feature[/{width}x{height}][.{format}] image/bin Create an image from a GeoJSON feature GET /searches/{search_id}/point/{lon},{lat} JSON (Point) Return pixel values from assets intersecting with a given point GET /searches/{search_id}/info JSON (Info) Return Search query infos POST /searches/register JSON (Register) Register Search query GET /searches/list JSON (Infos) Return list of Search entries with Mosaic type"},{"location":"endpoints/searches_endpoints/#tiles","title":"Tiles","text":"

:endpoint:/searches/{search_id}/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}]

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
    • z: Tile's zoom level.
    • x: Tile's column.
    • y: Tile's row.
    • scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • format: Output image format, default is set to None and will be either JPEG or PNG depending on masked value. OPTIONAL
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/1/2/3?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/1/2/3.jpg?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/WorldCRS84Quad/1/2/3@2x.png?assets=B01&assets=B02&assets=B03
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/WorldCRS84Quad/1/2/3?assets=B01&rescale=0,1000&colormap_name=cfastie
"},{"location":"endpoints/searches_endpoints/#tilesjson","title":"TilesJSON","text":"

:endpoint:/searches/{search_id}[/{TileMatrixSetId}]/tilejson.json

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
  • QueryParams:

    • tile_format: Output image format, default is set to None and will be either JPEG or PNG depending on masked value.
    • tile_scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • minzoom: Overwrite default minzoom. OPTIONAL
    • maxzoom: Overwrite default maxzoom. OPTIONAL
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • buffer (float): Buffer on each side of the given tile. It must be a multiple of 0.5. Output tilesize will be expanded to tilesize + 2 * buffer (e.g 0.5 = 257x257, 1.0 = 258x258).
    • padding (int): Padding to apply to each tile edge. Helps reduce resampling artefacts along edges. Defaults to 0
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tilejson.json?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tilejson.json?assets=B01&tile_format=png
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/WorldCRS84Quad/tilejson.json?assets=B01&tile_scale=2
"},{"location":"endpoints/searches_endpoints/#wmts","title":"WMTS","text":"

:endpoint:/searches/{search_id}[/{TileMatrixSetId}]/WMTSCapabilities.xml

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
  • QueryParams:

    • tile_format: Output image format, default is set to PNG.
    • tile_scale: Tile size scale, default is set to 1 (256x256). OPTIONAL
    • minzoom: Overwrite default minzoom. OPTIONAL
    • maxzoom: Overwrite default maxzoom. OPTIONAL

Important

additional query-parameters will be forwarded to the tile URL. If no defaults mosaic metadata, assets OR expression will be required

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/WMTSCapabilities.xml?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/WMTSCapabilities.xml?assets=B01&tile_format=png
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/WorldCRS84Quad/WMTSCapabilities.xml?assets=B01&tile_scale=2
"},{"location":"endpoints/searches_endpoints/#assets","title":"Assets","text":"

:endpoint:/searches/{search_id}/tiles/[{TileMatrixSetId}]/{z}/{x}/{y}/assets

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • TileMatrixSetId: TileMatrixSet name, default is WebMercatorQuad. OPTIONAL
    • z: Tile's zoom level.
    • x: Tile's column.
    • y: Tile's row.
  • QueryParams:

    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/tiles/0/0/0/assets

:endpoint:/searches/{search_id}/{lon},{lat}/assets

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • lon: Longitude (in WGS84 CRS).
    • lat: Latitude (in WGS84 CRS).
  • QueryParams:

    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/0.0,0.0/assets
"},{"location":"endpoints/searches_endpoints/#statistics","title":"Statistics","text":"

:endpoint:/searches/{search_id}/statistics - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature or FeatureCollection
  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size from which to calculate statistics.
    • height (int): Force image height from which to calculate statistics.
    • width (int): Force image width from which to calculate statistics.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • categorical (bool): Return statistics for categorical dataset, default is false.
    • c (array[float]): Pixels values for categories.
    • p (array[int]): Percentile values.
    • histogram_bins (str): Histogram bins.
    • histogram_range (str): Comma (',') delimited Min,Max histogram bounds.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/statistics?assets=B01
"},{"location":"endpoints/searches_endpoints/#bboxfeature","title":"BBOX/Feature","text":"

:endpoint:/searches/{search_id}/bbox/{minx},{miny},{maxx},{maxy}.{format}

:endpoint:/searches/{search_id}/bbox/{minx},{miny},{maxx},{maxy}/{width}x{height}.{format}

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • minx,miny,maxx,maxy (str): Comma (',') delimited bounding box in WGS84.
    • format (str): Output image format.
    • height (int): Force output image height.
    • width (int): Force output image width.
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input coordinates. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/bbox/0,0,10,10.png?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/bbox/0,0,10,10/400x300.png?assets=B01

:endpoint:/searches/{search_id}/feature[/{width}x{height}][].{format}] - [POST]

  • Body:

    • feature (JSON): A valid GeoJSON feature (Polygon or MultiPolygon)
  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • height (int): Force output image height. Optional
    • width (int): Force output image width. Optional
    • format (str): Output image format, default is set to None and will be either JPEG or PNG depending on masked value. Optional
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • dst_crs (str): Output Coordinate Reference System. Default to coord_crs.
    • max_size (int): Max image size.
    • nodata: Overwrite internal Nodata value. OPTIONAL
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • algorithm (str): Custom algorithm name (e.g hillshade).
    • algorithm_params (str): JSON encoded algorithm parameters.
    • rescale (array[str]): Comma (',') delimited Min,Max range (e.g rescale=0,1000, rescale=0,1000&rescale=0,3000&rescale=0,2000).
    • color_formula (str): rio-color formula.
    • colormap (str): JSON encoded custom Colormap.
    • colormap_name (str): rio-tiler color map name.
    • return_mask (bool): Add mask to the output data. Default is True.
    • pixel_selection (str): Pixel selection method (cogeotiff.github.io/rio-tiler/mosaic/).
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

if height and width are provided max_size will be ignored.

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/feature?assets=B01
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/feature.png?assets=B01f
  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/feature/100x100.png?assets=B01
"},{"location":"endpoints/searches_endpoints/#point","title":"Point","text":"

:endpoint:/searches/{search_id}/point/{lon},{lat}

  • PathParams:

    • search_id: PgSTAC Search Identifier (Hash).
    • lon: Longitude (in coord-crs, defaults to WGS84).
    • lat: Latitude (in coord-crs, defaults to WGS84).
  • QueryParams:

    • assets (array[str]): asset names.
    • expression (str): rio-tiler's math expression with asset names (e.g Asset1_b1/Asset2_b1).
    • asset_as_band (bool): tell rio-tiler that each asset is a 1 band dataset, so expression Asset1/Asset2 can be passed.
    • asset_bidx (array[str]): Per asset band index (e.g Asset1|1;2;3).
    • coord_crs (str): Coordinate Reference System of the input geometry. Default to epsg:4326.
    • nodata (str, int, float): Overwrite internal Nodata value.
    • unscale (bool): Apply dataset internal Scale/Offset.
    • resampling (str): RasterIO resampling algorithm. Defaults to nearest.
    • reproject (str): WarpKernel resampling algorithm (only used when doing re-projection). Defaults to nearest.
    • scan_limit (int): Return as soon as we scan N items, Default is 10,000 in PgSTAC.
    • items_limit (int): Return as soon as we have N items per geometry, Default is 100 in PgSTAC.
    • time_limit (int): Return after N seconds to avoid long requests, Default is 5sec in PgSTAC.
    • exitwhenfull (bool): Return as soon as the geometry is fully covered, Default is True in PgSTAC.
    • skipcovered (bool): Skip any items that would show up completely under the previous items, Default is True in PgSTAC.

Important

assets OR expression is required

Example:

  • https://myendpoint/searches/f1ed59f0a6ad91ed80ae79b7b52bc707/point/0,0?assets=B01
"},{"location":"endpoints/searches_endpoints/#register-a-search-request","title":"Register a Search Request","text":"

:endpoint:/searches/register - [POST]

  • Body (a combination of Search+Metadata): A JSON body composed of a valid STAC Search query (see: github.com/radiantearth/stac-api-spec/tree/master/item-search) and Mosaic's metadata.
// titiler-pgstac search body example\n{\n  // STAC search query\n  \"collections\": [\n    \"string\"\n  ],\n  \"ids\": [\n    \"string\"\n  ],\n  \"bbox\": [\n    \"number\",\n    \"number\",\n    \"number\",\n    \"number\"\n  ],\n  \"intersects\": {\n    \"type\": \"Point\",\n    \"coordinates\": [\n      \"number\",\n      \"number\"\n    ]\n  },\n  \"query\": {\n    \"additionalProp1\": {},\n    \"additionalProp2\": {},\n    \"additionalProp3\": {}\n  },\n  \"filter\": {},\n  \"datetime\": \"string\",\n  \"sortby\": \"string\",\n  \"filter-lang\": \"cql-json\",\n  // titiler-pgstac mosaic metadata\n  \"metadata\": {\n    \"type\": \"mosaic\",\n    \"bounds\": [\n      \"number\",\n      \"number\",\n      \"number\",\n      \"number\"\n    ],\n    \"minzoom\": \"number\",\n    \"maxzoom\": \"number\",\n    \"name\": \"string\",\n    \"assets\": [\n      \"string\",\n      \"string\",\n    ],\n    \"defaults\": {}\n  }\n}\n

Important

In titiler-pgstac we extended the regular stac search to add a metadata entry. Metadata defaults to {\"type\": \"mosaic\"}.

Example:

  • https://myendpoint/searches/register
curl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{\"collections\":[\"landsat-c2l2-sr\"], \"bbox\":[-123.75,34.30714385628804,-118.125,38.82259097617712], \"filter-lang\": \"cql-json\"}' | jq\n>> {\n  \"id\": \"5a1b82d38d53a5d200273cbada886bd7\",\n  \"links\": [\n    {\n      \"rel\": \"metadata\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/tilejson.json\"\n    }\n  ]\n}\n\n# or using CQL2\ncurl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{\"filter\": {\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat-c2l2-sr\"]}}'\n\n# or using CQL2 with metadata\ncurl -X 'POST' 'http://127.0.0.1:8081/searches/register' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{\"filter\": {\"op\": \"=\", \"args\": [{\"property\": \"collection\"}, \"landsat-c2l2-sr\"]}, \"metadata\": {\"name\": \"landsat mosaic\"}}'\n
"},{"location":"endpoints/searches_endpoints/#search-infos","title":"Search infos","text":"

:endpoint:/searches/{search_id}/info - [GET]

  • PathParams:
    • search_id: PgSTAC Search Identifier (Hash).

Example:

  • https://myendpoint/searches/5a1b82d38d53a5d200273cbada886bd7/info
curl 'http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/info' | jq\n>> {\n  \"search\": {\n    \"hash\": \"5a1b82d38d53a5d200273cbada886bd7\",\n    \"search\": {\n      \"bbox\": [\n        -123.75,\n        34.30714385628804,\n        -118.125,\n        38.82259097617712\n      ],\n      \"collections\": [\n        \"landsat-c2l2-sr\"\n      ],\n      \"filter-lang\": \"cql-json\"\n    },\n    \"_where\": \"(  TRUE  )  AND collection_id = ANY ('{landsat-c2l2-sr}')  AND geometry && '0103000020E610000001000000050000000000000000F05EC055F6687D502741400000000000F05EC02D553EA94A6943400000000000885DC02D553EA94A6943400000000000885DC055F6687D502741400000000000F05EC055F6687D50274140' \",\n    \"orderby\": \"datetime DESC, id DESC\",\n    \"lastused\": \"2022-03-03T11:42:07.213313+00:00\",\n    \"usecount\": 2,\n    \"metadata\": {\n      \"type\": \"mosaic\"\n    }\n  },\n  \"links\": [\n    {\n      \"rel\": \"self\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/info\"\n    },\n    {\n      \"rel\": \"tilejson\",\n      \"type\": \"application/json\",\n      \"href\": \"http://127.0.0.1:8081/searches/5a1b82d38d53a5d200273cbada886bd7/tilejson.json\"\n    }\n  ]\n}\n
"},{"location":"endpoints/searches_endpoints/#list-searches","title":"List Searches","text":"

:endpoint:/searches/list - [GET]

  • QueryParams:
    • limit (int): Page size limit, Default is 10.
    • offset (int): Page offset.
    • sortby (str): Sort the searches by Metadata properties (ascending (default) or descending (-)).

Important

Additional query-parameters (form PROP=VALUE) will be considered as a property filter.

Example:

  • https://myendpoint/searches/list
  • https://myendpoint/searches/list?limit=100
  • https://myendpoint/searches/list?limit=10&offset=10 (page 2)
  • https://myendpoint/searches/list?data=noaa (only show mosaics with metadata.data == noaa)
  • https://myendpoint/searches/list?sortby=lastused (sort mosaic by lastused pgstac search property)
  • https://myendpoint/searches/list?sortby=-prop (sort mosaic (descending) by metadata.prop values)
"},{"location":"endpoints/tms_endpoints/","title":"TileMatrixSet","text":"Method URL Output Description GET /tileMatrixSets JSON (TMS list) return the list of supported TileMatrixSet GET /tileMatrixSets/{TileMatrixSetId} JSON (TileMatrixSet) return the TileMatrixSet JSON document"},{"location":"endpoints/tms_endpoints/#list-tms","title":"List TMS","text":"

:endpoint:/tileMatrixSets - Get the list of supported TileMatrixSet

curl https://myendpoint/tileMatrixSets | jq\n>> {\n  \"tileMatrixSets\": [\n    {\n      \"id\": \"LINZAntarticaMapTilegrid\",\n      \"title\": \"LINZ Antarctic Map Tile Grid (Ross Sea Region)\",\n      \"links\": [\n        {\n          \"href\": \"https://myendpoint/tileMatrixSets/LINZAntarticaMapTilegrid\",\n          \"rel\": \"item\",\n          \"type\": \"application/json\"\n        }\n      ]\n    },\n    ...\n  ]\n}\n
"},{"location":"endpoints/tms_endpoints/#get-tms-info","title":"Get TMS info","text":"

:endpoint:/tileMatrixSets/{TileMatrixSetId} - Get the TileMatrixSet JSON document

  • PathParams:
    • TileMatrixSetId: TileMatrixSet name
curl http://127.0.0.1:8000/tileMatrixSets/WebMercatorQuad | jq\n>> {\n  \"type\": \"TileMatrixSetType\",\n  \"title\": \"Google Maps Compatible for the World\",\n  \"identifier\": \"WebMercatorQuad\",\n  \"supportedCRS\": \"http://www.opengis.net/def/crs/EPSG/0/3857\",\n  \"wellKnownScaleSet\": \"http://www.opengis.net/def/wkss/OGC/1.0/GoogleMapsCompatible\",\n  \"boundingBox\": {\n    \"type\": \"BoundingBoxType\",\n    \"crs\": \"http://www.opengis.net/def/crs/EPSG/0/3857\",\n    \"lowerCorner\": [\n      -20037508.3427892,\n      -20037508.3427892\n    ],\n    \"upperCorner\": [\n      20037508.3427892,\n      20037508.3427892\n    ]\n  },\n  \"tileMatrix\": [\n    {\n      \"type\": \"TileMatrixType\",\n      \"identifier\": \"0\",\n      \"scaleDenominator\": 559082264.028717,\n      \"topLeftCorner\": [\n        -20037508.3427892,\n        20037508.3427892\n      ],\n      \"tileWidth\": 256,\n      \"tileHeight\": 256,\n      \"matrixWidth\": 1,\n      \"matrixHeight\": 1\n    },\n    ...\n
"},{"location":"migrations/v1_migration/","title":"v0.8 -> v1.0","text":"

titiler-pgstac version 1.0 introduced many breaking changes. This document aims to help with migrating your code and client application to use titiler-pgstac~=1.0.

"},{"location":"migrations/v1_migration/#endpoints","title":"Endpoints","text":""},{"location":"migrations/v1_migration/#new-collectionscollection_id-endpoints","title":"New /collections/{collection_id} endpoints","text":"

With this new set of endpoints, there is no more need to register a PgSTAC Search in advance. The tiler will do it dynamically at request time.

# Simplified version of the CollectionIdParams\n# https://github.com/stac-utils/titiler-pgstac/blob/7da390e42d3abaace5ca9a7172c799289e4cacf7/titiler/pgstac/dependencies.py#L37-L91\ndef CollectionIdParams(\n    request: Request,\n    collection_id: Annotated[\n        str,\n        Path(description=\"STAC Collection Identifier\"),\n    ],\n) -> str:\n    \"\"\"collection_id Path Parameter\"\"\"\n    search = model.PgSTACSearch(collections=[collection_id])\n\n    with request.app.state.dbpool.connection() as conn:\n        with conn.cursor(row_factory=class_row(model.Search)) as cursor:\n\n            metadata = model.Metadata(\n                name=f\"Mosaic for '{collection_id}' Collection\",\n            )\n            cursor.execute(\n                \"SELECT * FROM search_query(%s, _metadata => %s);\",\n                (\n                    search.model_dump_json(by_alias=True, exclude_none=True),\n                    metadata.model_dump_json(exclude_none=True),\n                ),\n            )\n            search_info = cursor.fetchone()\n\n    return search_info.id\n
"},{"location":"migrations/v1_migration/#mosaicsearchid-searchessearch_id","title":"/mosaic/{searchid} -> /searches/{search_id}","text":"

We chose to rename the prefix of the PgSTAC Searches endpoints from /mosaic to /searches to match the collections and items endpoint prefixes.

Note: We also renamed searchid to search_id but this should be seamless for users.

# before\nresp = httpx.get(\"/mosaic/{{ searchid }}/info\")\n\n# now\nresp = httpx.get(\"/searches/{{ search_id }}/info\")\n

Important

You can change the prefix for the MosaicTilerFactory's endpoints and could easily revert this change in your own application.

from fastapi import FastAPI\n\nfrom titiler.pgstac.factory import (\n    MosaicTilerFactory,\n    add_search_list_route,\n    add_search_register_route,\n)\nfrom titiler.pgstac.dependencies import SearchIdParams\nfrom titiler.pgstac.extensions import searchInfoExtension\n\napp = FastAPI()\n\n# STAC Search Endpoints\nsearches = MosaicTilerFactory(\n    path_dependency=SearchIdParams,\n    router_prefix=\"/mosaic/{search_id}\",\n    add_statistics=True,\n    add_viewer=True,\n    add_part=True,\n    extensions=[\n        searchInfoExtension(),\n    ],\n)\napp.include_router(\n    searches.router, tags=[\"STAC Search\"], prefix=\"/mosaic/{search_id}\"\n)\nadd_search_register_route(\n    app,\n    prefix=\"/mosaic\",\n    tile_dependencies=[\n        searches.layer_dependency,\n        searches.dataset_dependency,\n        searches.pixel_selection_dependency,\n        searches.process_dependency,\n        searches.rescale_dependency,\n        searches.colormap_dependency,\n        searches.render_dependency,\n        searches.pgstac_dependency,\n        searches.reader_dependency,\n        searches.backend_dependency,\n    ],\n    tags=[\"STAC Search\"],\n)\nadd_search_list_route(app, prefix=\"/mosaic\", tags=[\"STAC Search\"])\n
"},{"location":"migrations/v1_migration/#searchid-id","title":"searchid -> id","text":"

In titiler.pgstac.model.RegisterResponse, model used in /register endpoint, we renamed searchid by id.

# before\nresp = httpx.post(\"/mosaic/register\", body={\"collections\": [\"my-collection\"], \"filter-lang\": \"cql-json\"})\nassert resp.json()[\"searchid\"]\n\n# now\nresp = httpx.post(\"/searches/register\", body={\"collections\": [\"my-collection\"], \"filter-lang\": \"cql-json\"})\nassert resp.json()[\"id\"]\n
"},{"location":"migrations/v1_migration/#api","title":"API","text":""},{"location":"migrations/v1_migration/#pathparams-itempathparams-searchidparams-itemidparams","title":"PathParams / ItemPathParams -> SearchIdParams / ItemIdParams","text":"

We renamed both PathParams and ItemPathParams classes to SearchIdParams and ItemIdParams to better match with the CollectionIdParams dependency.

"},{"location":"migrations/v1_migration/#remove-search_id-prefix-in-mosaictilerfactory","title":"remove /{search_id} prefix in MosaicTilerFactory","text":"

In order to re-use the MosaicTilerFactory for collections we had to remove the /{search_id} prefix which was hardcoded in each endpoint routes. This is now added to the router_prefix.

# Before\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/mosaics\"\n)\napp.include_router(mosaic.router, prefix=\"/mosaics\")\n\n# Now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...\n    router_prefix=\"/mosaics/{search_id}\"\n)\napp.include_router(mosaic.router, prefix=\"/mosaics/{search_id}\")\n
"},{"location":"migrations/v1_migration/#path_dependency-required-input-for-mosaictilerfactory-class","title":"path_dependency required input for MosaicTilerFactory class","text":"

With the introduction of the collections endpoints, and because we removed the default {search_id} prefix, we cannot default to SearchIdParams for the path_dependency (the dependency which sends the PgSTAC search identifier to the Mosaic Reader) and thus is now a required attribute when initializing the endpoints.

# before\napp = FastAPI()\nmosaic = MosaicTilerFactory(...)\napp.include_router(mosaic.router)\n\n# now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...,\n    path_dependency=lambda: \"aaaaaaaaaaaaaa\"\n)\napp.include_router(mosaic.router)\n
"},{"location":"migrations/v1_migration/#searchinfoextension","title":"searchInfoExtension","text":"

We moved the MosaicTilerFactory info endpoint outside the class to its own extension.

# Before\napp = FastAPI()\nmosaic = MosaicTilerFactory(...)\napp.include_router(mosaic.router)\n\n# Now\napp = FastAPI()\nmosaic = MosaicTilerFactory(\n    ...\n    extensions=[\n        searchInfoExtension(),\n    ]\n)\napp.include_router(mosaic.router)\n
"},{"location":"migrations/v1_migration/#register-and-list-endpoints","title":"Register and List endpoints","text":"

we moved the /register and /list endpoints creation outside the MosaicTilerFactory class because they are not usable for the collection's endpoint and do not need the /{search_id} prefix.

# before\nfrom titiler.pgstac.factory import MosaicTilerFactory\n\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/{search_id}\",\n)\napp.include_router(mosaic.router, prefix=\"/{search_id}\")\n\n# Now\nfrom titiler.pgstac.factory import (\n    MosaicTilerFactory,\n    add_search_register_route,\n    add_mosaic_register_route,\n)\n\nmosaic = MosaicTilerFactory(\n    ...,\n    router_prefix=\"/{search_id}\",\n)\napp.include_router(mosaic.router, prefix=\"/{search_id}\")\n\n# add /register endpoint\nadd_search_register_route(\n    app,\n    # any dependency we want to validate\n    # when creating the tilejson/map links\n    tile_dependencies=[\n        mosaic.layer_dependency,\n        mosaic.dataset_dependency,\n        mosaic.pixel_selection_dependency,\n        mosaic.process_dependency,\n        mosaic.rescale_dependency,\n        mosaic.colormap_dependency,\n        mosaic.render_dependency,\n        mosaic.pgstac_dependency,\n        mosaic.reader_dependency,\n        mosaic.backend_dependency,\n    ],\n)\n# add /list endpoint\nadd_search_list_route(app)\n
"},{"location":"notebooks/demo/","title":"demo","text":"In\u00a0[1]: Copied!
import json\nimport httpx\nfrom folium import Map, TileLayer, GeoJson\n\nfrom geojson_pydantic import Feature, Polygon\n\nendpoint = \"http://127.0.0.1:8081\"\n\nprint(httpx.get(f\"{endpoint}/healthz\").json())\n
import json import httpx from folium import Map, TileLayer, GeoJson from geojson_pydantic import Feature, Polygon endpoint = \"http://127.0.0.1:8081\" print(httpx.get(f\"{endpoint}/healthz\").json())
{'database_online': True}\n
In\u00a0[3]: Copied!
# bounds of the noaa-eri-nashville2020.json items\nbounds = (-87.0251, 36.0999, -85.4249, 36.2251)\n\npoly = Polygon.from_bounds(*bounds)\ngeojson = Feature(type=\"Feature\", geometry=poly, properties=None).dict(exclude_none=True)\n\nm = Map(\n    tiles=\"OpenStreetMap\",\n    location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n    zoom_start=8\n)\n\ngeo_json = GeoJson(\n    data=geojson,\n    style_function=lambda x: {\n        'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n    },\n)\ngeo_json.add_to(m)\nm\n
# bounds of the noaa-eri-nashville2020.json items bounds = (-87.0251, 36.0999, -85.4249, 36.2251) poly = Polygon.from_bounds(*bounds) geojson = Feature(type=\"Feature\", geometry=poly, properties=None).dict(exclude_none=True) m = Map( tiles=\"OpenStreetMap\", location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2), zoom_start=8 ) geo_json = GeoJson( data=geojson, style_function=lambda x: { 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1 }, ) geo_json.add_to(m) m Out[3]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[4]: Copied!
search_request = {\n    # Filter collection\n    \"collections\": [\"noaa-emergency-response\"],\n    # limit bounds of the known items (note: the bbox will also be used in the tilejson response)\n    \"bbox\": bounds,\n    \"filter-lang\": \"cql-json\",\n}\n\nresponse = httpx.post(\n    f\"{endpoint}/mosaic/register\", json=search_request,\n).json()\nprint(response)\n\nsearchid = response[\"id\"]\n
search_request = { # Filter collection \"collections\": [\"noaa-emergency-response\"], # limit bounds of the known items (note: the bbox will also be used in the tilejson response) \"bbox\": bounds, \"filter-lang\": \"cql-json\", } response = httpx.post( f\"{endpoint}/mosaic/register\", json=search_request, ).json() print(response) searchid = response[\"id\"]
{'searchid': '6d436413d0eed760acc2f6bd16ca77a5', 'links': [{'rel': 'metadata', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tilejson.json'}]}\n
In\u00a0[5]: Copied!
response = httpx.get(f\"{endpoint}/mosaic/list\").json()\nprint(\n    [\n        search[\"search\"][\"hash\"]\n        for search in response[\"searches\"]\n    ]\n)\n
response = httpx.get(f\"{endpoint}/mosaic/list\").json() print( [ search[\"search\"][\"hash\"] for search in response[\"searches\"] ] )
['b2b4e952ae7a8dd69cd11d595b398945', '6d436413d0eed760acc2f6bd16ca77a5']\n
In\u00a0[6]: Copied!
info_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/info\").json()\nprint(info_response)\n
info_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/info\").json() print(info_response)
{'search': {'hash': '6d436413d0eed760acc2f6bd16ca77a5', 'search': {'bbox': [-87.0251, 36.0999, -85.4249, 36.2251], 'collections': ['noaa-emergency-response'], 'filter-lang': 'cql-json'}, '_where': \"collection = ANY ('{noaa-emergency-response}')  AND st_intersects(geometry, '0103000020E610000001000000050000004BC8073D9BC155C0696FF085C90C42404BC8073D9BC155C0302AA913D01C42408104C58F315B55C0302AA913D01C42408104C58F315B55C0696FF085C90C42404BC8073D9BC155C0696FF085C90C4240')\", 'orderby': 'datetime DESC, id DESC', 'lastused': '2023-05-24T10:09:15.184690+00:00', 'usecount': 1, 'metadata': {'type': 'mosaic'}}, 'links': [{'rel': 'self', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tilejson.json'}]}\n
In\u00a0[7]: Copied!
tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json()\nprint(tj_response)\n
tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json() print(tj_response)
{'tilejson': '2.2.0', 'name': '6d436413d0eed760acc2f6bd16ca77a5', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['http://127.0.0.1:8081/mosaic/6d436413d0eed760acc2f6bd16ca77a5/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?assets=cog'], 'minzoom': 0, 'maxzoom': 24, 'bounds': [-87.0251, 36.0999, -85.4249, 36.2251], 'center': [-86.225, 36.162499999999994, 0]}\n
In\u00a0[8]: Copied!
m = Map(\n    location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n    zoom_start=14\n)\n\ngeo_json = GeoJson(\n    data=geojson,\n    style_function=lambda x: {\n        'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n    },\n)\ngeo_json.add_to(m)\n\naod_layer = TileLayer(\n    tiles=tj_response[\"tiles\"][0],\n    attr=\"Mosaic\",\n    min_zoom=14,\n    max_zoom=18,\n    max_native_zoom=18,\n)\naod_layer.add_to(m)\nm\n
m = Map( location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2), zoom_start=14 ) geo_json = GeoJson( data=geojson, style_function=lambda x: { 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1 }, ) geo_json.add_to(m) aod_layer = TileLayer( tiles=tj_response[\"tiles\"][0], attr=\"Mosaic\", min_zoom=14, max_zoom=18, max_native_zoom=18, ) aod_layer.add_to(m) m Out[8]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[9]: Copied!
search_request = {\n    # Filter collection\n    \"collections\": [\"noaa-emergency-response\"],\n    # limit bounds of the known items (note: the bbox will also be used in the tilejson response)\n    \"bbox\": bounds,\n    \"filter-lang\": \"cql-json\",\n    \"metadata\": {\n        \"bounds\": [-87.0251, 36.0999, -85.4249, 36.2251],  # This is redondant because it's in the bbox filter\n        \"minzoom\": 14,\n        \"maxzoom\": 18,\n        \"assets\": [\"cog\"],\n        \"defaults\": {\n            \"true_color\": {\n                \"bidx\": [1, 2, 3],\n            },\n        },\n    },\n}\n\nresponse = httpx.post(\n    f\"{endpoint}/mosaic/register\", json=search_request,\n).json()\nprint(response)\n\nsearchid = response[\"id\"]\n
search_request = { # Filter collection \"collections\": [\"noaa-emergency-response\"], # limit bounds of the known items (note: the bbox will also be used in the tilejson response) \"bbox\": bounds, \"filter-lang\": \"cql-json\", \"metadata\": { \"bounds\": [-87.0251, 36.0999, -85.4249, 36.2251], # This is redondant because it's in the bbox filter \"minzoom\": 14, \"maxzoom\": 18, \"assets\": [\"cog\"], \"defaults\": { \"true_color\": { \"bidx\": [1, 2, 3], }, }, }, } response = httpx.post( f\"{endpoint}/mosaic/register\", json=search_request, ).json() print(response) searchid = response[\"id\"]
{'searchid': '4b0db3dbd1858d54a3a55f84de97d1ca', 'links': [{'rel': 'metadata', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/info'}, {'rel': 'tilejson', 'type': 'application/json', 'href': 'http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/tilejson.json'}]}\n
In\u00a0[10]: Copied!
tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json()\nprint(tj_response)\n
tj_response = httpx.get(f\"{endpoint}/mosaic/{searchid}/tilejson.json?assets=cog\").json() print(tj_response)
{'tilejson': '2.2.0', 'name': '4b0db3dbd1858d54a3a55f84de97d1ca', 'version': '1.0.0', 'scheme': 'xyz', 'tiles': ['http://127.0.0.1:8081/mosaic/4b0db3dbd1858d54a3a55f84de97d1ca/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?assets=cog'], 'minzoom': 14, 'maxzoom': 18, 'bounds': [-87.0251, 36.0999, -85.4249, 36.2251], 'center': [-86.225, 36.162499999999994, 14]}\n
In\u00a0[11]: Copied!
m = Map(\n    location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2),\n    zoom_start=14\n)\n\ngeo_json = GeoJson(\n    data=geojson,\n    style_function=lambda x: {\n        'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1\n    },\n)\ngeo_json.add_to(m)\n\naod_layer = TileLayer(\n    tiles=tj_response[\"tiles\"][0],\n    attr=\"Mosaic\",\n    min_zoom=tj_response[\"minzoom\"],\n    max_zoom=tj_response[\"maxzoom\"],\n    max_native_zoom=tj_response[\"maxzoom\"],    \n)\naod_layer.add_to(m)\nm\n
m = Map( location=((bounds[1] + bounds[3]) / 2,(bounds[0] + bounds[2]) / 2), zoom_start=14 ) geo_json = GeoJson( data=geojson, style_function=lambda x: { 'opacity': 1, 'dashArray': '1', 'fillOpacity': 0, 'weight': 1 }, ) geo_json.add_to(m) aod_layer = TileLayer( tiles=tj_response[\"tiles\"][0], attr=\"Mosaic\", min_zoom=tj_response[\"minzoom\"], max_zoom=tj_response[\"maxzoom\"], max_native_zoom=tj_response[\"maxzoom\"], ) aod_layer.add_to(m) m Out[11]: Make this Notebook Trusted to load map: File -> Trust Notebook In\u00a0[\u00a0]: Copied!
\n
"},{"location":"notebooks/demo/#titilerpgstac-demo","title":"titiler.PgSTAC Demo\u00b6","text":"

This Notebook aims to show the different features provided by titiler.pgstac application

In order to run this demo you'll need to have a PgSTAC database and the titiler.pgstac application running. The easiest way to launch them is to use the repo's docker-compose.yml

docker-compose up tiler\n
"},{"location":"notebooks/demo/#python-requirements","title":"Python requirements\u00b6","text":"
pip install httpx folium pypgstac psycopg psycopg-pool geojson-pydantic\n
"},{"location":"notebooks/demo/#populate-the-pgstac-db-with-data","title":"Populate the PgSTAC db with data\u00b6","text":"
$ pypgstac load collections tests/fixtures/noaa-emergency-response.json --dsn postgresql://username:password@localhost:5439/postgis --method insert \n$ pypgstac load items tests/fixtures/noaa-eri-nashville2020.json --dsn postgresql://username:password@localhost:5439/postgis --method insert\n
"},{"location":"notebooks/demo/#register-search-query","title":"Register Search query\u00b6","text":""},{"location":"notebooks/demo/#show-list-of-mosaics","title":"Show list of Mosaics\u00b6","text":""},{"location":"notebooks/demo/#get-search-metadata","title":"Get Search Metadata\u00b6","text":""},{"location":"notebooks/demo/#get-tilejson","title":"Get TileJSON\u00b6","text":"

Note: to return a valid tilejson document you'll need to pass either the assets or expression option.

"},{"location":"notebooks/demo/#load-tiles","title":"Load tiles\u00b6","text":""},{"location":"notebooks/demo/#register-a-mosaic-with-metadata","title":"Register a Mosaic with Metadata\u00b6","text":""}]} \ No newline at end of file diff --git a/1.3.0/sitemap.xml b/1.3.0/sitemap.xml new file mode 100644 index 00000000..03aa87c3 --- /dev/null +++ b/1.3.0/sitemap.xml @@ -0,0 +1,128 @@ + + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/contributing/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/intro/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/release-notes/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/tiler_factories/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/advanced/custom_search/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/advanced/custom_tilejson/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/advanced/metadata/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/advanced/searches_list/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/db/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/dependencies/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/extensions/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/factory/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/model/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/mosaic/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/reader/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/settings/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/api/titiler/pgstac/utils/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/endpoints/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/endpoints/collections_endpoints/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/endpoints/items_endpoints/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/endpoints/searches_endpoints/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/endpoints/tms_endpoints/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/migrations/v1_migration/ + 2024-05-17 + daily + + + https://stac-utils.github.io/titiler-pgstac/1.3.0/notebooks/demo/ + 2024-05-17 + daily + + \ No newline at end of file diff --git a/1.3.0/sitemap.xml.gz b/1.3.0/sitemap.xml.gz new file mode 100644 index 0000000000000000000000000000000000000000..a48e45cb1882a826f5cbfdd75bf606757d6c9f85 GIT binary patch literal 427 zcmV;c0aX4UiwFn+o`mj zMj>$_TS!>!e!cw?+EtZyohCUKh~rrQpCH(#-W*4p?IED?Zk;d7D$l?vuZ`>1`TN^z z@sw|xMZF6afwJiAaGeiC(~A&?!=Xe&Fv+xOUsMmp@-ezx zY&ULdkgkVqfUibNV^6iXv7GgZtH8gJs=T9My6EeAPP$E$?Kue!U_l}j&J!Rf;j^jT zN<}^xUecxBOQ%2!wL|h_iy(sD!w4j`B*{|z`P~6*twCZ%FqxkPaaqtE zcL12c=~w`8`yl;Bfg^!SRSkELbA~uEr--7!vS&bP6my6GQpXu@h}esy!kYWhp7(KL zh{<8{u|;4iT6t?hQA!FebClF!vxm6We#|+D>!TP=ALM)<#D3XcXPgjH-`{!viLrPc V|KZQYe2Mv|`34t<8%8@5003Zn%C-Oi literal 0 HcmV?d00001 diff --git a/1.3.0/tiler_factories/index.html b/1.3.0/tiler_factories/index.html new file mode 100644 index 00000000..3d11a4ea --- /dev/null +++ b/1.3.0/tiler_factories/index.html @@ -0,0 +1,1520 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + Tiler Factories - TiTiler.PgSTAC + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + + + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + + +
+
+
+ + + +
+ +
+ + + +
+
+ + + + + + + +

Tiler Factories

+ +

Mosaics: titiler.pgstac.factory.MosaicTilerFactory

+

TiTiler.PgSTAC provides a MosaicTilerFactory factory which is an helper functions to create FastAPI router (fastapi.APIRouter) with a minimal set of endpoints.

+
# Minimal PgSTAC Mosaic Application
+from contextlib import asynccontextmanager
+
+from fastapi import FastAPI
+from titiler.pgstac.db import close_db_connection, connect_to_db
+from titiler.pgstac.factory import MosaicTilerFactory
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+    """FastAPI Lifespan."""
+    # Create Connection Pool
+    await connect_to_db(app)
+    yield
+    # Close the Connection Pool
+    await close_db_connection(app)
+
+
+app = FastAPI(lifespan=lifespan)
+
+mosaic = MosaicTilerFactory(
+    path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa",
+)
+app.include_router(mosaic.router)
+
+
+

Important

+

The MosaicTilerFactory requires a path_dependency, which should be a Callable that return a search_id (PgSTAC Search Hash).

+

For the /searches/{search_id} endpoints the path_dependency is set to titiler.pgstac.dependencies.SearchIdParams and to titiler.pgstac.dependencies.CollectionIdParams for the /collections/{collection_id} endpoints.

+
+

`

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodURLOutputDescription
GET/{lon},{lat}/assetsJSONReturn a list of assets which overlap a given point
GET/tiles[/{TileMatrixSetId}]/{z}/{x}/{Y}/assetsJSONReturn a list of assets which overlap a given tile
GET/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}]image/binCreate a web map tile image for a search query and a tile index
GET[/{TileMatrixSetId}]/tilejson.jsonJSON (TileJSON)Return a Mapbox TileJSON document
GET[/{TileMatrixSetId}]/WMTSCapabilities.xmlXMLReturn OGC WMTS Get Capabilities
GET[/{TileMatrixSetId}]/mapHTMLSimple map viewer OPTIONAL
POST/statisticsGeoJSON (Statistics)Return statistics for geojson features OPTIONAL
GET/bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format}image/binCreate an image from part of a dataset OPTIONAL
POST/feature[/{width}x{height}][.{format}]image/binCreate an image from a GeoJSON feature OPTIONAL
GET/point/{lon}x{lat}JSON (Point)Return pixel values from assets intersecting with a given point
+

Extensions

+

searchInfoExtension

+ + + + + + + + + + + + + + + + + +
MethodURLOutputDescription
GET/infoJSON (Infos)Return list of Search entries with Mosaic type OPTIONAL
+
app = FastAPI()
+mosaic = MosaicTilerFactory(
+    path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa",
+    extensions=[
+        searchInfoExtension(),
+    ],
+)
+app.include_router(mosaic.router)
+
+

register and list

+ + + + + + + + + + + + + + + + + + + + + + + +
MethodURLOutputDescription
POST/registerJSON (Register)Register Search query OPTIONAL
GET/listJSON (Info)Return Search query infos OPTIONAL
+
app = FastAPI()
+mosaic = MosaicTilerFactory(
+    path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa",
+)
+app.include_router(mosaic.router)
+
+add_search_register_route(app)
+add_search_list_route(app)
+
+

Items: titiler.core.factory.MultiBaseTilerFactory

+

For the single STAC item endpoints we use TiTiler's MultiBaseTilerFactory with a custom path_dependency to use item_id and collection_id path parameter (instead of the default url query param).

+

This custom path_dependency will connect to PgSTAC directly to fetch the STAC Item and pass it to a custom Reader.

+
# Minimal PgSTAC Item Application
+from contextlib import asynccontextmanager
+
+from fastapi import FastAPI
+
+from titiler.core.factory import MultiBaseTilerFactory
+
+from titiler.pgstac.db import close_db_connection, connect_to_db
+from titiler.pgstac.dependencies import ItemPathParams
+from titiler.pgstac.reader import PgSTACReader
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+    """FastAPI Lifespan."""
+    # Create Connection Pool
+    await connect_to_db(app)
+    yield
+    # Close the Connection Pool
+    await close_db_connection(app)
+
+
+app = FastAPI(lifespan=lifespan)
+
+item = MultiBaseTilerFactory(
+    reader=PgSTACReader,
+    path_dependency=ItemPathParams,
+    router_prefix="/collections/{collection_id}/items/{item_id}",
+)
+app.include_router(item.router, prefix="/collections/{collection_id}/items/{item_id}")
+
+ + + + + + + + + + + + + +
+
+ + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + + \ No newline at end of file diff --git a/1.3.0/tiler_factories/tiler_factories.md b/1.3.0/tiler_factories/tiler_factories.md new file mode 100644 index 00000000..fa54bc5d --- /dev/null +++ b/1.3.0/tiler_factories/tiler_factories.md @@ -0,0 +1,135 @@ + + +## Mosaics: `titiler.pgstac.factory.MosaicTilerFactory` + +`TiTiler.PgSTAC` provides a `MosaicTilerFactory` factory which is an helper functions to create FastAPI router (`fastapi.APIRouter`) with a minimal set of endpoints. + +```python +# Minimal PgSTAC Mosaic Application +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from titiler.pgstac.db import close_db_connection, connect_to_db +from titiler.pgstac.factory import MosaicTilerFactory + +@asynccontextmanager +async def lifespan(app: FastAPI): + """FastAPI Lifespan.""" + # Create Connection Pool + await connect_to_db(app) + yield + # Close the Connection Pool + await close_db_connection(app) + + +app = FastAPI(lifespan=lifespan) + +mosaic = MosaicTilerFactory( + path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa", +) +app.include_router(mosaic.router) +``` + +!!! Important + + The `MosaicTilerFactory` requires a `path_dependency`, which should be a `Callable` that return a *search_id* (PgSTAC Search Hash). + + For the `/searches/{search_id}` endpoints the `path_dependency` is set to `titiler.pgstac.dependencies.SearchIdParams` and to `titiler.pgstac.dependencies.CollectionIdParams` for the `/collections/{collection_id}` endpoints. +` + + +| Method | URL | Output | Description +| ------ | ---------------------------------------------------------------------------|---------------------------------------- |-------------- +| `GET` | `/{lon},{lat}/assets` | JSON | Return a list of assets which overlap a given point +| `GET` | `/tiles[/{TileMatrixSetId}]/{z}/{x}/{Y}/assets` | JSON | Return a list of assets which overlap a given tile +| `GET` | `/tiles[/{TileMatrixSetId}]/{z}/{x}/{y}[@{scale}x][.{format}]` | image/bin | Create a web map tile image for a search query and a tile index +| `GET` | `[/{TileMatrixSetId}]/tilejson.json` | JSON ([TileJSON][tilejson_model]) | Return a Mapbox TileJSON document +| `GET` | `[/{TileMatrixSetId}]/WMTSCapabilities.xml` | XML | Return OGC WMTS Get Capabilities +| `GET` | `[/{TileMatrixSetId}]/map` | HTML | Simple map viewer **OPTIONAL** +| `POST` | `/statistics` | GeoJSON ([Statistics][statitics_model]) | Return statistics for geojson features **OPTIONAL** +| `GET` | `/bbox/{minx},{miny},{maxx},{maxy}[/{width}x{height}].{format}`| image/bin | Create an image from part of a dataset **OPTIONAL** +| `POST` | `/feature[/{width}x{height}][.{format}]` | image/bin | Create an image from a GeoJSON feature **OPTIONAL** +| `GET` | `/point/{lon}x{lat}` | JSON ([Point][point_model]) | Return pixel values from assets intersecting with a given point + +### Extensions + +#### `searchInfoExtension` + +| Method | URL | Output | Description +| ------ | ---------------------------------------------------------------------------|---------------------------------------- |-------------- +| `GET` | `/info` | JSON ([Infos][infos_model]) | Return list of **Search** entries with `Mosaic` type **OPTIONAL** + +```python +app = FastAPI() +mosaic = MosaicTilerFactory( + path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa", + extensions=[ + searchInfoExtension(), + ], +) +app.include_router(mosaic.router) +``` + +#### `register and list` + +| Method | URL | Output | Description +| ------ | ---------------------------------------------------------------------------|---------------------------------------- |-------------- +| `POST` | `/register` | JSON ([Register][register_model]) | Register **Search** query **OPTIONAL** +| `GET` | `/list` | JSON ([Info][info_model]) | Return **Search** query infos **OPTIONAL** + +```python +app = FastAPI() +mosaic = MosaicTilerFactory( + path_dependency=lambda: "aaaaaaaaaaaaaaaaaaaaa", +) +app.include_router(mosaic.router) + +add_search_register_route(app) +add_search_list_route(app) +``` + +## Items: `titiler.core.factory.MultiBaseTilerFactory` + +For the `single STAC item` endpoints we use TiTiler's [MultiBaseTilerFactory](https://developmentseed.org/titiler/advanced/tiler_factories/#titilercorefactorymultibasetilerfactory) with a custom [`path_dependency`]() to use `item_id` and `collection_id` path parameter (instead of the default `url` query param). + +This custom `path_dependency` will connect to PgSTAC directly to fetch the STAC Item and pass it to a custom [Reader](https://github.com/stac-utils/titiler-pgstac/blob/d777eca04770622982121daa2df42d429e8c244d/titiler/pgstac/reader.py#L17-L25). + +```python +# Minimal PgSTAC Item Application +from contextlib import asynccontextmanager + +from fastapi import FastAPI + +from titiler.core.factory import MultiBaseTilerFactory + +from titiler.pgstac.db import close_db_connection, connect_to_db +from titiler.pgstac.dependencies import ItemPathParams +from titiler.pgstac.reader import PgSTACReader + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """FastAPI Lifespan.""" + # Create Connection Pool + await connect_to_db(app) + yield + # Close the Connection Pool + await close_db_connection(app) + + +app = FastAPI(lifespan=lifespan) + +item = MultiBaseTilerFactory( + reader=PgSTACReader, + path_dependency=ItemPathParams, + router_prefix="/collections/{collection_id}/items/{item_id}", +) +app.include_router(item.router, prefix="/collections/{collection_id}/items/{item_id}") +``` + +[tilejson_model]: https://github.com/developmentseed/titiler/blob/2335048a407f17127099cbbc6c14e1328852d619/src/titiler/core/titiler/core/models/mapbox.py#L16-L38 +[info_model]: https://github.com/stac-utils/titiler-pgstac/blob/047315da8851a974660032ca45f219db2c3a8d54/titiler/pgstac/model.py#L236-L240 +[infos_model]: https://github.com/stac-utils/titiler-pgstac/blob/4f569fee1946f853be9b9149cb4dd2fd5c62b110/titiler/pgstac/model.py#L260-L265 +[register_model]: https://github.com/stac-utils/titiler-pgstac/blob/047315da8851a974660032ca45f219db2c3a8d54/titiler/pgstac/model.py#L229-L233 +[statitics_model]: https://github.com/developmentseed/titiler/blob/17cdff2f0ddf08dbd9a47c2140b13c4bbcc30b6d/src/titiler/core/titiler/core/models/responses.py#L49-L52 +[point_model]: https://github.com/developmentseed/titiler/blob/e396959e7f818909a5494301a809b5f795aa202e/src/titiler/mosaic/titiler/mosaic/models/responses.py#L8-L17 diff --git a/latest b/latest index e2cac26c..589268e6 120000 --- a/latest +++ b/latest @@ -1 +1 @@ -1.2.3 \ No newline at end of file +1.3.0 \ No newline at end of file diff --git a/versions.json b/versions.json index 7180cf58..a0178f6c 100644 --- a/versions.json +++ b/versions.json @@ -1,11 +1,16 @@ [ { - "version": "1.2.3", - "title": "1.2.3", + "version": "1.3.0", + "title": "1.3.0", "aliases": [ "latest" ] }, + { + "version": "1.2.3", + "title": "1.2.3", + "aliases": [] + }, { "version": "1.2.2", "title": "1.2.2",