From b0e8a6d27f1a222af86fc763ac874c4761acd13b Mon Sep 17 00:00:00 2001 From: Talley Lambert Date: Sun, 19 Dec 2021 14:52:05 -0500 Subject: [PATCH] pull out importexport model (#72) --- npe2/manifest/_bases.py | 113 ++++++++++++++++++++++++++++++++++++++++ npe2/manifest/schema.py | 89 +++---------------------------- 2 files changed, 119 insertions(+), 83 deletions(-) create mode 100644 npe2/manifest/_bases.py diff --git a/npe2/manifest/_bases.py b/npe2/manifest/_bases.py new file mode 100644 index 00000000..52d38f35 --- /dev/null +++ b/npe2/manifest/_bases.py @@ -0,0 +1,113 @@ +import json +from contextlib import contextmanager +from pathlib import Path +from typing import Callable, Dict, Optional, Union + +import pytomlpp as toml +import yaml +from pydantic import BaseModel, PrivateAttr + + +class ImportExportModel(BaseModel): + """Model mixin/base class that provides read/write from toml/yaml/json. + + To force the inclusion of a given field in the exported toml/yaml use: + + class MyModel(ImportExportModel): + some_field: str = Field(..., always_export=True) + """ + + _source_file: Optional[Path] = PrivateAttr(None) + + def toml(self, pyproject=False, **kwargs) -> str: + """Generate serialized `toml` string for this model. + + Parameters + ---------- + pyproject : bool, optional + If `True`, output will be in pyproject format, with all data under + `tool.napari`, by default `False`. + **kwargs + passed to `BaseModel.json()` + """ + d = self._serialized_data(**kwargs) + if pyproject: + d = {"tool": {"napari": d}} + return toml.dumps(d) + + def yaml(self, **kwargs) -> str: + """Generate serialized `yaml` string for this model. + + Parameters + ---------- + **kwargs + passed to `BaseModel.json()` + """ + return yaml.safe_dump(self._serialized_data(**kwargs), sort_keys=False) + + @classmethod + def from_file(cls, path: Union[Path, str]) -> "ImportExportModel": + """Parse model from a metadata file. + + Parameters + ---------- + path : Path or str + Path to file. Must have extension {'.json', '.yaml', '.yml', '.toml'} + + Returns + ------- + object + The parsed model. + + Raises + ------ + FileNotFoundError + If `path` does not exist. + ValueError + If the file extension is not in {'.json', '.yaml', '.yml', '.toml'} + """ + path = Path(path).expanduser().absolute().resolve() + if not path.exists(): + raise FileNotFoundError(f"File not found: {path}") + + loader: Callable + if path.suffix.lower() == ".json": + loader = json.load + elif path.suffix.lower() == ".toml": + loader = toml.load + elif path.suffix.lower() in (".yaml", ".yml"): + loader = yaml.safe_load + else: + raise ValueError(f"unrecognized file extension: {path}") + + with open(path) as f: + data = loader(f) or {} + + if path.name == "pyproject.toml": + data = data["tool"]["napari"] + + obj = cls(**data) + obj._source_file = Path(path).expanduser().absolute().resolve() + return obj + + def _serialized_data(self, **kwargs): + """using json encoders for all outputs""" + kwargs.setdefault("exclude_unset", True) + with self._required_export_fields_set(): + return json.loads(self.json(**kwargs)) + + @contextmanager + def _required_export_fields_set(self): + fields = self.__fields__.items() + required = {k for k, v in fields if v.field_info.extra.get("always_export")} + + was_there: Dict[str, bool] = {} + for f in required: + was_there[f] = f in self.__fields_set__ + self.__fields_set__.add(f) + try: + yield + finally: + for f in required: + if not was_there.get(f): + self.__fields_set__.discard(f) diff --git a/npe2/manifest/schema.py b/npe2/manifest/schema.py index abee6042..4affe43b 100644 --- a/npe2/manifest/schema.py +++ b/npe2/manifest/schema.py @@ -1,6 +1,5 @@ from __future__ import annotations -import json import re import sys from contextlib import contextmanager @@ -8,21 +7,12 @@ from logging import getLogger from pathlib import Path from textwrap import dedent -from typing import ( - TYPE_CHECKING, - Callable, - Iterator, - NamedTuple, - Optional, - Sequence, - Union, -) - -import pytomlpp as toml -import yaml -from pydantic import BaseModel, Extra, Field, ValidationError, root_validator, validator +from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, Sequence, Union + +from pydantic import Extra, Field, ValidationError, root_validator, validator from . import _validators +from ._bases import ImportExportModel from .contributions import ContributionPoints from .package_metadata import PackageMetadata from .utils import Version @@ -50,7 +40,7 @@ class DiscoverResults(NamedTuple): error: Optional[Exception] -class PluginManifest(BaseModel): +class PluginManifest(ImportExportModel): # VS Code uses . as a unique ID for the extension # should this just be the package name ... not the module name? (yes) @@ -87,6 +77,7 @@ class PluginManifest(BaseModel): SCHEMA_VERSION, description="A SemVer compatible version string matching the napari plugin " "schema version that the plugin is compatible with.", + always_export=True, ) # TODO: @@ -117,7 +108,6 @@ class PluginManifest(BaseModel): contributions: Optional[ContributionPoints] - _manifest_file: Optional[Path] = None package_metadata: Optional[PackageMetadata] = None @property @@ -180,17 +170,6 @@ def _validate_root(cls, values: dict) -> dict: return values - def toml(self, pyproject=False) -> str: - with _schema_version_in_fields_set(self): - d = json.loads(self.json(exclude_unset=True)) - if pyproject: - d = {"tool": {"napari": d}} - return toml.dumps(d) - - def yaml(self) -> str: - with _schema_version_in_fields_set(self): - return yaml.safe_dump(json.loads(self.json(exclude_unset=True))) - @classmethod def from_distribution(cls, name: str) -> PluginManifest: """Return PluginManifest given a distribution (package) name. @@ -226,51 +205,6 @@ def from_distribution(cls, name: str) -> PluginManifest: "Distribution {name!r} exists but does not provide a napari manifest" ) - @classmethod - def from_file(cls, path: Union[Path, str]) -> PluginManifest: - """Parse PluginManifest from a specific file. - - Parameters - ---------- - path : Path or str - Path to a manifest. Must have extension {'.json', '.yaml', '.yml', '.toml'} - - Returns - ------- - PluginManifest - The parsed manifest. - - Raises - ------ - FileNotFoundError - If `path` does not exist. - ValueError - If the file extension is not in {'.json', '.yaml', '.yml', '.toml'} - """ - path = Path(path).expanduser().absolute().resolve() - if not path.exists(): - raise FileNotFoundError(f"File not found: {path}") - - loader: Callable - if path.suffix.lower() == ".json": - loader = json.load - elif path.suffix.lower() == ".toml": - loader = toml.load - elif path.suffix.lower() in (".yaml", ".yml"): - loader = yaml.safe_load - else: - raise ValueError(f"unrecognized file extension: {path}") - - with open(path) as f: - data = loader(f) or {} - - if path.name == "pyproject.toml": - data = data["tool"]["napari"] - - mf = cls(**data) - mf._manifest_file = path - return mf - class Config: underscore_attrs_are_private = True extra = Extra.forbid @@ -423,16 +357,5 @@ def _temporary_path_additions(paths: Sequence[Union[str, Path]] = ()): sys.path.remove(str(p)) -@contextmanager -def _schema_version_in_fields_set(manifest: PluginManifest): - was_there = "schema_version" in manifest.__fields_set__ - manifest.__fields_set__.add("schema_version") - try: - yield - finally: - if not was_there: - manifest.__fields_set__.discard("schema_version") - - if __name__ == "__main__": print(PluginManifest.schema_json())