Skip to content

Commit

Permalink
Update docs and remove the ability for resolvers to load schema (#18)
Browse files Browse the repository at this point in the history
* Update docs and remove the ability for resolvers to load schema

* Removing old full example and adding tests for others
  • Loading branch information
rmyers authored Dec 29, 2023
1 parent c5b8453 commit 3df005c
Show file tree
Hide file tree
Showing 85 changed files with 359 additions and 3,430 deletions.
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ VIRTUAL_ENV ?= venv
PYTHON_MODULES := $(shell find . -name '*.py')
DOCKER_COMPOSE := $(shell which docker-compose)

export TWINE_NON_INTERACTIVE=1
export HATCH_INDEX_USER = __token__

.SILENT: help
.PHONY: setup docs clean
Expand Down Expand Up @@ -77,8 +77,8 @@ docs: setup ## Build the documentation
$(VIRTUAL_ENV)/bin/sphinx-build -a docs docs/_build

publish-test: setup ## Publish the library to test pypi
$(VIRTUAL_ENV)/bin/python setup.py sdist bdist_wheel
$(VIRTUAL_ENV)/bin/python -m twine upload --repository-url https://test.pypi.org/legacy/ dist/*
$(VIRTUAL_ENV)/bin/hatch build -t sdist -t wheel
$(VIRTUAL_ENV)/bin/hatch publish --repo https://test.pypi.org/legacy/ dist/*

publish: setup ## Publish the library to pypi
$(VIRTUAL_ENV)/bin/python setup.py sdist bdist_wheel
Expand Down
5 changes: 3 additions & 2 deletions cannula/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
)
from .errors import format_errors
from .utils import gql
from .schema import build_and_extend_schema
from .schema import build_and_extend_schema, load_schema

__all__ = [
"API",
Expand All @@ -14,6 +14,7 @@
"format_errors",
"gql",
"build_and_extend_schema",
"load_schema",
]

__VERSION__ = "0.0.4"
__VERSION__ = "0.10.0"
170 changes: 71 additions & 99 deletions cannula/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import functools
import logging
import inspect
import os
import pathlib
import typing

from graphql import (
Expand All @@ -27,7 +27,6 @@
)

from .context import Context
from .helpers import get_root_path
from .schema import (
build_and_extend_schema,
fix_abstract_resolve_type,
Expand All @@ -44,104 +43,68 @@ class ParseResults(typing.NamedTuple):


class Resolver:
"""Resolver Registry
"""
Resolver Registry
-----------------
This class is a helper to organize your project as it grows. It allows you
to put your resolver modules and schema in different packages. For example::
app/
api.py # `api = cannula.API(__name__)`
api.py # `api = cannula.API(args)`
resolvers/
subpackage/
app.py # `app = cannula.Resolver(__name__)`
schema/
myschema.graphql
books.py # `books = cannula.Resolver(args)`
movies.py # `movies = cannula.Resolver(args)`
You then register resolvers and dataloaders in the same way:
You then register resolvers and dataloaders in the same way::
resolvers/books.py::
app.py:
import cannula
app = cannula.Resolver(__name__)
books = cannula.Resolver()
@app.resolver('Query')
def my_query(source, info):
@books.resolver('Query', 'books')
def get_books(source, info, args):
return 'Hello'
api.py:
resolvers/moives.py::
import cannula
from resolvers.subpackage.app import app
movies = cannula.Resolver()
@movies.resolver('Query', 'movies')
def get_movies(source, info, args):
return 'Hello'
app/api.py::
import cannula
from resolvers.books import books
from resolvers.movies import movies
api = cannula.API(schema=SCHEMA)
api.include_resolver(books)
api.include_resolver(movies)
api = cannula.API(__name__)
api.register_resolver(app)
:param name: The import name of the resolver, typically `__name__`
:param schema: GraphQL Schema for this resolver.
:param schema_directory: Directory name to search for schema files.
:param query_directory: Directory name to search for query docs.
"""

# Allow sub-resolvers to apply a base schema before applying custom schema.
base_schema: typing.Dict[str, DocumentNode] = {}
registry: typing.Dict[str, dict]
datasources: typing.Dict[str, typing.Any]
_schema_dir: str

def __init__(
self,
name: str,
schema: typing.List[typing.Union[str, DocumentNode]] = [],
schema_directory: str = "schema",
query_directory: str = "queries",
):
self.registry = collections.defaultdict(dict)
self.datasources = {}
self._schema_directory = schema_directory
self._query_directory = query_directory
self.root_dir = get_root_path(name)
self._schema = schema

@property
def schema_directory(self):
if not hasattr(self, "_schema_dir"):
if os.path.isabs(self._schema_directory):
setattr(self, "_schema_dir", self._schema_directory)
setattr(
self, "_schema_dir", os.path.join(self.root_dir, self._schema_directory)
)
return self._schema_dir

def find_schema(self) -> typing.List[DocumentNode]:
schemas: typing.List[DocumentNode] = []
if os.path.isdir(self.schema_directory):
LOG.debug(f"Searching {self.schema_directory} for schema.")
schemas = load_schema(self.schema_directory)

for schema in self._schema:
schemas.append(maybe_parse(schema))

return schemas

@property
def query_directory(self) -> str:
if not hasattr(self, "_query_dir"):
if os.path.isabs(self._query_directory):
self._query_dir: str = self._query_directory
self._query_dir = os.path.join(self.root_dir, self._query_directory)
return self._query_dir

@functools.lru_cache(maxsize=128)
def load_query(self, query_name: str) -> DocumentNode:
path = os.path.join(self.query_directory, f"{query_name}.graphql")
assert os.path.isfile(path), f"No query found for {query_name}"

with open(path, "r") as query:
return parse(query.read())

def resolver(self, type_name: str = "Query") -> typing.Any:
def resolver(self, type_name: str, field_name: str) -> typing.Any:
def decorator(function):
self.registry[type_name][function.__name__] = function
self.registry[type_name][field_name] = function

return decorator

Expand All @@ -153,13 +116,19 @@ def decorator(klass):


class API(Resolver):
"""Cannula API
"""
:param schema: GraphQL Schema for this resolver.
:param context: Context class to hold shared state, added to GraphQLResolveInfo object.
:param middleware: List of middleware to enable.
Cannula API
-----------
Your entry point into the fun filled world of graphql. Just dive right in::
import cannula
api = cannula.API(__name__, schema='''
api = cannula.API(schema='''
extend type Query {
hello(who: String): String
}
Expand All @@ -170,41 +139,44 @@ def hello(who):
return f'Hello {who}!'
"""

_schema: typing.Union[str, DocumentNode, pathlib.Path]
_resolvers: typing.List[Resolver]

def __init__(
self,
*args,
resolvers: typing.List[Resolver] = [],
context: typing.Any = Context,
schema: typing.Union[str, DocumentNode, pathlib.Path],
context: typing.Optional[Context] = None,
middleware: typing.List[typing.Any] = [],
**kwargs,
):
super().__init__(*args, **kwargs)
self._context = context
self._resolvers = resolvers
super().__init__(**kwargs)
self._context = context or Context
self._resolvers = []
self._schema = schema
self.middleware = middleware

def include_resolver(self, resolver: Resolver):
self._merge_registry(resolver.registry)
self.datasources.update(resolver.datasources)

def _find_schema(self) -> typing.List[DocumentNode]:
schemas: typing.List[DocumentNode] = []

if isinstance(self._schema, pathlib.Path):
schemas.extend(load_schema(self._schema))
else:
schemas.append(maybe_parse(self._schema))

return schemas

@property
def schema(self) -> GraphQLSchema:
if not hasattr(self, "_full_schema"):
self._full_schema = self._build_schema()
return self._full_schema

def _all_schema(self) -> typing.Iterator[DocumentNode]:
for document_node in self.find_schema():
yield document_node

for resolver in self._resolvers:
self._merge_registry(resolver.registry)
self.base_schema.update(resolver.base_schema)
self.datasources.update(resolver.datasources)
for document_node in resolver.find_schema():
yield document_node

for document_node in self.base_schema.values():
yield document_node

def _build_schema(self) -> GraphQLSchema:
schema = build_and_extend_schema(self._all_schema())
schema = build_and_extend_schema(self._find_schema())

schema_validation_errors = validate_schema(schema)
if schema_validation_errors:
Expand Down Expand Up @@ -238,7 +210,7 @@ def decorator(klass):
return decorator

def get_context(self, request):
context = self._context(request)
context = self._context.init(request)
# Initialize the datasources with a copy of the context without
# any of the datasource attributes set. It may work just fine but
# if you change the order the code may stop working. So discourage
Expand All @@ -253,12 +225,12 @@ def _merge_registry(self, registry: dict):
self.registry[type_name].update(value)

@functools.lru_cache(maxsize=128)
def validate(self, document: DocumentNode) -> typing.List[GraphQLError]:
def _validate(self, document: DocumentNode) -> typing.List[GraphQLError]:
"""Validate the document against the schema and store results in lru_cache."""
return validate(self.schema, document)

@functools.lru_cache(maxsize=128)
def parse_document(self, document: str) -> ParseResults:
def _parse_document(self, document: str) -> ParseResults:
"""Parse and store the document in lru_cache."""
try:
document_ast = parse(document)
Expand All @@ -278,11 +250,11 @@ async def call(
web framework that is synchronous use the `call_sync` method.
"""
if isinstance(document, str):
document, errors = self.parse_document(document)
document, errors = self._parse_document(document)
if errors:
return ExecutionResult(data=None, errors=errors)

if validation_errors := self.validate(document):
if validation_errors := self._validate(document):
return ExecutionResult(data=None, errors=validation_errors)

context = self.get_context(request)
Expand Down
4 changes: 4 additions & 0 deletions cannula/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,9 @@ class Context:
def __init__(self, request: typing.Any):
self.request = self.handle_request(request)

@classmethod
def init(cls, request: typing.Any):
return cls(request)

def handle_request(self, request: typing.Any) -> typing.Any:
return request
2 changes: 2 additions & 0 deletions cannula/contrib/asgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@


class GraphQLPayload(pydantic.BaseModel):
"""Model representing a GraphQL request body."""

query: str
variables: typing.Optional[typing.Dict[str, typing.Any]] = None
operation: typing.Optional[str] = None
26 changes: 0 additions & 26 deletions cannula/helpers.py

This file was deleted.

5 changes: 1 addition & 4 deletions cannula/middleware/mocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,10 +312,7 @@ def get_mocks_from_headers(self, context: typing.Any) -> dict:
return {}

async def run_next(self, _next, _resource, _info, **kwargs):
if inspect.isawaitable(_next):
results = await _next(_resource, _info, **kwargs)
else:
results = _next(_resource, _info, **kwargs)
results = _next(_resource, _info, **kwargs)

if inspect.isawaitable(results):
return await results
Expand Down
Loading

0 comments on commit 3df005c

Please sign in to comment.