From c27d432a71e0ec66d090a2b0a6655f1d56421f37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marius=20R=C3=A4sener?= <931919+elmcrest@users.noreply.github.com> Date: Sun, 26 Jan 2025 10:36:21 +0100 Subject: [PATCH] Fix type hints and formatting (#4) * fix type hints and formatting * Remove `noqa: ...` --------- Co-authored-by: Tomas R. --- pyjsx/__init__.py | 2 +- pyjsx/codecs.py | 12 ++++++++++-- pyjsx/jsx.py | 1 - pyjsx/tokenizer.py | 4 ++-- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/pyjsx/__init__.py b/pyjsx/__init__.py index 6a07285..3cf829e 100644 --- a/pyjsx/__init__.py +++ b/pyjsx/__init__.py @@ -4,4 +4,4 @@ __version__ = "0.1.0" -__all__ = ["register_jsx", "transpile", "jsx", "JSX", "JSXComponent"] +__all__ = ["JSX", "JSXComponent", "jsx", "register_jsx", "transpile"] diff --git a/pyjsx/codecs.py b/pyjsx/codecs.py index c4dec69..4fcbda7 100644 --- a/pyjsx/codecs.py +++ b/pyjsx/codecs.py @@ -1,11 +1,19 @@ +from __future__ import annotations + import codecs import encodings +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from _typeshed import ReadableBuffer from pyjsx.transpiler import transpile -def pyjsx_decode(input: memoryview, errors: str = "strict") -> tuple[str, int]: # noqa: A002, ARG001 - return transpile(bytes(input).decode("utf-8")), len(input) +def pyjsx_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: # noqa: A002, ARG001 + byte_content = bytes(input) + return transpile(byte_content.decode("utf-8")), len(byte_content) def pyjsx_search_function(encoding: str) -> codecs.CodecInfo | None: diff --git a/pyjsx/jsx.py b/pyjsx/jsx.py index 47de8bd..87422f8 100644 --- a/pyjsx/jsx.py +++ b/pyjsx/jsx.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys from typing import Any, Protocol, TypeAlias from pyjsx.elements import is_void_element diff --git a/pyjsx/tokenizer.py b/pyjsx/tokenizer.py index 3ecbf5f..15631c3 100644 --- a/pyjsx/tokenizer.py +++ b/pyjsx/tokenizer.py @@ -216,7 +216,7 @@ def tokenize_jsx(self) -> Generator[Token, None, None]: # noqa: C901, PLR0912, yield Token(TokenType.JSX_TEXT, text, self.curr, self.curr + len(text)) self.curr += len(text) else: - msg = f"Unexpected token {self.source[self.curr:]}" + msg = f"Unexpected token {self.source[self.curr :]}" raise TokenizerError(msg) def tokenize_py(self) -> Generator[Token, None, None]: # noqa: C901, PLR0912, PLR0915 @@ -386,6 +386,6 @@ def tokenize_fstring(self) -> Generator[Token, None, None]: else: break if not middle: - msg = f"Unexpected token {self.source[self.curr:]}" + msg = f"Unexpected token {self.source[self.curr :]}" raise TokenizerError(msg) yield Token(TokenType.FSTRING_MIDDLE, middle, self.curr, self.curr + len(middle))