Skip to content

Commit

Permalink
Fix type hints and formatting (#4)
Browse files Browse the repository at this point in the history
* fix type hints and formatting

* Remove `noqa: ...`

---------

Co-authored-by: Tomas R. <[email protected]>
  • Loading branch information
elmcrest and tomasr8 authored Jan 26, 2025
1 parent 5468c6b commit c27d432
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 6 deletions.
2 changes: 1 addition & 1 deletion pyjsx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@


__version__ = "0.1.0"
__all__ = ["register_jsx", "transpile", "jsx", "JSX", "JSXComponent"]
__all__ = ["JSX", "JSXComponent", "jsx", "register_jsx", "transpile"]
12 changes: 10 additions & 2 deletions pyjsx/codecs.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
from __future__ import annotations

import codecs
import encodings
from typing import TYPE_CHECKING


if TYPE_CHECKING:
from _typeshed import ReadableBuffer

from pyjsx.transpiler import transpile


def pyjsx_decode(input: memoryview, errors: str = "strict") -> tuple[str, int]: # noqa: A002, ARG001
return transpile(bytes(input).decode("utf-8")), len(input)
def pyjsx_decode(input: ReadableBuffer, errors: str = "strict") -> tuple[str, int]: # noqa: A002, ARG001
byte_content = bytes(input)
return transpile(byte_content.decode("utf-8")), len(byte_content)


def pyjsx_search_function(encoding: str) -> codecs.CodecInfo | None:
Expand Down
1 change: 0 additions & 1 deletion pyjsx/jsx.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import annotations

import sys
from typing import Any, Protocol, TypeAlias

from pyjsx.elements import is_void_element
Expand Down
4 changes: 2 additions & 2 deletions pyjsx/tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def tokenize_jsx(self) -> Generator[Token, None, None]: # noqa: C901, PLR0912,
yield Token(TokenType.JSX_TEXT, text, self.curr, self.curr + len(text))
self.curr += len(text)
else:
msg = f"Unexpected token {self.source[self.curr:]}"
msg = f"Unexpected token {self.source[self.curr :]}"
raise TokenizerError(msg)

def tokenize_py(self) -> Generator[Token, None, None]: # noqa: C901, PLR0912, PLR0915
Expand Down Expand Up @@ -386,6 +386,6 @@ def tokenize_fstring(self) -> Generator[Token, None, None]:
else:
break
if not middle:
msg = f"Unexpected token {self.source[self.curr:]}"
msg = f"Unexpected token {self.source[self.curr :]}"
raise TokenizerError(msg)
yield Token(TokenType.FSTRING_MIDDLE, middle, self.curr, self.curr + len(middle))

0 comments on commit c27d432

Please sign in to comment.