diff --git a/poetry.lock b/poetry.lock index d8c064149..4ca56bd69 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "alabaster" @@ -2334,7 +2334,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] @@ -2687,7 +2687,7 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -docs = ["sphinx", "furo", "sphinx-copybutton", "myst-parser", "sphinx-autobuild", "sphinx-reredirects"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-autobuild", "sphinx-copybutton", "sphinx-reredirects"] s3 = ["fs-s3fs"] testing = ["pytest", "pytest-durations"] diff --git a/pyproject.toml b/pyproject.toml index b744052a8..255c176d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -243,6 +243,7 @@ select = [ "T10", # flake8-debugger "ISC", # flake8-implicit-str-concat "ICN", # flake8-import-conventions + "G", # flake8-logging-format "INP", # flake8-no-pep420 "PIE", # flake8-pie "T20", # flake8-print diff --git a/samples/sample_tap_google_analytics/ga_tap_stream.py b/samples/sample_tap_google_analytics/ga_tap_stream.py index 0a99a7fc9..605365b5d 100644 --- a/samples/sample_tap_google_analytics/ga_tap_stream.py +++ b/samples/sample_tap_google_analytics/ga_tap_stream.py @@ -67,12 +67,14 @@ def prepare_request_payload( def parse_response(self, response) -> Iterable[dict]: """Parse Google Analytics API response into individual records.""" self.logger.info( - f"Received raw Google Analytics query response: {response.json()}", + "Received raw Google Analytics query response: %s", + response.json(), ) report_data = response.json().get("reports", [{}])[0].get("data") if not report_data: self.logger.info( - f"Received empty Google Analytics query response: {response.json()}", + "Received empty Google Analytics query response: %s", + response.json(), ) for total in report_data["totals"]: yield {"totals": total["values"]} diff --git a/samples/sample_target_csv/csv_target_sink.py b/samples/sample_target_csv/csv_target_sink.py index 6986d414c..f06c3163d 100644 --- a/samples/sample_target_csv/csv_target_sink.py +++ b/samples/sample_target_csv/csv_target_sink.py @@ -32,7 +32,7 @@ def process_batch(self, context: dict) -> None: openmode = "a" outpath = self.target_filepath.absolute() if not outpath.is_file(): - self.logger.info(f"Writing to new file: {outpath}") + self.logger.info("Writing to new file: %s", outpath) newfile = True openmode = "w" with outpath.open(openmode, newline="\n", encoding="utf-8") as csvfile: diff --git a/singer_sdk/helpers/_typing.py b/singer_sdk/helpers/_typing.py index 9d9f9c49f..e4097919d 100644 --- a/singer_sdk/helpers/_typing.py +++ b/singer_sdk/helpers/_typing.py @@ -200,11 +200,11 @@ def handle_invalid_timestamp_in_record( f"field '{':'.join(key_breadcrumb)}'." ) if treatment == DatetimeErrorTreatmentEnum.MAX: - logger.warning(f"{msg}. Replacing with MAX value.\n{ex}\n") + logger.warning("%s. Replacing with MAX value.\n%s\n", msg, ex) return _MAX_TIMESTAMP if datelike_typename != "time" else _MAX_TIME if treatment == DatetimeErrorTreatmentEnum.NULL: - logger.warning(f"{msg}. Replacing with NULL.\n{ex}\n") + logger.warning("%s. Replacing with NULL.\n%s\n", msg, ex) return None raise ValueError(msg) @@ -326,8 +326,10 @@ def _warn_unmapped_properties( logger: logging.Logger, ): logger.warning( - f"Properties {property_names} were present in the '{stream_name}' stream but " + "Properties %s were present in the '%s' stream but " "not found in catalog schema. Ignoring.", + property_names, + stream_name, ) diff --git a/singer_sdk/mapper.py b/singer_sdk/mapper.py index 289075df5..31e3cba04 100644 --- a/singer_sdk/mapper.py +++ b/singer_sdk/mapper.py @@ -343,7 +343,7 @@ def _eval( f"Failed to evaluate simpleeval expressions {expr}.", ) from ex - logging.debug(f"Eval result: {expr} = {result}") + logging.debug("Eval result: %s = %s", expr, result) return result @@ -405,22 +405,28 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915 include_by_default = True if stream_map and MAPPER_FILTER_OPTION in stream_map: filter_rule = stream_map.pop(MAPPER_FILTER_OPTION) - logging.info(f"Found '{self.stream_alias}' filter rule: {filter_rule}") + logging.info( + "Found '%s' filter rule: %s", + self.stream_alias, + filter_rule, + ) if stream_map and MAPPER_KEY_PROPERTIES_OPTION in stream_map: self.transformed_key_properties: list[str] = stream_map.pop( MAPPER_KEY_PROPERTIES_OPTION, ) logging.info( - f"Found stream map override for '{self.stream_alias}' key properties: " - f"{str(self.transformed_key_properties)}", + "Found stream map override for '%s' key properties: %s", + self.stream_alias, + self.transformed_key_properties, ) if stream_map and MAPPER_ELSE_OPTION in stream_map: if stream_map[MAPPER_ELSE_OPTION] in {None, NULL_STRING}: logging.info( - f"Detected `{MAPPER_ELSE_OPTION}=None` rule. " + "Detected `%s=None` rule. " "Unmapped, non-key properties will be excluded from output.", + MAPPER_ELSE_OPTION, ) include_by_default = False else: @@ -496,8 +502,9 @@ def _inner(record: dict) -> bool: property_name=None, ) logging.debug( - f"Filter result for '{filter_rule}' " - "in '{self.name}' stream: {filter_result}", + "Filter result for '%s' in '{self.name}' stream: %s", + filter_rule, + filter_result, ) if not filter_result: logging.debug("Excluding record due to filter.") @@ -588,8 +595,9 @@ def __init__( if MAPPER_ELSE_OPTION in self.stream_maps_dict: if self.stream_maps_dict[MAPPER_ELSE_OPTION] in {None, NULL_STRING}: logging.info( - f"Found '{MAPPER_ELSE_OPTION}=None' default mapper. " + "Found '%s=None' default mapper. " "Unmapped streams will be excluded from output.", + MAPPER_ELSE_OPTION, ) self.default_mapper_type = RemoveRecordTransform self.stream_maps_dict.pop(MAPPER_ELSE_OPTION) @@ -600,8 +608,9 @@ def __init__( ) else: logging.debug( - f"Operator '{MAPPER_ELSE_OPTION}=None' was not found. " + "Operator '%s=None' was not found. " "Unmapped streams will be included in output.", + MAPPER_ELSE_OPTION, ) for stream_map_key, stream_def in self.stream_maps_dict.items(): if stream_map_key.startswith("__"): @@ -694,7 +703,7 @@ def register_raw_stream_schema( # noqa: PLR0912 key_properties=None, flattening_options=self.flattening_options, ) - logging.info(f"Set null tansform as default for '{stream_name}'") + logging.info("Set null tansform as default for '%s'", stream_name) continue if not isinstance(stream_def, dict): diff --git a/singer_sdk/plugin_base.py b/singer_sdk/plugin_base.py index dd71c7d8a..c93227c26 100644 --- a/singer_sdk/plugin_base.py +++ b/singer_sdk/plugin_base.py @@ -262,7 +262,8 @@ def _validate_config( if config_jsonschema: self.append_builtin_config(config_jsonschema) self.logger.debug( - f"Validating config using jsonschema: {config_jsonschema}", + "Validating config using jsonschema: %s", + config_jsonschema, ) validator = JSONSchemaValidator(config_jsonschema) errors = [e.message for e in validator.iter_errors(self._config)] diff --git a/singer_sdk/sinks/core.py b/singer_sdk/sinks/core.py index b90a296f8..7ca1f70bc 100644 --- a/singer_sdk/sinks/core.py +++ b/singer_sdk/sinks/core.py @@ -63,7 +63,10 @@ def __init__( self._config = dict(target.config) self._pending_batch: dict | None = None self.stream_name = stream_name - self.logger.info(f"Initializing target sink for stream '{stream_name}'...") + self.logger.info( + "Initializing target sink for stream '%s'...", + stream_name, + ) self.schema = schema if self.include_sdc_metadata_properties: self._add_sdc_metadata_to_schema() diff --git a/singer_sdk/streams/core.py b/singer_sdk/streams/core.py index 101ff7ef6..296043d69 100644 --- a/singer_sdk/streams/core.py +++ b/singer_sdk/streams/core.py @@ -223,8 +223,8 @@ def stream_maps(self) -> list[StreamMap]: ) else: self.logger.info( - f"No custom mapper provided for '{self.name}'. " - "Using SameRecordTransform.", + "No custom mapper provided for '%s'. Using SameRecordTransform.", + self.name, ) self._stream_maps = [ SameRecordTransform( @@ -1175,7 +1175,7 @@ def sync(self, context: dict | None = None) -> None: msg = f"Beginning {self.replication_method.lower()} sync of '{self.name}'" if context: msg += f" with context: {context}" - self.logger.info(f"{msg}...") + self.logger.info("%s...", msg) # Use a replication signpost, if available signpost = self.get_replication_key_signpost(context) diff --git a/singer_sdk/streams/graphql.py b/singer_sdk/streams/graphql.py index 37e54043d..b4023b6b8 100644 --- a/singer_sdk/streams/graphql.py +++ b/singer_sdk/streams/graphql.py @@ -76,5 +76,5 @@ def prepare_request_payload( "query": (" ".join([line.strip() for line in query.splitlines()])), "variables": params, } - self.logger.debug(f"Attempting query:\n{query}") + self.logger.debug("Attempting query:\n%s", query) return request_data diff --git a/singer_sdk/streams/rest.py b/singer_sdk/streams/rest.py index ab8637f00..fe83bea36 100644 --- a/singer_sdk/streams/rest.py +++ b/singer_sdk/streams/rest.py @@ -641,9 +641,14 @@ def backoff_handler(self, details: Details) -> None: https://github.com/litl/backoff#event-handlers """ logging.error( - "Backing off {wait:0.1f} seconds after {tries} tries " - "calling function {target} with args {args} and kwargs " - "{kwargs}".format(**details), + "Backing off %(wait)0.2f seconds after %(tries)d tries " + "calling function %(target)s with args %(args)s and kwargs " + "%(kwargs)s", + details.get("wait"), + details.get("tries"), + details.get("target"), + details.get("args"), + details.get("kwargs"), ) def backoff_runtime( diff --git a/singer_sdk/tap_base.py b/singer_sdk/tap_base.py index e97127e9b..4be13ed29 100644 --- a/singer_sdk/tap_base.py +++ b/singer_sdk/tap_base.py @@ -223,9 +223,11 @@ def run_sync_dry_run( for stream in streams: if stream.parent_stream_type: self.logger.debug( - f"Child stream '{type(stream).__name__}' should be called by " - f"parent stream '{stream.parent_stream_type.__name__}'. " + "Child stream '%s' should be called by " + "parent stream '%s'. " "Skipping direct invocation.", + type(stream).__name__, + stream.parent_stream_type.__name__, ) continue with contextlib.suppress( @@ -326,7 +328,9 @@ def load_streams(self) -> list[Stream]: for stream in streams: parent.child_streams.append(stream) self.logger.info( - f"Added '{stream.name}' as child stream to '{parent.name}'", + "Added '%s' as child stream to '%s'", + stream.name, + parent.name, ) streams = [stream for streams in streams_by_type.values() for stream in streams] @@ -381,10 +385,13 @@ def _set_compatible_replication_methods(self) -> None: for descendent in stream.descendent_streams: if descendent.selected and descendent.ignore_parent_replication_key: self.logger.warning( - f"Stream descendent '{descendent.name}' is selected and " - f"its parent '{stream.name}' does not use inclusive " - f"replication keys. " - f"Forcing full table replication for '{stream.name}'.", + "Stream descendent '%s' is selected and " + "its parent '%s' does not use inclusive " + "replication keys. " + "Forcing full table replication for '%s'.", + descendent.name, + stream.name, + stream.name, ) stream.replication_key = None stream.forced_replication_method = "FULL_TABLE" @@ -399,14 +406,16 @@ def sync_all(self) -> None: stream: Stream for stream in self.streams.values(): if not stream.selected and not stream.has_selected_descendents: - self.logger.info(f"Skipping deselected stream '{stream.name}'.") + self.logger.info("Skipping deselected stream '%s'.", stream.name) continue if stream.parent_stream_type: self.logger.debug( - f"Child stream '{type(stream).__name__}' is expected to be called " - f"by parent stream '{stream.parent_stream_type.__name__}'. " + "Child stream '%s' is expected to be called " + "by parent stream '%s'. " "Skipping direct invocation.", + type(stream).__name__, + stream.parent_stream_type.__name__, ) continue diff --git a/singer_sdk/target_base.py b/singer_sdk/target_base.py index 20c72e2d5..c4fb9c879 100644 --- a/singer_sdk/target_base.py +++ b/singer_sdk/target_base.py @@ -173,8 +173,10 @@ def get_sink( or existing_sink.key_properties != key_properties ): self.logger.info( - f"Schema or key properties for '{stream_name}' stream have changed. " - f"Initializing a new '{stream_name}' sink...", + "Schema or key properties for '%s' stream have changed. " + "Initializing a new '%s' sink...", + stream_name, + stream_name, ) self._sinks_to_clear.append(self._sinks_active.pop(stream_name)) return self.add_sink(stream_name, schema, key_properties) @@ -236,7 +238,7 @@ def add_sink( Returns: A new sink for the stream. """ - self.logger.info(f"Initializing '{self.name}' target sink...") + self.logger.info("Initializing '%s' target sink...", self.name) sink_class = self.get_sink_class(stream_name=stream_name) sink = sink_class( target=self, @@ -270,8 +272,9 @@ def _handle_max_record_age(self) -> None: """Check if _MAX_RECORD_AGE_IN_MINUTES reached, and if so trigger drain.""" if self._max_record_age_in_minutes > self._MAX_RECORD_AGE_IN_MINUTES: self.logger.info( - "One or more records have exceeded the max age of " - f"{self._MAX_RECORD_AGE_IN_MINUTES} minutes. Draining all sinks.", + "One or more records have exceeded the max age of %d minutes. " + "Draining all sinks.", + self._MAX_RECORD_AGE_IN_MINUTES, ) self.drain_all() @@ -284,16 +287,19 @@ def _process_lines(self, file_input: IO[str]) -> Counter[str]: Returns: A counter object for the processed lines. """ - self.logger.info(f"Target '{self.name}' is listening for input from tap.") + self.logger.info("Target '%s' is listening for input from tap.", self.name) counter = super()._process_lines(file_input) line_count = sum(counter.values()) self.logger.info( - f"Target '{self.name}' completed reading {line_count} lines of input " - f"({counter[SingerMessageType.RECORD]} records, " - f"({counter[SingerMessageType.BATCH]} batch manifests, " - f"{counter[SingerMessageType.STATE]} state messages).", + "Target '%s' completed reading %d lines of input " + "(%d records, %d batch manifests, %d state messages).", + self.name, + line_count, + counter[SingerMessageType.RECORD], + counter[SingerMessageType.BATCH], + counter[SingerMessageType.STATE], ) return counter @@ -338,7 +344,8 @@ def _process_record_message(self, message_dict: dict) -> None: if sink.is_full: self.logger.info( - f"Target sink for '{sink.stream_name}' is full. Draining...", + "Target sink for '%s' is full. Draining...", + sink.stream_name, ) self.drain_one(sink) @@ -361,23 +368,25 @@ def _process_schema_message(self, message_dict: dict) -> None: do_registration = True elif self.mapper.stream_maps[stream_name][0].raw_schema != schema: self.logger.info( - f"Schema has changed for stream '{stream_name}'. " + "Schema has changed for stream '%s'. " "Mapping definitions will be reset.", + stream_name, ) do_registration = True elif ( self.mapper.stream_maps[stream_name][0].raw_key_properties != key_properties ): self.logger.info( - f"Key properties have changed for stream '{stream_name}'. " + "Key properties have changed for stream '%s'. " "Mapping definitions will be reset.", + stream_name, ) do_registration = True if not do_registration: self.logger.debug( - f"No changes detected in SCHEMA message for stream '{stream_name}'. " - "Ignoring.", + "No changes detected in SCHEMA message for stream '%s'. Ignoring.", + stream_name, ) return @@ -501,7 +510,7 @@ def _write_state_message(self, state: dict) -> None: state: TODO """ state_json = json.dumps(state) - self.logger.info(f"Emitting completed target state {state_json}") + self.logger.info("Emitting completed target state %s", state_json) sys.stdout.write(f"{state_json}\n") sys.stdout.flush() diff --git a/tests/cookiecutters/test_cookiecutter.py b/tests/cookiecutters/test_cookiecutter.py index ae5f1f33d..8c44802ae 100644 --- a/tests/cookiecutters/test_cookiecutter.py +++ b/tests/cookiecutters/test_cookiecutter.py @@ -66,7 +66,7 @@ def test_cookiecutter(outdir: str, cookiecutter_dir: Path, cookiecutter_input: d mypy_out = api.run([filepath, "--config", str(Path(outdir) / Path("tox.ini"))]) mypy_msg = str(mypy_out[0]) if not mypy_msg.startswith("Success:"): - logging.exception(f"MyPy validation failed: {mypy_msg}") + logging.exception("MyPy validation failed: %s", mypy_msg) assert not mypy_msg, f"MyPy validation failed for file {filepath}" report = style_guide_strict.check_files([filepath]) errors = report.get_statistics("E") diff --git a/tests/core/test_mapper.py b/tests/core/test_mapper.py index 2f9b7ad42..036d7586a 100644 --- a/tests/core/test_mapper.py +++ b/tests/core/test_mapper.py @@ -377,7 +377,7 @@ def _test_transform( for stream_name, stream in sample_stream.items(): for stream_map in mapper.stream_maps[stream_name]: if isinstance(stream_map, RemoveRecordTransform): - logging.info(f"Skipping ignored stream '{stream_name}'") + logging.info("Skipping ignored stream '%s'", stream_name) continue assert (