Skip to content

Commit

Permalink
chore: Enable G (flake8-logging-format) Ruff checks (#1621)
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon authored Apr 18, 2023
1 parent 47f272a commit 182d363
Show file tree
Hide file tree
Showing 15 changed files with 97 additions and 56 deletions.
6 changes: 3 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,7 @@ select = [
"T10", # flake8-debugger
"ISC", # flake8-implicit-str-concat
"ICN", # flake8-import-conventions
"G", # flake8-logging-format
"INP", # flake8-no-pep420
"PIE", # flake8-pie
"T20", # flake8-print
Expand Down
6 changes: 4 additions & 2 deletions samples/sample_tap_google_analytics/ga_tap_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,14 @@ def prepare_request_payload(
def parse_response(self, response) -> Iterable[dict]:
"""Parse Google Analytics API response into individual records."""
self.logger.info(
f"Received raw Google Analytics query response: {response.json()}",
"Received raw Google Analytics query response: %s",
response.json(),
)
report_data = response.json().get("reports", [{}])[0].get("data")
if not report_data:
self.logger.info(
f"Received empty Google Analytics query response: {response.json()}",
"Received empty Google Analytics query response: %s",
response.json(),
)
for total in report_data["totals"]:
yield {"totals": total["values"]}
Expand Down
2 changes: 1 addition & 1 deletion samples/sample_target_csv/csv_target_sink.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def process_batch(self, context: dict) -> None:
openmode = "a"
outpath = self.target_filepath.absolute()
if not outpath.is_file():
self.logger.info(f"Writing to new file: {outpath}")
self.logger.info("Writing to new file: %s", outpath)
newfile = True
openmode = "w"
with outpath.open(openmode, newline="\n", encoding="utf-8") as csvfile:
Expand Down
8 changes: 5 additions & 3 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,11 +200,11 @@ def handle_invalid_timestamp_in_record(
f"field '{':'.join(key_breadcrumb)}'."
)
if treatment == DatetimeErrorTreatmentEnum.MAX:
logger.warning(f"{msg}. Replacing with MAX value.\n{ex}\n")
logger.warning("%s. Replacing with MAX value.\n%s\n", msg, ex)
return _MAX_TIMESTAMP if datelike_typename != "time" else _MAX_TIME

if treatment == DatetimeErrorTreatmentEnum.NULL:
logger.warning(f"{msg}. Replacing with NULL.\n{ex}\n")
logger.warning("%s. Replacing with NULL.\n%s\n", msg, ex)
return None

raise ValueError(msg)
Expand Down Expand Up @@ -326,8 +326,10 @@ def _warn_unmapped_properties(
logger: logging.Logger,
):
logger.warning(
f"Properties {property_names} were present in the '{stream_name}' stream but "
"Properties %s were present in the '%s' stream but "
"not found in catalog schema. Ignoring.",
property_names,
stream_name,
)


Expand Down
29 changes: 19 additions & 10 deletions singer_sdk/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def _eval(
f"Failed to evaluate simpleeval expressions {expr}.",
) from ex

logging.debug(f"Eval result: {expr} = {result}")
logging.debug("Eval result: %s = %s", expr, result)

return result

Expand Down Expand Up @@ -405,22 +405,28 @@ def _init_functions_and_schema( # noqa: PLR0912, PLR0915
include_by_default = True
if stream_map and MAPPER_FILTER_OPTION in stream_map:
filter_rule = stream_map.pop(MAPPER_FILTER_OPTION)
logging.info(f"Found '{self.stream_alias}' filter rule: {filter_rule}")
logging.info(
"Found '%s' filter rule: %s",
self.stream_alias,
filter_rule,
)

if stream_map and MAPPER_KEY_PROPERTIES_OPTION in stream_map:
self.transformed_key_properties: list[str] = stream_map.pop(
MAPPER_KEY_PROPERTIES_OPTION,
)
logging.info(
f"Found stream map override for '{self.stream_alias}' key properties: "
f"{str(self.transformed_key_properties)}",
"Found stream map override for '%s' key properties: %s",
self.stream_alias,
self.transformed_key_properties,
)

if stream_map and MAPPER_ELSE_OPTION in stream_map:
if stream_map[MAPPER_ELSE_OPTION] in {None, NULL_STRING}:
logging.info(
f"Detected `{MAPPER_ELSE_OPTION}=None` rule. "
"Detected `%s=None` rule. "
"Unmapped, non-key properties will be excluded from output.",
MAPPER_ELSE_OPTION,
)
include_by_default = False
else:
Expand Down Expand Up @@ -496,8 +502,9 @@ def _inner(record: dict) -> bool:
property_name=None,
)
logging.debug(
f"Filter result for '{filter_rule}' "
"in '{self.name}' stream: {filter_result}",
"Filter result for '%s' in '{self.name}' stream: %s",
filter_rule,
filter_result,
)
if not filter_result:
logging.debug("Excluding record due to filter.")
Expand Down Expand Up @@ -588,8 +595,9 @@ def __init__(
if MAPPER_ELSE_OPTION in self.stream_maps_dict:
if self.stream_maps_dict[MAPPER_ELSE_OPTION] in {None, NULL_STRING}:
logging.info(
f"Found '{MAPPER_ELSE_OPTION}=None' default mapper. "
"Found '%s=None' default mapper. "
"Unmapped streams will be excluded from output.",
MAPPER_ELSE_OPTION,
)
self.default_mapper_type = RemoveRecordTransform
self.stream_maps_dict.pop(MAPPER_ELSE_OPTION)
Expand All @@ -600,8 +608,9 @@ def __init__(
)
else:
logging.debug(
f"Operator '{MAPPER_ELSE_OPTION}=None' was not found. "
"Operator '%s=None' was not found. "
"Unmapped streams will be included in output.",
MAPPER_ELSE_OPTION,
)
for stream_map_key, stream_def in self.stream_maps_dict.items():
if stream_map_key.startswith("__"):
Expand Down Expand Up @@ -694,7 +703,7 @@ def register_raw_stream_schema( # noqa: PLR0912
key_properties=None,
flattening_options=self.flattening_options,
)
logging.info(f"Set null tansform as default for '{stream_name}'")
logging.info("Set null tansform as default for '%s'", stream_name)
continue

if not isinstance(stream_def, dict):
Expand Down
3 changes: 2 additions & 1 deletion singer_sdk/plugin_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,8 @@ def _validate_config(
if config_jsonschema:
self.append_builtin_config(config_jsonschema)
self.logger.debug(
f"Validating config using jsonschema: {config_jsonschema}",
"Validating config using jsonschema: %s",
config_jsonschema,
)
validator = JSONSchemaValidator(config_jsonschema)
errors = [e.message for e in validator.iter_errors(self._config)]
Expand Down
5 changes: 4 additions & 1 deletion singer_sdk/sinks/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,10 @@ def __init__(
self._config = dict(target.config)
self._pending_batch: dict | None = None
self.stream_name = stream_name
self.logger.info(f"Initializing target sink for stream '{stream_name}'...")
self.logger.info(
"Initializing target sink for stream '%s'...",
stream_name,
)
self.schema = schema
if self.include_sdc_metadata_properties:
self._add_sdc_metadata_to_schema()
Expand Down
6 changes: 3 additions & 3 deletions singer_sdk/streams/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,8 @@ def stream_maps(self) -> list[StreamMap]:
)
else:
self.logger.info(
f"No custom mapper provided for '{self.name}'. "
"Using SameRecordTransform.",
"No custom mapper provided for '%s'. Using SameRecordTransform.",
self.name,
)
self._stream_maps = [
SameRecordTransform(
Expand Down Expand Up @@ -1175,7 +1175,7 @@ def sync(self, context: dict | None = None) -> None:
msg = f"Beginning {self.replication_method.lower()} sync of '{self.name}'"
if context:
msg += f" with context: {context}"
self.logger.info(f"{msg}...")
self.logger.info("%s...", msg)

# Use a replication signpost, if available
signpost = self.get_replication_key_signpost(context)
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/streams/graphql.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,5 +76,5 @@ def prepare_request_payload(
"query": (" ".join([line.strip() for line in query.splitlines()])),
"variables": params,
}
self.logger.debug(f"Attempting query:\n{query}")
self.logger.debug("Attempting query:\n%s", query)
return request_data
11 changes: 8 additions & 3 deletions singer_sdk/streams/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,9 +641,14 @@ def backoff_handler(self, details: Details) -> None:
https://github.com/litl/backoff#event-handlers
"""
logging.error(
"Backing off {wait:0.1f} seconds after {tries} tries "
"calling function {target} with args {args} and kwargs "
"{kwargs}".format(**details),
"Backing off %(wait)0.2f seconds after %(tries)d tries "
"calling function %(target)s with args %(args)s and kwargs "
"%(kwargs)s",
details.get("wait"),
details.get("tries"),
details.get("target"),
details.get("args"),
details.get("kwargs"),
)

def backoff_runtime(
Expand Down
29 changes: 19 additions & 10 deletions singer_sdk/tap_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,9 +223,11 @@ def run_sync_dry_run(
for stream in streams:
if stream.parent_stream_type:
self.logger.debug(
f"Child stream '{type(stream).__name__}' should be called by "
f"parent stream '{stream.parent_stream_type.__name__}'. "
"Child stream '%s' should be called by "
"parent stream '%s'. "
"Skipping direct invocation.",
type(stream).__name__,
stream.parent_stream_type.__name__,
)
continue
with contextlib.suppress(
Expand Down Expand Up @@ -326,7 +328,9 @@ def load_streams(self) -> list[Stream]:
for stream in streams:
parent.child_streams.append(stream)
self.logger.info(
f"Added '{stream.name}' as child stream to '{parent.name}'",
"Added '%s' as child stream to '%s'",
stream.name,
parent.name,
)

streams = [stream for streams in streams_by_type.values() for stream in streams]
Expand Down Expand Up @@ -381,10 +385,13 @@ def _set_compatible_replication_methods(self) -> None:
for descendent in stream.descendent_streams:
if descendent.selected and descendent.ignore_parent_replication_key:
self.logger.warning(
f"Stream descendent '{descendent.name}' is selected and "
f"its parent '{stream.name}' does not use inclusive "
f"replication keys. "
f"Forcing full table replication for '{stream.name}'.",
"Stream descendent '%s' is selected and "
"its parent '%s' does not use inclusive "
"replication keys. "
"Forcing full table replication for '%s'.",
descendent.name,
stream.name,
stream.name,
)
stream.replication_key = None
stream.forced_replication_method = "FULL_TABLE"
Expand All @@ -399,14 +406,16 @@ def sync_all(self) -> None:
stream: Stream
for stream in self.streams.values():
if not stream.selected and not stream.has_selected_descendents:
self.logger.info(f"Skipping deselected stream '{stream.name}'.")
self.logger.info("Skipping deselected stream '%s'.", stream.name)
continue

if stream.parent_stream_type:
self.logger.debug(
f"Child stream '{type(stream).__name__}' is expected to be called "
f"by parent stream '{stream.parent_stream_type.__name__}'. "
"Child stream '%s' is expected to be called "
"by parent stream '%s'. "
"Skipping direct invocation.",
type(stream).__name__,
stream.parent_stream_type.__name__,
)
continue

Expand Down
Loading

0 comments on commit 182d363

Please sign in to comment.