From 3addeb0700fc42994a310ba4d5d5249ad24c54e9 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 23 Nov 2022 17:40:55 -0500 Subject: [PATCH] build(ingest): support flake8 6.0.0 (#6540) --- .../src/datahub/ingestion/source/bigquery_v2/lineage.py | 1 - .../src/datahub/ingestion/source/bigquery_v2/usage.py | 4 +--- .../src/datahub/ingestion/source/confluent_schema_registry.py | 3 ++- .../datahub_provider/operators/datahub_operation_operator.py | 1 - 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index 0ee1269f3298ad..16505f4d27dc91 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -631,7 +631,6 @@ def get_upstream_lineage_info( return None def test_capability(self, project_id: str) -> None: - lineage_metadata: Dict[str, Set[str]] if self.config.use_exported_bigquery_audit_metadata: bigquery_client: BigQueryClient = BigQueryClient(project=project_id) entries = self._get_exported_bigquery_audit_metadata( diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py index 7227c209f8a632..97915586e8cbf1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py @@ -4,7 +4,7 @@ import time from dataclasses import dataclass from datetime import datetime -from typing import Any, Dict, Iterable, List, MutableMapping, Optional, Set, Union, cast +from typing import Any, Dict, Iterable, List, MutableMapping, Optional, Union, cast import cachetools from google.cloud.bigquery import Client as BigQueryClient @@ -443,7 +443,6 @@ def _extract_operational_meta( ) -> Optional[OperationalDataMeta]: # If we don't have Query object that means this is a queryless read operation or a read operation which was not executed as JOB # https://cloud.google.com/bigquery/docs/reference/auditlogs/rest/Shared.Types/BigQueryAuditMetadata.TableDataRead.Reason/ - operation_meta: OperationalDataMeta if not event.query_event and event.read_event: return OperationalDataMeta( statement_type=OperationTypeClass.CUSTOM, @@ -839,7 +838,6 @@ def _get_parsed_bigquery_log_events( ) def test_capability(self, project_id: str) -> None: - lineage_metadata: Dict[str, Set[str]] for entry in self._get_parsed_bigquery_log_events(project_id, limit=1): logger.debug(f"Connection test got one {entry}") return diff --git a/metadata-ingestion/src/datahub/ingestion/source/confluent_schema_registry.py b/metadata-ingestion/src/datahub/ingestion/source/confluent_schema_registry.py index e2f2e586df1965..41009440796ba0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/confluent_schema_registry.py +++ b/metadata-ingestion/src/datahub/ingestion/source/confluent_schema_registry.py @@ -141,7 +141,8 @@ def get_schemas_from_confluent_ref_protobuf( if schema_seen is None: schema_seen = set() - for schema_ref in schema.references: # type: SchemaReference + schema_ref: SchemaReference + for schema_ref in schema.references: ref_subject: str = schema_ref["subject"] if ref_subject in schema_seen: continue diff --git a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py index 905b7f71e23ca5..e5e45c2bf46940 100644 --- a/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py +++ b/metadata-ingestion/src/datahub_provider/operators/datahub_operation_operator.py @@ -83,7 +83,6 @@ def execute(self, context: Any) -> bool: else: raise Exception(f"urn parameter has invalid type {type(self.urn)}") - partition: Optional[str] for urn in urns: self.log.info(f"Checking if dataset {self.urn} is ready to be consumed") ret = self.circuit_breaker.is_circuit_breaker_active(