diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 93469c5b03266..f15fa94686370 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -192,7 +192,7 @@ repos: name: Run pydocstyle args: - --convention=pep257 - - --add-ignore=D100,D102,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D210,D400,D401 + - --add-ignore=D100,D102,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D400,D401 exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^backport_packages|^kubernetes_tests - repo: local hooks: diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py index ea9afa712dd5d..dbb54d59a0287 100644 --- a/airflow/api/auth/backend/kerberos_auth.py +++ b/airflow/api/auth/backend/kerberos_auth.py @@ -60,7 +60,7 @@ class KerberosService: # pylint: disable=too-few-public-methods - """Class to keep information about the Kerberos Service initialized """ + """Class to keep information about the Kerberos Service initialized""" def __init__(self): self.service_name = None diff --git a/airflow/api_connexion/schemas/config_schema.py b/airflow/api_connexion/schemas/config_schema.py index e1665a7fbaae6..bb7be9cd33dfc 100644 --- a/airflow/api_connexion/schemas/config_schema.py +++ b/airflow/api_connexion/schemas/config_schema.py @@ -21,36 +21,36 @@ class ConfigOptionSchema(Schema): - """ Config Option Schema """ + """Config Option Schema""" key = fields.String(required=True) value = fields.String(required=True) class ConfigOption(NamedTuple): - """ Config option """ + """Config option""" key: str value: str class ConfigSectionSchema(Schema): - """ Config Section Schema""" + """Config Section Schema""" name = fields.String(required=True) options = fields.List(fields.Nested(ConfigOptionSchema)) class ConfigSection(NamedTuple): - """ List of config options within a section """ + """List of config options within a section""" name: str options: List[ConfigOption] class ConfigSchema(Schema): - """ Config Schema""" + """Config Schema""" sections = fields.List(fields.Nested(ConfigSectionSchema)) class Config(NamedTuple): - """ List of config sections with their options """ + """List of config sections with their options""" sections: List[ConfigSection] diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py index 9c2eba368b6aa..e1d0a78cadc52 100644 --- a/airflow/api_connexion/schemas/connection_schema.py +++ b/airflow/api_connexion/schemas/connection_schema.py @@ -29,7 +29,7 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema): """ class Meta: - """ Meta """ + """Meta""" model = Connection connection_id = auto_field('conn_id', required=True) @@ -50,13 +50,13 @@ class ConnectionSchema(ConnectionCollectionItemSchema): # pylint: disable=too-m class ConnectionCollection(NamedTuple): - """ List of Connections with meta""" + """List of Connections with meta""" connections: List[Connection] total_entries: int class ConnectionCollectionSchema(Schema): - """ Connection Collection Schema""" + """Connection Collection Schema""" connections = fields.List(fields.Nested(ConnectionCollectionItemSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/dag_run_schema.py b/airflow/api_connexion/schemas/dag_run_schema.py index cef827ab7ddfd..f13d3d787f015 100644 --- a/airflow/api_connexion/schemas/dag_run_schema.py +++ b/airflow/api_connexion/schemas/dag_run_schema.py @@ -29,7 +29,7 @@ class ConfObject(fields.Field): - """ The conf field""" + """The conf field""" def _serialize(self, value, attr, obj, **kwargs): if not value: return {} @@ -47,7 +47,7 @@ class DAGRunSchema(SQLAlchemySchema): """ class Meta: - """ Meta """ + """Meta""" model = DagRun dateformat = "iso" @@ -90,10 +90,10 @@ class DAGRunCollectionSchema(Schema): class DagRunsBatchFormSchema(Schema): - """ Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint""" + """Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint""" class Meta: - """ Meta """ + """Meta""" datetimeformat = 'iso' strict = True diff --git a/airflow/api_connexion/schemas/enum_schemas.py b/airflow/api_connexion/schemas/enum_schemas.py index f0a38cd1261d6..8e7280a877a00 100644 --- a/airflow/api_connexion/schemas/enum_schemas.py +++ b/airflow/api_connexion/schemas/enum_schemas.py @@ -21,7 +21,7 @@ class DagStateField(fields.String): - """ Schema for DagState Enum""" + """Schema for DagState Enum""" def __init__(self, **metadata): super().__init__(**metadata) self.validators = ( diff --git a/airflow/api_connexion/schemas/error_schema.py b/airflow/api_connexion/schemas/error_schema.py index 3fee9a9f73600..d08ce519d3aa4 100644 --- a/airflow/api_connexion/schemas/error_schema.py +++ b/airflow/api_connexion/schemas/error_schema.py @@ -44,7 +44,7 @@ class ImportErrorCollection(NamedTuple): class ImportErrorCollectionSchema(Schema): - """ Import error collection schema """ + """Import error collection schema""" import_errors = fields.List(fields.Nested(ImportErrorSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/event_log_schema.py b/airflow/api_connexion/schemas/event_log_schema.py index 0753a8a104a44..c0b1bb280be9a 100644 --- a/airflow/api_connexion/schemas/event_log_schema.py +++ b/airflow/api_connexion/schemas/event_log_schema.py @@ -24,10 +24,10 @@ class EventLogSchema(SQLAlchemySchema): - """ Event log schema """ + """Event log schema""" class Meta: - """ Meta """ + """Meta""" model = Log id = auto_field(data_key='event_log_id', dump_only=True) @@ -41,13 +41,13 @@ class Meta: class EventLogCollection(NamedTuple): - """ List of import errors with metadata """ + """List of import errors with metadata""" event_logs: List[Log] total_entries: int class EventLogCollectionSchema(Schema): - """ EventLog Collection Schema """ + """EventLog Collection Schema""" event_logs = fields.List(fields.Nested(EventLogSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/health_schema.py b/airflow/api_connexion/schemas/health_schema.py index c0cbebb7ed627..bccfc0ae0fb03 100644 --- a/airflow/api_connexion/schemas/health_schema.py +++ b/airflow/api_connexion/schemas/health_schema.py @@ -28,12 +28,12 @@ class MetaDatabaseInfoSchema(BaseInfoSchema): class SchedulerInfoSchema(BaseInfoSchema): - """ Schema for Metadatabase info""" + """Schema for Metadatabase info""" latest_scheduler_heartbeat = fields.String(dump_only=True) class HeathInfoSchema(Schema): - """ Schema for the Health endpoint """ + """Schema for the Health endpoint""" metadatabase = fields.Nested(MetaDatabaseInfoSchema) scheduler = fields.Nested(SchedulerInfoSchema) diff --git a/airflow/api_connexion/schemas/log_schema.py b/airflow/api_connexion/schemas/log_schema.py index e59416fe4e0bc..b9b7817dc3b30 100644 --- a/airflow/api_connexion/schemas/log_schema.py +++ b/airflow/api_connexion/schemas/log_schema.py @@ -20,14 +20,14 @@ class LogsSchema(Schema): - """ Schema for logs """ + """Schema for logs""" content = fields.Str() continuation_token = fields.Str() class LogResponseObject(NamedTuple): - """ Log Response Object """ + """Log Response Object""" content: str continuation_token: str diff --git a/airflow/api_connexion/schemas/variable_schema.py b/airflow/api_connexion/schemas/variable_schema.py index c0c7bd1585dcd..4c73f8c36e3d3 100644 --- a/airflow/api_connexion/schemas/variable_schema.py +++ b/airflow/api_connexion/schemas/variable_schema.py @@ -19,13 +19,13 @@ class VariableSchema(Schema): - """ Variable Schema """ + """Variable Schema""" key = fields.String(required=True) value = fields.String(attribute="val", required=True) class VariableCollectionSchema(Schema): - """ Variable Collection Schema """ + """Variable Collection Schema""" variables = fields.List(fields.Nested(VariableSchema)) total_entries = fields.Int() diff --git a/airflow/api_connexion/schemas/xcom_schema.py b/airflow/api_connexion/schemas/xcom_schema.py index 9d06c1b05ac0f..268732534851f 100644 --- a/airflow/api_connexion/schemas/xcom_schema.py +++ b/airflow/api_connexion/schemas/xcom_schema.py @@ -28,7 +28,7 @@ class XComCollectionItemSchema(SQLAlchemySchema): """ class Meta: - """ Meta """ + """Meta""" model = XCom key = auto_field() @@ -47,13 +47,13 @@ class XComSchema(XComCollectionItemSchema): class XComCollection(NamedTuple): - """ List of XComs with meta""" + """List of XComs with meta""" xcom_entries: List[XCom] total_entries: int class XComCollectionSchema(Schema): - """ XCom Collection Schema""" + """XCom Collection Schema""" xcom_entries = fields.List(fields.Nested(XComCollectionItemSchema)) total_entries = fields.Int() diff --git a/airflow/cli/commands/legacy_commands.py b/airflow/cli/commands/legacy_commands.py index b2ca64ae08c27..aa0789c05c035 100644 --- a/airflow/cli/commands/legacy_commands.py +++ b/airflow/cli/commands/legacy_commands.py @@ -49,7 +49,7 @@ def check_legacy_command(action, value): - """ Checks command value and raise error if value is in removed command """ + """Checks command value and raise error if value is in removed command""" new_command = COMMAND_MAP.get(value) if new_command is not None: msg = f"`airflow {value}` command, has been removed, please use `airflow {new_command}`" diff --git a/airflow/models/base.py b/airflow/models/base.py index 9774de0eb3395..dc1953439ecf1 100644 --- a/airflow/models/base.py +++ b/airflow/models/base.py @@ -37,11 +37,11 @@ # used for typing class Operator: - """ Class just used for Typing """ + """Class just used for Typing""" def get_id_collation_args(): - """ Get SQLAlchemy args to use for COLLATION """ + """Get SQLAlchemy args to use for COLLATION""" collation = conf.get('core', 'sql_engine_collation_for_ids', fallback=None) if collation: return {'collation': collation} diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 5deeadc61378e..352ebcda791c2 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -1299,7 +1299,7 @@ def xcom_pull( @cached_property def extra_links(self) -> List[str]: - """@property: extra links for the task. """ + """@property: extra links for the task""" return list(set(self.operator_extra_link_dict.keys()) .union(self.global_operator_extra_link_dict.keys())) diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 17658f70b54d7..4bedc22ad94b5 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -308,7 +308,7 @@ def extra(cls): # pylint: disable=no-self-argument descriptor=property(cls.get_extra, cls.set_extra)) def rotate_fernet_key(self): - """Encrypts data with a new key. See: :ref:`security/fernet`. """ + """Encrypts data with a new key. See: :ref:`security/fernet`""" fernet = get_fernet() if self._password and self.is_encrypted: self._password = fernet.rotate(self._password.encode('utf-8')).decode() diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 736da96f018db..ff043a65fb90f 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -523,7 +523,7 @@ def is_backfill(self): @classmethod @provide_session def get_latest_runs(cls, session=None): - """Returns the latest DagRun for each DAG. """ + """Returns the latest DagRun for each DAG""" subquery = ( session .query( diff --git a/airflow/models/pool.py b/airflow/models/pool.py index f351fd593eacf..2a61a7520c479 100644 --- a/airflow/models/pool.py +++ b/airflow/models/pool.py @@ -30,7 +30,7 @@ class PoolStats(TypedDict): - """ Dictionary containing Pool Stats """ + """Dictionary containing Pool Stats""" total: int running: int queued: int diff --git a/airflow/models/renderedtifields.py b/airflow/models/renderedtifields.py index e4f1f00ba0ed6..515fb68756655 100644 --- a/airflow/models/renderedtifields.py +++ b/airflow/models/renderedtifields.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Save Rendered Template Fields """ +"""Save Rendered Template Fields""" from typing import Optional import sqlalchemy_jsonfield diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py index 0ad2eb882cd90..ba911da9ae729 100644 --- a/airflow/models/skipmixin.py +++ b/airflow/models/skipmixin.py @@ -35,8 +35,7 @@ class SkipMixin(LoggingMixin): - """ A Mixin to skip Tasks Instances """ - + """A Mixin to skip Tasks Instances""" def _set_state_to_skipped(self, dag_run, execution_date, tasks, session): """ Used internally to set state of task instances to skipped from the same dag run. diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index e0c465e9ed19d..0165614431d3b 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -268,7 +268,7 @@ def __init__(self, task, execution_date: datetime, state: Optional[str] = None): @reconstructor def init_on_load(self): - """ Initialize the attributes that aren't stored in the DB. """ + """Initialize the attributes that aren't stored in the DB""" self.test_mode = False # can be changed when calling 'run' @property diff --git a/airflow/models/variable.py b/airflow/models/variable.py index a306dcf5a81be..a9222c4468c22 100644 --- a/airflow/models/variable.py +++ b/airflow/models/variable.py @@ -177,7 +177,7 @@ def delete(cls, key: str, session: Session = None) -> int: return session.query(cls).filter(cls.key == key).delete() def rotate_fernet_key(self): - """ Rotate Fernet Key """ + """Rotate Fernet Key""" fernet = get_fernet() if self._val and self.is_encrypted: self._val = fernet.rotate(self._val.encode('utf-8')).decode() diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py index 8e577a31b1cdf..677ba3fe5e65e 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/airflow/providers/apache/hive/hooks/hive.py @@ -491,7 +491,7 @@ def kill(self) -> None: class HiveMetastoreHook(BaseHook): - """ Wrapper to interact with the Hive Metastore""" + """Wrapper to interact with the Hive Metastore""" # java short max val MAX_PART_COUNT = 32767 diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 59a1001a582f1..fec7f6f15d3f5 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -109,7 +109,7 @@ def on_kill(self) -> None: self._hook.kill() def _get_hook(self) -> SparkSqlHook: - """ Get SparkSqlHook """ + """Get SparkSqlHook""" return SparkSqlHook(sql=self._sql, conf=self._conf, conn_id=self._conn_id, diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py index 44432b9ef9360..6ff2ef37e0802 100644 --- a/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/airflow/providers/elasticsearch/log/es_task_handler.py @@ -281,7 +281,7 @@ def close(self) -> None: @property def log_name(self) -> str: - """ The log name""" + """The log name""" return self.LOG_NAME def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> str: diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py index c8f01aba0b482..1318cb24a40ba 100644 --- a/airflow/providers/facebook/ads/hooks/ads.py +++ b/airflow/providers/facebook/ads/hooks/ads.py @@ -72,7 +72,7 @@ def __init__( "account_id"] def _get_service(self) -> FacebookAdsApi: - """ Returns Facebook Ads Client using a service account""" + """Returns Facebook Ads Client using a service account""" config = self.facebook_ads_config return FacebookAdsApi.init(app_id=config["app_id"], app_secret=config["app_secret"], diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index 59885f2c209f4..6133c11ff9732 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -2240,17 +2240,17 @@ def __init__(self, *args, **kwargs) -> None: self._kwargs = kwargs def close(self) -> None: # noqa: D403 - """BigQueryConnection does not have anything to close. """ + """BigQueryConnection does not have anything to close""" def commit(self) -> None: # noqa: D403 - """BigQueryConnection does not support transactions. """ + """BigQueryConnection does not support transactions""" def cursor(self) -> "BigQueryCursor": # noqa: D403 - """Return a new :py:class:`Cursor` object using the connection. """ + """Return a new :py:class:`Cursor` object using the connection""" return BigQueryCursor(*self._args, **self._kwargs) def rollback(self) -> NoReturn: # noqa: D403 - """BigQueryConnection does not have transactions """ + """BigQueryConnection does not have transactions""" raise NotImplementedError( "BigQueryConnection does not have transactions") @@ -2584,15 +2584,15 @@ def __init__( @property def description(self) -> NoReturn: - """ The schema description method is not currently implemented. """ + """The schema description method is not currently implemented""" raise NotImplementedError def close(self) -> None: - """ By default, do nothing """ + """By default, do nothing""" @property def rowcount(self) -> int: - """ By default, return -1 to indicate that this is not supported. """ + """By default, return -1 to indicate that this is not supported""" return -1 def execute(self, operation: str, parameters: Optional[Dict] = None) -> None: @@ -2623,14 +2623,14 @@ def executemany(self, operation: str, seq_of_parameters: List) -> None: self.execute(operation, parameters) def flush_results(self) -> None: - """ Flush results related cursor attributes. """ + """Flush results related cursor attributes""" self.page_token = None self.job_id = None self.all_pages_loaded = False self.buffer = [] def fetchone(self) -> Union[List, None]: - """ Fetch the next row of a query result set. """ + """Fetch the next row of a query result set""" # pylint: disable=not-callable return self.next() @@ -2712,24 +2712,24 @@ def fetchall(self) -> List[List]: return result def get_arraysize(self) -> int: - """ Specifies the number of rows to fetch at a time with .fetchmany() """ + """Specifies the number of rows to fetch at a time with .fetchmany()""" return self.buffersize or 1 def set_arraysize(self, arraysize: int) -> None: - """ Specifies the number of rows to fetch at a time with .fetchmany() """ + """Specifies the number of rows to fetch at a time with .fetchmany()""" self.buffersize = arraysize arraysize = property(get_arraysize, set_arraysize) def setinputsizes(self, sizes: Any) -> None: - """ Does nothing by default """ + """Does nothing by default""" def setoutputsize(self, size: Any, column: Any = None) -> None: - """ Does nothing by default """ + """Does nothing by default""" def _bind_parameters(operation: str, parameters: Dict) -> str: - """ Helper method that binds parameters to a SQL query. """ + """Helper method that binds parameters to a SQL query""" # inspired by MySQL Python Connector (conversion.py) string_parameters = {} # type Dict[str, str] for (name, value) in parameters.items(): @@ -2743,7 +2743,7 @@ def _bind_parameters(operation: str, parameters: Dict) -> str: def _escape(s: str) -> str: - """ Helper method that escapes parameters to a SQL query. """ + """Helper method that escapes parameters to a SQL query""" e = s e = e.replace('\\', '\\\\') e = e.replace('\n', '\\n') @@ -2853,7 +2853,7 @@ def _cleanse_time_partitioning( def _validate_value(key: Any, value: Any, expected_type: Type) -> None: - """ Function to check expected type and raise error if type is not correct. """ + """Function to check expected type and raise error if type is not correct""" if not isinstance(value, expected_type): raise TypeError("{} argument must have a type {} not {}".format( key, expected_type, type(value))) diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py index 2ab10f6e7de04..1ecc70678d340 100644 --- a/airflow/providers/google/cloud/hooks/kms.py +++ b/airflow/providers/google/cloud/hooks/kms.py @@ -31,12 +31,12 @@ def _b64encode(s: bytes) -> str: - """ Base 64 encodes a bytes object to a string """ + """Base 64 encodes a bytes object to a string""" return base64.b64encode(s).decode("ascii") def _b64decode(s: str) -> bytes: - """ Base 64 decodes a string to bytes. """ + """Base 64 decodes a string to bytes""" return base64.b64decode(s.encode("utf-8")) diff --git a/airflow/settings.py b/airflow/settings.py index 4ab6738208cbf..f6a9c4a54d364 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -139,7 +139,7 @@ def pod_mutation_hook(pod): # pylint: disable=unused-argument # pylint: disable=global-statement def configure_vars(): - """ Configure Global Variables from airflow.cfg""" + """Configure Global Variables from airflow.cfg""" global SQL_ALCHEMY_CONN global DAGS_FOLDER global PLUGINS_FOLDER @@ -154,7 +154,7 @@ def configure_vars(): def configure_orm(disable_connection_pool=False): - """ Configure ORM using SQLAlchemy""" + """Configure ORM using SQLAlchemy""" log.debug("Setting up DB connection pool (PID %s)", os.getpid()) global engine global Session @@ -222,7 +222,7 @@ def configure_orm(disable_connection_pool=False): def dispose_orm(): - """ Properly close pooled database connections """ + """Properly close pooled database connections""" log.debug("Disposing DB connection pool (PID %s)", os.getpid()) global engine global Session @@ -236,7 +236,7 @@ def dispose_orm(): def configure_adapters(): - """ Register Adapters and DB Converters """ + """Register Adapters and DB Converters""" from pendulum import DateTime as Pendulum try: from sqlite3 import register_adapter @@ -256,7 +256,7 @@ def configure_adapters(): def validate_session(): - """ Validate ORM Session """ + """Validate ORM Session""" worker_precheck = conf.getboolean('core', 'worker_precheck', fallback=False) if not worker_precheck: return True @@ -300,7 +300,7 @@ def prepare_syspath(): def import_local_settings(): - """ Import airflow_local_settings.py files to allow overriding any configs in settings.py file """ + """Import airflow_local_settings.py files to allow overriding any configs in settings.py file""" try: # pylint: disable=too-many-nested-blocks import airflow_local_settings @@ -318,7 +318,7 @@ def import_local_settings(): def initialize(): - """ Initialize Airflow with all the settings from this file """ + """Initialize Airflow with all the settings from this file""" configure_vars() prepare_syspath() import_local_settings() diff --git a/airflow/stats.py b/airflow/stats.py index f2abc014075e3..026a30809b171 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -127,7 +127,7 @@ def __init__(self, allow_list=None): self.allow_list = None def test(self, stat): - """ Test if stat is in the Allow List """ + """Test if stat is in the Allow List""" if self.allow_list is not None: return stat.strip().lower().startswith(self.allow_list) else: diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py index 6aab9e6e055d8..dbfe476e18dde 100644 --- a/airflow/utils/log/log_reader.py +++ b/airflow/utils/log/log_reader.py @@ -27,7 +27,7 @@ class TaskLogReader: - """ Task log reader""" + """Task log reader""" def read_log_chunks(self, ti: TaskInstance, try_number: Optional[int], metadata) -> Tuple[List[str], Dict[str, Any]]: diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 023ab72e58a4c..6805ed729f69a 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -171,14 +171,14 @@ def dag_runs(dag_id): @api_experimental.route('/test', methods=['GET']) @requires_authentication def test(): - """ Test endpoint to check authentication """ + """Test endpoint to check authentication""" return jsonify(status='OK') @api_experimental.route('/info', methods=['GET']) @requires_authentication def info(): - """ Get Airflow Version """ + """Get Airflow Version""" return jsonify(version=version) @@ -198,7 +198,7 @@ def get_dag_code(dag_id): @api_experimental.route('/dags//tasks/', methods=['GET']) @requires_authentication def task_info(dag_id, task_id): - """Returns a JSON with a task's public instance variables. """ + """Returns a JSON with a task's public instance variables""" try: t_info = get_task(dag_id, task_id) except AirflowException as err: @@ -320,7 +320,7 @@ def dag_run_status(dag_id, execution_date): @api_experimental.route('/latest_runs', methods=['GET']) @requires_authentication def latest_dag_runs(): - """Returns the latest DagRun for each DAG formatted for the UI. """ + """Returns the latest DagRun for each DAG formatted for the UI""" from airflow.models import DagRun dagruns = DagRun.get_latest_runs() payload = [] @@ -402,7 +402,7 @@ def delete_pool(name): methods=['GET']) @requires_authentication def get_lineage(dag_id: str, execution_date: str): - """ Get Lineage details for a DagRun """ + """Get Lineage details for a DagRun""" # Convert string datetime into actual datetime try: execution_dt = timezone.parse(execution_date) diff --git a/airflow/www/utils.py b/airflow/www/utils.py index f81de136f26ad..efcb518ac2807 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -57,7 +57,7 @@ def get_sensitive_variables_fields(): def should_hide_value_for_key(key_name): - """Returns True if hide_sensitive_variable_fields is True, else False """ + """Returns True if hide_sensitive_variable_fields is True, else False""" # It is possible via importing variables from file that a key is empty. if key_name: config_set = conf.getboolean('admin', 'hide_sensitive_variable_fields') diff --git a/airflow/www/views.py b/airflow/www/views.py index 129c0edcb1d73..260af26bca847 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -96,7 +96,7 @@ def get_safe_url(url): def get_date_time_num_runs_dag_runs_form_data(www_request, session, dag): - """Get Execution Data, Base Date & Number of runs from a Request """ + """Get Execution Data, Base Date & Number of runs from a Request""" date_time = www_request.args.get('execution_date') if date_time: date_time = timezone.parse(date_time) diff --git a/dev/airflow-pr b/dev/airflow-pr index ef24836a0b76a..916604c24f69b 100755 --- a/dev/airflow-pr +++ b/dev/airflow-pr @@ -460,7 +460,7 @@ def fix_version_from_branch(branch, versions): def register(username, password): - """ Use this function to register a JIRA account in your OS' keyring """ + """Use this function to register a JIRA account in your OS' keyring """ keyring.set_password('airflow-pr', 'username', username) keyring.set_password('airflow-pr', 'password', password) diff --git a/docs/exts/docroles.py b/docs/exts/docroles.py index ee10d4ef50481..58346ab0f6adc 100644 --- a/docs/exts/docroles.py +++ b/docs/exts/docroles.py @@ -25,7 +25,7 @@ class RoleException(Exception): - """Exception for roles extension """ + """Exception for roles extension""" def get_template_field(env, fullname): diff --git a/tests/dags/test_task_view_type_check.py b/tests/dags/test_task_view_type_check.py index fa04b5e0f5b3f..6f4585ba0258a 100644 --- a/tests/dags/test_task_view_type_check.py +++ b/tests/dags/test_task_view_type_check.py @@ -38,11 +38,11 @@ class CallableClass: Class that is callable. """ def __call__(self): - """ A __call__ method """ + """A __call__ method """ def a_function(_, __): - """ A function with two args """ + """A function with two args """ partial_function = functools.partial(a_function, arg_x=1) diff --git a/tests/operators/test_sql.py b/tests/operators/test_sql.py index 57208473edb5d..5e076169abc10 100644 --- a/tests/operators/test_sql.py +++ b/tests/operators/test_sql.py @@ -356,7 +356,7 @@ def tearDown(self): session.query(TI).delete() def test_unsupported_conn_type(self): - """ Check if BranchSQLOperator throws an exception for unsupported connection type """ + """Check if BranchSQLOperator throws an exception for unsupported connection type """ op = BranchSQLOperator( task_id="make_choice", conn_id="redis_default", @@ -371,7 +371,7 @@ def test_unsupported_conn_type(self): end_date=DEFAULT_DATE, ignore_ti_state=True) def test_invalid_conn(self): - """ Check if BranchSQLOperator throws an exception for invalid connection """ + """Check if BranchSQLOperator throws an exception for invalid connection """ op = BranchSQLOperator( task_id="make_choice", conn_id="invalid_connection", @@ -386,7 +386,7 @@ def test_invalid_conn(self): end_date=DEFAULT_DATE, ignore_ti_state=True) def test_invalid_follow_task_true(self): - """ Check if BranchSQLOperator throws an exception for invalid connection """ + """Check if BranchSQLOperator throws an exception for invalid connection """ op = BranchSQLOperator( task_id="make_choice", conn_id="invalid_connection", @@ -401,7 +401,7 @@ def test_invalid_follow_task_true(self): end_date=DEFAULT_DATE, ignore_ti_state=True) def test_invalid_follow_task_false(self): - """ Check if BranchSQLOperator throws an exception for invalid connection """ + """Check if BranchSQLOperator throws an exception for invalid connection """ op = BranchSQLOperator( task_id="make_choice", conn_id="invalid_connection", @@ -417,7 +417,7 @@ def test_invalid_follow_task_false(self): @pytest.mark.backend("mysql") def test_sql_branch_operator_mysql(self): - """ Check if BranchSQLOperator works with backend """ + """Check if BranchSQLOperator works with backend """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -432,7 +432,7 @@ def test_sql_branch_operator_mysql(self): @pytest.mark.backend("postgres") def test_sql_branch_operator_postgres(self): - """ Check if BranchSQLOperator works with backend """ + """Check if BranchSQLOperator works with backend """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="postgres_default", @@ -447,7 +447,7 @@ def test_sql_branch_operator_postgres(self): @mock.patch("airflow.operators.sql.BaseHook") def test_branch_single_value_with_dag_run(self, mock_hook): - """ Check BranchSQLOperator branch operation """ + """Check BranchSQLOperator branch operation """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -490,7 +490,7 @@ def test_branch_single_value_with_dag_run(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_branch_true_with_dag_run(self, mock_hook): - """ Check BranchSQLOperator branch operation """ + """Check BranchSQLOperator branch operation """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -534,7 +534,7 @@ def test_branch_true_with_dag_run(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_branch_false_with_dag_run(self, mock_hook): - """ Check BranchSQLOperator branch operation """ + """Check BranchSQLOperator branch operation """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -578,7 +578,7 @@ def test_branch_false_with_dag_run(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_branch_list_with_dag_run(self, mock_hook): - """ Checks if the BranchSQLOperator supports branching off to a list of tasks.""" + """Checks if the BranchSQLOperator supports branching off to a list of tasks.""" branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -624,7 +624,7 @@ def test_branch_list_with_dag_run(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_invalid_query_result_with_dag_run(self, mock_hook): - """ Check BranchSQLOperator branch operation """ + """Check BranchSQLOperator branch operation """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -657,7 +657,7 @@ def test_invalid_query_result_with_dag_run(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_with_skip_in_branch_downstream_dependencies(self, mock_hook): - """ Test SQL Branch with skipping all downstream dependencies """ + """Test SQL Branch with skipping all downstream dependencies """ branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", @@ -701,7 +701,7 @@ def test_with_skip_in_branch_downstream_dependencies(self, mock_hook): @mock.patch("airflow.operators.sql.BaseHook") def test_with_skip_in_branch_downstream_dependencies2(self, mock_hook): - """ Test skipping downstream dependency for false condition""" + """Test skipping downstream dependency for false condition""" branch_op = BranchSQLOperator( task_id="make_choice", conn_id="mysql_default", diff --git a/tests/providers/jenkins/hooks/test_jenkins.py b/tests/providers/jenkins/hooks/test_jenkins.py index 13da3c5fc5ffd..044ac46be7a08 100644 --- a/tests/providers/jenkins/hooks/test_jenkins.py +++ b/tests/providers/jenkins/hooks/test_jenkins.py @@ -26,7 +26,7 @@ class TestJenkinsHook(unittest.TestCase): @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') def test_client_created_default_http(self, get_connection_mock): - """ tests `init` method to validate http client creation when all parameters are passed """ + """tests `init` method to validate http client creation when all parameters are passed """ default_connection_id = 'jenkins_default' connection_host = 'http://test.com' @@ -43,7 +43,7 @@ def test_client_created_default_http(self, get_connection_mock): @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') def test_client_created_default_https(self, get_connection_mock): - """ tests `init` method to validate https client creation when all + """tests `init` method to validate https client creation when all parameters are passed """ default_connection_id = 'jenkins_default' diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py index 93349483cebd3..3c7bade6990d2 100644 --- a/tests/providers/slack/hooks/test_slack.py +++ b/tests/providers/slack/hooks/test_slack.py @@ -28,7 +28,7 @@ class TestSlackHook(unittest.TestCase): def test_get_token_with_token_only(self): - """ tests `__get_token` method when only token is provided """ + """tests `__get_token` method when only token is provided """ # Given test_token = 'test_token' test_conn_id = None @@ -44,7 +44,7 @@ def test_get_token_with_token_only(self): @mock.patch('airflow.providers.slack.hooks.slack.WebClient') @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection') def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock_slack_client): - """ tests `__get_token` method when only connection is provided """ + """tests `__get_token` method when only connection is provided """ # Given test_token = None test_conn_id = 'x' @@ -64,7 +64,7 @@ def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection') def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock): - """ tests `__get_token` method when only connection is provided """ + """tests `__get_token` method when only connection is provided """ # Mock conn = mock.Mock() @@ -76,7 +76,7 @@ def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection') def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_mock): - """ tests `__get_token` method when only connection is provided """ + """tests `__get_token` method when only connection is provided """ # Mock get_connection_mock.return_value = mock.Mock(password=None) @@ -85,7 +85,7 @@ def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_m self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id='x') def test_get_token_with_token_and_slack_conn_id(self): - """ tests `__get_token` method when both arguments are provided """ + """tests `__get_token` method when both arguments are provided """ # Given test_token = 'test_token' test_conn_id = 'x' @@ -99,7 +99,7 @@ def test_get_token_with_token_and_slack_conn_id(self): self.assertEqual(output, expected) def test_get_token_with_out_token_nor_slack_conn_id(self): - """ tests `__get_token` method when no arguments are provided """ + """tests `__get_token` method when no arguments are provided """ self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id=None) diff --git a/tests/providers/yandex/hooks/test_yandex.py b/tests/providers/yandex/hooks/test_yandex.py index 3c1613b11d2cb..c9493b4d95bb9 100644 --- a/tests/providers/yandex/hooks/test_yandex.py +++ b/tests/providers/yandex/hooks/test_yandex.py @@ -29,7 +29,7 @@ class TestYandexHook(unittest.TestCase): @mock.patch('airflow.providers.yandex.hooks.yandex.YandexCloudBaseHook._get_credentials') def test_client_created_without_exceptions(self, get_credentials_mock, get_connection_mock): - """ tests `init` method to validate client creation when all parameters are passed """ + """tests `init` method to validate client creation when all parameters are passed """ # Inputs to constructor default_folder_id = 'test_id' @@ -49,7 +49,7 @@ def test_client_created_without_exceptions(self, get_credentials_mock, @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection') def test_get_credentials_raise_exception(self, get_connection_mock): - """ tests 'get_credentials' method raising exception if none of the required fields are passed.""" + """tests 'get_credentials' method raising exception if none of the required fields are passed.""" # Inputs to constructor default_folder_id = 'test_id' diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 152a3c210c4bf..948ca99f9d93e 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -135,7 +135,7 @@ def make_simple_dag(): def make_user_defined_macro_filter_dag(): - """ Make DAGs with user defined macros and filters using locally defined methods. + """Make DAGs with user defined macros and filters using locally defined methods. For Webserver, we do not include ``user_defined_macros`` & ``user_defined_filters``.