From 2f2066b43bd8cba8c73e4ec392c67d30d626c2c5 Mon Sep 17 00:00:00 2001 From: Nate Robinson Date: Wed, 15 Jan 2025 15:15:32 -0500 Subject: [PATCH] Rename to submit_job_timeout --- .../airflow/providers/amazon/aws/operators/batch.py | 10 +++++----- providers/tests/amazon/aws/operators/test_batch.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/providers/src/airflow/providers/amazon/aws/operators/batch.py b/providers/src/airflow/providers/amazon/aws/operators/batch.py index b7303ea779198..e69508d89319f 100644 --- a/providers/src/airflow/providers/amazon/aws/operators/batch.py +++ b/providers/src/airflow/providers/amazon/aws/operators/batch.py @@ -95,7 +95,7 @@ class BatchOperator(BaseOperator): If it is an array job, only the logs of the first task will be printed. :param awslogs_fetch_interval: The interval with which cloudwatch logs are to be fetched, 30 sec. :param poll_interval: (Deferrable mode only) Time in seconds to wait between polling. - :param batch_execution_timeout: Execution timeout in seconds for submitted batch job. + :param submit_job_timeout: Execution timeout in seconds for submitted batch job. .. note:: Any custom waiters must return a waiter for these calls: @@ -185,7 +185,7 @@ def __init__( poll_interval: int = 30, awslogs_enabled: bool = False, awslogs_fetch_interval: timedelta = timedelta(seconds=30), - batch_execution_timeout: int | None = None, + submit_job_timeout: int | None = None, **kwargs, ) -> None: BaseOperator.__init__(self, **kwargs) @@ -210,7 +210,7 @@ def __init__( self.poll_interval = poll_interval self.awslogs_enabled = awslogs_enabled self.awslogs_fetch_interval = awslogs_fetch_interval - self.batch_execution_timeout = batch_execution_timeout + self.submit_job_timeout = submit_job_timeout # params for hook self.max_retries = max_retries @@ -318,8 +318,8 @@ def submit_job(self, context: Context): "schedulingPriorityOverride": self.scheduling_priority_override, } - if self.batch_execution_timeout: - args["timeout"] = {"attemptDurationSeconds": self.batch_execution_timeout} + if self.submit_job_timeout: + args["timeout"] = {"attemptDurationSeconds": self.submit_job_timeout} try: response = self.hook.client.submit_job(**trim_none_values(args)) diff --git a/providers/tests/amazon/aws/operators/test_batch.py b/providers/tests/amazon/aws/operators/test_batch.py index 8ea964c0594ed..c1b1d847b7d91 100644 --- a/providers/tests/amazon/aws/operators/test_batch.py +++ b/providers/tests/amazon/aws/operators/test_batch.py @@ -70,7 +70,7 @@ def setup_method(self, _, get_client_type_mock): aws_conn_id="airflow_test", region_name="eu-west-1", tags={}, - batch_execution_timeout=3600, + submit_job_timeout=3600, ) self.client_mock = self.get_client_type_mock.return_value # We're mocking all actual AWS calls and don't need a connection. This @@ -110,7 +110,7 @@ def test_init(self): assert self.batch.hook.client == self.client_mock assert self.batch.tags == {} assert self.batch.wait_for_completion is True - assert self.batch.batch_execution_timeout == 3600 + assert self.batch.submit_job_timeout == 3600 self.get_client_type_mock.assert_called_once_with(region_name="eu-west-1") @@ -143,7 +143,7 @@ def test_init_defaults(self): assert issubclass(type(batch_job.hook.client), botocore.client.BaseClient) assert batch_job.tags == {} assert batch_job.wait_for_completion is True - assert batch_job.batch_execution_timeout is None + assert batch_job.submit_job_timeout is None def test_template_fields_overrides(self): assert self.batch.template_fields == (