diff --git a/UPDATING.md b/UPDATING.md index 0755f799b6997..65cea7fbc4a8c 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -57,6 +57,16 @@ https://developers.google.com/style/inclusive-documentation --> +### Remove airflow.utils.file.TemporaryDirectory + +Since Airflow dropped support for Python < 3.5 there's no need to have this custom +implementation of `TemporaryDirectory` because the same functionality is provided by +`tempfile.TemporaryDirectory`. + +Now users instead of `import from airflow.utils.files import TemporaryDirectory` should +do `from tempfile import TemporaryDirectory`. Both context managers provide the same +interface, thus no additional changes should be required. + ### Change python3 as Dataflow Hooks/Operators default interpreter Now the `py_interpreter` argument for DataFlow Hooks/Operators has been changed from python2 to python3. diff --git a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py index 8f816dda78058..fc1ba24cf99b7 100644 --- a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py +++ b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py @@ -18,6 +18,7 @@ # under the License. import os +from tempfile import TemporaryDirectory import unicodecsv as csv @@ -25,7 +26,6 @@ from airflow.hooks.oracle_hook import OracleHook from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory class OracleToAzureDataLakeTransfer(BaseOperator): diff --git a/airflow/contrib/sensors/bash_sensor.py b/airflow/contrib/sensors/bash_sensor.py index 5aaa2dfb87f8d..b0701f07e790f 100644 --- a/airflow/contrib/sensors/bash_sensor.py +++ b/airflow/contrib/sensors/bash_sensor.py @@ -19,11 +19,10 @@ import os from subprocess import PIPE, STDOUT, Popen -from tempfile import NamedTemporaryFile, gettempdir +from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory class BashSensor(BaseSensorOperator): diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index 4a636c5e5239d..67b84ff8d5859 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -24,7 +24,7 @@ import subprocess import time from collections import OrderedDict -from tempfile import NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory import unicodecsv as csv @@ -32,7 +32,6 @@ from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook from airflow.security import utils -from airflow.utils.file import TemporaryDirectory from airflow.utils.helpers import as_flattened_list from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING diff --git a/airflow/hooks/pig_hook.py b/airflow/hooks/pig_hook.py index 2b4d015bac027..7eaf536c328ba 100644 --- a/airflow/hooks/pig_hook.py +++ b/airflow/hooks/pig_hook.py @@ -18,11 +18,10 @@ # under the License. import subprocess -from tempfile import NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook -from airflow.utils.file import TemporaryDirectory class PigCliHook(BaseHook): diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py index 67ffbee37c40e..8e782b2b4b5c5 100644 --- a/airflow/operators/bash_operator.py +++ b/airflow/operators/bash_operator.py @@ -21,13 +21,12 @@ import os import signal from subprocess import PIPE, STDOUT, Popen -from tempfile import gettempdir +from tempfile import TemporaryDirectory, gettempdir from typing import Dict, Optional from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory from airflow.utils.operator_helpers import context_to_airflow_vars diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py index 7e530385c3fe7..86f575e0be4ff 100644 --- a/airflow/operators/docker_operator.py +++ b/airflow/operators/docker_operator.py @@ -21,6 +21,7 @@ """ import ast import json +from tempfile import TemporaryDirectory from typing import Dict, Iterable, List, Optional, Union from docker import APIClient, tls @@ -29,7 +30,6 @@ from airflow.hooks.docker_hook import DockerHook from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory # pylint: disable=too-many-instance-attributes diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index bb82428ceac13..b0a937d2a5ce7 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -25,6 +25,7 @@ import types from inspect import signature from itertools import islice +from tempfile import TemporaryDirectory from textwrap import dedent from typing import Callable, Dict, Iterable, List, Optional @@ -33,7 +34,6 @@ from airflow.exceptions import AirflowException from airflow.models import BaseOperator, SkipMixin from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory from airflow.utils.operator_helpers import context_to_airflow_vars diff --git a/airflow/operators/s3_to_hive_operator.py b/airflow/operators/s3_to_hive_operator.py index ee602ce6f8e1d..8de181e533924 100644 --- a/airflow/operators/s3_to_hive_operator.py +++ b/airflow/operators/s3_to_hive_operator.py @@ -25,7 +25,7 @@ import gzip import os import tempfile -from tempfile import NamedTemporaryFile +from tempfile import NamedTemporaryFile, TemporaryDirectory from typing import Dict, Optional, Union from airflow.exceptions import AirflowException @@ -34,7 +34,6 @@ from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.utils.compression import uncompress_file from airflow.utils.decorators import apply_defaults -from airflow.utils.file import TemporaryDirectory class S3ToHiveTransfer(BaseOperator): # pylint: disable=too-many-instance-attributes diff --git a/airflow/utils/file.py b/airflow/utils/file.py index b72f44a0f1d9e..8012792117cd2 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -17,30 +17,22 @@ # specific language governing permissions and limitations # under the License. -import errno import os import re -import shutil import zipfile -from contextlib import contextmanager -from tempfile import mkdtemp from typing import Dict, List, Optional, Pattern from airflow import LoggingMixin, conf -@contextmanager -def TemporaryDirectory(suffix='', prefix=None, dir=None): - name = mkdtemp(suffix=suffix, prefix=prefix, dir=dir) - try: - yield name - finally: - try: - shutil.rmtree(name) - except OSError as e: - # ENOENT - no such file or directory - if e.errno != errno.ENOENT: - raise e +def TemporaryDirectory(*args, **kwargs): + import warnings + from tempfile import TemporaryDirectory as TmpDir + warnings.warn( + "This function is deprecated. Please use `tempfile.TemporaryDirectory`", + DeprecationWarning, stacklevel=2 + ) + return TmpDir(*args, **kwargs) def mkdirs(path, mode): diff --git a/tests/contrib/operators/test_oracle_to_azure_data_lake_transfer.py b/tests/contrib/operators/test_oracle_to_azure_data_lake_transfer.py index 1b6551869de4d..6d1981e1bcf57 100644 --- a/tests/contrib/operators/test_oracle_to_azure_data_lake_transfer.py +++ b/tests/contrib/operators/test_oracle_to_azure_data_lake_transfer.py @@ -19,13 +19,13 @@ import os import unittest +from tempfile import TemporaryDirectory import mock import unicodecsv as csv from mock import MagicMock from airflow.contrib.operators.oracle_to_azure_data_lake_transfer import OracleToAzureDataLakeTransfer -from airflow.utils.file import TemporaryDirectory class TestOracleToAzureDataLakeTransfer(unittest.TestCase): diff --git a/tests/gcp/operators/test_cloud_build_system_helper.py b/tests/gcp/operators/test_cloud_build_system_helper.py index e7a075fd856d5..6e1c7b726fd6b 100755 --- a/tests/gcp/operators/test_cloud_build_system_helper.py +++ b/tests/gcp/operators/test_cloud_build_system_helper.py @@ -22,9 +22,9 @@ """ import argparse import os +from tempfile import TemporaryDirectory from urllib.parse import urlparse -from airflow.utils.file import TemporaryDirectory from tests.contrib.utils.logging_command_executor import LoggingCommandExecutor from tests.gcp.utils.gcp_authenticator import GCP_CLOUD_BUILD_KEY, GcpAuthenticator diff --git a/tests/operators/test_docker_operator.py b/tests/operators/test_docker_operator.py index 541f84eecb04d..03357e922f6a0 100644 --- a/tests/operators/test_docker_operator.py +++ b/tests/operators/test_docker_operator.py @@ -33,11 +33,11 @@ class TestDockerOperator(unittest.TestCase): - @mock.patch('airflow.utils.file.mkdtemp') + @mock.patch('airflow.operators.docker_operator.TemporaryDirectory') @mock.patch('airflow.operators.docker_operator.APIClient') - def test_execute(self, client_class_mock, mkdtemp_mock): + def test_execute(self, client_class_mock, tempdir_mock): host_config = mock.Mock() - mkdtemp_mock.return_value = '/mkdtemp' + tempdir_mock.return_value.__enter__.return_value = '/mkdtemp' client_mock = mock.Mock(spec=APIClient) client_mock.create_container.return_value = {'Id': 'some_id'} @@ -82,7 +82,7 @@ def test_execute(self, client_class_mock, mkdtemp_mock): auto_remove=False, dns=None, dns_search=None) - mkdtemp_mock.assert_called_once_with(dir='/host/airflow', prefix='airflowtmp', suffix='') + tempdir_mock.assert_called_once_with(dir='/host/airflow', prefix='airflowtmp') client_mock.images.assert_called_once_with(name='ubuntu:latest') client_mock.attach.assert_called_once_with(container='some_id', stdout=True, stderr=True, stream=True) diff --git a/tests/providers/google/cloud/operators/test_dataproc_operator_system_helper.py b/tests/providers/google/cloud/operators/test_dataproc_operator_system_helper.py index d74ce85a7d89a..b1916090736d5 100644 --- a/tests/providers/google/cloud/operators/test_dataproc_operator_system_helper.py +++ b/tests/providers/google/cloud/operators/test_dataproc_operator_system_helper.py @@ -18,8 +18,8 @@ # specific language governing permissions and limitations # under the License. import os +from tempfile import TemporaryDirectory -from airflow.utils.file import TemporaryDirectory from tests.contrib.utils.logging_command_executor import LoggingCommandExecutor