diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f08907b70dfc4..befff35c08564 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -264,6 +264,8 @@ metastore_browser/templates/.*\\.html$|.*\\.jinja2" (?x) ^airflow/providers/apache/cassandra/hooks/cassandra.py$| ^airflow/providers/apache/hive/operators/hive_stats.py$| + ^airflow/providers/apache/hive/PROVIDERS_CHANGES_*| + ^airflow/providers/apache/hive/README.md$| ^tests/providers/apache/cassandra/hooks/test_cassandra.py - id: consistent-pylint language: pygrep diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7ae2e9836151d..b3e9dfa539493 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -542,8 +542,71 @@ We support the following types of tests: For details on running different types of Airflow tests, see `TESTING.rst `_. + +Naming Conventions for provider packages +======================================== + +In Airflow 2.0 we standardized and enforced naming for provider packages, modules and classes. +those rules (introduced as AIP-21) were not only introduced but enforced using automated checks +that verify if the naming conventions are followed. Here is a brief summary of the rules, for +detailed discussion you can go to [AIP-21 Changes in import paths](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths) + +The rules are as follows: + +* Provider packages are all placed in 'airflow.providers' + +* Providers are usually direct sub-packages of the 'airflow.providers' package but in some cases they can be + further split into sub-packages (for example 'apache' package has 'cassandra', 'druid' ... providers ) out + of which several different provider packages are produced (apache.cassandra, apache.druid). This is + case when the providers are connected under common umbrella but very loosely coupled on the code level. + +* In some cases the package can have sub-packages but they are all delivered as single provider + package (for example 'google' package contains 'ads', 'cloud' etc. sub-packages). This is in case + the providers are connected under common umbrella and they are also tightly coupled on the code level. + +* Typical structure of provider package: + * example_dags -> example DAGs are stored here (used for documentation and System Tests) + * hooks -> hooks are stored here + * operators -> operators are stored here + * sensors -> sensors are stored here + * secrets -> secret backends are stored here + * transfers -> transfer operators are stored here + +* Module names do not contain word "hooks" , "operators" etc. The right type comes from + the package. For example 'hooks.datastore' module contains DataStore hook and 'operators.datastore' + contains DataStore operators. + +* Class names contain 'Operator', 'Hook', 'Sensor' - for example DataStoreHook, DataStoreExportOperator + +* Operator name usually follows the convention: Operator + (BigQueryExecuteQueryOperator) is a good example + +* Transfer Operators are those that actively push data from one service/provider and send it to another + service (might be for the same or another provider). This usually involves two hooks. The convention + for those ToOperator. They are not named *TransferOperator nor *Transfer. + +* Operators that use external service to perform transfer (for example CloudDataTransferService operators + are not placed in "transfers" package and do not have to follow the naming convention for + transfer operators. + +* It is often debatable where to put transfer operators but we agreed to the following criteria: + + * We use "maintainability" of the operators as the main criteria - so the transfer operator + should be kept at the provider which has highest "interest" in the transfer operator + + * For Cloud Providers or Service providers that usually means that the transfer operators + should land at the "target" side of the transfer + +* Secret Backend name follows the convention: Backend. + +* Tests are grouped in parallel packages under "tests.providers" top level package. Module name is usually + "test_.py', + +* System tests (not yet fully automated but allowing to run e2e testing of partucular provider) are + named with _system.py suffix. + Metadata Database Updates -============================== +========================= When developing features, you may need to persist information to the metadata database. Airflow has `Alembic `__ built-in @@ -623,7 +686,7 @@ could get a reproducible build. See the `Yarn docs Generate Bundled Files with yarn ----------------------------------- +-------------------------------- To parse and generate bundled files for Airflow, run either of the following commands: @@ -910,6 +973,7 @@ You can join the channels via links at the `Airflow Community page `_ for: * checking out old but still valuable issues that are not on Github yet * mentioning the JIRA issue number in the title of the related PR you would like to open on Github + **IMPORTANT** We don't create new issues on JIRA anymore. The reason we still look at JIRA issues is that there are valuable tickets inside of it. However, each new PR should be created on `Github issues `_ as stated in `Contribution Workflow Example `_ diff --git a/UPDATING.md b/UPDATING.md index 8de906470f9e8..01d69f0f160a9 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -912,7 +912,7 @@ The following table shows changes in import paths. |airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator |airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator | |airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator |airflow.providers.google.cloud.operators.datastore.DatastoreExportOperator | |airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator |airflow.providers.google.cloud.operators.datastore.DatastoreImportOperator | -|airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator |airflow.providers.google.cloud.operators.local_to_gcs.FileToGoogleCloudStorageOperator | +|airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator |airflow.providers.google.cloud.transfers.local_to_gcs.FileToGoogleCloudStorageOperator | |airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator |airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator | |airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator |airflow.providers.google.cloud.operators.bigtable.BigtableCreateInstanceOperator | |airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator |airflow.providers.google.cloud.operators.bigtable.BigtableDeleteInstanceOperator | @@ -1006,7 +1006,7 @@ The following table shows changes in import paths. |airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator |airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator | |airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator |airflow.providers.google.cloud.operators.gcs.GCSObjectCreateAclEntryOperator | |airflow.contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator |airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator | -|airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator |airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator | +|airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator |airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator | |airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator |airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator | |airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator |airflow.providers.google.cloud.operators.gcs.GCSCreateBucketOperator | |airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator |airflow.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator | diff --git a/airflow/contrib/operators/adls_to_gcs.py b/airflow/contrib/operators/adls_to_gcs.py index c0f3b6a305c6d..f3def746fd91a 100644 --- a/airflow/contrib/operators/adls_to_gcs.py +++ b/airflow/contrib/operators/adls_to_gcs.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.adls_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.adls_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.adls_to_gcs import ADLSToGCSOperator +from airflow.providers.google.cloud.transfers.adls_to_gcs import ADLSToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.adls_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.adls_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,13 +30,13 @@ class AdlsToGoogleCloudStorageOperator(ADLSToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.adls_to_gcs.ADLSToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.adls_to_gcs.ADLSToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py index e985c478f79a3..e585ccdf04a00 100644 --- a/airflow/contrib/operators/bigquery_to_bigquery.py +++ b/airflow/contrib/operators/bigquery_to_bigquery.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_bigquery`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_bigquery`.""" import warnings # pylint: disable=unused-import -from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator # noqa +from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_bigquery`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_bigquery`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/bigquery_to_gcs.py b/airflow/contrib/operators/bigquery_to_gcs.py index f8f718d31892b..cd3b57172f7ac 100644 --- a/airflow/contrib/operators/bigquery_to_gcs.py +++ b/airflow/contrib/operators/bigquery_to_gcs.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator +from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,13 +30,13 @@ class BigQueryToCloudStorageOperator(BigQueryToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/bigquery_to_mysql_operator.py b/airflow/contrib/operators/bigquery_to_mysql_operator.py index e9bf5763be937..37fec73ac92b3 100644 --- a/airflow/contrib/operators/bigquery_to_mysql_operator.py +++ b/airflow/contrib/operators/bigquery_to_mysql_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_mysql`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_mysql`.""" import warnings # pylint: disable=unused-import -from airflow.providers.google.cloud.operators.bigquery_to_mysql import BigQueryToMySqlOperator # noqa +from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_mysql`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_mysql`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/cassandra_to_gcs.py b/airflow/contrib/operators/cassandra_to_gcs.py index 3e32006e5cdec..c36fd5f4c51da 100644 --- a/airflow/contrib/operators/cassandra_to_gcs.py +++ b/airflow/contrib/operators/cassandra_to_gcs.py @@ -16,15 +16,15 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs`. +This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`. """ import warnings -from airflow.providers.google.cloud.operators.cassandra_to_gcs import CassandraToGCSOperator +from airflow.providers.google.cloud.transfers.cassandra_to_gcs import CassandraToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -32,13 +32,13 @@ class CassandraToGoogleCloudStorageOperator(CassandraToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/dynamodb_to_s3.py b/airflow/contrib/operators/dynamodb_to_s3.py index 41b298129195a..748a69dc92263 100644 --- a/airflow/contrib/operators/dynamodb_to_s3.py +++ b/airflow/contrib/operators/dynamodb_to_s3.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.dynamodb_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.dynamodb_to_s3`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.dynamodb_to_s3 import DynamoDBToS3Operator # noqa +from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.dynamodb_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.dynamodb_to_s3`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/file_to_gcs.py b/airflow/contrib/operators/file_to_gcs.py index 6bf4986390c3e..be7db23df5c1a 100644 --- a/airflow/contrib/operators/file_to_gcs.py +++ b/airflow/contrib/operators/file_to_gcs.py @@ -16,15 +16,15 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.google.cloud.operators.local_to_gcs`. +This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`. """ import warnings -from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator +from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.local_to_gcs`,", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`,", DeprecationWarning, stacklevel=2 ) @@ -32,14 +32,14 @@ class FileToGoogleCloudStorageOperator(LocalFilesystemToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator`.""", + `airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/file_to_wasb.py b/airflow/contrib/operators/file_to_wasb.py index b7368da7494a3..8ed0da71336ca 100644 --- a/airflow/contrib/operators/file_to_wasb.py +++ b/airflow/contrib/operators/file_to_wasb.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.microsoft.azure.operators.file_to_wasb`.""" +"""This module is deprecated. Please use `airflow.providers.microsoft.azure.transfers.file_to_wasb`.""" import warnings # pylint: disable=unused-import -from airflow.providers.microsoft.azure.operators.file_to_wasb import FileToWasbOperator # noqa +from airflow.providers.microsoft.azure.transfers.file_to_wasb import FileToWasbOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.microsoft.azure.operators.file_to_wasb`.", + "This module is deprecated. Please use `airflow.providers.microsoft.azure.transfers.file_to_wasb`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/gcp_transfer_operator.py b/airflow/contrib/operators/gcp_transfer_operator.py index 70f81b817acf4..0d9c5d0c63b2a 100644 --- a/airflow/contrib/operators/gcp_transfer_operator.py +++ b/airflow/contrib/operators/gcp_transfer_operator.py @@ -180,8 +180,8 @@ def __init__(self, *args, **kwargs): class GoogleCloudStorageToGoogleCloudStorageTransferOperator(CloudDataTransferServiceGCSToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.data_transfe - r.CloudDataTransferServiceGCSToGCSOperator`. + Please use `airflow.providers.google.cloud.operators.data_transfer + .CloudDataTransferServiceGCSToGCSOperator`. """ def __init__(self, *args, **kwargs): diff --git a/airflow/contrib/operators/gcs_download_operator.py b/airflow/contrib/operators/gcs_download_operator.py index 687f84ca02361..6103e6c6a2035 100644 --- a/airflow/contrib/operators/gcs_download_operator.py +++ b/airflow/contrib/operators/gcs_download_operator.py @@ -21,7 +21,7 @@ import warnings -from airflow.providers.google.cloud.operators.gcs import GCSToLocalOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator warnings.warn( "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.", @@ -29,16 +29,16 @@ ) -class GoogleCloudStorageDownloadOperator(GCSToLocalOperator): +class GoogleCloudStorageDownloadOperator(GCSToLocalFilesystemOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator`. + Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator`.""", + Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py index 333e5aa1aab77..99c339e3547ba 100644 --- a/airflow/contrib/operators/gcs_to_bq.py +++ b/airflow/contrib/operators/gcs_to_bq.py @@ -16,15 +16,15 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_bigquery`. +This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`. """ import warnings -from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator +from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_bigquery`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`.", DeprecationWarning, stacklevel=2 ) @@ -32,13 +32,13 @@ class GoogleCloudStorageToBigQueryOperator(GCSToBigQueryOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs_to_bq.GCSToBigQueryOperator`. + Please use `airflow.providers.google.cloud.transfers.gcs_to_bq.GCSToBigQueryOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs_to_bq.GCSToBigQueryOperator`.""", + Please use `airflow.providers.google.cloud.transfers.gcs_to_bq.GCSToBigQueryOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/gcs_to_gcs.py b/airflow/contrib/operators/gcs_to_gcs.py index 309353f57c728..4f6b4fb956510 100644 --- a/airflow/contrib/operators/gcs_to_gcs.py +++ b/airflow/contrib/operators/gcs_to_gcs.py @@ -16,15 +16,15 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_gcs`. +This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`. """ import warnings -from airflow.providers.google.cloud.operators.gcs_to_gcs import GCSToGCSOperator +from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -32,13 +32,13 @@ class GoogleCloudStorageToGoogleCloudStorageOperator(GCSToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/gcs_to_gdrive_operator.py b/airflow/contrib/operators/gcs_to_gdrive_operator.py index c7415919245bc..6ec7462d950cf 100644 --- a/airflow/contrib/operators/gcs_to_gdrive_operator.py +++ b/airflow/contrib/operators/gcs_to_gdrive_operator.py @@ -15,15 +15,15 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.suite.operators.gcs_to_gdrive`.""" +"""This module is deprecated. Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive`.""" import warnings # pylint: disable=unused-import -from airflow.providers.google.suite.operators.gcs_to_gdrive import GCSToGoogleDriveOperator # noqa +from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator # noqa warnings.warn( "This module is deprecated. " - "Please use `airflow.providers.google.suite.operators.gcs_to_gdrive.", + "Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/gcs_to_s3.py b/airflow/contrib/operators/gcs_to_s3.py index e80bc9697f88c..593be1fbcf7f8 100644 --- a/airflow/contrib/operators/gcs_to_s3.py +++ b/airflow/contrib/operators/gcs_to_s3.py @@ -16,27 +16,27 @@ # specific language governing permissions and limitations # under the License. """ -This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`. +This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`. """ import warnings -from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator +from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.", DeprecationWarning, stacklevel=2 ) class GoogleCloudStorageToS3Operator(GCSToS3Operator): """ - This class is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`. + This class is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`. """ def __init__(self, *args, **kwargs): warnings.warn( "This class is deprecated. " - "Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`.", + "Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`.", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/hive_to_dynamodb.py b/airflow/contrib/operators/hive_to_dynamodb.py index 80fe6c3b75fa1..6784680272273 100644 --- a/airflow/contrib/operators/hive_to_dynamodb.py +++ b/airflow/contrib/operators/hive_to_dynamodb.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.hive_to_dynamodb`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.hive_to_dynamodb`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.hive_to_dynamodb import HiveToDynamoDBTransferOperator # noqa +from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.hive_to_dynamodb`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.hive_to_dynamodb`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py index 6f0975e93c4b3..597d6beba0cd3 100644 --- a/airflow/contrib/operators/imap_attachment_to_s3_operator.py +++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.imap_attachment_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator # noqa +from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.imap_attachment_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/mongo_to_s3.py b/airflow/contrib/operators/mongo_to_s3.py index 20e33cda6acb1..82449ee5d7a8a 100644 --- a/airflow/contrib/operators/mongo_to_s3.py +++ b/airflow/contrib/operators/mongo_to_s3.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.mongo_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.mongo_to_s3`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.mongo_to_s3 import MongoToS3Operator # noqa +from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.mongo_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.mongo_to_s3`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/mssql_to_gcs.py b/airflow/contrib/operators/mssql_to_gcs.py index 14698f7492799..20be043ebe6e7 100644 --- a/airflow/contrib/operators/mssql_to_gcs.py +++ b/airflow/contrib/operators/mssql_to_gcs.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.mssql_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.mssql_to_gcs import MSSQLToGCSOperator +from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.mssql_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,13 +30,13 @@ class MsSqlToGoogleCloudStorageOperator(MSSQLToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.mssql_to_gcs.MSSQLToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.mssql_to_gcs.MSSQLToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/mysql_to_gcs.py b/airflow/contrib/operators/mysql_to_gcs.py index 4935efe97da86..25206a17c4f9a 100644 --- a/airflow/contrib/operators/mysql_to_gcs.py +++ b/airflow/contrib/operators/mysql_to_gcs.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.mysql_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.mysql_to_gcs import MySQLToGCSOperator +from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.mysql_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,13 +30,13 @@ class MySqlToGoogleCloudStorageOperator(MySQLToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.mysql_to_gcs.MySQLToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.mysql_to_gcs.MySQLToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py index 9f4e55400fba4..2cdca434975dd 100644 --- a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py +++ b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py @@ -17,18 +17,18 @@ # under the License. """ This module is deprecated. -Please use `airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`. +Please use `airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`. """ import warnings # pylint: disable=unused-import -from airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer import ( # noqa - OracleToAzureDataLakeTransferOperator, +from airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake import ( # noqa + OracleToAzureDataLakeOperator, ) warnings.warn( "This module is deprecated. " - "Please use `airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`.", + "Please use `airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/oracle_to_oracle_transfer.py b/airflow/contrib/operators/oracle_to_oracle_transfer.py index 12e7b21832019..f1749bcf1827c 100644 --- a/airflow/contrib/operators/oracle_to_oracle_transfer.py +++ b/airflow/contrib/operators/oracle_to_oracle_transfer.py @@ -15,30 +15,30 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`.""" +"""This module is deprecated. Please use `airflow.providers.oracle.transfers.oracle_to_oracle`.""" import warnings # pylint: disable=unused-import -from airflow.providers.oracle.operators.oracle_to_oracle_transfer import OracleToOracleTransferOperator +from airflow.providers.oracle.transfers.oracle_to_oracle import OracleToOracleOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`.", + "This module is deprecated. Please use `airflow.providers.oracle.transfers.oracle_to_oracle`.", DeprecationWarning, stacklevel=2 ) -class OracleToOracleTransfer(OracleToOracleTransferOperator): +class OracleToOracleTransfer(OracleToOracleOperator): """ This class is deprecated. Please use: - `airflow.providers.oracle.operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator`.""" + `airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.oracle.operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator`.""", + `airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/postgres_to_gcs_operator.py b/airflow/contrib/operators/postgres_to_gcs_operator.py index e36def34b59c6..9ad3b3136329c 100644 --- a/airflow/contrib/operators/postgres_to_gcs_operator.py +++ b/airflow/contrib/operators/postgres_to_gcs_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.postgres_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator +from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.postgres_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,12 +30,12 @@ class PostgresToGoogleCloudStorageOperator(PostgresToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.postgres_to_gcs.PostgresToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.postgres_to_gcs.PostgresToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/s3_to_gcs_operator.py b/airflow/contrib/operators/s3_to_gcs_operator.py index cedeaf3ffb7db..7a43f761c5402 100644 --- a/airflow/contrib/operators/s3_to_gcs_operator.py +++ b/airflow/contrib/operators/s3_to_gcs_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.s3_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.s3_to_gcs`.""" import warnings # pylint: disable=unused-import -from airflow.providers.google.cloud.operators.s3_to_gcs import S3ToGCSOperator # noqa +from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.s3_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.s3_to_gcs`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py index 3cc7d938b944b..b247ce53816e2 100644 --- a/airflow/contrib/operators/s3_to_sftp_operator.py +++ b/airflow/contrib/operators/s3_to_sftp_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_sftp`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_sftp`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.s3_to_sftp import S3ToSFTPOperator # noqa +from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_sftp`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_sftp`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py index 94b218f97e3a6..817880c14c706 100644 --- a/airflow/contrib/operators/sftp_to_s3_operator.py +++ b/airflow/contrib/operators/sftp_to_s3_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.sftp_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.sftp_to_s3`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.sftp_to_s3 import SFTPToS3Operator # noqa +from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.sftp_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.sftp_to_s3`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/contrib/operators/sql_to_gcs.py b/airflow/contrib/operators/sql_to_gcs.py index d7f176226ae2a..2206ba97e16ad 100644 --- a/airflow/contrib/operators/sql_to_gcs.py +++ b/airflow/contrib/operators/sql_to_gcs.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.sql_to_gcs`.""" +"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.sql_to_gcs`.""" import warnings -from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.google.cloud.operators.sql_to_gcs`.", + "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.sql_to_gcs`.", DeprecationWarning, stacklevel=2 ) @@ -30,13 +30,13 @@ class BaseSQLToGoogleCloudStorageOperator(BaseSQLToGCSOperator): """ This class is deprecated. - Please use `airflow.providers.google.cloud.operators.sql_to_gcs.BaseSQLToGCSOperator`. + Please use `airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.google.cloud.operators.sql_to_gcs.BaseSQLToGCSOperator`.""", + Please use `airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/vertica_to_hive.py b/airflow/contrib/operators/vertica_to_hive.py index 6492c631967d1..4c6acbdfd10f1 100644 --- a/airflow/contrib/operators/vertica_to_hive.py +++ b/airflow/contrib/operators/vertica_to_hive.py @@ -15,29 +15,29 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.vertica_to_hive`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.vertica_to_hive`.""" import warnings -from airflow.providers.apache.hive.operators.vertica_to_hive import VerticaToHiveTransferOperator +from airflow.providers.apache.hive.transfers.vertica_to_hive import VerticaToHiveOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.vertica_to_hive`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.vertica_to_hive`.", DeprecationWarning, stacklevel=2 ) -class VerticaToHiveTransfer(VerticaToHiveTransferOperator): +class VerticaToHiveTransfer(VerticaToHiveOperator): """ This class is deprecated. Please use: - `airflow.providers.apache.hive.operators.vertica_to_hive.VerticaToHiveTransferOperator`.""" + `airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.apache.hive.operators.vertica_to_hive.VerticaToHiveTransferOperator`.""", + `airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/contrib/operators/vertica_to_mysql.py b/airflow/contrib/operators/vertica_to_mysql.py index efd6d64fd8564..9f28561c594d6 100644 --- a/airflow/contrib/operators/vertica_to_mysql.py +++ b/airflow/contrib/operators/vertica_to_mysql.py @@ -15,30 +15,30 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.mysql.operators.vertica_to_mysql`.""" +"""This module is deprecated. Please use `airflow.providers.mysql.transfers.vertica_to_mysql`.""" import warnings # pylint: disable=unused-import -from airflow.providers.mysql.operators.vertica_to_mysql import VerticaToMySqlTransferOperator +from airflow.providers.mysql.transfers.vertica_to_mysql import VerticaToMySqlOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.mysql.operators.vertica_to_mysql`.", + "This module is deprecated. Please use `airflow.providers.mysql.transfers.vertica_to_mysql`.", DeprecationWarning, stacklevel=2 ) -class VerticaToMySqlTransfer(VerticaToMySqlTransferOperator): +class VerticaToMySqlTransfer(VerticaToMySqlOperator): """ This class is deprecated. Please use: - `airflow.providers.mysql.operators.vertica_to_mysql.VerticaToMySqlTransferOperator`.""" + `airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.mysql.operators.vertica_to_mysql.VerticaToMySqlTransferOperator`.""", + `airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/gcs_to_s3.py b/airflow/operators/gcs_to_s3.py index 6dc7f7fb30f64..19affc7c48553 100644 --- a/airflow/operators/gcs_to_s3.py +++ b/airflow/operators/gcs_to_s3.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.""" import warnings # pylint: disable=unused-import -from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator # noqa +from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/operators/google_api_to_s3_transfer.py b/airflow/operators/google_api_to_s3_transfer.py index c1c22a53506d4..a8fbf0b54c74c 100644 --- a/airflow/operators/google_api_to_s3_transfer.py +++ b/airflow/operators/google_api_to_s3_transfer.py @@ -17,32 +17,32 @@ # under the License. """ This module is deprecated. -Please use `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`. +Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`. """ import warnings -from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator +from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator warnings.warn( "This module is deprecated. " - "Please use `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`.", + "Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`.", DeprecationWarning, stacklevel=2 ) -class GoogleApiToS3Transfer(GoogleApiToS3TransferOperator): +class GoogleApiToS3Transfer(GoogleApiToS3Operator): """ This class is deprecated. Please use: - `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleApiToS3TransferOperator`.""" + `airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.amazon.aws.operators.""" + - "google_api_to_s3_transfer.GoogleApiToS3TransferOperator`.", + `airflow.providers.amazon.aws.transfers.""" + + "google_api_to_s3_transfer.GoogleApiToS3Operator`.", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py index 1cdd162b66be3..1fd70fb7b7b9f 100644 --- a/airflow/operators/hive_to_druid.py +++ b/airflow/operators/hive_to_druid.py @@ -15,30 +15,30 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.druid.operators.hive_to_druid`.""" +"""This module is deprecated. Please use `airflow.providers.apache.druid.transfers.hive_to_druid`.""" import warnings # pylint: disable=unused-import -from airflow.providers.apache.druid.operators.hive_to_druid import HiveToDruidTransferOperator +from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.druid.operators.hive_to_druid`.", + "This module is deprecated. Please use `airflow.providers.apache.druid.transfers.hive_to_druid`.", DeprecationWarning, stacklevel=2 ) -class HiveToDruidTransfer(HiveToDruidTransferOperator): +class HiveToDruidTransfer(HiveToDruidOperator): """ This class is deprecated. Please use: - `airflow.providers.apache.druid.operators.hive_to_druid.HiveToDruidTransferOperator`.""" + `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.apache.druid.operators.hive_to_druid.HiveToDruidTransferOperator`.""", + `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/hive_to_mysql.py b/airflow/operators/hive_to_mysql.py index bca1cf83cff7c..f27a1f89de840 100644 --- a/airflow/operators/hive_to_mysql.py +++ b/airflow/operators/hive_to_mysql.py @@ -15,30 +15,30 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_mysql`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_mysql`.""" import warnings # pylint: disable=unused-import -from airflow.providers.apache.hive.operators.hive_to_mysql import HiveToMySqlTransferOperator +from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_mysql`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_mysql`.", DeprecationWarning, stacklevel=2 ) -class HiveToMySqlTransfer(HiveToMySqlTransferOperator): +class HiveToMySqlTransfer(HiveToMySqlOperator): """ This class is deprecated. Please use: - `airflow.providers.apache.hive.operators.hive_to_mysql.HiveToMySqlTransferOperator`.""" + `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.apache.hive.operators.hive_to_mysql.HiveToMySqlTransferOperator`.""", + `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/hive_to_samba_operator.py b/airflow/operators/hive_to_samba_operator.py index ba4dc3d904174..d1d1e942e9ef1 100644 --- a/airflow/operators/hive_to_samba_operator.py +++ b/airflow/operators/hive_to_samba_operator.py @@ -15,14 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_samba`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_samba`.""" import warnings # pylint: disable=unused-import -from airflow.providers.apache.hive.operators.hive_to_samba import Hive2SambaOperator # noqa +from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator # noqa warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_samba`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_samba`.", DeprecationWarning, stacklevel=2 ) diff --git a/airflow/operators/mssql_to_hive.py b/airflow/operators/mssql_to_hive.py index 756ce30758488..50a98845fec7b 100644 --- a/airflow/operators/mssql_to_hive.py +++ b/airflow/operators/mssql_to_hive.py @@ -15,29 +15,29 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.mssql_to_hive`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mssql_to_hive`.""" import warnings -from airflow.providers.apache.hive.operators.mssql_to_hive import MsSqlToHiveTransferOperator +from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.mssql_to_hive`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mssql_to_hive`.", DeprecationWarning, stacklevel=2 ) -class MsSqlToHiveTransfer(MsSqlToHiveTransferOperator): +class MsSqlToHiveTransfer(MsSqlToHiveOperator): """ This class is deprecated. Please use: - `airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlToHiveTransferOperator`.""" + `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlToHiveTransferOperator`.""", + `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/mysql_to_hive.py b/airflow/operators/mysql_to_hive.py index 5c45075951e77..82b2082aa7ab1 100644 --- a/airflow/operators/mysql_to_hive.py +++ b/airflow/operators/mysql_to_hive.py @@ -15,28 +15,28 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.mysql_to_hive`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mysql_to_hive`.""" import warnings -from airflow.providers.apache.hive.operators.mysql_to_hive import MySqlToHiveTransferOperator +from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.mysql_to_hive`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mysql_to_hive`.", DeprecationWarning, stacklevel=2 ) -class MySqlToHiveTransfer(MySqlToHiveTransferOperator): +class MySqlToHiveTransfer(MySqlToHiveOperator): """ This class is deprecated. - Please use `airflow.providers.apache.hive.operators.mysql_to_hive.MySqlToHiveTransferOperator`. + Please use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.apache.hive.operators.mysql_to_hive.MySqlToHiveTransferOperator`.""", + Please use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/presto_to_mysql.py b/airflow/operators/presto_to_mysql.py index a7b4aee024d58..55e10f27b389e 100644 --- a/airflow/operators/presto_to_mysql.py +++ b/airflow/operators/presto_to_mysql.py @@ -15,30 +15,30 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.mysql.operators.presto_to_mysql`.""" +"""This module is deprecated. Please use `airflow.providers.mysql.transfers.presto_to_mysql`.""" import warnings # pylint: disable=unused-import -from airflow.providers.mysql.operators.presto_to_mysql import PrestoToMySqlTransferOperator +from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.mysql.operators.presto_to_mysql`.", + "This module is deprecated. Please use `airflow.providers.mysql.transfers.presto_to_mysql`.", DeprecationWarning, stacklevel=2 ) -class PrestoToMySqlTransfer(PrestoToMySqlTransferOperator): +class PrestoToMySqlTransfer(PrestoToMySqlOperator): """ This class is deprecated. Please use: - `airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`.""" + `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`.""", + `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/redshift_to_s3_operator.py b/airflow/operators/redshift_to_s3_operator.py index 9b4eb3e857749..2b9fcc0128cba 100644 --- a/airflow/operators/redshift_to_s3_operator.py +++ b/airflow/operators/redshift_to_s3_operator.py @@ -15,29 +15,29 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.redshift_to_s3`.""" +"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.redshift_to_s3`.""" import warnings -from airflow.providers.amazon.aws.operators.redshift_to_s3 import RedshiftToS3TransferOperator +from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.redshift_to_s3`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.redshift_to_s3`.", DeprecationWarning, stacklevel=2 ) -class RedshiftToS3Transfer(RedshiftToS3TransferOperator): +class RedshiftToS3Transfer(RedshiftToS3Operator): """ This class is deprecated. Please use: - `airflow.providers.amazon.aws.operators.redshift_to_s3.RedshiftToS3TransferOperator`.""" + `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.amazon.aws.operators.redshift_to_s3.RedshiftToS3TransferOperator`.""", + `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/s3_to_hive_operator.py b/airflow/operators/s3_to_hive_operator.py index d353b4e36e629..f1305e90c67f2 100644 --- a/airflow/operators/s3_to_hive_operator.py +++ b/airflow/operators/s3_to_hive_operator.py @@ -15,28 +15,28 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.s3_to_hive`.""" +"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.s3_to_hive`.""" import warnings -from airflow.providers.apache.hive.operators.s3_to_hive import S3ToHiveTransferOperator +from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.apache.hive.operators.s3_to_hive`.", + "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.s3_to_hive`.", DeprecationWarning, stacklevel=2 ) -class S3ToHiveTransfer(S3ToHiveTransferOperator): +class S3ToHiveTransfer(S3ToHiveOperator): """ This class is deprecated. - Please use `airflow.providers.apache.hive.operators.s3_to_hive.S3ToHiveTransferOperator`. + Please use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`. """ def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. - Please use `airflow.providers.apache.hive.operators.s3_to_hive.S3ToHiveTransferOperator`.""", + Please use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/operators/s3_to_redshift_operator.py b/airflow/operators/s3_to_redshift_operator.py index 74d37da84db36..4272ae8e96073 100644 --- a/airflow/operators/s3_to_redshift_operator.py +++ b/airflow/operators/s3_to_redshift_operator.py @@ -19,25 +19,25 @@ import warnings -from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator +from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator warnings.warn( - "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_redshift`.", + "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_redshift`.", DeprecationWarning, stacklevel=2 ) -class S3ToRedshiftTransfer(S3ToRedshiftTransferOperator): +class S3ToRedshiftTransfer(S3ToRedshiftOperator): """ This class is deprecated. Please use: - `airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator`.""" + `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`.""" def __init__(self, *args, **kwargs): warnings.warn( """This class is deprecated. Please use - `airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator`.""", + `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`.""", DeprecationWarning, stacklevel=2 ) super().__init__(*args, **kwargs) diff --git a/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md index 5caa145281b46..19612589035c4 100644 --- a/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,14 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | +| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | +| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | +| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | +| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | +| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | +| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | diff --git a/airflow/providers/amazon/README.md b/airflow/providers/amazon/README.md index 23df486f69a95..466449d9a5ad0 100644 --- a/airflow/providers/amazon/README.md +++ b/airflow/providers/amazon/README.md @@ -33,14 +33,14 @@ Release: 2020.5.20 - [Operators](#operators) - [New operators](#new-operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [Moved transfer operators](#moved-transfers) - [Sensors](#sensors) - [New sensors](#new-sensors) - [Moved sensors](#moved-sensors) - [Hooks](#hooks) - [New hooks](#new-hooks) - [Moved hooks](#moved-hooks) - - [Protocols](#protocols) - - [Moved protocols](#moved-protocols) - [Secrets](#secrets) - [Moved secrets](#moved-secrets) - [Releases](#releases) @@ -114,6 +114,8 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package. | [aws.operators.ec2_stop_instance.EC2StopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_stop_instance.py) | | [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py) | | [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py) | +| [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | +| [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | | [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py) | @@ -124,22 +126,13 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package. |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [aws.operators.athena.AWSAthenaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/athena.py) | [contrib.operators.aws_athena_operator.AWSAthenaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_athena_operator.py) | | [aws.operators.batch.AwsBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/batch.py) | [contrib.operators.awsbatch_operator.AWSBatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py) | -| [aws.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/dynamodb_to_s3.py) | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py) | | [aws.operators.ecs.ECSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py) | [contrib.operators.ecs_operator.ECSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py) | | [aws.operators.emr_add_steps.EmrAddStepsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_add_steps.py) | [contrib.operators.emr_add_steps_operator.EmrAddStepsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_add_steps_operator.py) | | [aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_create_job_flow.py) | [contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_create_job_flow_operator.py) | | [aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py) | [contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_terminate_job_flow_operator.py) | -| [aws.operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/gcs_to_s3.py) | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py) | -| [aws.operators.google_api_to_s3_transfer.GoogleApiToS3TransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py) | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py) | -| [aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py) | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py) | -| [aws.operators.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) | -| [aws.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/mongo_to_s3.py) | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py) | -| [aws.operators.redshift_to_s3.RedshiftToS3TransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/redshift_to_s3.py) | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py) | | [aws.operators.s3_copy_object.S3CopyObjectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_copy_object.py) | [contrib.operators.s3_copy_object_operator.S3CopyObjectOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_copy_object_operator.py) | | [aws.operators.s3_delete_objects.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_delete_objects.py) | [contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_delete_objects_operator.py) | | [aws.operators.s3_list.S3ListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_list.py) | [contrib.operators.s3_list_operator.S3ListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_list_operator.py) | -| [aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_to_redshift.py) | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py) | -| [aws.operators.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_to_sftp.py) | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py) | | [aws.operators.sagemaker_base.SageMakerBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_base.py) | [contrib.operators.sagemaker_base_operator.SageMakerBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_base_operator.py) | | [aws.operators.sagemaker_endpoint.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py) | [contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_operator.py) | | [aws.operators.sagemaker_endpoint_config.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py) | [contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_config_operator.py) | @@ -147,13 +140,33 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package. | [aws.operators.sagemaker_training.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_training.py) | [contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_training_operator.py) | | [aws.operators.sagemaker_transform.SageMakerTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_transform.py) | [contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_transform_operator.py) | | [aws.operators.sagemaker_tuning.SageMakerTuningOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_tuning.py) | [contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_tuning_operator.py) | -| [aws.operators.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sftp_to_s3.py) | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py) | | [aws.operators.sns.SnsPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sns.py) | [contrib.operators.sns_publish_operator.SnsPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sns_publish_operator.py) | | [aws.operators.sqs.SQSPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sqs.py) | [contrib.operators.aws_sqs_publish_operator.SQSPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_sqs_publish_operator.py) | + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py) | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py) | +| [aws.transfers.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/gcs_to_s3.py) | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py) | +| [aws.transfers.google_api_to_s3.GoogleApiToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/google_api_to_s3.py) | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py) | +| [aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py) | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py) | +| [aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) | +| [aws.transfers.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mongo_to_s3.py) | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py) | +| [aws.transfers.redshift_to_s3.RedshiftToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/redshift_to_s3.py) | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py) | +| [aws.transfers.s3_to_redshift.S3ToRedshiftOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_redshift.py) | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py) | +| [aws.transfers.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_sftp.py) | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py) | +| [aws.transfers.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/sftp_to_s3.py) | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py) | + + + + ## Sensors @@ -223,18 +236,6 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package. -## Protocols - - - -### Moved protocols - -| Airflow 2.0 protocols: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.hooks.batch_client.AwsBatchProtocol](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_client.py) | [contrib.operators.awsbatch_operator.BatchProtocol](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py) | -| [aws.operators.ecs.ECSProtocol](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py) | [contrib.operators.ecs_operator.ECSProtocol](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py) | - - ## Secrets @@ -256,6 +257,14 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | +| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | +| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | +| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | +| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | +| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | +| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py index 6dd9117bcae2d..057f29b115b95 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py @@ -38,7 +38,7 @@ from airflow import DAG from airflow.operators.dummy_operator import DummyOperator from airflow.operators.python import BranchPythonOperator -from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator +from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.dates import days_ago # [START howto_operator_google_api_to_s3_transfer_advanced_env_variables] @@ -79,7 +79,7 @@ def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs): tags=['example'] ) as dag: # [START howto_operator_google_api_to_s3_transfer_advanced_task_1] - task_video_ids_to_s3 = GoogleApiToS3TransferOperator( + task_video_ids_to_s3 = GoogleApiToS3Operator( gcp_conn_id=YOUTUBE_CONN_ID, google_api_service_name='youtube', google_api_service_version='v3', @@ -109,7 +109,7 @@ def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs): ) # [END howto_operator_google_api_to_s3_transfer_advanced_task_1_1] # [START howto_operator_google_api_to_s3_transfer_advanced_task_2] - task_video_data_to_s3 = GoogleApiToS3TransferOperator( + task_video_data_to_s3 = GoogleApiToS3Operator( gcp_conn_id=YOUTUBE_CONN_ID, google_api_service_name='youtube', google_api_service_version='v3', diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py index f329f1d890e7b..07290ca4c2993 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py @@ -23,7 +23,7 @@ from os import getenv from airflow import DAG -from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator +from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils.dates import days_ago # [START howto_operator_google_api_to_s3_transfer_basic_env_variables] @@ -41,7 +41,7 @@ tags=['example'] ) as dag: # [START howto_operator_google_api_to_s3_transfer_basic_task_1] - task_google_sheets_values_to_s3 = GoogleApiToS3TransferOperator( + task_google_sheets_values_to_s3 = GoogleApiToS3Operator( google_api_service_name='sheets', google_api_service_version='v4', google_api_endpoint_path='sheets.spreadsheets.values.get', diff --git a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py index 7a0d86cefd77a..636d360f4acb5 100644 --- a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py +++ b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py @@ -23,7 +23,7 @@ from os import getenv from airflow import DAG -from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator +from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator from airflow.utils.dates import days_ago # [START howto_operator_imap_attachment_to_s3_env_variables] diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py index c36d443788738..c9cc2bdbc8445 100644 --- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py +++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py @@ -16,7 +16,7 @@ # under the License. """ -This is an example dag for using `S3ToRedshiftTransferOperator` to copy a S3 key into a Redshift table. +This is an example dag for using `S3ToRedshiftOperator` to copy a S3 key into a Redshift table. """ from os import getenv @@ -24,7 +24,7 @@ from airflow import DAG from airflow.operators.python import PythonOperator from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator +from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from airflow.providers.postgres.operators.postgres import PostgresOperator from airflow.utils.dates import days_ago @@ -64,7 +64,7 @@ def _remove_sample_data_from_s3(): task_id='setup__create_table' ) # [START howto_operator_s3_to_redshift_task_1] - task_transfer_s3_to_redshift = S3ToRedshiftTransferOperator( + task_transfer_s3_to_redshift = S3ToRedshiftOperator( s3_bucket=S3_BUCKET, s3_key=S3_KEY, schema="PUBLIC", diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py index 653854eb53e98..2069bc9c78718 100644 --- a/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/airflow/providers/amazon/aws/hooks/batch_client.py @@ -38,7 +38,8 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.typing_compat import Protocol, runtime_checkable -# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library could provide +# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library +# could provide # protocols for all their dynamically generated classes (try to migrate this to a PR on botocore). # Note that the use of invalid-name parameters should be restricted to the boto3 mappings only; # all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3. diff --git a/airflow/providers/amazon/aws/transfers/__init__.py b/airflow/providers/amazon/aws/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/amazon/aws/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/amazon/aws/operators/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/dynamodb_to_s3.py rename to airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py diff --git a/airflow/providers/amazon/aws/operators/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/gcs_to_s3.py rename to airflow/providers/amazon/aws/transfers/gcs_to_s3.py diff --git a/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py similarity index 99% rename from airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py rename to airflow/providers/amazon/aws/transfers/google_api_to_s3.py index e0f13e5becd7d..e30e39b5d1a0a 100644 --- a/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py +++ b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py @@ -29,7 +29,7 @@ from airflow.utils.decorators import apply_defaults -class GoogleApiToS3TransferOperator(BaseOperator): +class GoogleApiToS3Operator(BaseOperator): """ Basic class for transferring data from a Google API endpoint into a S3 Bucket. diff --git a/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py similarity index 98% rename from airflow/providers/amazon/aws/operators/hive_to_dynamodb.py rename to airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index b1b8ab75fda74..882a6fb2ebd38 100644 --- a/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py +++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -28,7 +28,7 @@ from airflow.utils.decorators import apply_defaults -class HiveToDynamoDBTransferOperator(BaseOperator): +class HiveToDynamoDBOperator(BaseOperator): """ Moves data from Hive to DynamoDB, note that for now the data is loaded into memory before being pushed to DynamoDB, so this operator should diff --git a/airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py rename to airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py diff --git a/airflow/providers/amazon/aws/operators/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/mongo_to_s3.py rename to airflow/providers/amazon/aws/transfers/mongo_to_s3.py diff --git a/airflow/providers/amazon/aws/operators/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py similarity index 99% rename from airflow/providers/amazon/aws/operators/redshift_to_s3.py rename to airflow/providers/amazon/aws/transfers/redshift_to_s3.py index a456560e0c820..5c85506be55c0 100644 --- a/airflow/providers/amazon/aws/operators/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -26,7 +26,7 @@ from airflow.utils.decorators import apply_defaults -class RedshiftToS3TransferOperator(BaseOperator): +class RedshiftToS3Operator(BaseOperator): """ Executes an UNLOAD command to s3 as a CSV with headers diff --git a/airflow/providers/amazon/aws/operators/s3_to_redshift.py b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py similarity index 98% rename from airflow/providers/amazon/aws/operators/s3_to_redshift.py rename to airflow/providers/amazon/aws/transfers/s3_to_redshift.py index 5a32af50c435b..f8d28a15a0930 100644 --- a/airflow/providers/amazon/aws/operators/s3_to_redshift.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + from typing import List, Optional, Union from airflow.models import BaseOperator @@ -23,7 +23,7 @@ from airflow.utils.decorators import apply_defaults -class S3ToRedshiftTransferOperator(BaseOperator): +class S3ToRedshiftOperator(BaseOperator): """ Executes an COPY command to load files from s3 to Redshift diff --git a/airflow/providers/amazon/aws/operators/s3_to_sftp.py b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py similarity index 100% rename from airflow/providers/amazon/aws/operators/s3_to_sftp.py rename to airflow/providers/amazon/aws/transfers/s3_to_sftp.py diff --git a/airflow/providers/amazon/aws/operators/sftp_to_s3.py b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py similarity index 100% rename from airflow/providers/amazon/aws/operators/sftp_to_s3.py rename to airflow/providers/amazon/aws/transfers/sftp_to_s3.py diff --git a/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md index 4ae75e81f66d4..fe4163a81b90c 100644 --- a/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/cassandra/README.md b/airflow/providers/apache/cassandra/README.md index 484f2b78252e8..fb2bb65b11982 100644 --- a/airflow/providers/apache/cassandra/README.md +++ b/airflow/providers/apache/cassandra/README.md @@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.cassandra` package. + + ## Sensors @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.cassandra` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md index a2542e2278377..493a62e88a15d 100644 --- a/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/druid/README.md b/airflow/providers/apache/druid/README.md index df7b70cbf535a..f14aaab797f3b 100644 --- a/airflow/providers/apache/druid/README.md +++ b/airflow/providers/apache/druid/README.md @@ -32,6 +32,8 @@ Release: 2020.5.20 - [Provider class summary](#provider-class-summary) - [Operators](#operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [Moved transfer operators](#moved-transfers) - [Hooks](#hooks) - [Moved hooks](#moved-hooks) - [Releases](#releases) @@ -92,11 +94,22 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.druid` package. ### Moved operators -| Airflow 2.0 operators: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py) | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py) | -| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) | -| [operators.hive_to_druid.HiveToDruidTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) | +| Airflow 2.0 operators: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:--------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py) | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py) | +| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) | + + + + + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.hive_to_druid.HiveToDruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/transfers/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) | @@ -124,6 +137,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.druid` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/druid/transfers/__init__.py b/airflow/providers/apache/druid/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/apache/druid/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/druid/operators/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py similarity index 99% rename from airflow/providers/apache/druid/operators/hive_to_druid.py rename to airflow/providers/apache/druid/transfers/hive_to_druid.py index d021a88bfd016..e8ba9bc2883a6 100644 --- a/airflow/providers/apache/druid/operators/hive_to_druid.py +++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py @@ -31,7 +31,7 @@ DEFAULT_TARGET_PARTITION_SIZE = 5000000 -class HiveToDruidTransferOperator(BaseOperator): +class HiveToDruidOperator(BaseOperator): """ Moves data from Hive to Druid, [del]note that for now the data is loaded into memory before being pushed to Druid, so this operator should diff --git a/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md index 2bb90a15fcd6a..e6ea33ab46acf 100644 --- a/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/hdfs/README.md b/airflow/providers/apache/hdfs/README.md index b215487d78e23..4781ada025c77 100644 --- a/airflow/providers/apache/hdfs/README.md +++ b/airflow/providers/apache/hdfs/README.md @@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hdfs` package. + + ## Sensors @@ -108,6 +110,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hdfs` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md index 7ae7ae440e619..742055ba0c79b 100644 --- a/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,9 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | +| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/apache/hive/README.md b/airflow/providers/apache/hive/README.md index 8b2af2cfae996..3e832738cd703 100644 --- a/airflow/providers/apache/hive/README.md +++ b/airflow/providers/apache/hive/README.md @@ -32,6 +32,8 @@ Release: 2020.5.20 - [Provider class summary](#provider-class-summary) - [Operators](#operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [Moved transfer operators](#moved-transfers) - [Sensors](#sensors) - [Moved sensors](#moved-sensors) - [Hooks](#hooks) @@ -66,7 +68,7 @@ For full compatibility and test status of the backport packages check | PIP package | Version required | |:--------------|:-------------------| | hmsclient | >=0.1.0 | -| pyhive | >=0.6.0 | +| pyhive[hive] | >=0.6.0 | ## Cross provider package dependencies @@ -100,16 +102,27 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hive` package. ### Moved operators -| Airflow 2.0 operators: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py) | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py) | -| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) | -| [operators.hive_to_mysql.HiveToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_to_mysql.py) | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py) | -| [operators.hive_to_samba.Hive2SambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_to_samba.py) | [operators.hive_to_samba_operator.Hive2SambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py) | -| [operators.mssql_to_hive.MsSqlToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/mssql_to_hive.py) | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py) | -| [operators.mysql_to_hive.MySqlToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/mysql_to_hive.py) | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py) | -| [operators.s3_to_hive.S3ToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/s3_to_hive.py) | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py) | -| [operators.vertica_to_hive.VerticaToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) | +| Airflow 2.0 operators: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py) | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py) | +| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) | + + + + + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.hive_to_mysql.HiveToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_mysql.py) | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py) | +| [transfers.hive_to_samba.HiveToSambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_samba.py) | [operators.hive_to_samba_operator.HiveToSambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py) | +| [transfers.mssql_to_hive.MsSqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mssql_to_hive.py) | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py) | +| [transfers.mysql_to_hive.MySqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mysql_to_hive.py) | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py) | +| [transfers.s3_to_hive.S3ToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/s3_to_hive.py) | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py) | +| [transfers.vertica_to_hive.VerticaToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) | @@ -151,6 +164,9 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hive` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | +| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/apache/hive/transfers/__init__.py b/airflow/providers/apache/hive/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/apache/hive/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/hive/operators/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py similarity index 99% rename from airflow/providers/apache/hive/operators/hive_to_mysql.py rename to airflow/providers/apache/hive/transfers/hive_to_mysql.py index d46c800e8ddc9..524f9b15cd818 100644 --- a/airflow/providers/apache/hive/operators/hive_to_mysql.py +++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py @@ -29,7 +29,7 @@ from airflow.utils.operator_helpers import context_to_airflow_vars -class HiveToMySqlTransferOperator(BaseOperator): +class HiveToMySqlOperator(BaseOperator): """ Moves data from Hive to MySQL, note that for now the data is loaded into memory before being pushed to MySQL, so this operator should diff --git a/airflow/providers/apache/hive/operators/hive_to_samba.py b/airflow/providers/apache/hive/transfers/hive_to_samba.py similarity index 98% rename from airflow/providers/apache/hive/operators/hive_to_samba.py rename to airflow/providers/apache/hive/transfers/hive_to_samba.py index 4e8f714bef847..4426da115acd2 100644 --- a/airflow/providers/apache/hive/operators/hive_to_samba.py +++ b/airflow/providers/apache/hive/transfers/hive_to_samba.py @@ -29,7 +29,7 @@ from airflow.utils.operator_helpers import context_to_airflow_vars -class Hive2SambaOperator(BaseOperator): +class HiveToSambaOperator(BaseOperator): """ Executes hql code in a specific Hive database and loads the results of the query as a csv to a Samba location. diff --git a/airflow/providers/apache/hive/operators/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py similarity index 99% rename from airflow/providers/apache/hive/operators/mssql_to_hive.py rename to airflow/providers/apache/hive/transfers/mssql_to_hive.py index fba48db996f27..7af6ec664a947 100644 --- a/airflow/providers/apache/hive/operators/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -33,7 +33,7 @@ from airflow.utils.decorators import apply_defaults -class MsSqlToHiveTransferOperator(BaseOperator): +class MsSqlToHiveOperator(BaseOperator): """ Moves data from Microsoft SQL Server to Hive. The operator runs your query against Microsoft SQL Server, stores the file locally diff --git a/airflow/providers/apache/hive/operators/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py similarity index 99% rename from airflow/providers/apache/hive/operators/mysql_to_hive.py rename to airflow/providers/apache/hive/transfers/mysql_to_hive.py index 1d4461182c2bd..083c2a3535c45 100644 --- a/airflow/providers/apache/hive/operators/mysql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py @@ -33,7 +33,7 @@ from airflow.utils.decorators import apply_defaults -class MySqlToHiveTransferOperator(BaseOperator): +class MySqlToHiveOperator(BaseOperator): """ Moves data from MySql to Hive. The operator runs your query against MySQL, stores the file locally before loading it into a Hive table. diff --git a/airflow/providers/apache/hive/operators/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py similarity index 99% rename from airflow/providers/apache/hive/operators/s3_to_hive.py rename to airflow/providers/apache/hive/transfers/s3_to_hive.py index 2e353213ce3a4..032ff5b23edc3 100644 --- a/airflow/providers/apache/hive/operators/s3_to_hive.py +++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -35,7 +35,7 @@ from airflow.utils.decorators import apply_defaults -class S3ToHiveTransferOperator(BaseOperator): # pylint: disable=too-many-instance-attributes +class S3ToHiveOperator(BaseOperator): # pylint: disable=too-many-instance-attributes """ Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table. diff --git a/airflow/providers/apache/hive/operators/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py similarity index 99% rename from airflow/providers/apache/hive/operators/vertica_to_hive.py rename to airflow/providers/apache/hive/transfers/vertica_to_hive.py index 5bd7314436708..5d1d9beac18b6 100644 --- a/airflow/providers/apache/hive/operators/vertica_to_hive.py +++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py @@ -31,7 +31,7 @@ from airflow.utils.decorators import apply_defaults -class VerticaToHiveTransferOperator(BaseOperator): +class VerticaToHiveOperator(BaseOperator): """ Moves data from Vertica to Hive. The operator runs your query against Vertica, stores the file locally diff --git a/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md index eef2cfd2f08b3..384646c2cc8ef 100644 --- a/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/livy/README.md b/airflow/providers/apache/livy/README.md index cf0c3d5539940..96ec7626782fb 100644 --- a/airflow/providers/apache/livy/README.md +++ b/airflow/providers/apache/livy/README.md @@ -94,6 +94,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.livy` package. + + ## Sensors @@ -127,6 +129,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.livy` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md index 7fae213a70bc5..dbc4872d1ed6b 100644 --- a/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/pig/README.md b/airflow/providers/apache/pig/README.md index 03a1d6616e600..7f81852b284d5 100644 --- a/airflow/providers/apache/pig/README.md +++ b/airflow/providers/apache/pig/README.md @@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pig` package. + + ## Hooks @@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pig` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md index c1f59aa18b7a6..a6b8e6e4ad4bb 100644 --- a/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/pinot/README.md b/airflow/providers/apache/pinot/README.md index f3f08e062e7d9..d342959540108 100644 --- a/airflow/providers/apache/pinot/README.md +++ b/airflow/providers/apache/pinot/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pinot` package. + + ## Hooks @@ -92,6 +94,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pinot` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md index 8c025a2be7e64..dd49fd349e328 100644 --- a/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/spark/README.md b/airflow/providers/apache/spark/README.md index eac1eafa156e7..d9da28b6343fe 100644 --- a/airflow/providers/apache/spark/README.md +++ b/airflow/providers/apache/spark/README.md @@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.spark` package. + + ## Hooks @@ -109,6 +111,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.spark` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md index 26c70b0447f71..dd3d4fd393d01 100644 --- a/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/apache/sqoop/README.md b/airflow/providers/apache/sqoop/README.md index d1d81607e0240..4e902456f36ff 100644 --- a/airflow/providers/apache/sqoop/README.md +++ b/airflow/providers/apache/sqoop/README.md @@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.sqoop` package. + + ## Hooks @@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.sqoop` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md index dd184370722d9..9ac9a7b1d9c33 100644 --- a/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/celery/README.md b/airflow/providers/celery/README.md index 0520fa7b88933..f77c5360ee2c7 100644 --- a/airflow/providers/celery/README.md +++ b/airflow/providers/celery/README.md @@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.celery` package. + + ## Sensors @@ -93,6 +95,7 @@ All classes in Airflow 2.0 are in `airflow.providers.celery` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md index e45e43fbe173f..06713f6324dc7 100644 --- a/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/cloudant/README.md b/airflow/providers/cloudant/README.md index 0962864f195aa..9cc3ae1f7a775 100644 --- a/airflow/providers/cloudant/README.md +++ b/airflow/providers/cloudant/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.cloudant` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.cloudant` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md index a15a8d71e1987..8ec7de235efc2 100644 --- a/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/databricks/README.md b/airflow/providers/databricks/README.md index 9d2cf93bfdd2d..678d141e0e32f 100644 --- a/airflow/providers/databricks/README.md +++ b/airflow/providers/databricks/README.md @@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.databricks` package. + + ## Hooks @@ -106,6 +108,7 @@ All classes in Airflow 2.0 are in `airflow.providers.databricks` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md index a9c72ea9ff7c1..9c1eac609e470 100644 --- a/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/datadog/README.md b/airflow/providers/datadog/README.md index b56c7dfffe1bd..d6ca490419fdf 100644 --- a/airflow/providers/datadog/README.md +++ b/airflow/providers/datadog/README.md @@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.datadog` package. + + ## Sensors @@ -104,6 +106,7 @@ All classes in Airflow 2.0 are in `airflow.providers.datadog` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md index 20c96108c1560..58def40bcfac7 100644 --- a/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/dingding/README.md b/airflow/providers/dingding/README.md index fcec20852575d..9db9b092cd159 100644 --- a/airflow/providers/dingding/README.md +++ b/airflow/providers/dingding/README.md @@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.dingding` package. + + ## Hooks @@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.dingding` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md index 8100422c1bffc..b32b8c488dbe5 100644 --- a/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/discord/README.md b/airflow/providers/discord/README.md index d5369d2eec894..b64b7d50cda0f 100644 --- a/airflow/providers/discord/README.md +++ b/airflow/providers/discord/README.md @@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.discord` package. + + ## Hooks @@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.discord` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md index f321a1a93c3fc..30de3588258b8 100644 --- a/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,9 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | +| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/docker/README.md b/airflow/providers/docker/README.md index 8d28424e3237e..c24bfb2fd2e97 100644 --- a/airflow/providers/docker/README.md +++ b/airflow/providers/docker/README.md @@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.docker` package. + + ## Hooks @@ -106,6 +108,9 @@ All classes in Airflow 2.0 are in `airflow.providers.docker` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | +| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md index e4da8b02f369c..04b5c896c6cde 100644 --- a/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/elasticsearch/README.md b/airflow/providers/elasticsearch/README.md index 5e06f3b2a7c9c..83bfa30204a7d 100644 --- a/airflow/providers/elasticsearch/README.md +++ b/airflow/providers/elasticsearch/README.md @@ -63,6 +63,8 @@ All classes in Airflow 2.0 are in `airflow.providers.elasticsearch` package. + + ## Hooks @@ -84,6 +86,7 @@ All classes in Airflow 2.0 are in `airflow.providers.elasticsearch` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md index f0e3f1c4c171f..7331213cee265 100644 --- a/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/email/README.md b/airflow/providers/email/README.md index 5738044853a79..b89030b7b08b4 100644 --- a/airflow/providers/email/README.md +++ b/airflow/providers/email/README.md @@ -79,12 +79,15 @@ All classes in Airflow 2.0 are in `airflow.providers.email` package. + + ## Releases ### Release 2020.5.20 | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md index b4658cca706f6..3da135237f641 100644 --- a/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/exasol/README.md b/airflow/providers/exasol/README.md index 608bd0524a9bb..31e00bdfb89d7 100644 --- a/airflow/providers/exasol/README.md +++ b/airflow/providers/exasol/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.exasol` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.exasol` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md index 2960030251984..385aaa8f5888f 100644 --- a/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/facebook/README.md b/airflow/providers/facebook/README.md index 40c2bb67ecc59..26afa6bddc2a4 100644 --- a/airflow/providers/facebook/README.md +++ b/airflow/providers/facebook/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.facebook` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.facebook` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md index d26e9e92c35d1..06b6365a7403e 100644 --- a/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/ftp/README.md b/airflow/providers/ftp/README.md index 3645623492327..b983ddd0f4939 100644 --- a/airflow/providers/ftp/README.md +++ b/airflow/providers/ftp/README.md @@ -64,6 +64,8 @@ All classes in Airflow 2.0 are in `airflow.providers.ftp` package. + + ## Sensors @@ -99,6 +101,7 @@ All classes in Airflow 2.0 are in `airflow.providers.ftp` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md index 4c0012bdf11ef..15320b115e074 100644 --- a/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,26 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | +| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | +| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | +| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | +| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | +| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | +| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | +| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | +| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | +| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | +| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | +| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | +| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | +| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | +| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | +| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | +| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | +| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | +| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | diff --git a/airflow/providers/google/README.md b/airflow/providers/google/README.md index ea088443a0c87..c31386130fbd8 100644 --- a/airflow/providers/google/README.md +++ b/airflow/providers/google/README.md @@ -33,6 +33,9 @@ Release: 2020.5.20 - [Operators](#operators) - [New operators](#new-operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [New transfer operators](#new-transfers) + - [Moved transfer operators](#moved-transfers) - [Sensors](#sensors) - [New sensors](#new-sensors) - [Moved sensors](#moved-sensors) @@ -100,12 +103,11 @@ For full compatibility and test status of the backport packages check | google-cloud-speech | >=0.36.3 | | google-cloud-storage | >=1.16 | | google-cloud-tasks | >=1.2.1 | -| google-cloud-texttospeech | >=0.4.0 | +| google-cloud-texttospeech | >=0.4.0,<2 | | google-cloud-translate | >=1.5.0 | | google-cloud-videointelligence | >=1.7.0 | | google-cloud-vision | >=0.35.2 | | grpcio-gcp | >=0.2.2 | -| httplib2 | ~=0.15 | | pandas-gbq | | ## Cross provider package dependencies @@ -145,7 +147,6 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | New Airflow 2.0 operators: `airflow.providers.google` package | |:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [ads.operators.ads.GoogleAdsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py) | -| [ads.operators.ads.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py) | | [cloud.operators.automl.AutoMLBatchPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | | [cloud.operators.automl.AutoMLCreateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | | [cloud.operators.automl.AutoMLDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | @@ -159,6 +160,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.automl.AutoMLTablesListTableSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | | [cloud.operators.automl.AutoMLTablesUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | | [cloud.operators.automl.AutoMLTrainModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | +| [cloud.operators.bigquery.BigQueryInsertJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | | [cloud.operators.bigquery_dts.BigQueryCreateDataTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | | [cloud.operators.bigquery_dts.BigQueryDataTransferServiceStartTransferRunsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | | [cloud.operators.bigquery_dts.BigQueryDeleteDataTransferConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | @@ -173,6 +175,8 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | | [cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | | [cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | +| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | +| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | | [cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | | [cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | | [cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | @@ -206,12 +210,10 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | | [cloud.operators.dataproc.DataprocSubmitJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | | [cloud.operators.dataproc.DataprocUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | -| [cloud.operators.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py) | | [cloud.operators.functions.CloudFunctionInvokeFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | | [cloud.operators.gcs.GCSDeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | | [cloud.operators.gcs.GCSFileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.gcs_to_gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_gcs.py) | -| [cloud.operators.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_sftp.py) | +| [cloud.operators.gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | | [cloud.operators.life_sciences.LifeSciencesRunPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/life_sciences.py) | | [cloud.operators.mlengine.MLEngineCreateModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | | [cloud.operators.mlengine.MLEngineCreateVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | @@ -221,10 +223,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.mlengine.MLEngineListVersionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | | [cloud.operators.mlengine.MLEngineSetDefaultVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | | [cloud.operators.mlengine.MLEngineTrainingCancelJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/presto_to_gcs.py) | | [cloud.operators.pubsub.PubSubPullOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | -| [cloud.operators.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sftp_to_gcs.py) | -| [cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sheets_to_gcs.py) | | [cloud.operators.stackdriver.StackdriverDeleteAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | | [cloud.operators.stackdriver.StackdriverDeleteNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | | [cloud.operators.stackdriver.StackdriverDisableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | @@ -248,6 +247,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.tasks.CloudTasksTaskGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | | [cloud.operators.tasks.CloudTasksTaskRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | | [cloud.operators.tasks.CloudTasksTasksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | +| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | | [firebase.operators.firestore.CloudFirestoreExportDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/operators/firestore.py) | | [marketing_platform.operators.analytics.GoogleAnalyticsDataImportUploadOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | | [marketing_platform.operators.analytics.GoogleAnalyticsDeletePreviousDataUploadsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | @@ -271,7 +271,6 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | | [marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | | [marketing_platform.operators.search_ads.GoogleSearchAdsInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | -| [suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/gcs_to_sheets.py) | | [suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/sheets.py) | @@ -280,14 +279,27 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | Airflow 2.0 operators: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.operators.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/adls_to_gcs.py) | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py) | +| [cloud.operators.bigquery.BigQueryCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | +| [cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_table_delete_operator.py) | +| [cloud.operators.bigquery.BigQueryExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_get_data.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_get_data.py) | +| [cloud.operators.bigquery.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | +| [cloud.operators.bigquery.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | +| [cloud.operators.bigquery.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | | [cloud.operators.bigtable.BigtableCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | | [cloud.operators.bigtable.BigtableCreateTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | | [cloud.operators.bigtable.BigtableDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | | [cloud.operators.bigtable.BigtableDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | | [cloud.operators.bigtable.BigtableUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cassandra_to_gcs.py) | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py) | -| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py) | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py) | +| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py) | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py) | | [cloud.operators.cloud_sql.CloudSQLBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | | [cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | | [cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | @@ -301,12 +313,10 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.s3_to_gcs_transfer_operator.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_transfer_operator.py) | | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | | [cloud.operators.compute.ComputeEngineBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | | [cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | @@ -368,29 +378,22 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.gcs.GCSDeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_delete_operator.py) | | [cloud.operators.gcs.GCSListObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_list_operator.py) | | [cloud.operators.gcs.GCSObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py) | -| [cloud.operators.gcs.GCSToLocalOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_download_operator.py) | -| [cloud.operators.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_gcs.py) | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py) | | [cloud.operators.kubernetes_engine.GKECreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | | [cloud.operators.kubernetes_engine.GKEDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | | [cloud.operators.kubernetes_engine.GKEStartPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/local_to_gcs.py) | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py) | | [cloud.operators.mlengine.MLEngineManageModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | | [cloud.operators.mlengine.MLEngineManageVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineVersionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | | [cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | | [cloud.operators.mlengine.MLEngineStartTrainingJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mssql_to_gcs.py) | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py) | -| [cloud.operators.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mysql_to_gcs.py) | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py) | | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | | [cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/postgres_to_gcs.py) | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) | | [cloud.operators.pubsub.PubSubCreateSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | | [cloud.operators.pubsub.PubSubCreateTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | | [cloud.operators.pubsub.PubSubDeleteSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | | [cloud.operators.pubsub.PubSubDeleteTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | | [cloud.operators.pubsub.PubSubPublishMessageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/s3_to_gcs.py) | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py) | | [cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | | [cloud.operators.spanner.SpannerDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | | [cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | @@ -398,14 +401,12 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | | [cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | | [cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/speech_to_text.py) | [contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_speech_to_text_operator.py) | -| [cloud.operators.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sql_to_gcs.py) | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py) | | [cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/text_to_speech.py) | [contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_text_to_speech_operator.py) | | [cloud.operators.translate.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate.py) | [contrib.operators.gcp_translate_operator.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_operator.py) | | [cloud.operators.translate_speech.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate_speech.py) | [contrib.operators.gcp_translate_speech_operator.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_speech_operator.py) | | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | | [cloud.operators.vision.CloudVisionCreateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | | [cloud.operators.vision.CloudVisionCreateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | | [cloud.operators.vision.CloudVisionCreateReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | @@ -421,7 +422,44 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | [cloud.operators.vision.CloudVisionTextDetectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | | [cloud.operators.vision.CloudVisionUpdateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | | [cloud.operators.vision.CloudVisionUpdateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/gcs_to_gdrive.py) | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py) | + + + + + +### New transfer operators + +| New Airflow 2.0 transfers: `airflow.providers.google` package | +|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/transfers/ads_to_gcs.py) | +| [cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py) | +| [cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_local.py) | +| [cloud.transfers.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_sftp.py) | +| [cloud.transfers.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/presto_to_gcs.py) | +| [cloud.transfers.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sftp_to_gcs.py) | +| [cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sheets_to_gcs.py) | +| [suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_sheets.py) | + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [cloud.transfers.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/adls_to_gcs.py) | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py) | +| [cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py) | [contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_bigquery.py) | +| [cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py) | [contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_gcs.py) | +| [cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py) | [contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_mysql_operator.py) | +| [cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py) | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py) | +| [cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py) | [contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_bq.py) | +| [cloud.transfers.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_gcs.py) | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py) | +| [cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/local_to_gcs.py) | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py) | +| [cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mssql_to_gcs.py) | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py) | +| [cloud.transfers.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mysql_to_gcs.py) | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py) | +| [cloud.transfers.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/postgres_to_gcs.py) | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) | +| [cloud.transfers.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/s3_to_gcs.py) | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py) | +| [cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sql_to_gcs.py) | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py) | +| [suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_gdrive.py) | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py) | @@ -444,6 +482,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | Airflow 2.0 sensors: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [cloud.sensors.bigquery.BigQueryTableExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py) | [contrib.sensors.bigquery_sensor.BigQueryTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/bigquery_sensor.py) | | [cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | | [cloud.sensors.cloud_storage_transfer_service.CloudDataTransferServiceJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py) | [contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcp_transfer_sensor.py) | | [cloud.sensors.gcs.GCSObjectExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | @@ -482,6 +521,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | Airflow 2.0 hooks: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | |:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [cloud.hooks.bigquery.BigQueryHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery.py) | [contrib.hooks.bigquery_hook.BigQueryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/bigquery_hook.py) | | [cloud.hooks.bigtable.BigtableHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigtable.py) | [contrib.hooks.gcp_bigtable_hook.BigtableHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_bigtable_hook.py) | | [cloud.hooks.cloud_build.CloudBuildHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_build.py) | [contrib.hooks.gcp_cloud_build_hook.CloudBuildHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_cloud_build_hook.py) | | [cloud.hooks.cloud_sql.CloudSQLDatabaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py) | [contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py) | @@ -531,6 +571,26 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | +| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | +| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | +| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | +| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | +| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | +| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | +| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | +| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | +| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | +| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | +| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | +| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | +| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | +| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | +| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | +| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | +| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | +| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | diff --git a/airflow/providers/google/ads/example_dags/example_ads.py b/airflow/providers/google/ads/example_dags/example_ads.py index a22738a98923a..f1682e78958a1 100644 --- a/airflow/providers/google/ads/example_dags/example_ads.py +++ b/airflow/providers/google/ads/example_dags/example_ads.py @@ -21,7 +21,8 @@ import os from airflow import models -from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator, GoogleAdsToGcsOperator +from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator +from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator from airflow.utils import dates # [START howto_google_ads_env_variables] diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/operators/ads.py index 8876ecf1fe6c9..950dc1f2095d3 100644 --- a/airflow/providers/google/ads/operators/ads.py +++ b/airflow/providers/google/ads/operators/ads.py @@ -19,9 +19,8 @@ This module contains Google Ad to GCS operators. """ import csv -from operator import attrgetter from tempfile import NamedTemporaryFile -from typing import Dict, List +from typing import Dict from airflow.models import BaseOperator from airflow.providers.google.ads.hooks.ads import GoogleAdsHook @@ -29,99 +28,6 @@ from airflow.utils.decorators import apply_defaults -class GoogleAdsToGcsOperator(BaseOperator): - """ - Fetches the daily results from the Google Ads API for 1-n clients - Converts and saves the data as a temporary CSV file - Uploads the CSV to Google Cloud Storage - - .. seealso:: - For more information on the Google Ads API, take a look at the API docs: - https://developers.google.com/google-ads/api/docs/start - - .. seealso:: - For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:GoogleAdsToGcsOperator` - - :param client_ids: Google Ads client IDs to query - :type client_ids: List[str] - :param query: Google Ads Query Language API query - :type query: str - :param attributes: List of Google Ads Row attributes to extract - :type attributes: List[str] - :param bucket: The GCS bucket to upload to - :type bucket: str - :param obj: GCS path to save the object. Must be the full file path (ex. `path/to/file.txt`) - :type obj: str - :param gcp_conn_id: Airflow Google Cloud Platform connection ID - :type gcp_conn_id: str - :param google_ads_conn_id: Airflow Google Ads connection ID - :type google_ads_conn_id: str - :param page_size: The number of results per API page request. Max 10,000 - :type page_size: int - :param gzip: Option to compress local file or file data for upload - :type gzip: bool - """ - - template_fields = ("client_ids", "query", "attributes", "bucket", "obj") - - @apply_defaults - def __init__( - self, - client_ids: List[str], - query: str, - attributes: List[str], - bucket: str, - obj: str, - gcp_conn_id: str = "google_cloud_default", - google_ads_conn_id: str = "google_ads_default", - page_size: int = 10000, - gzip: bool = False, - *args, - **kwargs, - ) -> None: - super().__init__(*args, **kwargs) - self.client_ids = client_ids - self.query = query - self.attributes = attributes - self.bucket = bucket - self.obj = obj - self.gcp_conn_id = gcp_conn_id - self.google_ads_conn_id = google_ads_conn_id - self.page_size = page_size - self.gzip = gzip - - def execute(self, context: Dict): - service = GoogleAdsHook( - gcp_conn_id=self.gcp_conn_id, - google_ads_conn_id=self.google_ads_conn_id - ) - rows = service.search( - client_ids=self.client_ids, query=self.query, page_size=self.page_size - ) - - try: - getter = attrgetter(*self.attributes) - converted_rows = [getter(row) for row in rows] - except Exception as e: - self.log.error("An error occurred in converting the Google Ad Rows. \n Error %s", e) - raise - - with NamedTemporaryFile("w", suffix=".csv") as csvfile: - writer = csv.writer(csvfile) - writer.writerows(converted_rows) - csvfile.flush() - - hook = GCSHook(gcp_conn_id=self.gcp_conn_id) - hook.upload( - bucket_name=self.bucket, - object_name=self.obj, - filename=csvfile.name, - gzip=self.gzip, - ) - self.log.info("%s uploaded to GCS", self.obj) - - class GoogleAdsListAccountsOperator(BaseOperator): """ Saves list of customers on GCS in form of a csv file. diff --git a/airflow/providers/google/ads/transfers/__init__.py b/airflow/providers/google/ads/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/google/ads/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/google/ads/transfers/ads_to_gcs.py b/airflow/providers/google/ads/transfers/ads_to_gcs.py new file mode 100644 index 0000000000000..90a57b2713043 --- /dev/null +++ b/airflow/providers/google/ads/transfers/ads_to_gcs.py @@ -0,0 +1,119 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import csv +from operator import attrgetter +from tempfile import NamedTemporaryFile +from typing import Dict, List + +from airflow.models import BaseOperator +from airflow.providers.google.ads.hooks.ads import GoogleAdsHook +from airflow.providers.google.cloud.hooks.gcs import GCSHook +from airflow.utils.decorators import apply_defaults + + +class GoogleAdsToGcsOperator(BaseOperator): + """ + Fetches the daily results from the Google Ads API for 1-n clients + Converts and saves the data as a temporary CSV file + Uploads the CSV to Google Cloud Storage + + .. seealso:: + For more information on the Google Ads API, take a look at the API docs: + https://developers.google.com/google-ads/api/docs/start + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GoogleAdsToGcsOperator` + + :param client_ids: Google Ads client IDs to query + :type client_ids: List[str] + :param query: Google Ads Query Language API query + :type query: str + :param attributes: List of Google Ads Row attributes to extract + :type attributes: List[str] + :param bucket: The GCS bucket to upload to + :type bucket: str + :param obj: GCS path to save the object. Must be the full file path (ex. `path/to/file.txt`) + :type obj: str + :param gcp_conn_id: Airflow Google Cloud Platform connection ID + :type gcp_conn_id: str + :param google_ads_conn_id: Airflow Google Ads connection ID + :type google_ads_conn_id: str + :param page_size: The number of results per API page request. Max 10,000 + :type page_size: int + :param gzip: Option to compress local file or file data for upload + :type gzip: bool + """ + + template_fields = ("client_ids", "query", "attributes", "bucket", "obj") + + @apply_defaults + def __init__( + self, + client_ids: List[str], + query: str, + attributes: List[str], + bucket: str, + obj: str, + gcp_conn_id: str = "google_cloud_default", + google_ads_conn_id: str = "google_ads_default", + page_size: int = 10000, + gzip: bool = False, + *args, + **kwargs, + ) -> None: + super().__init__(*args, **kwargs) + self.client_ids = client_ids + self.query = query + self.attributes = attributes + self.bucket = bucket + self.obj = obj + self.gcp_conn_id = gcp_conn_id + self.google_ads_conn_id = google_ads_conn_id + self.page_size = page_size + self.gzip = gzip + + def execute(self, context: Dict): + service = GoogleAdsHook( + gcp_conn_id=self.gcp_conn_id, + google_ads_conn_id=self.google_ads_conn_id + ) + rows = service.search( + client_ids=self.client_ids, query=self.query, page_size=self.page_size + ) + + try: + getter = attrgetter(*self.attributes) + converted_rows = [getter(row) for row in rows] + except Exception as e: + self.log.error("An error occurred in converting the Google Ad Rows. \n Error %s", e) + raise + + with NamedTemporaryFile("w", suffix=".csv") as csvfile: + writer = csv.writer(csvfile) + writer.writerows(converted_rows) + csvfile.flush() + + hook = GCSHook(gcp_conn_id=self.gcp_conn_id) + hook.upload( + bucket_name=self.bucket, + object_name=self.obj, + filename=csvfile.name, + gzip=self.gzip, + ) + self.log.info("%s uploaded to GCS", self.obj) diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py index c075dc282de21..3171d3062cf30 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py @@ -25,7 +25,7 @@ from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, ) -from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator +from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator from airflow.utils.dates import days_ago PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py index 0b6ac88e35930..221138f4a28e9 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py @@ -25,7 +25,7 @@ from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, ) -from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator +from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.dates import days_ago PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py index 79cd8de8342ac..4667919190c09 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py @@ -25,8 +25,8 @@ from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, ) -from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator -from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator +from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator +from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator from airflow.utils.dates import days_ago PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") diff --git a/airflow/providers/google/cloud/example_dags/example_dataflow.py b/airflow/providers/google/cloud/example_dags/example_dataflow.py index d05318d76f8d0..c9034cf45fc8b 100644 --- a/airflow/providers/google/cloud/example_dags/example_dataflow.py +++ b/airflow/providers/google/cloud/example_dags/example_dataflow.py @@ -27,7 +27,7 @@ CheckJobRunning, DataflowCreateJavaJobOperator, DataflowCreatePythonJobOperator, DataflowTemplatedJobStartOperator, ) -from airflow.providers.google.cloud.operators.gcs import GCSToLocalOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator from airflow.utils.dates import days_ago GCS_TMP = os.environ.get('GCP_DATAFLOW_GCS_TMP', 'gs://test-dataflow-example/temp/') @@ -70,7 +70,7 @@ ) # [END howto_operator_start_java_job] - jar_to_local = GCSToLocalOperator( + jar_to_local = GCSToLocalFilesystemOperator( task_id="jar-to-local", bucket=GCS_JAR_BUCKET_NAME, object_name=GCS_JAR_OBJECT_NAME, diff --git a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py index b23d1559cbf3a..0bebd35a07019 100644 --- a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py @@ -27,9 +27,9 @@ BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator, ) -from airflow.providers.google.cloud.operators.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator -from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator +from airflow.providers.google.cloud.transfers.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator +from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.dates import days_ago # [START howto_GCS_env_variables] diff --git a/airflow/providers/google/cloud/example_dags/example_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs.py index 55e24d69f748f..4cdac3636088e 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs.py @@ -26,10 +26,11 @@ from airflow.providers.google.cloud.operators.gcs import ( GCSBucketCreateAclEntryOperator, GCSCreateBucketOperator, GCSDeleteBucketOperator, GCSDeleteObjectsOperator, GCSFileTransformOperator, GCSListObjectsOperator, - GCSObjectCreateAclEntryOperator, GCSToLocalOperator, + GCSObjectCreateAclEntryOperator, ) -from airflow.providers.google.cloud.operators.gcs_to_gcs import GCSToGCSOperator -from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator +from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.dates import days_ago default_args = {"start_date": days_ago(1)} @@ -106,12 +107,14 @@ ) # [END howto_operator_gcs_object_create_acl_entry_task] - download_file = GCSToLocalOperator( + # [START howto_operator_gcs_download_file_task] + download_file = GCSToLocalFilesystemOperator( task_id="download_file", object_name=BUCKET_FILE_LOCATION, bucket=BUCKET_1, filename=PATH_TO_SAVED_FILE, ) + # [END howto_operator_gcs_download_file_task] copy_file = GCSToGCSOperator( task_id="copy_file", diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py index 33b9c69e1deba..9035f2dcbaff9 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py @@ -26,7 +26,7 @@ from airflow.providers.google.cloud.operators.bigquery import ( BigQueryCreateEmptyDatasetOperator, BigQueryDeleteDatasetOperator, ) -from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator +from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.utils.dates import days_ago DATASET_NAME = os.environ.get("GCP_DATASET_NAME", 'airflow_test') diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py index 30a542fab5a33..20f0b2b28da4a 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py @@ -22,9 +22,8 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.gcs_to_gcs import ( - GCSSynchronizeBucketsOperator, GCSToGCSOperator, -) +from airflow.providers.google.cloud.operators.gcs import GCSSynchronizeBucketsOperator +from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator from airflow.utils.dates import days_ago default_args = {"start_date": days_ago(1)} diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py index bb2e6e8253a78..d325e9c8e534a 100644 --- a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py +++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py @@ -22,7 +22,7 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.gcs_to_sftp import GCSToSFTPOperator +from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator from airflow.utils.dates import days_ago default_args = {"start_date": days_ago(1)} diff --git a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py index 5e8d7b08edd67..eb3a8dfc43600 100644 --- a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py @@ -19,7 +19,7 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator +from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils import dates # [START howto_gcs_environment_variables] diff --git a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py index 81f414fd4308e..d4df9336dd532 100644 --- a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py @@ -19,7 +19,7 @@ Example DAG using PostgresToGoogleCloudStorageOperator. """ from airflow import models -from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator +from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator from airflow.utils.dates import days_ago GCS_BUCKET = "postgres_to_gcs_example" diff --git a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py index 385cdc375d146..ca4ae0f6413b1 100644 --- a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py @@ -26,7 +26,7 @@ BigQueryCreateEmptyDatasetOperator, BigQueryCreateExternalTableOperator, BigQueryDeleteDatasetOperator, BigQueryExecuteQueryOperator, ) -from airflow.providers.google.cloud.operators.presto_to_gcs import PrestoToGCSOperator +from airflow.providers.google.cloud.transfers.presto_to_gcs import PrestoToGCSOperator from airflow.utils.dates import days_ago GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", 'example-project') diff --git a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py index ee3a488df0ab6..9c6f31529a719 100644 --- a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py @@ -22,7 +22,7 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.sftp_to_gcs import SFTPToGCSOperator +from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator from airflow.utils.dates import days_ago default_args = {"start_date": days_ago(1)} diff --git a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py index 18c2d92c4228e..b4ecfae8990ee 100644 --- a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py +++ b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py @@ -19,7 +19,7 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator +from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator from airflow.utils.dates import days_ago BUCKET = os.environ.get("GCP_GCS_BUCKET", "test28397yeo") diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py index dace60f8c8a34..e76eb3c6a5397 100644 --- a/airflow/providers/google/cloud/operators/gcs.py +++ b/airflow/providers/google/cloud/operators/gcs.py @@ -28,7 +28,6 @@ from airflow.exceptions import AirflowException from airflow.models import BaseOperator -from airflow.models.xcom import MAX_XCOM_SIZE from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.utils.decorators import apply_defaults @@ -227,102 +226,6 @@ def execute(self, context): delimiter=self.delimiter) -class GCSToLocalOperator(BaseOperator): - """ - Downloads a file from Google Cloud Storage. - - If a filename is supplied, it writes the file to the specified location, alternatively one can - set the ``store_to_xcom_key`` parameter to True push the file content into xcom. When the file size - exceeds the maximum size for xcom it is recommended to write to a file. - - :param bucket: The Google Cloud Storage bucket where the object is. - Must not contain 'gs://' prefix. (templated) - :type bucket: str - :param object: The name of the object to download in the Google cloud - storage bucket. (templated) - :type object: str - :param filename: The file path, including filename, on the local file system (where the - operator is being executed) that the file should be downloaded to. (templated) - If no filename passed, the downloaded data will not be stored on the local file - system. - :type filename: str - :param store_to_xcom_key: If this param is set, the operator will push - the contents of the downloaded file to XCom with the key set in this - parameter. If not set, the downloaded data will not be pushed to XCom. (templated) - :type store_to_xcom_key: str - :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform. - :type gcp_conn_id: str - :param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud - Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead. - :type google_cloud_storage_conn_id: str - :param delegate_to: The account to impersonate, if any. - For this to work, the service account making the request must have - domain-wide delegation enabled. - :type delegate_to: str - """ - template_fields = ('bucket', 'object', 'filename', 'store_to_xcom_key',) - ui_color = '#f0eee4' - - @apply_defaults - def __init__(self, - bucket: str, - object_name: Optional[str] = None, - filename: Optional[str] = None, - store_to_xcom_key: Optional[str] = None, - gcp_conn_id: str = 'google_cloud_default', - google_cloud_storage_conn_id: Optional[str] = None, - delegate_to: Optional[str] = None, - *args, - **kwargs) -> None: - # To preserve backward compatibility - # TODO: Remove one day - if object_name is None: - if 'object' in kwargs: - object_name = kwargs['object'] - DeprecationWarning("Use 'object_name' instead of 'object'.") - else: - TypeError("__init__() missing 1 required positional argument: 'object_name'") - - if filename is not None and store_to_xcom_key is not None: - raise ValueError("Either filename or store_to_xcom_key can be set") - - if google_cloud_storage_conn_id: - warnings.warn( - "The google_cloud_storage_conn_id parameter has been deprecated. You should pass " - "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3) - gcp_conn_id = google_cloud_storage_conn_id - - super().__init__(*args, **kwargs) - self.bucket = bucket - self.object = object_name - self.filename = filename - self.store_to_xcom_key = store_to_xcom_key - self.gcp_conn_id = gcp_conn_id - self.delegate_to = delegate_to - - def execute(self, context): - self.log.info('Executing download: %s, %s, %s', self.bucket, - self.object, self.filename) - hook = GCSHook( - google_cloud_storage_conn_id=self.gcp_conn_id, - delegate_to=self.delegate_to - ) - - if self.store_to_xcom_key: - file_bytes = hook.download(bucket_name=self.bucket, - object_name=self.object) - if sys.getsizeof(file_bytes) < MAX_XCOM_SIZE: - context['ti'].xcom_push(key=self.store_to_xcom_key, value=file_bytes) - else: - raise AirflowException( - 'The size of the downloaded file is too large to push to XCom!' - ) - else: - hook.download(bucket_name=self.bucket, - object_name=self.object, - filename=self.filename) - - class GCSDeleteObjectsOperator(BaseOperator): """ Deletes objects from a Google Cloud Storage bucket, either @@ -663,3 +566,94 @@ def __init__(self, def execute(self, context): hook = GCSHook(gcp_conn_id=self.gcp_conn_id) hook.delete_bucket(bucket_name=self.bucket_name, force=self.force) + + +class GCSSynchronizeBucketsOperator(BaseOperator): + """ + Synchronizes the contents of the buckets or bucket's directories in the Google Cloud Services. + + Parameters ``source_object`` and ``destination_object`` describe the root sync directory. If they are + not passed, the entire bucket will be synchronized. They should point to directories. + + .. note:: + The synchronization of individual files is not supported. Only entire directories can be + synchronized. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:GCSSynchronizeBuckets` + + :param source_bucket: The name of the bucket containing the source objects. + :type source_bucket: str + :param destination_bucket: The name of the bucket containing the destination objects. + :type destination_bucket: str + :param source_object: The root sync directory in the source bucket. + :type source_object: Optional[str] + :param destination_object: The root sync directory in the destination bucket. + :type destination_object: Optional[str] + :param recursive: If True, subdirectories will be considered + :type recursive: bool + :param allow_overwrite: if True, the files will be overwritten if a mismatched file is found. + By default, overwriting files is not allowed + :type allow_overwrite: bool + :param delete_extra_files: if True, deletes additional files from the source that not found in the + destination. By default extra files are not deleted. + + .. note:: + This option can delete data quickly if you specify the wrong source/destination combination. + + :type delete_extra_files: bool + """ + + template_fields = ( + 'source_bucket', + 'destination_bucket', + 'source_object', + 'destination_object', + 'recursive', + 'delete_extra_files', + 'allow_overwrite', + 'gcp_conn_id', + 'delegate_to', + ) + + @apply_defaults + def __init__( + self, + source_bucket: str, + destination_bucket: str, + source_object: Optional[str] = None, + destination_object: Optional[str] = None, + recursive: bool = True, + delete_extra_files: bool = False, + allow_overwrite: bool = False, + gcp_conn_id: str = 'google_cloud_default', + delegate_to: Optional[str] = None, + *args, + **kwargs + ) -> None: + super().__init__(*args, **kwargs) + self.source_bucket = source_bucket + self.destination_bucket = destination_bucket + self.source_object = source_object + self.destination_object = destination_object + self.recursive = recursive + self.delete_extra_files = delete_extra_files + self.allow_overwrite = allow_overwrite + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + + def execute(self, context): + hook = GCSHook( + google_cloud_storage_conn_id=self.gcp_conn_id, + delegate_to=self.delegate_to + ) + hook.sync( + source_bucket=self.source_bucket, + destination_bucket=self.destination_bucket, + source_object=self.source_object, + destination_object=self.destination_object, + recursive=self.recursive, + delete_extra_files=self.delete_extra_files, + allow_overwrite=self.allow_overwrite + ) diff --git a/airflow/providers/google/cloud/transfers/__init__.py b/airflow/providers/google/cloud/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/google/cloud/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/google/cloud/operators/adls_to_gcs.py b/airflow/providers/google/cloud/transfers/adls_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/adls_to_gcs.py rename to airflow/providers/google/cloud/transfers/adls_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/bigquery_to_bigquery.py b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigquery_to_bigquery.py rename to airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py diff --git a/airflow/providers/google/cloud/operators/bigquery_to_gcs.py b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigquery_to_gcs.py rename to airflow/providers/google/cloud/transfers/bigquery_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py similarity index 100% rename from airflow/providers/google/cloud/operators/bigquery_to_mysql.py rename to airflow/providers/google/cloud/transfers/bigquery_to_mysql.py diff --git a/airflow/providers/google/cloud/operators/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/cassandra_to_gcs.py rename to airflow/providers/google/cloud/transfers/cassandra_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py rename to airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py similarity index 100% rename from airflow/providers/google/cloud/operators/gcs_to_bigquery.py rename to airflow/providers/google/cloud/transfers/gcs_to_bigquery.py diff --git a/airflow/providers/google/cloud/operators/gcs_to_gcs.py b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py similarity index 82% rename from airflow/providers/google/cloud/operators/gcs_to_gcs.py rename to airflow/providers/google/cloud/transfers/gcs_to_gcs.py index 207a535261fcd..a3d6db8636b85 100644 --- a/airflow/providers/google/cloud/operators/gcs_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py @@ -19,7 +19,6 @@ This module contains a Google Cloud Storage operator. """ import warnings -from typing import Optional from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -342,94 +341,3 @@ def _copy_single_object(self, hook, source_object, destination_object): if self.move_object: hook.delete(self.source_bucket, source_object) - - -class GCSSynchronizeBucketsOperator(BaseOperator): - """ - Synchronizes the contents of the buckets or bucket's directories in the Google Cloud Services. - - Parameters ``source_object`` and ``destination_object`` describe the root sync directory. If they are - not passed, the entire bucket will be synchronized. They should point to directories. - - .. note:: - The synchronization of individual files is not supported. Only entire directories can be - synchronized. - - .. seealso:: - For more information on how to use this operator, take a look at the guide: - :ref:`howto/operator:GCSSynchronizeBuckets` - - :param source_bucket: The name of the bucket containing the source objects. - :type source_bucket: str - :param destination_bucket: The name of the bucket containing the destination objects. - :type destination_bucket: str - :param source_object: The root sync directory in the source bucket. - :type source_object: Optional[str] - :param destination_object: The root sync directory in the destination bucket. - :type destination_object: Optional[str] - :param recursive: If True, subdirectories will be considered - :type recursive: bool - :param allow_overwrite: if True, the files will be overwritten if a mismatched file is found. - By default, overwriting files is not allowed - :type allow_overwrite: bool - :param delete_extra_files: if True, deletes additional files from the source that not found in the - destination. By default extra files are not deleted. - - .. note:: - This option can delete data quickly if you specify the wrong source/destination combination. - - :type delete_extra_files: bool - """ - - template_fields = ( - 'source_bucket', - 'destination_bucket', - 'source_object', - 'destination_object', - 'recursive', - 'delete_extra_files', - 'allow_overwrite', - 'gcp_conn_id', - 'delegate_to', - ) - - @apply_defaults - def __init__( - self, - source_bucket: str, - destination_bucket: str, - source_object: Optional[str] = None, - destination_object: Optional[str] = None, - recursive: bool = True, - delete_extra_files: bool = False, - allow_overwrite: bool = False, - gcp_conn_id: str = 'google_cloud_default', - delegate_to: Optional[str] = None, - *args, - **kwargs - ) -> None: - super().__init__(*args, **kwargs) - self.source_bucket = source_bucket - self.destination_bucket = destination_bucket - self.source_object = source_object - self.destination_object = destination_object - self.recursive = recursive - self.delete_extra_files = delete_extra_files - self.allow_overwrite = allow_overwrite - self.gcp_conn_id = gcp_conn_id - self.delegate_to = delegate_to - - def execute(self, context): - hook = GCSHook( - google_cloud_storage_conn_id=self.gcp_conn_id, - delegate_to=self.delegate_to - ) - hook.sync( - source_bucket=self.source_bucket, - destination_bucket=self.destination_bucket, - source_object=self.source_object, - destination_object=self.destination_object, - recursive=self.recursive, - delete_extra_files=self.delete_extra_files, - allow_overwrite=self.allow_overwrite - ) diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/airflow/providers/google/cloud/transfers/gcs_to_local.py new file mode 100644 index 0000000000000..ffd33157dde92 --- /dev/null +++ b/airflow/providers/google/cloud/transfers/gcs_to_local.py @@ -0,0 +1,122 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import sys +import warnings +from typing import Optional + +from airflow.exceptions import AirflowException +from airflow.models import BaseOperator +from airflow.models.xcom import MAX_XCOM_SIZE +from airflow.providers.google.cloud.hooks.gcs import GCSHook +from airflow.sensors.base_sensor_operator import apply_defaults + + +class GCSToLocalFilesystemOperator(BaseOperator): + """ + Downloads a file from Google Cloud Storage. + + If a filename is supplied, it writes the file to the specified location, alternatively one can + set the ``store_to_xcom_key`` parameter to True push the file content into xcom. When the file size + exceeds the maximum size for xcom it is recommended to write to a file. + + :param bucket: The Google Cloud Storage bucket where the object is. + Must not contain 'gs://' prefix. (templated) + :type bucket: str + :param object: The name of the object to download in the Google cloud + storage bucket. (templated) + :type object: str + :param filename: The file path, including filename, on the local file system (where the + operator is being executed) that the file should be downloaded to. (templated) + If no filename passed, the downloaded data will not be stored on the local file + system. + :type filename: str + :param store_to_xcom_key: If this param is set, the operator will push + the contents of the downloaded file to XCom with the key set in this + parameter. If not set, the downloaded data will not be pushed to XCom. (templated) + :type store_to_xcom_key: str + :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform. + :type gcp_conn_id: str + :param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud + Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead. + :type google_cloud_storage_conn_id: str + :param delegate_to: The account to impersonate, if any. + For this to work, the service account making the request must have + domain-wide delegation enabled. + :type delegate_to: str + """ + template_fields = ('bucket', 'object', 'filename', 'store_to_xcom_key',) + ui_color = '#f0eee4' + + @apply_defaults + def __init__(self, + bucket: str, + object_name: Optional[str] = None, + filename: Optional[str] = None, + store_to_xcom_key: Optional[str] = None, + gcp_conn_id: str = 'google_cloud_default', + google_cloud_storage_conn_id: Optional[str] = None, + delegate_to: Optional[str] = None, + *args, + **kwargs) -> None: + # To preserve backward compatibility + # TODO: Remove one day + if object_name is None: + if 'object' in kwargs: + object_name = kwargs['object'] + DeprecationWarning("Use 'object_name' instead of 'object'.") + else: + TypeError("__init__() missing 1 required positional argument: 'object_name'") + + if filename is not None and store_to_xcom_key is not None: + raise ValueError("Either filename or store_to_xcom_key can be set") + + if google_cloud_storage_conn_id: + warnings.warn( + "The google_cloud_storage_conn_id parameter has been deprecated. You should pass " + "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3) + gcp_conn_id = google_cloud_storage_conn_id + + super().__init__(*args, **kwargs) + self.bucket = bucket + self.object = object_name + self.filename = filename # noqa + self.store_to_xcom_key = store_to_xcom_key # noqa + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + + def execute(self, context): + self.log.info('Executing download: %s, %s, %s', self.bucket, + self.object, self.filename) + hook = GCSHook( + google_cloud_storage_conn_id=self.gcp_conn_id, + delegate_to=self.delegate_to + ) + + if self.store_to_xcom_key: + file_bytes = hook.download(bucket_name=self.bucket, + object_name=self.object) + if sys.getsizeof(file_bytes) < MAX_XCOM_SIZE: + context['ti'].xcom_push(key=self.store_to_xcom_key, value=file_bytes) + else: + raise AirflowException( + 'The size of the downloaded file is too large to push to XCom!' + ) + else: + hook.download(bucket_name=self.bucket, + object_name=self.object, + filename=self.filename) diff --git a/airflow/providers/google/cloud/operators/gcs_to_sftp.py b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py similarity index 100% rename from airflow/providers/google/cloud/operators/gcs_to_sftp.py rename to airflow/providers/google/cloud/transfers/gcs_to_sftp.py diff --git a/airflow/providers/google/cloud/operators/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/local_to_gcs.py rename to airflow/providers/google/cloud/transfers/local_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/mssql_to_gcs.py b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py similarity index 97% rename from airflow/providers/google/cloud/operators/mssql_to_gcs.py rename to airflow/providers/google/cloud/transfers/mssql_to_gcs.py index 4265dbd4896fa..d6d1c806991ca 100644 --- a/airflow/providers/google/cloud/operators/mssql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py @@ -21,7 +21,7 @@ import decimal -from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/google/cloud/operators/mysql_to_gcs.py b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py similarity index 98% rename from airflow/providers/google/cloud/operators/mysql_to_gcs.py rename to airflow/providers/google/cloud/transfers/mysql_to_gcs.py index 87fe5d45190aa..8ab753959b685 100644 --- a/airflow/providers/google/cloud/operators/mysql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py @@ -26,7 +26,7 @@ from MySQLdb.constants import FIELD_TYPE -from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/google/cloud/operators/postgres_to_gcs.py b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py similarity index 98% rename from airflow/providers/google/cloud/operators/postgres_to_gcs.py rename to airflow/providers/google/cloud/transfers/postgres_to_gcs.py index 2938eeb5799fb..1f0aabf97abe9 100644 --- a/airflow/providers/google/cloud/operators/postgres_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py @@ -26,7 +26,7 @@ import pendulum -from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator from airflow.providers.postgres.hooks.postgres import PostgresHook from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/google/cloud/operators/presto_to_gcs.py b/airflow/providers/google/cloud/transfers/presto_to_gcs.py similarity index 99% rename from airflow/providers/google/cloud/operators/presto_to_gcs.py rename to airflow/providers/google/cloud/transfers/presto_to_gcs.py index 49c931c45a4a4..855b99a84511c 100644 --- a/airflow/providers/google/cloud/operators/presto_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/presto_to_gcs.py @@ -19,7 +19,7 @@ from prestodb.dbapi import Cursor as PrestoCursor -from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator from airflow.providers.presto.hooks.presto import PrestoHook from airflow.utils.decorators import apply_defaults diff --git a/airflow/providers/google/cloud/operators/s3_to_gcs.py b/airflow/providers/google/cloud/transfers/s3_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/s3_to_gcs.py rename to airflow/providers/google/cloud/transfers/s3_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/sftp_to_gcs.py b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/sftp_to_gcs.py rename to airflow/providers/google/cloud/transfers/sftp_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/sheets_to_gcs.py b/airflow/providers/google/cloud/transfers/sheets_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/sheets_to_gcs.py rename to airflow/providers/google/cloud/transfers/sheets_to_gcs.py diff --git a/airflow/providers/google/cloud/operators/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py similarity index 100% rename from airflow/providers/google/cloud/operators/sql_to_gcs.py rename to airflow/providers/google/cloud/transfers/sql_to_gcs.py diff --git a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py index 0e8fd845f7400..df4e538eff168 100644 --- a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py +++ b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py @@ -22,7 +22,7 @@ from typing import Dict from airflow import models -from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator +from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook from airflow.providers.google.marketing_platform.operators.display_video import ( GoogleDisplayVideo360CreateReportOperator, GoogleDisplayVideo360CreateSDFDownloadTaskOperator, diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py index 565ab4a885f08..0d838d4200770 100644 --- a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py +++ b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py @@ -21,7 +21,7 @@ import os from airflow import models -from airflow.providers.google.suite.operators.gcs_to_gdrive import GCSToGoogleDriveOperator +from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator from airflow.utils.dates import days_ago GCS_TO_GDRIVE_BUCKET = os.environ.get("GCS_TO_DRIVE_BUCKET", "example-object") diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py index 5aca9517e33d2..52430c295d7ce 100644 --- a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py +++ b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py @@ -19,8 +19,8 @@ import os from airflow import models -from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator -from airflow.providers.google.suite.operators.gcs_to_sheets import GCSToGoogleSheetsOperator +from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator +from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator from airflow.utils.dates import days_ago BUCKET = os.environ.get("GCP_GCS_BUCKET", "example-test-bucket3") diff --git a/airflow/providers/google/suite/example_dags/example_sheets.py b/airflow/providers/google/suite/example_dags/example_sheets.py index e5a51b7756c1f..fcb266be75f20 100644 --- a/airflow/providers/google/suite/example_dags/example_sheets.py +++ b/airflow/providers/google/suite/example_dags/example_sheets.py @@ -20,9 +20,9 @@ from airflow import models from airflow.operators.bash import BashOperator -from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator -from airflow.providers.google.suite.operators.gcs_to_sheets import GCSToGoogleSheetsOperator +from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator from airflow.providers.google.suite.operators.sheets import GoogleSheetsCreateSpreadsheetOperator +from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator from airflow.utils.dates import days_ago GCS_BUCKET = os.environ.get("SHEETS_GCS_BUCKET", "test28397ye") diff --git a/airflow/providers/google/suite/transfers/__init__.py b/airflow/providers/google/suite/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/google/suite/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/google/suite/operators/gcs_to_gdrive.py b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py similarity index 100% rename from airflow/providers/google/suite/operators/gcs_to_gdrive.py rename to airflow/providers/google/suite/transfers/gcs_to_gdrive.py diff --git a/airflow/providers/google/suite/operators/gcs_to_sheets.py b/airflow/providers/google/suite/transfers/gcs_to_sheets.py similarity index 100% rename from airflow/providers/google/suite/operators/gcs_to_sheets.py rename to airflow/providers/google/suite/transfers/gcs_to_sheets.py diff --git a/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md index bef9d4e7c8c81..78a4c7489e907 100644 --- a/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/grpc/README.md b/airflow/providers/grpc/README.md index 5e5a4778b429a..121231d03bc86 100644 --- a/airflow/providers/grpc/README.md +++ b/airflow/providers/grpc/README.md @@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.grpc` package. + + ## Hooks @@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.grpc` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md index fa6cd844b5fc1..cd666b6d5f04c 100644 --- a/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/hashicorp/README.md b/airflow/providers/hashicorp/README.md index 8b5bad560af79..69ffdea3f94dd 100644 --- a/airflow/providers/hashicorp/README.md +++ b/airflow/providers/hashicorp/README.md @@ -88,6 +88,8 @@ All classes in Airflow 2.0 are in `airflow.providers.hashicorp` package. + + ## Secrets @@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.hashicorp` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md index 47f1073d3d32c..d6130dcbcded5 100644 --- a/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/http/README.md b/airflow/providers/http/README.md index 498536e42c26a..e205fe9c5e494 100644 --- a/airflow/providers/http/README.md +++ b/airflow/providers/http/README.md @@ -78,6 +78,8 @@ All classes in Airflow 2.0 are in `airflow.providers.http` package. + + ## Sensors @@ -111,6 +113,7 @@ All classes in Airflow 2.0 are in `airflow.providers.http` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md index 68bb8bfc516bc..e38a7400283d1 100644 --- a/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/imap/README.md b/airflow/providers/imap/README.md index bf02199f978c5..2f70cbb18b635 100644 --- a/airflow/providers/imap/README.md +++ b/airflow/providers/imap/README.md @@ -64,6 +64,8 @@ All classes in Airflow 2.0 are in `airflow.providers.imap` package. + + ## Sensors @@ -97,6 +99,7 @@ All classes in Airflow 2.0 are in `airflow.providers.imap` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md index f332e61c8cfe2..6c01c42767a81 100644 --- a/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/jdbc/README.md b/airflow/providers/jdbc/README.md index 544becc943706..3558dc01ce612 100644 --- a/airflow/providers/jdbc/README.md +++ b/airflow/providers/jdbc/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jdbc` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jdbc` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md index 7a63064fbb4e6..e21875a88fe8f 100644 --- a/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/jenkins/README.md b/airflow/providers/jenkins/README.md index 70215b7f313d0..91b92c786723f 100644 --- a/airflow/providers/jenkins/README.md +++ b/airflow/providers/jenkins/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jenkins` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jenkins` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md index ba65a7f0929a4..f48488c3cb4a9 100644 --- a/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/jira/README.md b/airflow/providers/jira/README.md index 93c39513b77c9..d7804457aa52e 100644 --- a/airflow/providers/jira/README.md +++ b/airflow/providers/jira/README.md @@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jira` package. + + ## Sensors @@ -119,6 +121,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jira` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md index 029b9eef251a3..8a5afdc0935d4 100644 --- a/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/microsoft/azure/README.md b/airflow/providers/microsoft/azure/README.md index 81c272448a78a..474afe162d39c 100644 --- a/airflow/providers/microsoft/azure/README.md +++ b/airflow/providers/microsoft/azure/README.md @@ -33,6 +33,8 @@ Release: 2020.5.20 - [Operators](#operators) - [New operators](#new-operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [Moved transfer operators](#moved-transfers) - [Sensors](#sensors) - [New sensors](#new-sensors) - [Moved sensors](#moved-sensors) @@ -71,11 +73,11 @@ For full compatibility and test status of the backport packages check | azure-batch | >=8.0.0 | | azure-cosmos | >=3.0.1,<4 | | azure-datalake-store | >=0.0.45 | -| azure-kusto-data | >=0.0.43 | +| azure-kusto-data | >=0.0.43,<0.1 | | azure-mgmt-containerinstance | >=1.5.0 | | azure-mgmt-datalake-store | >=0.5.0 | | azure-mgmt-resource | >=2.2.0 | -| azure-storage | >=0.34.0,<0.37.0 | +| azure-storage | >=0.34.0, <0.37.0 | | azure-storage-blob | <12.0 | ## Cross provider package dependencies @@ -112,14 +114,25 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.azure` package. ### Moved operators -| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py) | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py) | -| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py) | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py) | -| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py) | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py) | -| [operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/file_to_wasb.py) | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py) | -| [operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) | -| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py) | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py) | +| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py) | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py) | +| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py) | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py) | +| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py) | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py) | +| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py) | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py) | + + + + + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/file_to_wasb.py) | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py) | +| [transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) | @@ -177,6 +190,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.azure` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/microsoft/azure/transfers/__init__.py b/airflow/providers/microsoft/azure/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/microsoft/azure/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/microsoft/azure/operators/file_to_wasb.py b/airflow/providers/microsoft/azure/transfers/file_to_wasb.py similarity index 100% rename from airflow/providers/microsoft/azure/operators/file_to_wasb.py rename to airflow/providers/microsoft/azure/transfers/file_to_wasb.py diff --git a/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py similarity index 98% rename from airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py rename to airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py index 5f59030d01083..11013d5476f77 100644 --- a/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py +++ b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py @@ -27,7 +27,7 @@ from airflow.utils.decorators import apply_defaults -class OracleToAzureDataLakeTransferOperator(BaseOperator): +class OracleToAzureDataLakeOperator(BaseOperator): """ Moves data from Oracle to Azure Data Lake. The operator runs the query against Oracle and stores the file locally before loading it into Azure Data Lake. diff --git a/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md index d942c5b842648..aef3d8b442f40 100644 --- a/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/microsoft/mssql/README.md b/airflow/providers/microsoft/mssql/README.md index 32cc3fc9e400f..d76b174eedaca 100644 --- a/airflow/providers/microsoft/mssql/README.md +++ b/airflow/providers/microsoft/mssql/README.md @@ -100,6 +100,8 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.mssql` package. + + ## Hooks @@ -121,6 +123,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.mssql` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md index dc9ea5e4d9988..518dcbcdb9aa6 100644 --- a/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/microsoft/winrm/README.md b/airflow/providers/microsoft/winrm/README.md index d760db80b99ac..036a1fd3ca22c 100644 --- a/airflow/providers/microsoft/winrm/README.md +++ b/airflow/providers/microsoft/winrm/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.winrm` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.winrm` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md index 14192614f44ff..c2162a1016f43 100644 --- a/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/mongo/README.md b/airflow/providers/mongo/README.md index 13f30422c59bc..870398a8709ed 100644 --- a/airflow/providers/mongo/README.md +++ b/airflow/providers/mongo/README.md @@ -72,6 +72,8 @@ All classes in Airflow 2.0 are in `airflow.providers.mongo` package. + + ## Sensors @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.mongo` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md index 6af033ee74927..5bc13ed89132a 100644 --- a/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/mysql/README.md b/airflow/providers/mysql/README.md index fb57a3ac6c577..4d722d3dcbf97 100644 --- a/airflow/providers/mysql/README.md +++ b/airflow/providers/mysql/README.md @@ -31,8 +31,10 @@ Release: 2020.5.20 - [Cross provider package dependencies](#cross-provider-package-dependencies) - [Provider class summary](#provider-class-summary) - [Operators](#operators) - - [New operators](#new-operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [New transfer operators](#new-transfers) + - [Moved transfer operators](#moved-transfers) - [Hooks](#hooks) - [Moved hooks](#moved-hooks) - [Releases](#releases) @@ -92,21 +94,32 @@ All classes in Airflow 2.0 are in `airflow.providers.mysql` package. ## Operators -### New operators -| New Airflow 2.0 operators: `airflow.providers.mysql` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.s3_to_mysql.S3ToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/s3_to_mysql.py) | +### Moved operators +| Airflow 2.0 operators: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| +| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py) | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py) | -### Moved operators -| Airflow 2.0 operators: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py) | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py) | -| [operators.presto_to_mysql.PrestoToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/presto_to_mysql.py) | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py) | -| [operators.vertica_to_mysql.VerticaToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) | + + + +### New transfer operators + +| New Airflow 2.0 transfers: `airflow.providers.mysql` package | +|:------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.s3_to_mysql.S3ToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/s3_to_mysql.py) | + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.presto_to_mysql.PrestoToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/presto_to_mysql.py) | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py) | +| [transfers.vertica_to_mysql.VerticaToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) | @@ -133,6 +146,7 @@ All classes in Airflow 2.0 are in `airflow.providers.mysql` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/mysql/transfers/__init__.py b/airflow/providers/mysql/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/mysql/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/mysql/operators/presto_to_mysql.py b/airflow/providers/mysql/transfers/presto_to_mysql.py similarity index 98% rename from airflow/providers/mysql/operators/presto_to_mysql.py rename to airflow/providers/mysql/transfers/presto_to_mysql.py index c727fb9368df9..156ff319d2138 100644 --- a/airflow/providers/mysql/operators/presto_to_mysql.py +++ b/airflow/providers/mysql/transfers/presto_to_mysql.py @@ -23,7 +23,7 @@ from airflow.utils.decorators import apply_defaults -class PrestoToMySqlTransferOperator(BaseOperator): +class PrestoToMySqlOperator(BaseOperator): """ Moves data from Presto to MySQL, note that for now the data is loaded into memory before being pushed to MySQL, so this operator should diff --git a/airflow/providers/mysql/operators/s3_to_mysql.py b/airflow/providers/mysql/transfers/s3_to_mysql.py similarity index 98% rename from airflow/providers/mysql/operators/s3_to_mysql.py rename to airflow/providers/mysql/transfers/s3_to_mysql.py index e8a3415f4cdca..53cb144690abc 100644 --- a/airflow/providers/mysql/operators/s3_to_mysql.py +++ b/airflow/providers/mysql/transfers/s3_to_mysql.py @@ -24,7 +24,7 @@ from airflow.utils.decorators import apply_defaults -class S3ToMySqlTransferOperator(BaseOperator): +class S3ToMySqlOperator(BaseOperator): """ Loads a file from S3 into a MySQL table. diff --git a/airflow/providers/mysql/operators/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py similarity index 99% rename from airflow/providers/mysql/operators/vertica_to_mysql.py rename to airflow/providers/mysql/transfers/vertica_to_mysql.py index 7e952b86a2ebc..0e6b8a69dbeb2 100644 --- a/airflow/providers/mysql/operators/vertica_to_mysql.py +++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py @@ -28,7 +28,7 @@ from airflow.utils.decorators import apply_defaults -class VerticaToMySqlTransferOperator(BaseOperator): +class VerticaToMySqlOperator(BaseOperator): """ Moves data from Vertica to MySQL. diff --git a/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md index b7c4c761f23c8..658a3d1a22402 100644 --- a/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/odbc/README.md b/airflow/providers/odbc/README.md index bda579a7d837b..ae79968dee071 100644 --- a/airflow/providers/odbc/README.md +++ b/airflow/providers/odbc/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.odbc` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.odbc` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md index 077e6782f7e13..95d1ad815ce51 100644 --- a/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/openfaas/README.md b/airflow/providers/openfaas/README.md index abcc8e7630143..b44dc990a8b4e 100644 --- a/airflow/providers/openfaas/README.md +++ b/airflow/providers/openfaas/README.md @@ -63,6 +63,8 @@ All classes in Airflow 2.0 are in `airflow.providers.openfaas` package. + + ## Hooks @@ -84,6 +86,7 @@ All classes in Airflow 2.0 are in `airflow.providers.openfaas` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md index 78bd5ae1c5699..667607afdc3d3 100644 --- a/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/opsgenie/README.md b/airflow/providers/opsgenie/README.md index 8f27ed879d7d4..f41652ff5f16a 100644 --- a/airflow/providers/opsgenie/README.md +++ b/airflow/providers/opsgenie/README.md @@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.opsgenie` package. + + ## Hooks @@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.opsgenie` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md index dd65bf53f9520..9a2042252485f 100644 --- a/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/oracle/README.md b/airflow/providers/oracle/README.md index 628138322856d..35de449e16a96 100644 --- a/airflow/providers/oracle/README.md +++ b/airflow/providers/oracle/README.md @@ -31,6 +31,8 @@ Release: 2020.5.20 - [Provider class summary](#provider-class-summary) - [Operators](#operators) - [Moved operators](#moved-operators) + - [Transfer operators](#transfers) + - [Moved transfer operators](#moved-transfers) - [Hooks](#hooks) - [Moved hooks](#moved-hooks) - [Releases](#releases) @@ -76,10 +78,21 @@ All classes in Airflow 2.0 are in `airflow.providers.oracle` package. ### Moved operators -| Airflow 2.0 operators: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py) | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py) | -| [operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) | +| Airflow 2.0 operators: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py) | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py) | + + + + + + + +### Moved transfer operators + +| Airflow 2.0 transfers: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | +|:----------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.oracle_to_oracle.OracleToOracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/transfers/oracle_to_oracle.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) | @@ -106,6 +119,7 @@ All classes in Airflow 2.0 are in `airflow.providers.oracle` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/oracle/transfers/__init__.py b/airflow/providers/oracle/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/oracle/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py b/airflow/providers/oracle/transfers/oracle_to_oracle.py similarity index 98% rename from airflow/providers/oracle/operators/oracle_to_oracle_transfer.py rename to airflow/providers/oracle/transfers/oracle_to_oracle.py index 7973e4af9a6ca..6f845d3e8f045 100644 --- a/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py +++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py @@ -21,7 +21,7 @@ from airflow.utils.decorators import apply_defaults -class OracleToOracleTransferOperator(BaseOperator): +class OracleToOracleOperator(BaseOperator): """ Moves data from Oracle to Oracle. diff --git a/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md index eff10ce23aeae..0a2fbd1be7a48 100644 --- a/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/pagerduty/README.md b/airflow/providers/pagerduty/README.md index 71dd9c87baee7..7f967a9b59291 100644 --- a/airflow/providers/pagerduty/README.md +++ b/airflow/providers/pagerduty/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.pagerduty` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.pagerduty` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md index 7fdb7aefd2bb0..4acd97ca81862 100644 --- a/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/postgres/README.md b/airflow/providers/postgres/README.md index 66b2ca9b62d76..6bdf41337c495 100644 --- a/airflow/providers/postgres/README.md +++ b/airflow/providers/postgres/README.md @@ -100,6 +100,8 @@ All classes in Airflow 2.0 are in `airflow.providers.postgres` package. + + ## Hooks @@ -121,6 +123,7 @@ All classes in Airflow 2.0 are in `airflow.providers.postgres` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md index 45bca41010bee..c7a4f7839396c 100644 --- a/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/presto/README.md b/airflow/providers/presto/README.md index 689a51b4d038a..dd3ac0dafb061 100644 --- a/airflow/providers/presto/README.md +++ b/airflow/providers/presto/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.presto` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.presto` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md index 7d7c8bdb2a1d8..b44865f73746e 100644 --- a/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/qubole/README.md b/airflow/providers/qubole/README.md index eb57f8a124b9f..5d18994237b3f 100644 --- a/airflow/providers/qubole/README.md +++ b/airflow/providers/qubole/README.md @@ -87,6 +87,8 @@ All classes in Airflow 2.0 are in `airflow.providers.qubole` package. + + ## Sensors @@ -123,6 +125,7 @@ All classes in Airflow 2.0 are in `airflow.providers.qubole` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md index 131848f03d504..70a410436ba50 100644 --- a/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/redis/README.md b/airflow/providers/redis/README.md index 83d92c0cf2517..98bbeb3f315de 100644 --- a/airflow/providers/redis/README.md +++ b/airflow/providers/redis/README.md @@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.redis` package. + + ## Sensors @@ -119,6 +121,7 @@ All classes in Airflow 2.0 are in `airflow.providers.redis` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md index e3786ff6ed60b..bf54e1f82e49d 100644 --- a/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/salesforce/README.md b/airflow/providers/salesforce/README.md index 00307f3374f9d..20bcf6eb06f96 100644 --- a/airflow/providers/salesforce/README.md +++ b/airflow/providers/salesforce/README.md @@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.salesforce` package. + + ## Sensors @@ -125,6 +127,7 @@ All classes in Airflow 2.0 are in `airflow.providers.salesforce` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md index 752fed9f67a80..cd65a927d5b6e 100644 --- a/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/samba/README.md b/airflow/providers/samba/README.md index 50253ed6176fd..a17a54ed371e2 100644 --- a/airflow/providers/samba/README.md +++ b/airflow/providers/samba/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.samba` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.samba` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md index a9c72ea9ff7c1..9c1eac609e470 100644 --- a/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/segment/README.md b/airflow/providers/segment/README.md index fb5e31ca263b3..66cecd91fbe40 100644 --- a/airflow/providers/segment/README.md +++ b/airflow/providers/segment/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.segment` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.segment` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md index 2cad6334c5c17..a59e463a742d4 100644 --- a/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/sftp/README.md b/airflow/providers/sftp/README.md index 2121228b7689a..689c9849d593c 100644 --- a/airflow/providers/sftp/README.md +++ b/airflow/providers/sftp/README.md @@ -103,6 +103,8 @@ All classes in Airflow 2.0 are in `airflow.providers.sftp` package. + + ## Sensors @@ -136,6 +138,7 @@ All classes in Airflow 2.0 are in `airflow.providers.sftp` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md index d310578e6ed3e..ead7f4d82f23c 100644 --- a/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/singularity/README.md b/airflow/providers/singularity/README.md index 9b1d8f8d97af9..81fc5602f649a 100644 --- a/airflow/providers/singularity/README.md +++ b/airflow/providers/singularity/README.md @@ -86,12 +86,15 @@ All classes in Airflow 2.0 are in `airflow.providers.singularity` package. + + ## Releases ### Release 2020.5.20 | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md index f55384abf0c90..755504b10e43d 100644 --- a/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,8 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| +| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29 | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24 | Remove defunct code from setup.py (#8982) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/slack/README.md b/airflow/providers/slack/README.md index 36484db9119e7..6f8004cbcb75c 100644 --- a/airflow/providers/slack/README.md +++ b/airflow/providers/slack/README.md @@ -31,6 +31,7 @@ Release: 2020.5.20 - [Cross provider package dependencies](#cross-provider-package-dependencies) - [Provider class summary](#provider-class-summary) - [Operators](#operators) + - [New operators](#new-operators) - [Moved operators](#moved-operators) - [Hooks](#hooks) - [Moved hooks](#moved-hooks) @@ -88,6 +89,12 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package. ## Operators +### New operators + +| New Airflow 2.0 operators: `airflow.providers.slack` package | +|:---------------------------------------------------------------------------------------------------------------------------------| +| [operators.slack.SlackAPIFileOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack.py) | + ### Moved operators @@ -102,6 +109,8 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package. + + ## Hooks @@ -124,6 +133,8 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| +| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29 | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24 | Remove defunct code from setup.py (#8982) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md index 992d41347d32a..a5e944ada4133 100644 --- a/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,8 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/snowflake/README.md b/airflow/providers/snowflake/README.md index b800a879a5e45..1dd193a501700 100644 --- a/airflow/providers/snowflake/README.md +++ b/airflow/providers/snowflake/README.md @@ -28,9 +28,12 @@ Release: 2020.5.20 - [Installation](#installation) - [Compatibility](#compatibility) - [PIP requirements](#pip-requirements) +- [Cross provider package dependencies](#cross-provider-package-dependencies) - [Provider class summary](#provider-class-summary) - [Operators](#operators) - [New operators](#new-operators) + - [Transfer operators](#transfers) + - [New transfer operators](#new-transfers) - [Hooks](#hooks) - [New hooks](#new-hooks) - [Releases](#releases) @@ -65,6 +68,21 @@ For full compatibility and test status of the backport packages check | snowflake-connector-python | >=1.5.2 | | snowflake-sqlalchemy | >=1.1.0 | +## Cross provider package dependencies + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified backport providers package in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +```bash +pip install apache-airflow-backport-providers-snowflake[slack] +``` + +| Dependent package | Extra | +|:-----------------------------------------------------------------------------------------------------------------|:--------| +| [apache-airflow-backport-providers-slack](https://github.com/apache/airflow/tree/master/airflow/providers/slack) | slack | + # Provider class summary All classes in Airflow 2.0 are in `airflow.providers.snowflake` package. @@ -75,10 +93,22 @@ All classes in Airflow 2.0 are in `airflow.providers.snowflake` package. ### New operators -| New Airflow 2.0 operators: `airflow.providers.snowflake` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.s3_to_snowflake.S3ToSnowflakeTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/s3_to_snowflake.py) | -| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py) | +| New Airflow 2.0 operators: `airflow.providers.snowflake` package | +|:------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py) | + + + + + + + +### New transfer operators + +| New Airflow 2.0 transfers: `airflow.providers.snowflake` package | +|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [transfers.s3_to_snowflake.S3ToSnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/s3_to_snowflake.py) | +| [transfers.snowflake_to_slack.SnowflakeToSlackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/snowflake_to_slack.py) | @@ -107,6 +137,8 @@ All classes in Airflow 2.0 are in `airflow.providers.snowflake` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/snowflake/example_dags/example_snowflake.py b/airflow/providers/snowflake/example_dags/example_snowflake.py index 944efad7817db..70261019bd555 100644 --- a/airflow/providers/snowflake/example_dags/example_snowflake.py +++ b/airflow/providers/snowflake/example_dags/example_snowflake.py @@ -21,9 +21,9 @@ import os from airflow import DAG -from airflow.providers.snowflake.operators.s3_to_snowflake import S3ToSnowflakeTransferOperator from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator -from airflow.providers.snowflake.operators.snowflake_to_slack import SnowflakeToSlackOperator +from airflow.providers.snowflake.transfers.s3_to_snowflake import S3ToSnowflakeOperator +from airflow.providers.snowflake.transfers.snowflake_to_slack import SnowflakeToSlackOperator from airflow.utils.dates import days_ago SNOWFLAKE_CONN_ID = os.environ.get('SNOWFLAKE_CONN_ID', 'snowflake_default') @@ -76,7 +76,7 @@ dag=dag, ) -copy_into_table = S3ToSnowflakeTransferOperator( +copy_into_table = S3ToSnowflakeOperator( task_id='copy_into_table', snowflake_conn_id=SNOWFLAKE_CONN_ID, s3_keys=[SNOWFLAKE_LOAD_JSON_PATH], diff --git a/airflow/providers/snowflake/transfers/__init__.py b/airflow/providers/snowflake/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/snowflake/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/snowflake/operators/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py similarity index 98% rename from airflow/providers/snowflake/operators/s3_to_snowflake.py rename to airflow/providers/snowflake/transfers/s3_to_snowflake.py index 4f2cc816490c2..bbd6192345e7f 100644 --- a/airflow/providers/snowflake/operators/s3_to_snowflake.py +++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py @@ -25,7 +25,7 @@ from airflow.utils.decorators import apply_defaults -class S3ToSnowflakeTransferOperator(BaseOperator): +class S3ToSnowflakeOperator(BaseOperator): """ Executes an COPY command to load files from s3 to Snowflake diff --git a/airflow/providers/snowflake/operators/snowflake_to_slack.py b/airflow/providers/snowflake/transfers/snowflake_to_slack.py similarity index 100% rename from airflow/providers/snowflake/operators/snowflake_to_slack.py rename to airflow/providers/snowflake/transfers/snowflake_to_slack.py diff --git a/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md index f22f50d74a60b..0260584486138 100644 --- a/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/sqlite/README.md b/airflow/providers/sqlite/README.md index 1bfed7c2fe6c6..7b88ffe8c380d 100644 --- a/airflow/providers/sqlite/README.md +++ b/airflow/providers/sqlite/README.md @@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.sqlite` package. + + ## Hooks @@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.sqlite` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md index e1bfc46e60371..8322cdda3a1a3 100644 --- a/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/ssh/README.md b/airflow/providers/ssh/README.md index b936eb0d0ed1e..92bb81c585ec0 100644 --- a/airflow/providers/ssh/README.md +++ b/airflow/providers/ssh/README.md @@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.ssh` package. + + ## Hooks @@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.ssh` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md index 14789c60bae89..50c95dfb4b0f9 100644 --- a/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/vertica/README.md b/airflow/providers/vertica/README.md index 7b4e136aa9306..90c86d9fb9e64 100644 --- a/airflow/providers/vertica/README.md +++ b/airflow/providers/vertica/README.md @@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.vertica` package. + + ## Hooks @@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.vertica` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md index 6a0606f253021..b1a69b786e428 100644 --- a/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/yandex/README.md b/airflow/providers/yandex/README.md index 72b8f2d57aaea..d5ab9f10d83be 100644 --- a/airflow/providers/yandex/README.md +++ b/airflow/providers/yandex/README.md @@ -89,6 +89,8 @@ All classes in Airflow 2.0 are in `airflow.providers.yandex` package. + + ## Hooks @@ -111,6 +113,7 @@ All classes in Airflow 2.0 are in `airflow.providers.yandex` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | diff --git a/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md index a9c72ea9ff7c1..9c1eac609e470 100644 --- a/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md +++ b/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md @@ -4,6 +4,7 @@ | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/airflow/providers/zendesk/README.md b/airflow/providers/zendesk/README.md index 933b8c2d9cd2d..bc8162dba2838 100644 --- a/airflow/providers/zendesk/README.md +++ b/airflow/providers/zendesk/README.md @@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.zendesk` package. + + ## Hooks @@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.zendesk` package. | Commit | Committed | Subject | |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| +| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | diff --git a/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2 b/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2 index 6363e71f892e9..3fa76134ae3ea 100644 --- a/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2 +++ b/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2 @@ -37,6 +37,20 @@ All classes in Airflow 2.0 are in `{{FULL_PACKAGE_NAME}}` package. {% endif %} {% endif %} +{% if NEW_TRANSFERS or MOVED_TRANSFERS %} +{% if NEW_TRANSFERS %} +### New transfer operators + +{{NEW_TRANSFERS_TABLE}} +{% endif %} + +{% if MOVED_TRANSFERS %} +### Moved transfer operators + +{{MOVED_TRANSFERS_TABLE}} +{% endif %} +{% endif %} + {% if NEW_SENSORS or MOVED_SENSORS %} ## Sensors @@ -65,20 +79,7 @@ All classes in Airflow 2.0 are in `{{FULL_PACKAGE_NAME}}` package. {{MOVED_HOOKS_TABLE}} {% endif %} {% endif %} -{% if NEW_PROTOCOLS or MOVED_PROTOCOLS %} -## Protocols - -{% if NEW_PROTOCOLS %} -### New protocols - -{{NEW_PROTOCOLS_TABLE}} -{% endif %} -{% if MOVED_PROTOCOLS %} -### Moved protocols -{{MOVED_PROTOCOLS_TABLE}} -{% endif %} -{% endif %} {% if NEW_SECRETS or MOVED_SECRETS %} ## Secrets diff --git a/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2 b/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2 index fecf6a57961c4..ae5a13857097b 100644 --- a/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2 +++ b/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2 @@ -42,6 +42,15 @@ Release: {{ RELEASE_NO_LEADING_ZEROS }} - [Moved operators](#moved-operators) {%- endif %} {%- endif %} +{%- if NEW_TRANSFERS or MOVED_TRANSFERS %} + - [Transfer operators](#transfers) + {%- if NEW_TRANSFERS %} + - [New transfer operators](#new-transfers) + {%- endif %} + {%- if MOVED_TRANSFERS %} + - [Moved transfer operators](#moved-transfers) + {%- endif %} +{%- endif %} {%- if NEW_SENSORS or MOVED_SENSORS %} - [Sensors](#sensors) {%- if NEW_SENSORS %} @@ -60,15 +69,6 @@ Release: {{ RELEASE_NO_LEADING_ZEROS }} - [Moved hooks](#moved-hooks) {%- endif %} {%- endif %} -{%- if NEW_PROTOCOLS or MOVED_PROTOCOLS %} - - [Protocols](#protocols) - {%- if NEW_PROTOCOLS %} - - [New protocols](#new-protocols) - {%- endif %} - {%- if MOVED_PROTOCOLS %} - - [Moved protocols](#moved-protocols) - {%- endif %} -{%- endif %} {%- if NEW_SECRETS or MOVED_SECRETS %} - [Secrets](#secrets) {%- if NEW_SECRETS %} diff --git a/backport_packages/refactor_backport_packages.py b/backport_packages/refactor_backport_packages.py index ef971747c7e6f..37a0d0a67e3d2 100755 --- a/backport_packages/refactor_backport_packages.py +++ b/backport_packages/refactor_backport_packages.py @@ -25,7 +25,7 @@ from backport_packages.setup_backport_packages import ( get_source_airflow_folder, get_source_providers_folder, get_target_providers_folder, - get_target_providers_package_folder, is_bigquery_non_dts_module, + get_target_providers_package_folder, ) from bowler import LN, TOKEN, Capture, Filename, Query from fissix.fixer_util import Comma, KeywordArg, Name @@ -46,28 +46,6 @@ def rm_build_dir() -> None: if os.path.isdir(build_dir): rmtree(build_dir) - def ignore_bigquery_files(src: str, names: List[str]) -> List[str]: - """ - Ignore files with bigquery - :param src: source file - :param names: Name of the file - :return: - """ - ignored_names = [] - if any([src.endswith(os.path.sep + class_type) for class_type in CLASS_TYPES]): - ignored_names = [name for name in names - if is_bigquery_non_dts_module(module_name=name)] - if src.endswith(os.path.sep + "example_dags"): - for file_name in names: - file_path = src + os.path.sep + file_name - with open(file_path, "rt") as file: - text = file.read() - if any([f"airflow.providers.google.cloud.{class_type}.bigquery" in text - for class_type in CLASS_TYPES]) or "_to_bigquery" in text: - print(f"Ignoring {file_path}") - ignored_names.append(file_name) - return ignored_names - def ignore_kubernetes_files(src: str, names: List[str]) -> List[str]: ignored_names = [] if src.endswith(os.path.sep + "example_dags"): @@ -77,7 +55,7 @@ def ignore_kubernetes_files(src: str, names: List[str]) -> List[str]: return ignored_names def ignore_some_files(src: str, names: List[str]) -> List[str]: - ignored_list = ignore_bigquery_files(src=src, names=names) + ignored_list = [] ignored_list.extend(ignore_kubernetes_files(src=src, names=names)) return ignored_list diff --git a/backport_packages/setup_backport_packages.py b/backport_packages/setup_backport_packages.py index 420be3ad2594b..6c991fa5954f7 100644 --- a/backport_packages/setup_backport_packages.py +++ b/backport_packages/setup_backport_packages.py @@ -36,7 +36,10 @@ from setup import PROVIDERS_REQUIREMENTS from setuptools import Command, find_packages, setup as setuptools_setup -from tests.test_core_to_contrib import HOOK, OPERATOR, PROTOCOLS, SECRETS, SENSOR +from tests.test_core_to_contrib import HOOKS, OPERATORS, SECRETS, SENSORS, TRANSFERS + +# Note - we do not test protocols as they are not really part of the official API of +# Apache Airflow # noinspection DuplicatedCode logger = logging.getLogger(__name__) # noqa @@ -50,6 +53,23 @@ PROVIDERS_PATH = os.path.join(AIRFLOW_PATH, "providers") +OPERATORS_PATTERN = r".*Operator$" +SENSORS_PATTERN = r".*Sensor$" +HOOKS_PATTERN = r".*Hook$" +SECRETS_PATTERN = r".*Backend$" +TRANSFERS_PATTERN = r".*To[A-Z0-9].*Operator$" +WRONG_TRANSFERS_PATTERN = r".*Transfer$|.*TransferOperator$" + +ALL_PATTERNS = { + OPERATORS_PATTERN, + SENSORS_PATTERN, + HOOKS_PATTERN, + SECRETS_PATTERN, + TRANSFERS_PATTERN, + WRONG_TRANSFERS_PATTERN, +} + + def get_source_airflow_folder() -> str: """ Returns source directory for whole airflow (from the main airflow project). @@ -115,11 +135,11 @@ def run(self): DEPENDENCIES_JSON_FILE = os.path.join(PROVIDERS_PATH, "dependencies.json") -MOVED_OPERATORS_DICT = {value[0]: value[1] for value in OPERATOR} -MOVED_SENSORS_DICT = {value[0]: value[1] for value in SENSOR} -MOVED_HOOKS_DICT = {value[0]: value[1] for value in HOOK} -MOVED_PROTOCOLS_DICT = {value[0]: value[1] for value in PROTOCOLS} +MOVED_OPERATORS_DICT = {value[0]: value[1] for value in OPERATORS} +MOVED_SENSORS_DICT = {value[0]: value[1] for value in SENSORS} +MOVED_HOOKS_DICT = {value[0]: value[1] for value in HOOKS} MOVED_SECRETS_DICT = {value[0]: value[1] for value in SECRETS} +MOVED_TRANSFERS_DICT = {value[0]: value[1] for value in TRANSFERS} def get_pip_package_name(provider_package_id: str) -> str: @@ -132,19 +152,6 @@ def get_pip_package_name(provider_package_id: str) -> str: return "apache-airflow-backport-providers-" + provider_package_id.replace(".", "-") -def is_bigquery_non_dts_module(module_name: str) -> bool: - """ - Returns true if the module name indicates this is a bigquery module that should be skipped - for now. - TODO: this method should be removed as soon as BigQuery rewrite is finished. - - :param module_name: name of the module - :return: true if module is a bigquery module (but not bigquery_dts) - """ - return module_name.startswith("bigquery") and "bigquery_dts" not in module_name \ - or "_to_bigquery" in module_name - - def get_long_description(provider_package_id: str) -> str: """ Gets long description of the package. @@ -279,7 +286,7 @@ def usage() -> None: print() print(" list-providers-packages - lists all provider packages") print(" list-backportable-packages - lists all packages that are backportable") - print(" update-package-release-notes YYYY.MM.DD [PACKAGES] - updates package release notes") + print(" update-package-release-notes [YYYY.MM.DD] [PACKAGES] - updates package release notes") print(" --version-suffix - adds version suffix to version of the packages.") print() @@ -305,7 +312,7 @@ def is_example_dag(imported_name: str) -> bool: return ".example_dags." in imported_name -def is_from_the_expected_package(the_class: Type, expected_package: str) -> bool: +def is_from_the_expected_base_package(the_class: Type, expected_package: str) -> bool: """ Returns true if the class is from the package expected. :param the_class: the class object @@ -339,55 +346,68 @@ def is_class(the_class: Type) -> bool: return inspect.isclass(the_class) -def is_bigquery_class(imported_name: str) -> bool: +def package_name_matches(the_class: Type, expected_pattern: Optional[str]) -> bool: """ - Returns true if the object passed is a class - :param imported_name: name of the class imported - :return: true if it is a class + In case expected_pattern is set, it checks if the package name matches the pattern. + . + :param the_class: imported class + :param expected_pattern: the pattern that should match the package + :return: true if the expected_pattern is None or the pattern matches the package """ - return is_bigquery_non_dts_module(module_name=imported_name.split(".")[-2]) + return expected_pattern is None or re.match(expected_pattern, the_class.__module__) -def has_expected_string_in_name(the_class: Type, expected_string: Optional[str]) -> bool: - """ - In case expected_string is different than None then it checks for presence of the string in the - imported_name. - :param the_class: name of the imported object - :param expected_string: string to expect - :return: true if the expected_string is None or the expected string is found in the imported name - """ - return expected_string is None or expected_string in the_class.__module__ - - -def find_all_subclasses(imported_classes: List[str], - expected_package: str, - expected_ancestor: Type, - expected_string: Optional[str] = None, - exclude_class_type=None) -> Set[str]: +def find_all_classes(imported_classes: List[str], + base_package: str, + ancestor_match: Type, + sub_package_pattern_match: str, + expected_class_name_pattern: str, + unexpected_class_name_patterns: Set[str], + exclude_class_type: Type = None, + false_positive_class_names: Optional[Set[str]] = None, + ) -> Tuple[Set[str], List[Tuple[type, str]]]: """ Returns set of classes containing all subclasses in package specified. :param imported_classes: classes imported from providers - :param expected_package: full package name where to look for the classes - :param expected_ancestor: type of the object the method looks for - :param expected_string: this string is expected to appear in the package name - :param exclude_class_type: exclude class of this type (Sensor are also Operators so they should be - excluded from the Operator list) - """ - subclasses = set() + :param base_package: base package name where to start looking for the classes + :param sub_package_pattern_match: this string is expected to appear in the sub-package name + :param ancestor_match: type of the object the method looks for + :param expected_class_name_pattern: regexp of class name pattern to expect + :param unexpected_class_name_patterns: set of regexp of class name pattern that are not expected + :param exclude_class_type: exclude class of this type (Sensor are also Operators so + they should be excluded from the list) + :param false_positive_class_names: set of class names that are wrongly recognised as badly named + """ + found_classes: Set[str] = set() + wrong_classes: List[Tuple[type, str]] = [] for imported_name in imported_classes: module, class_name = imported_name.rsplit(".", maxsplit=1) the_class = getattr(importlib.import_module(module), class_name) if is_class(the_class=the_class) \ and not is_example_dag(imported_name=imported_name) \ - and is_from_the_expected_package(the_class=the_class, expected_package=expected_package) \ + and is_from_the_expected_base_package(the_class=the_class, expected_package=base_package) \ and is_imported_from_same_module(the_class=the_class, imported_name=imported_name) \ - and has_expected_string_in_name(the_class=the_class, expected_string=expected_string) \ - and inherits_from(the_class=the_class, expected_ancestor=expected_ancestor) \ + and inherits_from(the_class=the_class, expected_ancestor=ancestor_match) \ and not inherits_from(the_class=the_class, expected_ancestor=exclude_class_type) \ - and not is_bigquery_class(imported_name=imported_name): - subclasses.add(imported_name) - return subclasses + and package_name_matches(the_class=the_class, expected_pattern=sub_package_pattern_match): + + if not false_positive_class_names or class_name not in false_positive_class_names: + if not re.match(expected_class_name_pattern, class_name): + wrong_classes.append( + (the_class, f"The class name {class_name} is wrong. " + f"It should match {expected_class_name_pattern}")) + continue + if unexpected_class_name_patterns: + for unexpected_class_name_pattern in unexpected_class_name_patterns: + if re.match(unexpected_class_name_pattern, class_name): + wrong_classes.append( + (the_class, + f"The class name {class_name} is wrong. " + f"It should not match {unexpected_class_name_pattern}")) + continue + found_classes.add(imported_name) + return found_classes, wrong_classes def get_new_and_moved_classes(classes: Set[str], @@ -482,49 +502,94 @@ def convert_moved_objects_to_table(class_dict: Dict[str, str], return tabulate(table, headers=headers, tablefmt="pipe") -def get_package_class_summary(full_package_name: str, imported_classes: List[str]) -> Dict[str, Any]: +def print_wrong_naming(class_type: str, wrong_classes: List[Tuple[type, str]]): + """ + Prints wrong classes of a given type if there are any + :param class_type: type of the class to print + :param wrong_classes: list of wrong classes + """ + if wrong_classes: + print(f"\nThere are wrongly named classes of type {class_type}:\n", file=sys.stderr) + for class_type, message in wrong_classes: + print(f"{class_type}: {message}", file=sys.stderr) + + +def get_package_class_summary(full_package_name: str, imported_classes: List[str]) \ + -> Tuple[Dict[str, Any], int]: """ Gets summary of the package in the form of dictionary containing all types of classes :param full_package_name: full package name :param imported_classes: classes imported_from providers - :return: dictionary of objects usable as context for Jinja2 templates + :return: dictionary of objects usable as context for Jinja2 templates - or None if there are some errors """ from airflow.secrets import BaseSecretsBackend from airflow.sensors.base_sensor_operator import BaseSensorOperator from airflow.hooks.base_hook import BaseHook from airflow.models.baseoperator import BaseOperator - from typing_extensions import Protocol - operators = find_all_subclasses( + + operators, wrong_operators = find_all_classes( imported_classes=imported_classes, - expected_package=full_package_name, - expected_ancestor=BaseOperator, - expected_string=".operators.", - exclude_class_type=BaseSensorOperator) - sensors = find_all_subclasses( + base_package=full_package_name, + sub_package_pattern_match=r".*\.operators\..*", + ancestor_match=BaseOperator, + expected_class_name_pattern=OPERATORS_PATTERN, + unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN}, + exclude_class_type=BaseSensorOperator, + false_positive_class_names={ + 'CloudVisionAddProductToProductSetOperator', + 'CloudDataTransferServiceGCSToGCSOperator', + 'CloudDataTransferServiceS3ToGCSOperator', + 'BigQueryCreateDataTransferOperator', + 'CloudTextToSpeechSynthesizeOperator', + 'CloudSpeechToTextRecognizeSpeechOperator', + } + ) + sensors, wrong_sensors = find_all_classes( imported_classes=imported_classes, - expected_package=full_package_name, - expected_ancestor=BaseSensorOperator, - expected_string='.sensors.') - hooks = find_all_subclasses( + base_package=full_package_name, + sub_package_pattern_match=r".*\.sensors\..*", + ancestor_match=BaseSensorOperator, + expected_class_name_pattern=SENSORS_PATTERN, + unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, SENSORS_PATTERN} + ) + hooks, wrong_hooks = find_all_classes( imported_classes=imported_classes, - expected_package=full_package_name, - expected_ancestor=BaseHook, - expected_string='.hooks.') - protocols = find_all_subclasses( + base_package=full_package_name, + sub_package_pattern_match=r".*\.hooks\..*", + ancestor_match=BaseHook, + expected_class_name_pattern=HOOKS_PATTERN, + unexpected_class_name_patterns=ALL_PATTERNS - {HOOKS_PATTERN} + ) + secrets, wrong_secrets = find_all_classes( imported_classes=imported_classes, - expected_package=full_package_name, - expected_ancestor=Protocol, + sub_package_pattern_match=r".*\.secrets\..*", + base_package=full_package_name, + ancestor_match=BaseSecretsBackend, + expected_class_name_pattern=SECRETS_PATTERN, + unexpected_class_name_patterns=ALL_PATTERNS - {SECRETS_PATTERN}, ) - secrets = find_all_subclasses( + transfers, wrong_transfers = find_all_classes( imported_classes=imported_classes, - expected_package=full_package_name, - expected_ancestor=BaseSecretsBackend, + base_package=full_package_name, + sub_package_pattern_match=r".*\.transfers\..*", + ancestor_match=BaseOperator, + expected_class_name_pattern=TRANSFERS_PATTERN, + unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, TRANSFERS_PATTERN}, ) + print_wrong_naming("Operators", wrong_operators) + print_wrong_naming("Sensors", wrong_sensors) + print_wrong_naming("Hooks", wrong_hooks) + print_wrong_naming("Secrets", wrong_secrets) + print_wrong_naming("Transfers", wrong_transfers) + + num_errors = len(wrong_operators) + len(wrong_sensors) + len(wrong_hooks) + \ + len(wrong_secrets) + len(wrong_transfers) + new_operators, moved_operators = get_new_and_moved_classes(operators, MOVED_OPERATORS_DICT) new_sensors, moved_sensors = get_new_and_moved_classes(sensors, MOVED_SENSORS_DICT) new_hooks, moved_hooks = get_new_and_moved_classes(hooks, MOVED_HOOKS_DICT) - new_protocols, moved_protocols = get_new_and_moved_classes(protocols, MOVED_PROTOCOLS_DICT) new_secrets, moved_secrets = get_new_and_moved_classes(secrets, MOVED_SECRETS_DICT) + new_transfers, moved_transfers = get_new_and_moved_classes(transfers, MOVED_TRANSFERS_DICT) class_summary = { "NEW_OPERATORS": new_operators, "MOVED_OPERATORS": moved_operators, @@ -532,23 +597,22 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str "MOVED_SENSORS": moved_sensors, "NEW_HOOKS": new_hooks, "MOVED_HOOKS": moved_hooks, - "NEW_PROTOCOLS": new_protocols, - "MOVED_PROTOCOLS": moved_protocols, "NEW_SECRETS": new_secrets, "MOVED_SECRETS": moved_secrets, + "NEW_TRANSFERS": new_transfers, + "MOVED_TRANSFERS": moved_transfers, "OPERATORS": operators, "HOOKS": hooks, "SENSORS": sensors, - "PROTOCOLS": protocols, "SECRETS": secrets, - + "TRANSFERS": transfers, } for from_name, to_name, object_type in [ ("NEW_OPERATORS", "NEW_OPERATORS_TABLE", "operators"), ("NEW_SENSORS", "NEW_SENSORS_TABLE", "sensors"), ("NEW_HOOKS", "NEW_HOOKS_TABLE", "hooks"), - ("NEW_PROTOCOLS", "NEW_PROTOCOLS_TABLE", "protocols"), ("NEW_SECRETS", "NEW_SECRETS_TABLE", "secrets"), + ("NEW_TRANSFERS", "NEW_TRANSFERS_TABLE", "transfers"), ]: class_summary[to_name] = convert_new_classes_to_table(class_summary[from_name], full_package_name, @@ -557,13 +621,13 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str ("MOVED_OPERATORS", "MOVED_OPERATORS_TABLE", "operators"), ("MOVED_SENSORS", "MOVED_SENSORS_TABLE", "sensors"), ("MOVED_HOOKS", "MOVED_HOOKS_TABLE", "hooks"), - ("MOVED_PROTOCOLS", "MOVED_PROTOCOLS_TABLE", "protocols"), ("MOVED_SECRETS", "MOVED_SECRETS_TABLE", "protocols"), + ("MOVED_TRANSFERS", "MOVED_TRANSFERS_TABLE", "transfers"), ]: class_summary[to_name] = convert_moved_objects_to_table(class_summary[from_name], full_package_name, object_type) - return class_summary + return class_summary, num_errors def render_template(template_name: str, context: Dict[str, Any]) -> str: @@ -741,13 +805,11 @@ def get_previous_release_info(previous_release_version: str, def check_if_release_version_ok( past_releases: List[ReleaseInfo], - current_release_version: str, - package_id: str) -> Tuple[str, Optional[str]]: + current_release_version: str) -> Tuple[str, Optional[str]]: """ Check if the release version passed is not later than the last release version :param past_releases: all past releases (if there are any) :param current_release_version: release version to check - :param package_id: package id :return: Tuple of current/previous_release (previous might be None if there are no releases) """ previous_release_version = past_releases[0].release_version if past_releases else None @@ -864,8 +926,8 @@ def get_additional_package_info(provider_package_path: str) -> str: "OPERATORS": "Operator", "HOOKS": "Hook", "SENSORS": "Sensor", - "PROTOCOLS": "Protocol", "SECRETS": "Backend", + "TRANSFERS": "Operator", } @@ -920,10 +982,10 @@ def update_release_notes_for_package(provider_package_id: str, current_release_v """ full_package_name = f"airflow.providers.{provider_package_id}" provider_package_path = get_package_path(provider_package_id) - class_summary = get_package_class_summary(full_package_name, imported_classes) + class_summary, num_errors = get_package_class_summary(full_package_name, imported_classes) past_releases = get_all_releases(provider_package_path=provider_package_path) current_release_version, previous_release = check_if_release_version_ok( - past_releases, current_release_version, provider_package_id) + past_releases, current_release_version) cross_providers_dependencies = \ get_cross_provider_dependent_packages(provider_package_id=provider_package_id) previous_release = get_previous_release_info(previous_release_version=previous_release, @@ -982,9 +1044,10 @@ def update_release_notes_for_package(provider_package_id: str, current_release_v finally: os.remove(temp_file_path) total, bad = check_if_classes_are_properly_named(class_summary) + bad = bad + num_errors if bad != 0: print() - print(f"ERROR! There are {bad} classes badly named out of {total} classes for {provider_package_id}") + print(f"ERROR! There are {bad} errors of {total} classes for {provider_package_id}") print() return total, bad diff --git a/docs/autoapi_templates/index.rst b/docs/autoapi_templates/index.rst index ed1a91048c53f..d3d102ab82b20 100644 --- a/docs/autoapi_templates/index.rst +++ b/docs/autoapi_templates/index.rst @@ -70,16 +70,22 @@ All operators are in the following packages: airflow/providers/amazon/aws/sensors/index + airflow/providers/amazon/aws/transfers/index + airflow/providers/apache/cassandra/sensors/index airflow/providers/apache/druid/operators/index + airflow/providers/apache/druid/transfers/index + airflow/providers/apache/hdfs/sensors/index airflow/providers/apache/hive/operators/index airflow/providers/apache/hive/sensors/index + airflow/providers/apache/hive/transfers/index + airflow/providers/apache/livy/operators/index airflow/providers/apache/livy/sensors/index @@ -114,10 +120,14 @@ All operators are in the following packages: airflow/providers/google/ads/operators/index + airflow/providers/google/ads/transfers/index + airflow/providers/google/cloud/operators/index airflow/providers/google/cloud/sensors/index + airflow/providers/google/cloud/transfers/index + airflow/providers/google/firebase/operators/index airflow/providers/google/marketing_platform/operators/index @@ -126,6 +136,8 @@ All operators are in the following packages: airflow/providers/google/suite/operators/index + airflow/providers/google/suite/transfers/index + airflow/providers/grpc/operators/index airflow/providers/http/operators/index @@ -146,6 +158,8 @@ All operators are in the following packages: airflow/providers/microsoft/azure/sensors/index + airflow/providers/microsoft/azure/transfers/index + airflow/providers/microsoft/mssql/operators/index airflow/providers/microsoft/winrm/operators/index @@ -154,10 +168,14 @@ All operators are in the following packages: airflow/providers/mysql/operators/index + airflow/providers/mysql/transfers/index + airflow/providers/opsgenie/operators/index airflow/providers/oracle/operators/index + airflow/providers/oracle/transfers/index + airflow/providers/papermill/operators/index airflow/providers/postgres/operators/index @@ -186,6 +204,8 @@ All operators are in the following packages: airflow/providers/snowflake/operators/index + airflow/providers/snowflake/transfers/index + airflow/providers/sqlite/operators/index airflow/providers/ssh/operators/index diff --git a/docs/build b/docs/build index 7e0720a9e833d..8266243c0034c 100755 --- a/docs/build +++ b/docs/build @@ -168,7 +168,8 @@ def check_class_links_in_operators_and_hooks_ref() -> None: airflow_modules = find_modules() - find_modules(deprecated_only=True) airflow_modules = { - o for o in airflow_modules if any(f".{d}." in o for d in ["operators", "hooks", "sensors"]) + o for o in airflow_modules if any(f".{d}." in o for d in + ["operators", "hooks", "sensors", "transfers"]) } missing_modules = airflow_modules - current_modules_in_file diff --git a/docs/concepts.rst b/docs/concepts.rst index 79bc024473880..ca66a404bf916 100644 --- a/docs/concepts.rst +++ b/docs/concepts.rst @@ -278,7 +278,7 @@ Airflow provides operators for many common tasks, including: In addition to these basic building blocks, there are many more specific operators: :class:`~airflow.providers.docker.operators.docker.DockerOperator`, :class:`~airflow.providers.apache.hive.operators.hive.HiveOperator`, :class:`~airflow.providers.amazon.aws.operators.s3_file_transform.S3FileTransformOperator`, -:class:`~airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`, +:class:`~airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`, :class:`~airflow.providers.slack.operators.slack.SlackAPIOperator`... you get the idea! Operators are only loaded by Airflow if they are assigned to a DAG. diff --git a/docs/howto/define_extra_link.rst b/docs/howto/define_extra_link.rst index ee6506a78cb55..47bb5ed3b7d0b 100644 --- a/docs/howto/define_extra_link.rst +++ b/docs/howto/define_extra_link.rst @@ -66,7 +66,7 @@ You can also add (or override) an extra link to an existing operators through an Airflow plugin. For example, the following Airflow plugin will add an Operator Link on all -tasks using :class:`~airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator` operator. +tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator` operator. **Adding Operator Links to Existing Operators** ``plugins/extra_link.py``: @@ -75,7 +75,7 @@ tasks using :class:`~airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Ope from airflow.plugins_manager import AirflowPlugin from airflow.models.baseoperator import BaseOperatorLink - from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator + from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator class S3LogLink(BaseOperatorLink): name = 'S3' diff --git a/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst b/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst index 0c095d5ca091b..ff7f3540e364d 100644 --- a/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst +++ b/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst @@ -31,7 +31,7 @@ Overview The ``GoogleApiToS3Transfer`` can call requests to any Google API which supports discovery and save its response on S3. Two example_dags are provided which showcase the -:class:`~airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer` +:class:`~airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Transfer` in action. - example_google_api_to_s3_transfer_basic.py diff --git a/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst b/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst index 94cfea38d91c8..cf28ad5bc4d12 100644 --- a/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst +++ b/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst @@ -32,7 +32,7 @@ The ``ImapAttachmentToS3Operator`` can transfer an email attachment via IMAP protocol from a mail server to S3 Bucket. An example dag ``example_imap_attachment_to_s3.py`` is provided which showcase the -:class:`~airflow.providers.amazon.aws.operators.imap_attachment_to_s3.ImapAttachmentToS3Operator` +:class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator` in action. example_imap_attachment_to_s3.py diff --git a/docs/howto/operator/amazon/aws/s3_to_redshift.rst b/docs/howto/operator/amazon/aws/s3_to_redshift.rst index 7a634da2838c8..dc5089eedccc7 100644 --- a/docs/howto/operator/amazon/aws/s3_to_redshift.rst +++ b/docs/howto/operator/amazon/aws/s3_to_redshift.rst @@ -16,7 +16,7 @@ under the License. -.. _howto/operator:S3ToRedshiftTransferOperator: +.. _howto/operator:S3ToRedshiftOperator: S3 To Redshift Transfer Operator ================================ @@ -28,10 +28,10 @@ S3 To Redshift Transfer Operator Overview -------- -The ``S3ToRedshiftTransferOperator`` copies data from a S3 Bucket into a Redshift table. +The ``S3ToRedshiftOperator`` copies data from a S3 Bucket into a Redshift table. The example dag provided showcases the -:class:`~airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator` +:class:`~airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator` in action. - example_s3_to_redshift.py @@ -42,7 +42,7 @@ example_s3_to_redshift.py Purpose """"""" -This is a basic example dag for using ``S3ToRedshiftTransferOperator`` to copies data from a S3 Bucket into a Redshift table. +This is a basic example dag for using ``S3ToRedshiftOperator`` to copies data from a S3 Bucket into a Redshift table. Environment variables """"""""""""""""""""" diff --git a/docs/howto/operator/gcp/ads.rst b/docs/howto/operator/gcp/ads.rst index 311304bc93b48..ad2c376b5eb8d 100644 --- a/docs/howto/operator/gcp/ads.rst +++ b/docs/howto/operator/gcp/ads.rst @@ -35,7 +35,7 @@ Google Ads to GCS ^^^^^^^^^^^^^^^^^ To query the Google Ads API and generate a CSV report of the results use -:class:`~airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator`. +:class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator`. .. exampleinclude:: ../../../../airflow/providers/google/ads/example_dags/example_ads.py :language: python @@ -44,7 +44,7 @@ To query the Google Ads API and generate a CSV report of the results use :end-before: [END howto_google_ads_to_gcs_operator] Use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator` +:template-fields:`airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator` parameters which allow you to dynamically determine values. The result is saved to :ref:`XCom `, which allows the result to be used by other operators. @@ -54,7 +54,7 @@ Upload Google Ads Accounts to GCS ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To upload Google Ads accounts to Google Cloud Storage bucket use the -:class:`~airflow.providers.google.ads.operators.ads.GoogleAdsListAccountsOperator`. +:class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsListAccountsOperator`. .. exampleinclude:: ../../../../airflow/providers/google/ads/example_dags/example_ads.py :language: python @@ -63,6 +63,6 @@ To upload Google Ads accounts to Google Cloud Storage bucket use the :end-before: [END howto_ads_list_accounts_operator] Use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator` +:template-fields:`airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator` parameters which allow you to dynamically determine values. The result is saved to :ref:`XCom `, which allows the result to be used by other operators. diff --git a/docs/howto/operator/gcp/facebook_ads_to_gcs.rst b/docs/howto/operator/gcp/facebook_ads_to_gcs.rst index a686878c65cdd..7d6e50ba72a59 100644 --- a/docs/howto/operator/gcp/facebook_ads_to_gcs.rst +++ b/docs/howto/operator/gcp/facebook_ads_to_gcs.rst @@ -35,7 +35,7 @@ FacebookAdsReportToGcsOperator ------------------------------ Use the -:class:`~airflow.providers.google.cloud.operators.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator` +:class:`~airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator` to execute a Facebook ads report fetch and load to GCS. .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py diff --git a/docs/howto/operator/gcp/gcs.rst b/docs/howto/operator/gcp/gcs.rst index b35659fef78bd..675bbf6ffe5dc 100644 --- a/docs/howto/operator/gcp/gcs.rst +++ b/docs/howto/operator/gcp/gcs.rst @@ -35,7 +35,7 @@ GCSToBigQueryOperator --------------------- Use the -:class:`~airflow.providers.google.cloud.operators.gcs_to_bigquery.GCSToBigQueryOperator` +:class:`~airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator` to execute a BigQuery load job. .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py diff --git a/docs/howto/operator/gcp/gcs_to_gcs.rst b/docs/howto/operator/gcp/gcs_to_gcs.rst index e17bedf5fea38..a23b37ceb5b73 100644 --- a/docs/howto/operator/gcp/gcs_to_gcs.rst +++ b/docs/howto/operator/gcp/gcs_to_gcs.rst @@ -72,7 +72,7 @@ Operators GCSToGCSOperator ~~~~~~~~~~~~~~~~ -:class:`~airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator` allows you to copy +:class:`~airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator` allows you to copy one or more files within GCS. The files may be copied between two different buckets or within one bucket. The copying always takes place without taking into account the initial state of the destination bucket. @@ -165,7 +165,7 @@ the ``delimiter`` argument apply to moves as well as copies. GCSSynchronizeBuckets ~~~~~~~~~~~~~~~~~~~~~ -The :class:`~airflow.providers.google.cloud.operators.gcs_to_gcs.GCSSynchronizeBuckets` +The :class:`~airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSSynchronizeBuckets` operator checks the initial state of the destination bucket, and then compares it with the source bucket. Based on this, it creates an operation plan that describes which objects should be deleted from the destination bucket, which should be overwritten, and which should be copied. diff --git a/docs/howto/operator/gcp/gcs_to_gdrive.rst b/docs/howto/operator/gcp/gcs_to_gdrive.rst index 61e750184c74b..4c4c43f046ce7 100644 --- a/docs/howto/operator/gcp/gcs_to_gdrive.rst +++ b/docs/howto/operator/gcp/gcs_to_gdrive.rst @@ -41,10 +41,10 @@ Operator ^^^^^^^^ Transfer files between Google Storage and Google Drive is performed with the -:class:`~airflow.providers.google.suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator` operator. +:class:`~airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator` operator. You can use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator` +:template-fields:`airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator` parameters which allows you to dynamically determine values. Copy single files diff --git a/docs/howto/operator/gcp/gcs_to_local.rst b/docs/howto/operator/gcp/gcs_to_local.rst new file mode 100644 index 0000000000000..860fc06c2b858 --- /dev/null +++ b/docs/howto/operator/gcp/gcs_to_local.rst @@ -0,0 +1,57 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Downloads data from Google Cloud Storage to Local Filesystem +============================================================ +The `Google Cloud Storage `__ (GCS) is used to store large data from various applications. +This page shows how to download data from GCS to local filesystem. + +.. contents:: + :depth: 1 + :local: + + +Prerequisite Tasks +^^^^^^^^^^^^^^^^^^ + +.. include:: _partials/prerequisite_tasks.rst + +.. _howto/operator:GCSToLocalFilesystemOperator: + +GCSToLocalFilesystemOperator +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:class:`~airflow.providers.google.cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator` allows you to download +data from GCS to local filesystem. + + +Below is an example of using this operator to upload a file to GCS. + +.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs.py + :language: python + :dedent: 0 + :start-after: [START howto_operator_gcs_download_file_task] + :end-before: [END howto_operator_gcs_download_file_task] + + +Reference +--------- + +For further information, look at: + +* `Google Cloud Storage Documentation `__ diff --git a/docs/howto/operator/gcp/gcs_to_sftp.rst b/docs/howto/operator/gcp/gcs_to_sftp.rst index 682be89bfe67a..d50f0adc6cfe7 100644 --- a/docs/howto/operator/gcp/gcs_to_sftp.rst +++ b/docs/howto/operator/gcp/gcs_to_sftp.rst @@ -41,10 +41,10 @@ Operator ^^^^^^^^ Transfer files between SFTP and Google Storage is performed with the -:class:`~airflow.providers.google.cloud.operators.gcs_to_sftp.GCSToSFTPOperator` operator. +:class:`~airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSToSFTPOperator` operator. Use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.cloud.operators.gcs_to_sftp.GCSToSFTPOperator` +:template-fields:`airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSToSFTPOperator` to define values dynamically. diff --git a/docs/howto/operator/gcp/gcs_to_sheets.rst b/docs/howto/operator/gcp/gcs_to_sheets.rst index fdfd268daded7..2af5042cd8063 100644 --- a/docs/howto/operator/gcp/gcs_to_sheets.rst +++ b/docs/howto/operator/gcp/gcs_to_sheets.rst @@ -40,7 +40,7 @@ Upload data from GCS to Google Sheets ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To upload data from Google Cloud Storage to Google Spreadsheet you can use the -:class:`~airflow.providers.google.suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator`. +:class:`~airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`. .. exampleinclude:: ../../../../airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py :language: python @@ -49,4 +49,4 @@ To upload data from Google Cloud Storage to Google Spreadsheet you can use the :end-before: [END upload_gcs_to_sheets] You can use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator`. +:template-fields:`airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`. diff --git a/docs/howto/operator/gcp/local_to_gcs.rst b/docs/howto/operator/gcp/local_to_gcs.rst index 098618cf0c03d..a52c7c7d16b20 100644 --- a/docs/howto/operator/gcp/local_to_gcs.rst +++ b/docs/howto/operator/gcp/local_to_gcs.rst @@ -36,7 +36,7 @@ Prerequisite Tasks LocalFileSystemToGCSOperator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -:class:`~airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload +:class:`~airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload data from local filesystem to GCS. When you use this operator, you can optionally compress the data being uploaded. diff --git a/docs/howto/operator/gcp/presto_to_gcs.rst b/docs/howto/operator/gcp/presto_to_gcs.rst index d961e3cf48dd2..e6fc95413dec9 100644 --- a/docs/howto/operator/gcp/presto_to_gcs.rst +++ b/docs/howto/operator/gcp/presto_to_gcs.rst @@ -35,7 +35,7 @@ Data transfer ------------- Transfer files between Presto and Google Storage is performed with the -:class:`~airflow.providers.google.cloud.operators.presto_to_gcs.PrestoToGCSOperator` operator. +:class:`~airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoToGCSOperator` operator. This operator has 3 required parameters: @@ -45,7 +45,7 @@ This operator has 3 required parameters: A ``{}`` should be specified in the filename to allow the operator to inject file numbers in cases where the file is split due to size. -All parameters are described in the reference documentation - :class:`~airflow.providers.google.cloud.operators.presto_to_gcs.PrestoToGCSOperator`. +All parameters are described in the reference documentation - :class:`~airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoToGCSOperator`. An example operator call might look like this: diff --git a/docs/howto/operator/gcp/sftp_to_gcs.rst b/docs/howto/operator/gcp/sftp_to_gcs.rst index f93c83304d619..e807e62ca46eb 100644 --- a/docs/howto/operator/gcp/sftp_to_gcs.rst +++ b/docs/howto/operator/gcp/sftp_to_gcs.rst @@ -40,10 +40,10 @@ Operator ^^^^^^^^ Transfer files between SFTP and Google Storage is performed with the -:class:`~airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPToGCSOperator` operator. +:class:`~airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPToGCSOperator` operator. Use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPToGCSOperator` +:template-fields:`airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPToGCSOperator` to define values dynamically. Copying single files diff --git a/docs/howto/operator/gcp/sheets_to_gcs.rst b/docs/howto/operator/gcp/sheets_to_gcs.rst index b42c796c0aa33..373bb018cd7a9 100644 --- a/docs/howto/operator/gcp/sheets_to_gcs.rst +++ b/docs/howto/operator/gcp/sheets_to_gcs.rst @@ -40,7 +40,7 @@ Upload data from Google Sheets to GCS ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To upload data from Google Spreadsheet to Google Cloud Storage you can use the -:class:`~airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator`. +:class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py :language: python @@ -49,4 +49,4 @@ To upload data from Google Spreadsheet to Google Cloud Storage you can use the :end-before: [END upload_sheet_to_gcs] You can use :ref:`Jinja templating ` with -:template-fields:`airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator`. +:template-fields:`airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. diff --git a/docs/installation.rst b/docs/installation.rst index 29a228afe5876..89139438001d5 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -188,55 +188,55 @@ Here's the list of the subpackages and what they enable: **Software:** -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| subpackage | install command | enables | -+=====================+=====================================================+===================================================================================+ -| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| celery | ``pip install 'apache-airflow[celery]'`` | CeleryExecutor | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| dask | ``pip install 'apache-airflow[dask]'`` | DaskExecutor | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| docker | ``pip install 'apache-airflow[docker]'`` | Docker hooks and operators | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| elasticsearch | ``pip install 'apache-airflow[elasticsearch]'`` | Elasticsearch hooks and Log Handler | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| exasol | ``pip install 'apache-airflow[exasol]'`` | Exasol hooks and operators | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| kubernetes | ``pip install 'apache-airflow[cncf.kubernetes]'`` | Kubernetes Executor and operator | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| mongo | ``pip install 'apache-airflow[mongo]'`` | Mongo hooks and operators | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| mssql (deprecated) | ``pip install 'apache-airflow[microsoft.mssql]'`` | Microsoft SQL Server operators and hook, | -| | | support as an Airflow backend. Uses pymssql. | -| | | Will be replaced by subpackage ``odbc``. | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| mysql | ``pip install 'apache-airflow[mysql]'`` | MySQL operators and hook, support as an Airflow | -| | | backend. The version of MySQL server has to be | -| | | 5.6.4+. The exact version upper bound depends | -| | | on version of ``mysqlclient`` package. For | -| | | example, ``mysqlclient`` 1.3.12 can only be | -| | | used with MySQL server 5.6.4 through 5.7. | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| odbc | ``pip install 'apache-airflow[odbc]'`` | ODBC data sources including MS SQL Server. Can use MsSqlOperator, | -| | | or as metastore database backend. Uses pyodbc. | -| | | See :ref:`howto/connection/odbc` for more info. | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| oracle | ``pip install 'apache-airflow[oracle]'`` | Oracle hooks and operators | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| pinot | ``pip install 'apache-airflow[pinot]'`` | Pinot DB hook | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| postgres | ``pip install 'apache-airflow[postgres]'`` | PostgreSQL operators and hook, support as an | -| | | Airflow backend | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| redis | ``pip install 'apache-airflow[redis]'`` | Redis hooks and sensors | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| samba | ``pip install 'apache-airflow[samba]'`` | :class:`airflow.providers.apache.hive.operators.hive_to_samba.Hive2SambaOperator` | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ -| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | -+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+ ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| subpackage | install command | enables | ++=====================+=====================================================+====================================================================================+ +| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| celery | ``pip install 'apache-airflow[celery]'`` | CeleryExecutor | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| dask | ``pip install 'apache-airflow[dask]'`` | DaskExecutor | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| docker | ``pip install 'apache-airflow[docker]'`` | Docker hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| elasticsearch | ``pip install 'apache-airflow[elasticsearch]'`` | Elasticsearch hooks and Log Handler | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| exasol | ``pip install 'apache-airflow[exasol]'`` | Exasol hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| kubernetes | ``pip install 'apache-airflow[cncf.kubernetes]'`` | Kubernetes Executor and operator | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| mongo | ``pip install 'apache-airflow[mongo]'`` | Mongo hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| mssql (deprecated) | ``pip install 'apache-airflow[microsoft.mssql]'`` | Microsoft SQL Server operators and hook, | +| | | support as an Airflow backend. Uses pymssql. | +| | | Will be replaced by subpackage ``odbc``. | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| mysql | ``pip install 'apache-airflow[mysql]'`` | MySQL operators and hook, support as an Airflow | +| | | backend. The version of MySQL server has to be | +| | | 5.6.4+. The exact version upper bound depends | +| | | on version of ``mysqlclient`` package. For | +| | | example, ``mysqlclient`` 1.3.12 can only be | +| | | used with MySQL server 5.6.4 through 5.7. | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| odbc | ``pip install 'apache-airflow[odbc]'`` | ODBC data sources including MS SQL Server. Can use MsSqlOperator, | +| | | or as metastore database backend. Uses pyodbc. | +| | | See :ref:`howto/connection/odbc` for more info. | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| oracle | ``pip install 'apache-airflow[oracle]'`` | Oracle hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| pinot | ``pip install 'apache-airflow[pinot]'`` | Pinot DB hook | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| postgres | ``pip install 'apache-airflow[postgres]'`` | PostgreSQL operators and hook, support as an | +| | | Airflow backend | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| redis | ``pip install 'apache-airflow[redis]'`` | Redis hooks and sensors | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| samba | ``pip install 'apache-airflow[samba]'`` | :class:`airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator` | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ +| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | ++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+ **Other:** diff --git a/docs/operators-and-hooks-ref.rst b/docs/operators-and-hooks-ref.rst index 46f5f1d013925..01b1401613191 100644 --- a/docs/operators-and-hooks-ref.rst +++ b/docs/operators-and-hooks-ref.rst @@ -213,52 +213,52 @@ Foundation. * - `Amazon Simple Storage Service (S3) `_ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.s3_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.s3_to_hive` * - `Amazon Simple Storage Service (S3) `_ - `MySQL `__ - - - :mod:`airflow.providers.mysql.operators.s3_to_mysql` + - :mod:`airflow.providers.mysql.transfers.s3_to_mysql` * - `Apache Cassandra `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.cassandra_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.cassandra_to_gcs` * - `Apache Hive `__ - `Amazon DynamoDB `__ - - - :mod:`airflow.providers.amazon.aws.operators.hive_to_dynamodb` + - :mod:`airflow.providers.amazon.aws.transfers.hive_to_dynamodb` * - `Apache Hive `__ - `Apache Druid `__ - - - :mod:`airflow.providers.apache.druid.operators.hive_to_druid` + - :mod:`airflow.providers.apache.druid.transfers.hive_to_druid` * - `Apache Hive `__ - `MySQL `__ - - - :mod:`airflow.providers.apache.hive.operators.hive_to_mysql` + - :mod:`airflow.providers.apache.hive.transfers.hive_to_mysql` * - `Apache Hive `__ - `Samba `__ - - - :mod:`airflow.providers.apache.hive.operators.hive_to_samba` + - :mod:`airflow.providers.apache.hive.transfers.hive_to_samba` * - `Microsoft SQL Server (MSSQL) `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.mssql_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.mssql_to_hive` * - `MySQL `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.mysql_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.mysql_to_hive` * - `Vertica `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive` .. _Azure: @@ -335,17 +335,17 @@ These integrations allow you to copy data from/to Microsoft Azure. * - `Azure Data Lake Storage `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.adls_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.adls_to_gcs` * - Local - `Azure Blob Storage `__ - - - :mod:`airflow.providers.microsoft.azure.operators.file_to_wasb` + - :mod:`airflow.providers.microsoft.azure.transfers.file_to_wasb` * - `Oracle `__ - `Azure Data Lake Storage `__ - - - :mod:`airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer` + - :mod:`airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake` .. _AWS: @@ -520,7 +520,7 @@ These integrations allow you to copy data from/to Amazon Web Services. All GCP services :ref:`[1] ` - `Amazon Simple Storage Service (S3) `__ - :doc:`How to use ` - - :mod:`airflow.providers.amazon.aws.operators.google_api_to_s3_transfer` + - :mod:`airflow.providers.amazon.aws.transfers.google_api_to_s3` * - `Amazon DataSync `__ - `Amazon Simple Storage Service (S3) `_ @@ -530,63 +530,63 @@ These integrations allow you to copy data from/to Amazon Web Services. * - `Amazon DynamoDB `__ - `Amazon Simple Storage Service (S3) `_ - - - :mod:`airflow.providers.amazon.aws.operators.dynamodb_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.dynamodb_to_s3` * - `Amazon Redshift `__ - `Amazon Simple Storage Service (S3) `_ - - - :mod:`airflow.providers.amazon.aws.operators.redshift_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.redshift_to_s3` * - `Amazon Simple Storage Service (S3) `_ - `Amazon Redshift `__ - :doc:`How to use ` - - :mod:`airflow.providers.amazon.aws.operators.s3_to_redshift` + - :mod:`airflow.providers.amazon.aws.transfers.s3_to_redshift` * - `Amazon Simple Storage Service (S3) `_ - `Snowflake `__ - - - :mod:`airflow.providers.snowflake.operators.s3_to_snowflake` + - :mod:`airflow.providers.snowflake.transfers.s3_to_snowflake` * - `Amazon Simple Storage Service (S3) `_ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.s3_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.s3_to_hive` * - `Amazon Simple Storage Service (S3) `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.s3_to_gcs`, + - :mod:`airflow.providers.google.cloud.transfers.s3_to_gcs`, :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service` * - `Amazon Simple Storage Service (S3) `_ - `SSH File Transfer Protocol (SFTP) `__ - - - :mod:`airflow.providers.amazon.aws.operators.s3_to_sftp` + - :mod:`airflow.providers.amazon.aws.transfers.s3_to_sftp` * - `Apache Hive `__ - `Amazon DynamoDB `__ - - - :mod:`airflow.providers.amazon.aws.operators.hive_to_dynamodb` + - :mod:`airflow.providers.amazon.aws.transfers.hive_to_dynamodb` * - `Google Cloud Storage (GCS) `__ - `Amazon Simple Storage Service (S3) `__ - - - :mod:`airflow.providers.amazon.aws.operators.gcs_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.gcs_to_s3` * - `Internet Message Access Protocol (IMAP) `__ - `Amazon Simple Storage Service (S3) `__ - :doc:`How to use ` - - :mod:`airflow.providers.amazon.aws.operators.imap_attachment_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.imap_attachment_to_s3` * - `MongoDB `__ - `Amazon Simple Storage Service (S3) `__ - - - :mod:`airflow.providers.amazon.aws.operators.mongo_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.mongo_to_s3` * - `SSH File Transfer Protocol (SFTP) `__ - `Amazon Simple Storage Service (S3) `_ - - - :mod:`airflow.providers.amazon.aws.operators.sftp_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.sftp_to_s3` :ref:`[1] ` Those discovery-based operators use :class:`~airflow.providers.google.common.hooks.discovery_api.GoogleDiscoveryApiHook` to communicate with Google @@ -850,43 +850,49 @@ These integrations allow you to copy data from/to Google Cloud Platform. All services :ref:`[1] ` - `Amazon Simple Storage Service (S3) `__ - :doc:`How to use ` - - :mod:`airflow.providers.amazon.aws.operators.google_api_to_s3_transfer` + - :mod:`airflow.providers.amazon.aws.transfers.google_api_to_s3` * - `Amazon Simple Storage Service (S3) `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.s3_to_gcs`, + - :mod:`airflow.providers.google.cloud.transfers.s3_to_gcs`, :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service` * - `Apache Cassandra `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.cassandra_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.cassandra_to_gcs` * - `Azure Data Lake Storage `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.adls_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.adls_to_gcs` * - `Facebook Ads `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.facebook_ads_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.facebook_ads_to_gcs` + + + * - `Google Ads `__ + - `Google Cloud Storage (GCS) `__ + - :doc:`How to use ` + - :mod:`airflow.providers.google.ads.transfers.ads_to_gcs` * - `Google BigQuery `__ - `MySQL `__ - - - :mod:`airflow.providers.google.cloud.operators.bigquery_to_mysql` + - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_mysql` * - `Google BigQuery `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.bigquery_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_gcs` * - `Google BigQuery `__ - `Google BigQuery `__ - - - :mod:`airflow.providers.google.cloud.operators.bigquery_to_bigquery` + - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_bigquery` * - `Cloud Firestore `__ - `Google Cloud Storage (GCS) `__ @@ -896,74 +902,79 @@ These integrations allow you to copy data from/to Google Cloud Platform. * - `Google Cloud Storage (GCS) `__ - `Amazon Simple Storage Service (S3) `__ - - - :mod:`airflow.providers.amazon.aws.operators.gcs_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.gcs_to_s3` * - `Google Cloud Storage (GCS) `__ - `Google BigQuery `__ - - - :mod:`airflow.providers.google.cloud.operators.gcs_to_bigquery` + - :mod:`airflow.providers.google.cloud.transfers.gcs_to_bigquery` * - `Google Cloud Storage (GCS) `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use `, :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.gcs_to_gcs`, + - :mod:`airflow.providers.google.cloud.transfers.gcs_to_gcs`, :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service` + * - `Google Cloud Storage (GCS) `__ + - Local + - :doc:`How to use ` + - :mod:`airflow.providers.google.cloud.transfers.gcs_to_local` + * - `Google Cloud Storage (GCS) `__ - `Google Drive `__ - - - :mod:`airflow.providers.google.suite.operators.gcs_to_gdrive` + - :mod:`airflow.providers.google.suite.transfers.gcs_to_gdrive` * - `Google Cloud Storage (GCS) `__ - SFTP - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.gcs_to_sftp` + - :mod:`airflow.providers.google.cloud.transfers.gcs_to_sftp` * - Local - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.local_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.local_to_gcs` * - `Microsoft SQL Server (MSSQL) `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.mssql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.mssql_to_gcs` * - `MySQL `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.mysql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.mysql_to_gcs` * - `PostgresSQL `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.postgres_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.postgres_to_gcs` * - `Presto `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.presto_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.presto_to_gcs` * - SFTP - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.sftp_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.sftp_to_gcs` * - SQL - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.sql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.sql_to_gcs` * - `Google Spreadsheet `__ - `Google Cloud Storage (GCS) `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.cloud.operators.sheets_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.sheets_to_gcs` * - `Google Cloud Storage (GCS) `__ - `Google Spreadsheet `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.suite.operators.gcs_to_sheets` + - :mod:`airflow.providers.google.suite.transfers.gcs_to_sheets` .. _integration:GCP-Discovery: @@ -1219,7 +1230,7 @@ These integrations allow you to perform various operations within various servic - - :mod:`airflow.providers.snowflake.hooks.snowflake` - :mod:`airflow.providers.snowflake.operators.snowflake`, - :mod:`airflow.providers.snowflake.operators.snowflake_to_slack` + :mod:`airflow.providers.snowflake.transfers.snowflake_to_slack` - * - `Vertica `__ @@ -1250,17 +1261,17 @@ These integrations allow you to perform various operations within various servic * - `Google Cloud Storage (GCS) `__ - `Google Drive `__ - :doc:`How to use ` - - :mod:`airflow.providers.google.suite.operators.gcs_to_gdrive` + - :mod:`airflow.providers.google.suite.transfers.gcs_to_gdrive` * - `Vertica `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive` * - `Vertica `__ - `MySQL `__ - - - :mod:`airflow.providers.mysql.operators.vertica_to_mysql` + - :mod:`airflow.providers.mysql.transfers.vertica_to_mysql` .. _software: @@ -1425,72 +1436,72 @@ These integrations allow you to copy data. * - `Apache Hive `__ - `Samba `__ - - - :mod:`airflow.providers.apache.hive.operators.hive_to_samba` + - :mod:`airflow.providers.apache.hive.transfers.hive_to_samba` * - `BigQuery `__ - `MySQL `__ - - - :mod:`airflow.providers.google.cloud.operators.bigquery_to_mysql` + - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_mysql` * - `Microsoft SQL Server (MSSQL) `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.mssql_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.mssql_to_hive` * - `Microsoft SQL Server (MSSQL) `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.mssql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.mssql_to_gcs` * - `MongoDB `__ - `Amazon Simple Storage Service (S3) `__ - - - :mod:`airflow.providers.amazon.aws.operators.mongo_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.mongo_to_s3` * - `MySQL `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.mysql_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.mysql_to_hive` * - `MySQL `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.mysql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.mysql_to_gcs` * - `Oracle `__ - `Azure Data Lake Storage `__ - - - :mod:`airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer` + - :mod:`airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake` * - `Oracle `__ - `Oracle `__ - - - :mod:`airflow.providers.oracle.operators.oracle_to_oracle_transfer` + - :mod:`airflow.providers.oracle.transfers.oracle_to_oracle` * - `PostgresSQL `__ - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.postgres_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.postgres_to_gcs` * - `Presto `__ - `MySQL `__ - - - :mod:`airflow.providers.mysql.operators.presto_to_mysql` + - :mod:`airflow.providers.mysql.transfers.presto_to_mysql` * - SQL - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.sql_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.sql_to_gcs` * - `Vertica `__ - `Apache Hive `__ - - - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive` + - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive` * - `Vertica `__ - `MySQL `__ - - - :mod:`airflow.providers.mysql.operators.vertica_to_mysql` + - :mod:`airflow.providers.mysql.transfers.vertica_to_mysql` .. _protocol: @@ -1588,24 +1599,24 @@ These integrations allow you to copy data. * - `Amazon Simple Storage Service (S3) `_ - `SSH File Transfer Protocol (SFTP) `__ - - - :mod:`airflow.providers.amazon.aws.operators.s3_to_sftp` + - :mod:`airflow.providers.amazon.aws.transfers.s3_to_sftp` * - Filesystem - `Azure Blob Storage `__ - - - :mod:`airflow.providers.microsoft.azure.operators.file_to_wasb` + - :mod:`airflow.providers.microsoft.azure.transfers.file_to_wasb` * - Filesystem - `Google Cloud Storage (GCS) `__ - - - :mod:`airflow.providers.google.cloud.operators.local_to_gcs` + - :mod:`airflow.providers.google.cloud.transfers.local_to_gcs` * - `Internet Message Access Protocol (IMAP) `__ - `Amazon Simple Storage Service (S3) `__ - :doc:`How to use ` - - :mod:`airflow.providers.amazon.aws.operators.imap_attachment_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.imap_attachment_to_s3` * - `SSH File Transfer Protocol (SFTP) `__ - `Amazon Simple Storage Service (S3) `_ - - - :mod:`airflow.providers.amazon.aws.operators.sftp_to_s3` + - :mod:`airflow.providers.amazon.aws.transfers.sftp_to_s3` diff --git a/docs/plugins.rst b/docs/plugins.rst index c67ead9b36485..63719d5d9d099 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -161,7 +161,7 @@ definitions in Airflow. from airflow.hooks.base_hook import BaseHook from airflow.models import BaseOperator from airflow.models.baseoperator import BaseOperatorLink - from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator + from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator from airflow.sensors.base_sensor_operator import BaseSensorOperator # Will show up under airflow.hooks.test_plugin.PluginHook diff --git a/requirements/requirements-python3.6.txt b/requirements/requirements-python3.6.txt index fe07f3498eaa9..0a4e142003bce 100644 --- a/requirements/requirements-python3.6.txt +++ b/requirements/requirements-python3.6.txt @@ -15,7 +15,7 @@ Flask==1.1.2 GitPython==3.1.3 HeapDict==1.0.1 JPype1==0.7.5 -JayDeBeApi==1.2.2 +JayDeBeApi==1.2.3 Jinja2==2.10.3 Mako==1.1.3 Markdown==2.6.11 @@ -29,7 +29,7 @@ Pygments==2.6.1 SQLAlchemy-JSONField==0.9.0 SQLAlchemy-Utils==0.36.6 SQLAlchemy==1.3.17 -Sphinx==3.1.0 +Sphinx==3.1.1 Unidecode==1.1.1 WTForms==2.3.1 Werkzeug==0.16.1 @@ -72,9 +72,9 @@ beautifulsoup4==4.7.1 billiard==3.6.3.0 black==19.10b0 blinker==1.4 -boto3==1.14.0 +boto3==1.14.3 boto==2.49.0 -botocore==1.17.0 +botocore==1.17.3 bowler==0.8.0 cached-property==1.5.1 cachetools==4.1.0 @@ -96,7 +96,7 @@ colorlog==4.0.2 connexion==2.7.0 contextvars==2.4 coverage==5.1 -croniter==0.3.32 +croniter==0.3.33 cryptography==2.9.2 curlify==2.2.1 cx-Oracle==7.3.0 @@ -104,7 +104,7 @@ dask==2.18.1 datadog==0.36.0 decorator==4.4.2 defusedxml==0.6.0 -dill==0.3.1.1 +dill==0.3.2 distlib==0.3.0 distributed==2.18.0 dnspython==1.16.0 @@ -135,14 +135,14 @@ funcsigs==1.0.2 future-fstrings==1.2.0 future==0.18.2 gcsfs==0.6.2 -gevent==20.6.0 +gevent==20.6.1 gitdb==4.0.5 google-ads==4.0.0 google-api-core==1.20.0 -google-api-python-client==1.9.2 +google-api-python-client==1.9.3 google-auth-httplib2==0.0.3 google-auth-oauthlib==0.4.1 -google-auth==1.16.1 +google-auth==1.17.2 google-cloud-automl==0.10.0 google-cloud-bigquery-datatransfer==1.0.0 google-cloud-bigquery==1.25.0 @@ -161,7 +161,7 @@ google-cloud-redis==1.0.0 google-cloud-secret-manager==1.0.0 google-cloud-spanner==1.17.0 google-cloud-speech==1.3.2 -google-cloud-storage==1.28.1 +google-cloud-storage==1.29.0 google-cloud-tasks==1.5.0 google-cloud-texttospeech==1.0.1 google-cloud-translate==2.0.1 @@ -187,7 +187,7 @@ ijson==2.6.1 imagesize==1.2.0 immutables==0.14 importlib-metadata==1.6.1 -importlib-resources==2.0.0 +importlib-resources==2.0.1 inflection==0.5.0 ipdb==0.13.2 ipython-genutils==0.2.0 @@ -220,7 +220,7 @@ mccabe==0.6.1 mock==4.0.2 mongomock==3.19.0 monotonic==1.5 -more-itertools==8.3.0 +more-itertools==8.4.0 moto==1.3.14 msgpack==1.0.0 msrest==0.6.16 @@ -257,7 +257,7 @@ pep562==1.0 pexpect==4.8.0 pickleshare==0.7.5 pinotdb==0.1.1 -pipdeptree==0.13.2 +pipdeptree==1.0.0 pluggy==0.13.1 pre-commit==2.5.1 presto-python-client==0.7.0 @@ -268,7 +268,7 @@ psutil==5.7.0 psycopg2-binary==2.8.5 ptyprocess==0.6.0 py4j==0.10.7 -py==1.8.1 +py==1.8.2 pyOpenSSL==19.1.0 pyarrow==0.17.1 pyasn1-modules==0.2.8 @@ -291,11 +291,11 @@ pypd==1.1.0 pyrsistent==0.16.0 pysftp==0.2.9 pyspark==2.4.6 -pytest-cov==2.9.0 +pytest-cov==2.10.0 pytest-forked==1.1.3 -pytest-instafail==0.4.1.post0 +pytest-instafail==0.4.2 pytest-rerunfailures==9.0 -pytest-timeout==1.3.4 +pytest-timeout==1.4.1 pytest-xdist==1.32.0 pytest==5.4.3 python-daemon==2.1.2 @@ -320,11 +320,11 @@ requests-ntlm==1.1.0 requests-oauthlib==1.1.0 requests-toolbelt==0.9.1 requests==2.23.0 -responses==0.10.14 -rsa==4.0 +responses==0.10.15 +rsa==4.6 s3transfer==0.3.3 sasl==0.2.1 -semver==2.10.1 +semver==2.10.2 sendgrid==6.3.1 sentinels==1.0.0 sentry-sdk==0.14.4 @@ -379,7 +379,7 @@ uritemplate==3.0.1 urllib3==1.25.9 vertica-python==0.10.4 vine==1.3.0 -virtualenv==20.0.21 +virtualenv==20.0.23 watchtower==0.7.3 wcwidth==0.2.4 websocket-client==0.57.0 diff --git a/requirements/requirements-python3.7.txt b/requirements/requirements-python3.7.txt index aa1b97bae5f6f..24487b5cb9cbf 100644 --- a/requirements/requirements-python3.7.txt +++ b/requirements/requirements-python3.7.txt @@ -15,7 +15,7 @@ Flask==1.1.2 GitPython==3.1.3 HeapDict==1.0.1 JPype1==0.7.5 -JayDeBeApi==1.2.2 +JayDeBeApi==1.2.3 Jinja2==2.10.3 Mako==1.1.3 Markdown==2.6.11 @@ -29,7 +29,7 @@ Pygments==2.6.1 SQLAlchemy-JSONField==0.9.0 SQLAlchemy-Utils==0.36.6 SQLAlchemy==1.3.17 -Sphinx==3.1.0 +Sphinx==3.1.1 Unidecode==1.1.1 WTForms==2.3.1 Werkzeug==0.16.1 @@ -45,7 +45,7 @@ apispec==1.3.3 appdirs==1.4.4 argcomplete==1.11.1 asn1crypto==1.3.0 -astroid==2.4.2 +astroid==2.3.3 async-generator==1.10 async-timeout==3.0.1 atlasclient==1.0.0 @@ -72,9 +72,9 @@ beautifulsoup4==4.7.1 billiard==3.6.3.0 black==19.10b0 blinker==1.4 -boto3==1.14.0 +boto3==1.14.3 boto==2.49.0 -botocore==1.17.0 +botocore==1.17.3 bowler==0.8.0 cached-property==1.5.1 cachetools==4.1.0 @@ -95,7 +95,7 @@ colorama==0.4.3 colorlog==4.0.2 connexion==2.7.0 coverage==5.1 -croniter==0.3.32 +croniter==0.3.33 cryptography==2.9.2 curlify==2.2.1 cx-Oracle==7.3.0 @@ -103,7 +103,7 @@ dask==2.18.1 datadog==0.36.0 decorator==4.4.2 defusedxml==0.6.0 -dill==0.3.1.1 +dill==0.3.2 distlib==0.3.0 distributed==2.18.0 dnspython==1.16.0 @@ -134,14 +134,14 @@ funcsigs==1.0.2 future-fstrings==1.2.0 future==0.18.2 gcsfs==0.6.2 -gevent==20.6.0 +gevent==20.6.1 gitdb==4.0.5 google-ads==5.1.0 google-api-core==1.20.0 -google-api-python-client==1.9.2 +google-api-python-client==1.9.3 google-auth-httplib2==0.0.3 google-auth-oauthlib==0.4.1 -google-auth==1.16.1 +google-auth==1.17.2 google-cloud-automl==0.10.0 google-cloud-bigquery-datatransfer==1.0.0 google-cloud-bigquery==1.25.0 @@ -160,7 +160,7 @@ google-cloud-redis==1.0.0 google-cloud-secret-manager==1.0.0 google-cloud-spanner==1.17.0 google-cloud-speech==1.3.2 -google-cloud-storage==1.28.1 +google-cloud-storage==1.29.0 google-cloud-tasks==1.5.0 google-cloud-texttospeech==1.0.1 google-cloud-translate==2.0.1 @@ -216,7 +216,7 @@ mccabe==0.6.1 mock==4.0.2 mongomock==3.19.0 monotonic==1.5 -more-itertools==8.3.0 +more-itertools==8.4.0 moto==1.3.14 msgpack==1.0.0 msrest==0.6.16 @@ -252,7 +252,7 @@ pendulum==2.1.0 pexpect==4.8.0 pickleshare==0.7.5 pinotdb==0.1.1 -pipdeptree==0.13.2 +pipdeptree==1.0.0 pluggy==0.13.1 pre-commit==2.5.1 presto-python-client==0.7.0 @@ -263,7 +263,7 @@ psutil==5.7.0 psycopg2-binary==2.8.5 ptyprocess==0.6.0 py4j==0.10.7 -py==1.8.1 +py==1.8.2 pyOpenSSL==19.1.0 pyarrow==0.17.1 pyasn1-modules==0.2.8 @@ -286,11 +286,11 @@ pypd==1.1.0 pyrsistent==0.16.0 pysftp==0.2.9 pyspark==2.4.6 -pytest-cov==2.9.0 +pytest-cov==2.10.0 pytest-forked==1.1.3 -pytest-instafail==0.4.1.post0 +pytest-instafail==0.4.2 pytest-rerunfailures==9.0 -pytest-timeout==1.3.4 +pytest-timeout==1.4.1 pytest-xdist==1.32.0 pytest==5.4.3 python-daemon==2.1.2 @@ -315,11 +315,11 @@ requests-ntlm==1.1.0 requests-oauthlib==1.1.0 requests-toolbelt==0.9.1 requests==2.23.0 -responses==0.10.14 -rsa==4.0 +responses==0.10.15 +rsa==4.6 s3transfer==0.3.3 sasl==0.2.1 -semver==2.10.1 +semver==2.10.2 sendgrid==6.3.1 sentinels==1.0.0 sentry-sdk==0.14.4 @@ -373,7 +373,7 @@ uritemplate==3.0.1 urllib3==1.25.9 vertica-python==0.10.4 vine==1.3.0 -virtualenv==20.0.21 +virtualenv==20.0.23 watchtower==0.7.3 wcwidth==0.2.4 websocket-client==0.57.0 diff --git a/requirements/requirements-python3.8.txt b/requirements/requirements-python3.8.txt index 976bf7f4a8d10..4e7317fb52493 100644 --- a/requirements/requirements-python3.8.txt +++ b/requirements/requirements-python3.8.txt @@ -15,7 +15,7 @@ Flask==1.1.2 GitPython==3.1.3 HeapDict==1.0.1 JPype1==0.7.5 -JayDeBeApi==1.2.2 +JayDeBeApi==1.2.3 Jinja2==2.10.3 Mako==1.1.3 Markdown==2.6.11 @@ -29,7 +29,7 @@ Pygments==2.6.1 SQLAlchemy-JSONField==0.9.0 SQLAlchemy-Utils==0.36.6 SQLAlchemy==1.3.17 -Sphinx==3.1.0 +Sphinx==3.1.1 Unidecode==1.1.1 WTForms==2.3.1 Werkzeug==0.16.1 @@ -45,7 +45,7 @@ apispec==1.3.3 appdirs==1.4.4 argcomplete==1.11.1 asn1crypto==1.3.0 -astroid==2.4.2 +astroid==2.3.3 async-generator==1.10 async-timeout==3.0.1 atlasclient==1.0.0 @@ -72,9 +72,9 @@ beautifulsoup4==4.7.1 billiard==3.6.3.0 black==19.10b0 blinker==1.4 -boto3==1.14.0 +boto3==1.14.3 boto==2.49.0 -botocore==1.17.0 +botocore==1.17.3 bowler==0.8.0 cached-property==1.5.1 cachetools==4.1.0 @@ -95,7 +95,7 @@ colorama==0.4.3 colorlog==4.0.2 connexion==2.7.0 coverage==5.1 -croniter==0.3.32 +croniter==0.3.33 cryptography==2.9.2 curlify==2.2.1 cx-Oracle==7.3.0 @@ -103,7 +103,7 @@ dask==2.18.1 datadog==0.36.0 decorator==4.4.2 defusedxml==0.6.0 -dill==0.3.1.1 +dill==0.3.2 distlib==0.3.0 distributed==2.18.0 dnspython==1.16.0 @@ -134,14 +134,14 @@ funcsigs==1.0.2 future-fstrings==1.2.0 future==0.18.2 gcsfs==0.6.2 -gevent==20.6.0 +gevent==20.6.1 gitdb==4.0.5 google-ads==5.1.0 google-api-core==1.20.0 -google-api-python-client==1.9.2 +google-api-python-client==1.9.3 google-auth-httplib2==0.0.3 google-auth-oauthlib==0.4.1 -google-auth==1.16.1 +google-auth==1.17.2 google-cloud-automl==0.10.0 google-cloud-bigquery-datatransfer==1.0.0 google-cloud-bigquery==1.25.0 @@ -160,7 +160,7 @@ google-cloud-redis==1.0.0 google-cloud-secret-manager==1.0.0 google-cloud-spanner==1.17.0 google-cloud-speech==1.3.2 -google-cloud-storage==1.28.1 +google-cloud-storage==1.29.0 google-cloud-tasks==1.5.0 google-cloud-texttospeech==1.0.1 google-cloud-translate==2.0.1 @@ -216,7 +216,7 @@ mccabe==0.6.1 mock==4.0.2 mongomock==3.19.0 monotonic==1.5 -more-itertools==8.3.0 +more-itertools==8.4.0 moto==1.3.14 msgpack==1.0.0 msrest==0.6.16 @@ -252,7 +252,7 @@ pendulum==2.1.0 pexpect==4.8.0 pickleshare==0.7.5 pinotdb==0.1.1 -pipdeptree==0.13.2 +pipdeptree==1.0.0 pluggy==0.13.1 pre-commit==2.5.1 presto-python-client==0.7.0 @@ -263,7 +263,7 @@ psutil==5.7.0 psycopg2-binary==2.8.5 ptyprocess==0.6.0 py4j==0.10.7 -py==1.8.1 +py==1.8.2 pyOpenSSL==19.1.0 pyarrow==0.17.1 pyasn1-modules==0.2.8 @@ -285,11 +285,11 @@ pypd==1.1.0 pyrsistent==0.16.0 pysftp==0.2.9 pyspark==2.4.6 -pytest-cov==2.9.0 +pytest-cov==2.10.0 pytest-forked==1.1.3 -pytest-instafail==0.4.1.post0 +pytest-instafail==0.4.2 pytest-rerunfailures==9.0 -pytest-timeout==1.3.4 +pytest-timeout==1.4.1 pytest-xdist==1.32.0 pytest==5.4.3 python-daemon==2.1.2 @@ -314,11 +314,11 @@ requests-ntlm==1.1.0 requests-oauthlib==1.1.0 requests-toolbelt==0.9.1 requests==2.23.0 -responses==0.10.14 -rsa==4.1 +responses==0.10.15 +rsa==4.6 s3transfer==0.3.3 sasl==0.2.1 -semver==2.10.1 +semver==2.10.2 sendgrid==6.3.1 sentinels==1.0.0 sentry-sdk==0.14.4 @@ -372,11 +372,11 @@ uritemplate==3.0.1 urllib3==1.25.9 vertica-python==0.10.4 vine==1.3.0 -virtualenv==20.0.21 +virtualenv==20.0.23 watchtower==0.7.3 wcwidth==0.2.4 websocket-client==0.57.0 -wrapt==1.12.1 +wrapt==1.11.2 xmltodict==0.12.0 yamllint==1.23.0 yandexcloud==0.41.0 diff --git a/scripts/ci/in_container/_in_container_utils.sh b/scripts/ci/in_container/_in_container_utils.sh index bb11fc51f6b6f..35f6d622ac694 100644 --- a/scripts/ci/in_container/_in_container_utils.sh +++ b/scripts/ci/in_container/_in_container_utils.sh @@ -97,7 +97,7 @@ function in_container_fix_ownership() { set +o pipefail echo "Fixing ownership of mounted files" sudo find "${AIRFLOW_SOURCES}" -print0 -user root \ - | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference >/dev/null 2>&1 + | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || true >/dev/null 2>&1 sudo find "/root/.aws" "/root/.azure" "/root/.config" "/root/.docker" -print0 -user root \ | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || true >/dev/null 2>&1 set -o pipefail diff --git a/tests/providers/amazon/aws/transfers/__init__.py b/tests/providers/amazon/aws/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/amazon/aws/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py similarity index 92% rename from tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py rename to tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index d51231c5b47a5..ee27aa0cfd285 100644 --- a/tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -20,7 +20,7 @@ import unittest from unittest.mock import MagicMock, patch -from airflow.providers.amazon.aws.operators.dynamodb_to_s3 import DynamoDBToS3Operator +from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator class DynamodbToS3Test(unittest.TestCase): @@ -34,8 +34,8 @@ def mock_upload_file(self, Filename, Bucket, Key): # pylint: disable=unused-arg for line in lines: self.output_queue.append(json.loads(line)) - @patch('airflow.providers.amazon.aws.operators.dynamodb_to_s3.S3Hook') - @patch('airflow.providers.amazon.aws.operators.dynamodb_to_s3.AwsDynamoDBHook') + @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook') + @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.AwsDynamoDBHook') def test_dynamodb_to_s3_success(self, mock_aws_dynamodb_hook, mock_s3_hook): responses = [ { diff --git a/tests/providers/amazon/aws/operators/test_gcs_to_s3.py b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_gcs_to_s3.py rename to tests/providers/amazon/aws/transfers/test_gcs_to_s3.py index 97b7668f3fca6..eb5d0582f0bbd 100644 --- a/tests/providers/amazon/aws/operators/test_gcs_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py @@ -21,7 +21,7 @@ import mock from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator +from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator try: from moto import mock_s3 @@ -41,7 +41,7 @@ class TestGCSToS3Operator(unittest.TestCase): # Test1: incremental behaviour (just some files missing) @mock_s3 @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') - @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook') + @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook') def test_execute_incremental(self, mock_hook, mock_hook2): mock_hook.return_value.list.return_value = MOCK_FILES mock_hook.return_value.download.return_value = b"testing" @@ -71,7 +71,7 @@ def test_execute_incremental(self, mock_hook, mock_hook2): # Test2: All the files are already in origin and destination without replace @mock_s3 @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') - @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook') + @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook') def test_execute_without_replace(self, mock_hook, mock_hook2): mock_hook.return_value.list.return_value = MOCK_FILES mock_hook.return_value.download.return_value = b"testing" @@ -102,7 +102,7 @@ def test_execute_without_replace(self, mock_hook, mock_hook2): # Test3: There are no files in destination bucket @mock_s3 @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') - @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook') + @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook') def test_execute(self, mock_hook, mock_hook2): mock_hook.return_value.list.return_value = MOCK_FILES mock_hook.return_value.download.return_value = b"testing" @@ -131,7 +131,7 @@ def test_execute(self, mock_hook, mock_hook2): # Test4: Destination and Origin are in sync but replace all files in destination @mock_s3 @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') - @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook') + @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook') def test_execute_with_replace(self, mock_hook, mock_hook2): mock_hook.return_value.list.return_value = MOCK_FILES mock_hook.return_value.download.return_value = b"testing" @@ -162,7 +162,7 @@ def test_execute_with_replace(self, mock_hook, mock_hook2): # Test5: Incremental sync with replace @mock_s3 @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') - @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook') + @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook') def test_execute_incremental_with_replace(self, mock_hook, mock_hook2): mock_hook.return_value.list.return_value = MOCK_FILES mock_hook.return_value.download.return_value = b"testing" diff --git a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py similarity index 81% rename from tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py rename to tests/providers/amazon/aws/transfers/test_google_api_to_s3.py index 8db9c4429a903..0283937d6ea0a 100644 --- a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py +++ b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py @@ -22,11 +22,11 @@ from airflow import models from airflow.configuration import load_test_config from airflow.models.xcom import MAX_XCOM_SIZE -from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator +from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator from airflow.utils import db -class TestGoogleApiToS3Transfer(unittest.TestCase): +class TestGoogleApiToS3(unittest.TestCase): def setUp(self): load_test_config() @@ -66,13 +66,13 @@ def setUp(self): 'dag': None } - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps') def test_execute(self, mock_json_dumps, mock_s3_hook_load_string, mock_google_api_hook_query): context = {'task_instance': Mock()} - GoogleApiToS3TransferOperator(**self.kwargs).execute(context) + GoogleApiToS3Operator(**self.kwargs).execute(context) mock_google_api_hook_query.assert_called_once_with( endpoint=self.kwargs['google_api_endpoint_path'], @@ -89,9 +89,9 @@ def test_execute(self, mock_json_dumps, mock_s3_hook_load_string, mock_google_ap context['task_instance'].xcom_pull.assert_not_called() context['task_instance'].xcom_push.assert_not_called() - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps') def test_execute_with_xcom(self, mock_json_dumps, mock_s3_hook_load_string, mock_google_api_hook_query): context = {'task_instance': Mock()} xcom_kwargs = { @@ -101,7 +101,7 @@ def test_execute_with_xcom(self, mock_json_dumps, mock_s3_hook_load_string, mock } context['task_instance'].xcom_pull.return_value = {} - GoogleApiToS3TransferOperator(**self.kwargs, **xcom_kwargs).execute(context) + GoogleApiToS3Operator(**self.kwargs, **xcom_kwargs).execute(context) mock_google_api_hook_query.assert_called_once_with( endpoint=self.kwargs['google_api_endpoint_path'], @@ -124,11 +124,11 @@ def test_execute_with_xcom(self, mock_json_dumps, mock_s3_hook_load_string, mock value=mock_google_api_hook_query.return_value ) - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string') - @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string') + @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps') @patch( - 'airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.sys.getsizeof', + 'airflow.providers.amazon.aws.transfers.google_api_to_s3.sys.getsizeof', return_value=MAX_XCOM_SIZE ) def test_execute_with_xcom_exceeded_max_xcom_size( @@ -147,7 +147,7 @@ def test_execute_with_xcom_exceeded_max_xcom_size( context['task_instance'].xcom_pull.return_value = {} self.assertRaises(RuntimeError, - GoogleApiToS3TransferOperator(**self.kwargs, **xcom_kwargs).execute, context) + GoogleApiToS3Operator(**self.kwargs, **xcom_kwargs).execute, context) mock_google_api_hook_query.assert_called_once_with( endpoint=self.kwargs['google_api_endpoint_path'], diff --git a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer_system.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer_system.py rename to tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py diff --git a/tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py similarity index 93% rename from tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py rename to tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py index bb75724e19cd2..b27a9c482029e 100644 --- a/tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py +++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py @@ -24,7 +24,7 @@ import pandas as pd -import airflow.providers.amazon.aws.operators.hive_to_dynamodb +import airflow.providers.amazon.aws.transfers.hive_to_dynamodb from airflow.models.dag import DAG from airflow.providers.amazon.aws.hooks.aws_dynamodb import AwsDynamoDBHook @@ -38,7 +38,7 @@ mock_dynamodb2 = None -class TestHiveToDynamoDBTransferOperator(unittest.TestCase): +class TestHiveToDynamoDBOperator(unittest.TestCase): def setUp(self): args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} @@ -84,7 +84,7 @@ def test_get_records_with_schema(self, mock_get_pandas_df): } ) - operator = airflow.providers.amazon.aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator( + operator = airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator( sql=self.sql, table_name="test_airflow", task_id='hive_to_dynamodb_check', @@ -124,7 +124,7 @@ def test_pre_process_records_with_schema(self, mock_get_pandas_df): } ) - operator = airflow.providers.amazon.aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator( + operator = airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator( sql=self.sql, table_name='test_airflow', task_id='hive_to_dynamodb_check', diff --git a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py similarity index 92% rename from tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py rename to tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py index 8cf35df12702f..18e87887d24f6 100644 --- a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py @@ -19,7 +19,7 @@ import unittest from unittest.mock import patch -from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator +from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator class TestImapAttachmentToS3Operator(unittest.TestCase): @@ -36,8 +36,8 @@ def setUp(self): dag=None ) - @patch('airflow.providers.amazon.aws.operators.imap_attachment_to_s3.S3Hook') - @patch('airflow.providers.amazon.aws.operators.imap_attachment_to_s3.ImapHook') + @patch('airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook') + @patch('airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook') def test_execute(self, mock_imap_hook, mock_s3_hook): mock_imap_hook.return_value.__enter__ = mock_imap_hook mock_imap_hook.return_value.retrieve_mail_attachments.return_value = [('test_file', b'Hello World')] diff --git a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3_system.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_imap_attachment_to_s3_system.py rename to tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py diff --git a/tests/providers/amazon/aws/operators/test_mongo_to_s3.py b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_mongo_to_s3.py rename to tests/providers/amazon/aws/transfers/test_mongo_to_s3.py index ea39888d501f7..81db1746f47d9 100644 --- a/tests/providers/amazon/aws/operators/test_mongo_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py @@ -21,7 +21,7 @@ from airflow.models import TaskInstance from airflow.models.dag import DAG -from airflow.providers.amazon.aws.operators.mongo_to_s3 import MongoToS3Operator +from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator from airflow.utils import timezone TASK_ID = 'test_mongo_to_s3_operator' @@ -83,8 +83,8 @@ def test_render_template(self): getattr(self.mock_operator, 'mongo_query') ) - @mock.patch('airflow.providers.amazon.aws.operators.mongo_to_s3.MongoHook') - @mock.patch('airflow.providers.amazon.aws.operators.mongo_to_s3.S3Hook') + @mock.patch('airflow.providers.amazon.aws.transfers.mongo_to_s3.MongoHook') + @mock.patch('airflow.providers.amazon.aws.transfers.mongo_to_s3.S3Hook') def test_execute(self, mock_s3_hook, mock_mongo_hook): operator = self.mock_operator diff --git a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py similarity index 95% rename from tests/providers/amazon/aws/operators/test_redshift_to_s3.py rename to tests/providers/amazon/aws/transfers/test_redshift_to_s3.py index f1ee3e2e40905..84783fa307499 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py @@ -23,7 +23,7 @@ from boto3.session import Session from parameterized import parameterized -from airflow.providers.amazon.aws.operators.redshift_to_s3 import RedshiftToS3TransferOperator +from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces @@ -45,7 +45,7 @@ def test_execute(self, table_as_file_name, expected_s3_key, mock_run, mock_sessi s3_key = "key" unload_options = ['HEADER', ] - RedshiftToS3TransferOperator( + RedshiftToS3Operator( schema=schema, table=table, s3_bucket=s3_bucket, diff --git a/tests/providers/amazon/aws/operators/test_s3_to_redshift.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py similarity index 94% rename from tests/providers/amazon/aws/operators/test_s3_to_redshift.py rename to tests/providers/amazon/aws/transfers/test_s3_to_redshift.py index 992c1330c0676..32da61d955ef5 100644 --- a/tests/providers/amazon/aws/operators/test_s3_to_redshift.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py @@ -22,7 +22,7 @@ from boto3.session import Session -from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator +from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces @@ -41,7 +41,7 @@ def test_execute(self, mock_run, mock_session): s3_key = "key" copy_options = "" - op = S3ToRedshiftTransferOperator( + op = S3ToRedshiftOperator( schema=schema, table=table, s3_bucket=s3_bucket, diff --git a/tests/providers/amazon/aws/operators/test_s3_to_redshift_system.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py similarity index 100% rename from tests/providers/amazon/aws/operators/test_s3_to_redshift_system.py rename to tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py diff --git a/tests/providers/amazon/aws/operators/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_s3_to_sftp.py rename to tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index 24eda72cc6b19..de9c9e5e4b005 100644 --- a/tests/providers/amazon/aws/operators/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -22,7 +22,7 @@ from moto import mock_s3 from airflow.models import DAG, TaskInstance -from airflow.providers.amazon.aws.operators.s3_to_sftp import S3ToSFTPOperator +from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils import timezone from airflow.utils.timezone import datetime diff --git a/tests/providers/amazon/aws/operators/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py similarity index 98% rename from tests/providers/amazon/aws/operators/test_sftp_to_s3.py rename to tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index ebd5cdefe3821..3621104afce0c 100644 --- a/tests/providers/amazon/aws/operators/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -23,7 +23,7 @@ from airflow.models import DAG, TaskInstance from airflow.providers.amazon.aws.hooks.s3 import S3Hook -from airflow.providers.amazon.aws.operators.sftp_to_s3 import SFTPToS3Operator +from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils import timezone diff --git a/tests/providers/apache/druid/transfers/__init__.py b/tests/providers/apache/druid/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/apache/druid/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/apache/druid/operators/test_hive_to_druid.py b/tests/providers/apache/druid/transfers/test_hive_to_druid.py similarity index 97% rename from tests/providers/apache/druid/operators/test_hive_to_druid.py rename to tests/providers/apache/druid/transfers/test_hive_to_druid.py index 11433e74f9f20..8951fe66997f5 100644 --- a/tests/providers/apache/druid/operators/test_hive_to_druid.py +++ b/tests/providers/apache/druid/transfers/test_hive_to_druid.py @@ -23,7 +23,7 @@ import requests_mock from airflow.models.dag import DAG -from airflow.providers.apache.druid.operators.hive_to_druid import HiveToDruidTransferOperator +from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator class TestDruidHook(unittest.TestCase): @@ -74,7 +74,7 @@ def setUp(self): session.mount('mock', adapter) def test_construct_ingest_query(self): - operator = HiveToDruidTransferOperator( + operator = HiveToDruidOperator( task_id='hive_to_druid', dag=self.dag, **self.hook_config diff --git a/tests/providers/apache/hive/transfers/__init__.py b/tests/providers/apache/hive/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/apache/hive/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/apache/hive/operators/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py similarity index 83% rename from tests/providers/apache/hive/operators/test_hive_to_mysql.py rename to tests/providers/apache/hive/transfers/test_hive_to_mysql.py index 25a79d254d4c8..1a437d8f3dc03 100644 --- a/tests/providers/apache/hive/operators/test_hive_to_mysql.py +++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py @@ -20,7 +20,7 @@ import unittest from unittest.mock import MagicMock, PropertyMock, patch -from airflow.providers.apache.hive.operators.hive_to_mysql import HiveToMySqlTransferOperator +from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator from airflow.utils import timezone from airflow.utils.operator_helpers import context_to_airflow_vars from tests.providers.apache.hive import TestHiveEnvironment @@ -41,10 +41,10 @@ def setUp(self): ) super().setUp() - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook') - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook') def test_execute(self, mock_hive_hook, mock_mysql_hook): - HiveToMySqlTransferOperator(**self.kwargs).execute(context={}) + HiveToMySqlOperator(**self.kwargs).execute(context={}) mock_hive_hook.assert_called_once_with(hiveserver2_conn_id=self.kwargs['hiveserver2_conn_id']) mock_hive_hook.return_value.get_records.assert_called_once_with('sql', hive_conf={}) @@ -54,33 +54,33 @@ def test_execute(self, mock_hive_hook, mock_mysql_hook): rows=mock_hive_hook.return_value.get_records.return_value ) - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook') - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook') def test_execute_mysql_preoperator(self, mock_hive_hook, mock_mysql_hook): self.kwargs.update(dict(mysql_preoperator='preoperator')) - HiveToMySqlTransferOperator(**self.kwargs).execute(context={}) + HiveToMySqlOperator(**self.kwargs).execute(context={}) mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_preoperator']) - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook') - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook') def test_execute_with_mysql_postoperator(self, mock_hive_hook, mock_mysql_hook): self.kwargs.update(dict(mysql_postoperator='postoperator')) - HiveToMySqlTransferOperator(**self.kwargs).execute(context={}) + HiveToMySqlOperator(**self.kwargs).execute(context={}) mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_postoperator']) - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook') - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.NamedTemporaryFile') - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.NamedTemporaryFile') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook') def test_execute_bulk_load(self, mock_hive_hook, mock_tmp_file, mock_mysql_hook): type(mock_tmp_file).name = PropertyMock(return_value='tmp_file') context = {} self.kwargs.update(dict(bulk_load=True)) - HiveToMySqlTransferOperator(**self.kwargs).execute(context=context) + HiveToMySqlOperator(**self.kwargs).execute(context=context) mock_tmp_file.assert_called_once_with() mock_hive_hook.return_value.to_csv.assert_called_once_with( @@ -97,7 +97,7 @@ def test_execute_bulk_load(self, mock_hive_hook, mock_tmp_file, mock_mysql_hook) ) mock_tmp_file.return_value.close.assert_called_once_with() - @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook') def test_execute_with_hive_conf(self, mock_mysql_hook): context = {} mock_hive_hook = MockHiveServer2Hook() @@ -105,9 +105,9 @@ def test_execute_with_hive_conf(self, mock_mysql_hook): self.kwargs.update(dict(hive_conf={'mapreduce.job.queuename': 'fake_queue'})) - with patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook', + with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook', return_value=mock_hive_hook): - HiveToMySqlTransferOperator(**self.kwargs).execute(context=context) + HiveToMySqlOperator(**self.kwargs).execute(context=context) hive_conf = context_to_airflow_vars(context) hive_conf.update(self.kwargs['hive_conf']) @@ -130,12 +130,12 @@ def test_hive_to_mysql(self): mock_mysql_hook.run = MagicMock() mock_mysql_hook.insert_rows = MagicMock() - with patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook', + with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook', return_value=mock_hive_hook): - with patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook', + with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook', return_value=mock_mysql_hook): - op = HiveToMySqlTransferOperator( + op = HiveToMySqlOperator( mysql_conn_id='airflow_db', task_id='hive_to_mysql_check', sql=""" diff --git a/tests/providers/apache/hive/operators/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py similarity index 87% rename from tests/providers/apache/hive/operators/test_hive_to_samba.py rename to tests/providers/apache/hive/transfers/test_hive_to_samba.py index f14ae6da7f78a..dbd71f567f51c 100644 --- a/tests/providers/apache/hive/operators/test_hive_to_samba.py +++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py @@ -19,7 +19,7 @@ import unittest from unittest.mock import MagicMock, Mock, PropertyMock, patch -from airflow.providers.apache.hive.operators.hive_to_samba import Hive2SambaOperator +from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator from airflow.utils.operator_helpers import context_to_airflow_vars from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook @@ -37,15 +37,15 @@ def setUp(self): ) super().setUp() - @patch('airflow.providers.apache.hive.operators.hive_to_samba.SambaHook') - @patch('airflow.providers.apache.hive.operators.hive_to_samba.HiveServer2Hook') - @patch('airflow.providers.apache.hive.operators.hive_to_samba.NamedTemporaryFile') + @patch('airflow.providers.apache.hive.transfers.hive_to_samba.SambaHook') + @patch('airflow.providers.apache.hive.transfers.hive_to_samba.HiveServer2Hook') + @patch('airflow.providers.apache.hive.transfers.hive_to_samba.NamedTemporaryFile') def test_execute(self, mock_tmp_file, mock_hive_hook, mock_samba_hook): type(mock_tmp_file).name = PropertyMock(return_value='tmp_file') mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file) context = {} - Hive2SambaOperator(**self.kwargs).execute(context) + HiveToSambaOperator(**self.kwargs).execute(context) mock_hive_hook.assert_called_once_with( hiveserver2_conn_id=self.kwargs['hiveserver2_conn_id']) @@ -63,7 +63,7 @@ def test_execute(self, mock_tmp_file, mock_hive_hook, mock_samba_hook): "Skipped because AIRFLOW_RUNALL_TESTS is not set") @patch('tempfile.tempdir', '/tmp/') @patch('tempfile._RandomNameSequence.__next__') - @patch('airflow.providers.apache.hive.operators.hive_to_samba.HiveServer2Hook', + @patch('airflow.providers.apache.hive.transfers.hive_to_samba.HiveServer2Hook', side_effect=MockHiveServer2Hook) def test_hive2samba(self, mock_hive_server_hook, mock_temp_dir): mock_temp_dir.return_value = "tst" @@ -71,10 +71,10 @@ def test_hive2samba(self, mock_hive_server_hook, mock_temp_dir): samba_hook = MockSambaHook(self.kwargs['samba_conn_id']) samba_hook.upload = MagicMock() - with patch('airflow.providers.apache.hive.operators.hive_to_samba.SambaHook', + with patch('airflow.providers.apache.hive.transfers.hive_to_samba.SambaHook', return_value=samba_hook): samba_hook.conn.upload = MagicMock() - op = Hive2SambaOperator( + op = HiveToSambaOperator( task_id='hive2samba_check', samba_conn_id='tableau_samba', hql="SELECT * FROM airflow.static_babynames LIMIT 10000", diff --git a/tests/providers/apache/hive/operators/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py similarity index 83% rename from tests/providers/apache/hive/operators/test_mssql_to_hive.py rename to tests/providers/apache/hive/transfers/test_mssql_to_hive.py index fb7d2ceb6d185..e6795ea8b34b1 100644 --- a/tests/providers/apache/hive/operators/test_mssql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py @@ -26,7 +26,7 @@ if PY38: MsSqlToHiveTransferOperator = None else: - from airflow.providers.apache.hive.operators.mssql_to_hive import MsSqlToHiveTransferOperator + from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator try: import pymssql @@ -48,32 +48,32 @@ def setUp(self): # pylint: disable=c-extension-no-member def test_type_map_binary(self): - mapped_type = MsSqlToHiveTransferOperator( + mapped_type = MsSqlToHiveOperator( **self.kwargs).type_map(pymssql.BINARY.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'INT') def test_type_map_decimal(self): - mapped_type = MsSqlToHiveTransferOperator( + mapped_type = MsSqlToHiveOperator( **self.kwargs).type_map(pymssql.DECIMAL.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'FLOAT') def test_type_map_number(self): - mapped_type = MsSqlToHiveTransferOperator( + mapped_type = MsSqlToHiveOperator( **self.kwargs).type_map(pymssql.NUMBER.value) # pylint: disable=c-extension-no-member self.assertEqual(mapped_type, 'INT') def test_type_map_string(self): - mapped_type = MsSqlToHiveTransferOperator(**self.kwargs).type_map(None) + mapped_type = MsSqlToHiveOperator(**self.kwargs).type_map(None) self.assertEqual(mapped_type, 'STRING') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.csv') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.NamedTemporaryFile') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlHook') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.HiveCliHook') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.csv') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.NamedTemporaryFile') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlHook') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.HiveCliHook') def test_execute(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv): type(mock_tmp_file).name = PropertyMock(return_value='tmp_file') mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file) @@ -81,7 +81,7 @@ def test_execute(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv) mock_mssql_hook_cursor = mock_mssql_hook_get_conn.return_value.cursor.return_value.__enter__ mock_mssql_hook_cursor.return_value.description = [('te', 'st')] - mssql_to_hive_transfer = MsSqlToHiveTransferOperator(**self.kwargs) + mssql_to_hive_transfer = MsSqlToHiveOperator(**self.kwargs) mssql_to_hive_transfer.execute(context={}) mock_mssql_hook_cursor.return_value.execute.assert_called_once_with(mssql_to_hive_transfer.sql) @@ -101,10 +101,10 @@ def test_execute(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv) recreate=mssql_to_hive_transfer.recreate, tblproperties=mssql_to_hive_transfer.tblproperties) - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.csv') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.NamedTemporaryFile') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlHook') - @patch('airflow.providers.apache.hive.operators.mssql_to_hive.HiveCliHook') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.csv') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.NamedTemporaryFile') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlHook') + @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.HiveCliHook') def test_execute_empty_description_field(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv): type(mock_tmp_file).name = PropertyMock(return_value='tmp_file') mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file) @@ -112,7 +112,7 @@ def test_execute_empty_description_field(self, mock_hive_hook, mock_mssql_hook, mock_mssql_hook_cursor = mock_mssql_hook_get_conn.return_value.cursor.return_value.__enter__ mock_mssql_hook_cursor.return_value.description = [('', '')] - mssql_to_hive_transfer = MsSqlToHiveTransferOperator(**self.kwargs) + mssql_to_hive_transfer = MsSqlToHiveOperator(**self.kwargs) mssql_to_hive_transfer.execute(context={}) field_dict = OrderedDict() diff --git a/tests/providers/apache/hive/operators/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py similarity index 98% rename from tests/providers/apache/hive/operators/test_mysql_to_hive.py rename to tests/providers/apache/hive/transfers/test_mysql_to_hive.py index 680bc25205843..e5d6ec6fe048f 100644 --- a/tests/providers/apache/hive/operators/test_mysql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py @@ -23,7 +23,7 @@ import pytest from airflow.models.dag import DAG -from airflow.providers.apache.hive.operators.mysql_to_hive import MySqlToHiveTransferOperator +from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator from airflow.providers.mysql.hooks.mysql import MySqlHook from airflow.utils import timezone from tests.test_utils.mock_hooks import MockHiveServer2Hook @@ -134,7 +134,7 @@ def test_mysql_to_hive(self, mock_popen, mock_temp_dir): with mock.patch.dict('os.environ', self.env_vars): sql = "SELECT * FROM baby_names LIMIT 1000;" - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql=sql, @@ -172,7 +172,7 @@ def test_mysql_to_hive_partition(self, mock_popen, mock_temp_dir): with mock.patch.dict('os.environ', self.env_vars): sql = "SELECT * FROM baby_names LIMIT 1000;" - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql=sql, @@ -212,7 +212,7 @@ def test_mysql_to_hive_tblproperties(self, mock_popen, mock_temp_dir): with mock.patch.dict('os.environ', self.env_vars): sql = "SELECT * FROM baby_names LIMIT 1000;" - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql=sql, @@ -261,7 +261,7 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): ) """.format(mysql_table)) - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql="SELECT * FROM {}".format(mysql_table), @@ -318,7 +318,7 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen, mock_temp_dir): with mock.patch.dict('os.environ', self.env_vars): import unicodecsv as csv - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql="SELECT * FROM {}".format(mysql_table), @@ -410,7 +410,7 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen, mock_temp_dir): """.format(mysql_table, *minmax)) with mock.patch.dict('os.environ', self.env_vars): - op = MySqlToHiveTransferOperator( + op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', sql="SELECT * FROM {}".format(mysql_table), diff --git a/tests/providers/apache/hive/operators/test_s3_to_hive.py b/tests/providers/apache/hive/transfers/test_s3_to_hive.py similarity index 93% rename from tests/providers/apache/hive/operators/test_s3_to_hive.py rename to tests/providers/apache/hive/transfers/test_s3_to_hive.py index d475b25ed1795..67aec2a2330a8 100644 --- a/tests/providers/apache/hive/operators/test_s3_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py @@ -30,7 +30,7 @@ import mock from airflow.exceptions import AirflowException -from airflow.providers.apache.hive.operators.s3_to_hive import S3ToHiveTransferOperator +from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator try: import boto3 @@ -157,18 +157,18 @@ def test_bad_parameters(self): self.kwargs['check_headers'] = True self.kwargs['headers'] = False self.assertRaisesRegex(AirflowException, "To check_headers.*", - S3ToHiveTransferOperator, **self.kwargs) + S3ToHiveOperator, **self.kwargs) def test__get_top_row_as_list(self): self.kwargs['delimiter'] = '\t' fn_txt = self._get_fn('.txt', True) - header_list = S3ToHiveTransferOperator(**self.kwargs). \ + header_list = S3ToHiveOperator(**self.kwargs). \ _get_top_row_as_list(fn_txt) self.assertEqual(header_list, ['Sno', 'Some,Text'], msg="Top row from file doesnt matched expected value") self.kwargs['delimiter'] = ',' - header_list = S3ToHiveTransferOperator(**self.kwargs). \ + header_list = S3ToHiveOperator(**self.kwargs). \ _get_top_row_as_list(fn_txt) self.assertEqual(header_list, ['Sno\tSome', 'Text'], msg="Top row from file doesnt matched expected value") @@ -176,20 +176,20 @@ def test__get_top_row_as_list(self): def test__match_headers(self): self.kwargs['field_dict'] = OrderedDict([('Sno', 'BIGINT'), ('Some,Text', 'STRING')]) - self.assertTrue(S3ToHiveTransferOperator(**self.kwargs). + self.assertTrue(S3ToHiveOperator(**self.kwargs). _match_headers(['Sno', 'Some,Text']), msg="Header row doesnt match expected value") # Testing with different column order - self.assertFalse(S3ToHiveTransferOperator(**self.kwargs). + self.assertFalse(S3ToHiveOperator(**self.kwargs). _match_headers(['Some,Text', 'Sno']), msg="Header row doesnt match expected value") # Testing with extra column in header - self.assertFalse(S3ToHiveTransferOperator(**self.kwargs). + self.assertFalse(S3ToHiveOperator(**self.kwargs). _match_headers(['Sno', 'Some,Text', 'ExtraColumn']), msg="Header row doesnt match expected value") def test__delete_top_row_and_compress(self): - s32hive = S3ToHiveTransferOperator(**self.kwargs) + s32hive = S3ToHiveOperator(**self.kwargs) # Testing gz file type fn_txt = self._get_fn('.txt', True) gz_txt_nh = s32hive._delete_top_row_and_compress(fn_txt, @@ -208,7 +208,7 @@ def test__delete_top_row_and_compress(self): @unittest.skipIf(mock is None, 'mock package not present') @unittest.skipIf(mock_s3 is None, 'moto package not present') - @mock.patch('airflow.providers.apache.hive.operators.s3_to_hive.HiveCliHook') + @mock.patch('airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook') @mock_s3 def test_execute(self, mock_hiveclihook): conn = boto3.client('s3') @@ -234,12 +234,12 @@ def test_execute(self, mock_hiveclihook): self._check_file_equality(args[0], op_fn, ext), msg='{0} output file not as expected'.format(ext)) # Execute S3ToHiveTransfer - s32hive = S3ToHiveTransferOperator(**self.kwargs) + s32hive = S3ToHiveOperator(**self.kwargs) s32hive.execute(None) @unittest.skipIf(mock is None, 'mock package not present') @unittest.skipIf(mock_s3 is None, 'moto package not present') - @mock.patch('airflow.providers.apache.hive.operators.s3_to_hive.HiveCliHook') + @mock.patch('airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook') @mock_s3 def test_execute_with_select_expression(self, mock_hiveclihook): conn = boto3.client('s3') @@ -278,7 +278,7 @@ def test_execute_with_select_expression(self, mock_hiveclihook): with mock.patch('airflow.providers.amazon.aws.hooks.s3.S3Hook.select_key', return_value="") as mock_select_key: # Execute S3ToHiveTransfer - s32hive = S3ToHiveTransferOperator(**self.kwargs) + s32hive = S3ToHiveOperator(**self.kwargs) s32hive.execute(None) mock_select_key.assert_called_once_with( diff --git a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py new file mode 100644 index 0000000000000..a454030743c1b --- /dev/null +++ b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import datetime +import unittest +from unittest import mock + +from airflow.models.dag import DAG +from airflow.providers.apache.hive.transfers.vertica_to_hive import VerticaToHiveOperator + + +def mock_get_conn(): + commit_mock = mock.MagicMock( + ) + cursor_mock = mock.MagicMock( + execute=[], + fetchall=[['1', '2', '3']], + description=['a', 'b', 'c'], + iterate=[['1', '2', '3']], + ) + conn_mock = mock.MagicMock( + commit=commit_mock, + cursor=cursor_mock, + ) + return conn_mock + + +class TestVerticaToHiveTransfer(unittest.TestCase): + def setUp(self): + args = { + 'owner': 'airflow', + 'start_date': datetime.datetime(2017, 1, 1) + } + self.dag = DAG('test_dag_id', default_args=args) + + @mock.patch( + 'airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaHook.get_conn', + side_effect=mock_get_conn) + @mock.patch( + 'airflow.providers.apache.hive.transfers.vertica_to_hive.HiveCliHook.load_file') + def test_select_insert_transfer(self, *args): + """ + Test check selection from vertica into memory and + after that inserting into mysql + """ + task = VerticaToHiveOperator( + task_id='test_task_id', + sql='select a, b, c', + hive_table='test_table', + vertica_conn_id='test_vertica_conn_id', + hive_cli_conn_id='hive_cli_default', + dag=self.dag) + task.execute(None) diff --git a/tests/providers/google/ads/operators/test_ads.py b/tests/providers/google/ads/operators/test_ads.py index 5154e66462680..43f07ef87041d 100644 --- a/tests/providers/google/ads/operators/test_ads.py +++ b/tests/providers/google/ads/operators/test_ads.py @@ -16,7 +16,7 @@ # under the License. from unittest import mock -from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator, GoogleAdsToGcsOperator +from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator CLIENT_IDS = ["1111111111", "2222222222"] BUCKET = "gs://test-google-ads-bucket" @@ -38,33 +38,6 @@ google_ads_conn_id = "google_ads_conn_id" -class TestGoogleAdsToGcsOperator: - @mock.patch("airflow.providers.google.ads.operators.ads.GoogleAdsHook") - @mock.patch("airflow.providers.google.ads.operators.ads.GCSHook") - def test_execute(self, mock_gcs_hook, mock_ads_hook): - op = GoogleAdsToGcsOperator( - gcp_conn_id=gcp_conn_id, - google_ads_conn_id=google_ads_conn_id, - client_ids=CLIENT_IDS, - query=QUERY, - attributes=FIELDS_TO_EXTRACT, - obj=GCS_OBJ_PATH, - bucket=BUCKET, - task_id="run_operator", - ) - op.execute({}) - mock_ads_hook.assert_called_once_with( - gcp_conn_id=gcp_conn_id, google_ads_conn_id=google_ads_conn_id - ) - mock_ads_hook.return_value.search.assert_called_once_with( - client_ids=CLIENT_IDS, query=QUERY, page_size=10000 - ) - mock_gcs_hook.assert_called_once_with(gcp_conn_id=gcp_conn_id) - mock_gcs_hook.return_value.upload.assert_called_once_with( - bucket_name=BUCKET, object_name=GCS_OBJ_PATH, filename=mock.ANY, gzip=False - ) - - class TestGoogleAdsListAccountsOperator: @mock.patch("airflow.providers.google.ads.operators.ads.GoogleAdsHook") @mock.patch("airflow.providers.google.ads.operators.ads.GCSHook") diff --git a/tests/providers/google/ads/transfers/__init__.py b/tests/providers/google/ads/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/google/ads/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/google/ads/transfers/test_ads_to_gcs.py b/tests/providers/google/ads/transfers/test_ads_to_gcs.py new file mode 100644 index 0000000000000..947f4141e7ba8 --- /dev/null +++ b/tests/providers/google/ads/transfers/test_ads_to_gcs.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest import mock + +from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator +from tests.providers.google.ads.operators.test_ads import ( + BUCKET, CLIENT_IDS, FIELDS_TO_EXTRACT, GCS_OBJ_PATH, QUERY, gcp_conn_id, google_ads_conn_id, +) + + +class TestGoogleAdsToGcsOperator: + @mock.patch("airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsHook") + @mock.patch("airflow.providers.google.ads.transfers.ads_to_gcs.GCSHook") + def test_execute(self, mock_gcs_hook, mock_ads_hook): + op = GoogleAdsToGcsOperator( + gcp_conn_id=gcp_conn_id, + google_ads_conn_id=google_ads_conn_id, + client_ids=CLIENT_IDS, + query=QUERY, + attributes=FIELDS_TO_EXTRACT, + obj=GCS_OBJ_PATH, + bucket=BUCKET, + task_id="run_operator", + ) + op.execute({}) + mock_ads_hook.assert_called_once_with( + gcp_conn_id=gcp_conn_id, google_ads_conn_id=google_ads_conn_id + ) + mock_ads_hook.return_value.search.assert_called_once_with( + client_ids=CLIENT_IDS, query=QUERY, page_size=10000 + ) + mock_gcs_hook.assert_called_once_with(gcp_conn_id=gcp_conn_id) + mock_gcs_hook.return_value.upload.assert_called_once_with( + bucket_name=BUCKET, object_name=GCS_OBJ_PATH, filename=mock.ANY, gzip=False + ) diff --git a/tests/providers/google/cloud/operators/test_gcs.py b/tests/providers/google/cloud/operators/test_gcs.py index e73de5faa1d8b..067157ea347d4 100644 --- a/tests/providers/google/cloud/operators/test_gcs.py +++ b/tests/providers/google/cloud/operators/test_gcs.py @@ -23,7 +23,7 @@ from airflow.providers.google.cloud.operators.gcs import ( GCSBucketCreateAclEntryOperator, GCSCreateBucketOperator, GCSDeleteBucketOperator, GCSDeleteObjectsOperator, GCSFileTransformOperator, GCSListObjectsOperator, - GCSObjectCreateAclEntryOperator, GCSToLocalOperator, + GCSObjectCreateAclEntryOperator, GCSSynchronizeBucketsOperator, ) TASK_ID = "test-gcs-operator" @@ -145,22 +145,6 @@ def test_delete_prefix(self, mock_hook): ) -class TestGoogleCloudStorageDownloadOperator(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook") - def test_execute(self, mock_hook): - operator = GCSToLocalOperator( - task_id=TASK_ID, - bucket=TEST_BUCKET, - object_name=TEST_OBJECT, - filename=LOCAL_FILE_PATH, - ) - - operator.execute(None) - mock_hook.return_value.download.assert_called_once_with( - bucket_name=TEST_BUCKET, object_name=TEST_OBJECT, filename=LOCAL_FILE_PATH - ) - - class TestGoogleCloudStorageListOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook") def test_execute(self, mock_hook): @@ -241,3 +225,35 @@ def test_delete_bucket(self, mock_hook): operator.execute(None) mock_hook.return_value.delete_bucket.assert_called_once_with(bucket_name=TEST_BUCKET, force=True) + + +class TestGoogleCloudStorageSync(unittest.TestCase): + + @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook') + def test_execute(self, mock_hook): + task = GCSSynchronizeBucketsOperator( + task_id="task-id", + source_bucket="SOURCE_BUCKET", + destination_bucket="DESTINATION_BUCKET", + source_object="SOURCE_OBJECT", + destination_object="DESTINATION_OBJECT", + recursive=True, + delete_extra_files=True, + allow_overwrite=True, + gcp_conn_id="GCP_CONN_ID", + delegate_to="DELEGATE_TO", + ) + task.execute({}) + mock_hook.assert_called_once_with( + google_cloud_storage_conn_id='GCP_CONN_ID', + delegate_to='DELEGATE_TO' + ) + mock_hook.return_value.sync.assert_called_once_with( + source_bucket='SOURCE_BUCKET', + source_object='SOURCE_OBJECT', + destination_bucket='DESTINATION_BUCKET', + destination_object='DESTINATION_OBJECT', + delete_extra_files=True, + recursive=True, + allow_overwrite=True, + ) diff --git a/tests/providers/google/cloud/transfers/__init__.py b/tests/providers/google/cloud/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/google/cloud/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/google/cloud/operators/test_adls_to_gcs.py b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py similarity index 94% rename from tests/providers/google/cloud/operators/test_adls_to_gcs.py rename to tests/providers/google/cloud/transfers/test_adls_to_gcs.py index ae16af2074aee..72589a1259e30 100644 --- a/tests/providers/google/cloud/operators/test_adls_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py @@ -20,7 +20,7 @@ import mock -from airflow.providers.google.cloud.operators.adls_to_gcs import ADLSToGCSOperator +from airflow.providers.google.cloud.transfers.adls_to_gcs import ADLSToGCSOperator TASK_ID = 'test-adls-gcs-operator' ADLS_PATH_1 = '*' @@ -51,10 +51,10 @@ def test_init(self): self.assertEqual(operator.gcp_conn_id, GCS_CONN_ID) self.assertEqual(operator.azure_data_lake_conn_id, AZURE_CONN_ID) - @mock.patch('airflow.providers.google.cloud.operators.adls_to_gcs.AzureDataLakeHook') + @mock.patch('airflow.providers.google.cloud.transfers.adls_to_gcs.AzureDataLakeHook') @mock.patch('airflow.providers.microsoft.azure.operators.adls_list.AzureDataLakeHook') @mock.patch( - 'airflow.providers.google.cloud.operators.adls_to_gcs.GCSHook') + 'airflow.providers.google.cloud.transfers.adls_to_gcs.GCSHook') def test_execute(self, gcs_mock_hook, adls_one_mock_hook, adls_two_mock_hook): """Test the execute function when the run is successful.""" @@ -100,10 +100,10 @@ def test_execute(self, gcs_mock_hook, adls_one_mock_hook, adls_two_mock_hook): # we expect MOCK_FILES to be uploaded self.assertEqual(sorted(MOCK_FILES), sorted(uploaded_files)) - @mock.patch('airflow.providers.google.cloud.operators.adls_to_gcs.AzureDataLakeHook') + @mock.patch('airflow.providers.google.cloud.transfers.adls_to_gcs.AzureDataLakeHook') @mock.patch('airflow.providers.microsoft.azure.operators.adls_list.AzureDataLakeHook') @mock.patch( - 'airflow.providers.google.cloud.operators.adls_to_gcs.GCSHook') + 'airflow.providers.google.cloud.transfers.adls_to_gcs.GCSHook') def test_execute_with_gzip(self, gcs_mock_hook, adls_one_mock_hook, adls_two_mock_hook): """Test the execute function when the run is successful.""" diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py similarity index 95% rename from tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py rename to tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py index 740d1ad35a6dd..049befdbfcd47 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py @@ -20,7 +20,7 @@ import mock -from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator +from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator TASK_ID = 'test-bq-create-table-operator' TEST_DATASET = 'test-dataset' @@ -28,7 +28,7 @@ class TestBigQueryToBigQueryOperator(unittest.TestCase): - @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_bigquery.BigQueryHook') + @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryHook') def test_execute(self, mock_hook): source_project_dataset_tables = '{}.{}'.format( TEST_DATASET, TEST_TABLE_ID) diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery_system.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_bigquery_to_bigquery_system.py rename to tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_gcs.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py similarity index 94% rename from tests/providers/google/cloud/operators/test_bigquery_to_gcs.py rename to tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py index d58228ff76674..e95d62365b1f9 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py @@ -20,7 +20,7 @@ import mock -from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator +from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator TASK_ID = 'test-bq-create-table-operator' TEST_DATASET = 'test-dataset' @@ -28,7 +28,7 @@ class TestBigQueryToCloudStorageOperator(unittest.TestCase): - @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryHook') + @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryHook') def test_execute(self, mock_hook): source_project_dataset_table = '{}.{}'.format( TEST_DATASET, TEST_TABLE_ID) diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_bigquery_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_mysql.py b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py similarity index 93% rename from tests/providers/google/cloud/operators/test_bigquery_to_mysql.py rename to tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py index 68b1745e5c18b..f21f249a29274 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_to_mysql.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py @@ -19,7 +19,7 @@ import mock -from airflow.providers.google.cloud.operators.bigquery_to_mysql import BigQueryToMySqlOperator +from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator TASK_ID = 'test-bq-create-table-operator' TEST_DATASET = 'test-dataset' @@ -28,7 +28,7 @@ class TestBigQueryToMySqlOperator(unittest.TestCase): - @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_mysql.BigQueryHook') + @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_mysql.BigQueryHook') def test_execute_good_request_to_bq(self, mock_hook): destination_table = 'table' operator = BigQueryToMySqlOperator( diff --git a/tests/providers/google/cloud/operators/test_cassandra_to_gcs.py b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py similarity index 93% rename from tests/providers/google/cloud/operators/test_cassandra_to_gcs.py rename to tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py index 390a9fe650203..7eca09d5a186e 100644 --- a/tests/providers/google/cloud/operators/test_cassandra_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py @@ -21,17 +21,17 @@ from mock import call -from airflow.providers.google.cloud.operators.cassandra_to_gcs import CassandraToGCSOperator +from airflow.providers.google.cloud.transfers.cassandra_to_gcs import CassandraToGCSOperator TMP_FILE_NAME = "temp-file" class TestCassandraToGCS(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.cassandra_to_gcs.NamedTemporaryFile") + @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile") @mock.patch( - "airflow.providers.google.cloud.operators.cassandra_to_gcs.GCSHook.upload" + "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload" ) - @mock.patch("airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraHook") + @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook") def test_execute(self, mock_hook, mock_upload, mock_tempfile): test_bucket = "test-bucket" schema = "schema.json" diff --git a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py similarity index 93% rename from tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py rename to tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py index a2b8b6b9dade5..fdf18f9372f6a 100644 --- a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py @@ -16,7 +16,7 @@ # under the License. from unittest import mock -from airflow.providers.google.cloud.operators.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator +from airflow.providers.google.cloud.transfers.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator GCS_BUCKET = "airflow_bucket_fb" GCS_OBJ_PATH = "Temp/this_is_my_report_json.json" @@ -47,8 +47,8 @@ class TestFacebookAdsReportToGcsOperator: - @mock.patch("airflow.providers.google.cloud.operators.facebook_ads_to_gcs.FacebookAdsReportingHook") - @mock.patch("airflow.providers.google.cloud.operators.facebook_ads_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportingHook") + @mock.patch("airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.GCSHook") def test_execute(self, mock_gcs_hook, mock_ads_hook): mock_ads_hook.return_value.bulk_facebook_report.return_value = FACEBOOK_RETURN_VALUE op = FacebookAdsReportToGcsOperator(facebook_conn_id=FACEBOOK_ADS_CONN_ID, diff --git a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_facebook_ads_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_gcs_to_bigquery.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py similarity index 93% rename from tests/providers/google/cloud/operators/test_gcs_to_bigquery.py rename to tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py index dd9a0ba759e39..b327cecdc4e05 100644 --- a/tests/providers/google/cloud/operators/test_gcs_to_bigquery.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py @@ -20,7 +20,7 @@ import mock -from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator +from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator TASK_ID = 'test-gcs-to-bq-operator' TEST_EXPLICIT_DEST = 'test-project.dataset.table' @@ -31,7 +31,7 @@ class TestGoogleCloudStorageToBigQueryOperator(unittest.TestCase): - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_bigquery.BigQueryHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook') def test_execute_explicit_project_legacy(self, bq_hook): operator = GCSToBigQueryOperator(task_id=TASK_ID, bucket=TEST_BUCKET, @@ -50,7 +50,7 @@ def test_execute_explicit_project_legacy(self, bq_hook): .execute \ .assert_called_once_with("SELECT MAX(id) FROM [test-project.dataset.table]") - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_bigquery.BigQueryHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook') def test_execute_explicit_project(self, bq_hook): operator = GCSToBigQueryOperator(task_id=TASK_ID, bucket=TEST_BUCKET, diff --git a/tests/providers/google/cloud/operators/test_gcs_to_bigquery_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_gcs_to_bigquery_system.py rename to tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py diff --git a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py similarity index 87% rename from tests/providers/google/cloud/operators/test_gcs_to_gcs.py rename to tests/providers/google/cloud/transfers/test_gcs_to_gcs.py index 73815eee7de4c..49c067b6aec53 100644 --- a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py @@ -22,9 +22,7 @@ import mock from airflow.exceptions import AirflowException -from airflow.providers.google.cloud.operators.gcs_to_gcs import ( - WILDCARD, GCSSynchronizeBucketsOperator, GCSToGCSOperator, -) +from airflow.providers.google.cloud.transfers.gcs_to_gcs import WILDCARD, GCSToGCSOperator TASK_ID = 'test-gcs-to-gcs-operator' TEST_BUCKET = 'test-bucket' @@ -64,7 +62,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase): Also tests the destination_object as prefix when the wildcard is used. """ - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_no_prefix(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -76,7 +74,7 @@ def test_execute_no_prefix(self, mock_hook): TEST_BUCKET, prefix="", delimiter="test_object" ) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_no_suffix(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -88,7 +86,7 @@ def test_execute_no_suffix(self, mock_hook): TEST_BUCKET, prefix="test_object", delimiter="" ) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_prefix_and_suffix(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -101,7 +99,7 @@ def test_execute_prefix_and_suffix(self, mock_hook): ) # copy with wildcard - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_wildcard_with_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -119,7 +117,7 @@ def test_execute_wildcard_with_destination_object(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_wildcard_with_destination_object_retained_prefix(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -139,7 +137,7 @@ def test_execute_wildcard_with_destination_object_retained_prefix(self, mock_hoo ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_retained) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_wildcard_without_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -156,7 +154,7 @@ def test_execute_wildcard_without_destination_object(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_wildcard_empty_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -174,7 +172,7 @@ def test_execute_wildcard_empty_destination_object(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_empty) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_last_modified_time(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -192,7 +190,7 @@ def test_execute_last_modified_time(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_wc_with_last_modified_time_with_all_true_cond(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST mock_hook.return_value.is_updated_after.side_effect = [True, True, True] @@ -211,7 +209,7 @@ def test_wc_with_last_modified_time_with_all_true_cond(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_wc_with_last_modified_time_with_one_true_cond(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST mock_hook.return_value.is_updated_after.side_effect = [True, False, False] @@ -226,7 +224,7 @@ def test_wc_with_last_modified_time_with_one_true_cond(self, mock_hook): TEST_BUCKET, 'test_object/file1.txt', DESTINATION_BUCKET, 'test_object/file1.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_wc_with_no_last_modified_time(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -244,7 +242,7 @@ def test_wc_with_no_last_modified_time(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_no_prefix_with_last_modified_time_with_true_cond(self, mock_hook): mock_hook.return_value.is_updated_after.return_value = True operator = GCSToGCSOperator( @@ -258,7 +256,7 @@ def test_no_prefix_with_last_modified_time_with_true_cond(self, mock_hook): mock_hook.return_value.rewrite.assert_called_once_with( TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_no_prefix_with_maximum_modified_time_with_true_cond(self, mock_hook): mock_hook.return_value.is_updated_before.return_value = True operator = GCSToGCSOperator( @@ -272,7 +270,7 @@ def test_no_prefix_with_maximum_modified_time_with_true_cond(self, mock_hook): mock_hook.return_value.rewrite.assert_called_once_with( TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_exe_last_modified_time_and_maximum_modified_time_with_true_cond(self, mock_hook): mock_hook.return_value.is_updated_between.return_value = True operator = GCSToGCSOperator( @@ -287,7 +285,7 @@ def test_exe_last_modified_time_and_maximum_modified_time_with_true_cond(self, m mock_hook.return_value.rewrite.assert_called_once_with( TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_no_prefix_with_no_last_modified_time(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -300,7 +298,7 @@ def test_execute_no_prefix_with_no_last_modified_time(self, mock_hook): mock_hook.return_value.rewrite.assert_called_once_with( TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_no_prefix_with_last_modified_time_with_false_cond(self, mock_hook): mock_hook.return_value.is_updated_after.return_value = False operator = GCSToGCSOperator( @@ -313,7 +311,7 @@ def test_no_prefix_with_last_modified_time_with_false_cond(self, mock_hook): operator.execute(None) mock_hook.return_value.rewrite.assert_not_called() - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_is_older_than_with_true_cond(self, mock_hook): mock_hook.return_value.is_older_than.return_value = True operator = GCSToGCSOperator( @@ -329,7 +327,7 @@ def test_executes_with_is_older_than_with_true_cond(self, mock_hook): mock_hook.return_value.rewrite.assert_called_once_with( TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt') - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_more_than_1_wildcard(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -346,7 +344,7 @@ def test_execute_more_than_1_wildcard(self, mock_hook): with self.assertRaisesRegex(AirflowException, error_msg): operator.execute(None) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_execute_with_empty_destination_bucket(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_FILES_LIST operator = GCSToGCSOperator( @@ -364,7 +362,7 @@ def test_execute_with_empty_destination_bucket(self, mock_hook): self.assertEqual(operator.destination_bucket, operator.source_bucket) # Tests the use of delimiter and source object as list - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_empty_source_objects(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -375,7 +373,7 @@ def test_executes_with_empty_source_objects(self, mock_hook): TEST_BUCKET, prefix='', delimiter=None ) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_raises_exception_with_two_empty_list_inside_source_objects(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST operator = GCSToGCSOperator( @@ -386,7 +384,7 @@ def test_raises_exception_with_two_empty_list_inside_source_objects(self, mock_h "You can't have two empty strings inside source_object"): operator.execute(None) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_single_item_in_source_objects(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -396,7 +394,7 @@ def test_executes_with_single_item_in_source_objects(self, mock_hook): TEST_BUCKET, prefix=SOURCE_OBJECTS_SINGLE_FILE[0], delimiter=None ) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_multiple_items_in_source_objects(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -410,7 +408,7 @@ def test_executes_with_multiple_items_in_source_objects(self, mock_hook): any_order=True ) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_a_delimiter(self, mock_hook): operator = GCSToGCSOperator( task_id=TASK_ID, source_bucket=TEST_BUCKET, @@ -421,7 +419,7 @@ def test_executes_with_a_delimiter(self, mock_hook): ) # COPY - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_delimiter_and_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = ['test_object/file3.json'] operator = GCSToGCSOperator( @@ -438,7 +436,7 @@ def test_executes_with_delimiter_and_destination_object(self, mock_hook): ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_different_delimiter_and_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = ['test_object/file1.txt', 'test_object/file2.txt'] operator = GCSToGCSOperator( @@ -458,7 +456,7 @@ def test_executes_with_different_delimiter_and_destination_object(self, mock_hoo ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_executes_with_no_destination_bucket_and_no_destination_object(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST operator = GCSToGCSOperator( @@ -474,7 +472,7 @@ def test_executes_with_no_destination_bucket_and_no_destination_object(self, moc TEST_BUCKET, 'test_object/file3.json'), ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls) - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook') def test_wc_with_last_modified_time_with_all_true_cond_no_file(self, mock_hook): mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST mock_hook.return_value.is_updated_after.side_effect = [True, True, True] @@ -499,35 +497,3 @@ def test_wc_with_last_modified_time_with_all_true_cond_no_file(self, mock_hook): DESTINATION_BUCKET, 'test_object/file3.json' ), ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) - - -class TestGoogleCloudStorageSync(unittest.TestCase): - - @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') - def test_execute(self, mock_hook): - task = GCSSynchronizeBucketsOperator( - task_id="task-id", - source_bucket="SOURCE_BUCKET", - destination_bucket="DESTINATION_BUCKET", - source_object="SOURCE_OBJECT", - destination_object="DESTINATION_OBJECT", - recursive=True, - delete_extra_files=True, - allow_overwrite=True, - gcp_conn_id="GCP_CONN_ID", - delegate_to="DELEGATE_TO", - ) - task.execute({}) - mock_hook.assert_called_once_with( - google_cloud_storage_conn_id='GCP_CONN_ID', - delegate_to='DELEGATE_TO' - ) - mock_hook.return_value.sync.assert_called_once_with( - source_bucket='SOURCE_BUCKET', - source_object='SOURCE_OBJECT', - destination_bucket='DESTINATION_BUCKET', - destination_object='DESTINATION_OBJECT', - delete_extra_files=True, - recursive=True, - allow_overwrite=True, - ) diff --git a/tests/providers/google/cloud/operators/test_gcs_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_gcs_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_local.py b/tests/providers/google/cloud/transfers/test_gcs_to_local.py new file mode 100644 index 0000000000000..4763cae0c7603 --- /dev/null +++ b/tests/providers/google/cloud/transfers/test_gcs_to_local.py @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest + +import mock + +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator + +TASK_ID = "test-gcs-operator" +TEST_BUCKET = "test-bucket" +TEST_PROJECT = "test-project" +DELIMITER = ".csv" +PREFIX = "TEST" +MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"] +TEST_OBJECT = "dir1/test-object" +LOCAL_FILE_PATH = "/home/airflow/gcp/test-object" + + +class TestGoogleCloudStorageDownloadOperator(unittest.TestCase): + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_local.GCSHook") + def test_execute(self, mock_hook): + operator = GCSToLocalFilesystemOperator( + task_id=TASK_ID, + bucket=TEST_BUCKET, + object_name=TEST_OBJECT, + filename=LOCAL_FILE_PATH, + ) + + operator.execute(None) + mock_hook.return_value.download.assert_called_once_with( + bucket_name=TEST_BUCKET, object_name=TEST_OBJECT, filename=LOCAL_FILE_PATH + ) diff --git a/tests/providers/google/cloud/operators/test_gcs_to_sftp.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py similarity index 90% rename from tests/providers/google/cloud/operators/test_gcs_to_sftp.py rename to tests/providers/google/cloud/transfers/test_gcs_to_sftp.py index 9b5b42eb2830d..5b15b41dac2d9 100644 --- a/tests/providers/google/cloud/operators/test_gcs_to_sftp.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py @@ -23,7 +23,7 @@ import mock from airflow.exceptions import AirflowException -from airflow.providers.google.cloud.operators.gcs_to_sftp import GCSToSFTPOperator +from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator TASK_ID = "test-gcs-to-sftp-operator" GCP_CONN_ID = "GCP_CONN_ID" @@ -46,8 +46,8 @@ # pylint: disable=unused-argument class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook") def test_execute_copy_single_file(self, sftp_hook, gcs_hook): task = GCSToSFTPOperator( task_id=TASK_ID, @@ -76,8 +76,8 @@ def test_execute_copy_single_file(self, sftp_hook, gcs_hook): gcs_hook.return_value.delete.assert_not_called() - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook") def test_execute_move_single_file(self, sftp_hook, gcs_hook): task = GCSToSFTPOperator( task_id=TASK_ID, @@ -108,8 +108,8 @@ def test_execute_move_single_file(self, sftp_hook, gcs_hook): TEST_BUCKET, SOURCE_OBJECT_NO_WILDCARD ) - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook") def test_execute_copy_with_wildcard(self, sftp_hook, gcs_hook): gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2] operator = GCSToSFTPOperator( @@ -135,8 +135,8 @@ def test_execute_copy_with_wildcard(self, sftp_hook, gcs_hook): self.assertEqual(call_two[1]["bucket_name"], TEST_BUCKET) self.assertEqual(call_two[1]["object_name"], "test_object/file2.txt") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook") def test_execute_move_with_wildcard(self, sftp_hook, gcs_hook): gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2] operator = GCSToSFTPOperator( @@ -159,8 +159,8 @@ def test_execute_move_with_wildcard(self, sftp_hook, gcs_hook): self.assertEqual(call_one[0], (TEST_BUCKET, "test_object/file1.txt")) self.assertEqual(call_two[0], (TEST_BUCKET, "test_object/file2.txt")) - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook") def test_execute_more_than_one_wildcard_exception(self, sftp_hook, gcs_hook): gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2] operator = GCSToSFTPOperator( diff --git a/tests/providers/google/cloud/operators/test_gcs_to_sftp_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_gcs_to_sftp_system.py rename to tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py diff --git a/tests/providers/google/cloud/operators/test_local_to_gcs.py b/tests/providers/google/cloud/transfers/test_local_to_gcs.py similarity index 95% rename from tests/providers/google/cloud/operators/test_local_to_gcs.py rename to tests/providers/google/cloud/transfers/test_local_to_gcs.py index d8d4f97530889..95bee7449402d 100644 --- a/tests/providers/google/cloud/operators/test_local_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_local_to_gcs.py @@ -23,7 +23,7 @@ import mock from airflow.models.dag import DAG -from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator +from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator class TestFileToGcsOperator(unittest.TestCase): @@ -55,7 +55,7 @@ def test_init(self): self.assertEqual(operator.mime_type, self._config['mime_type']) self.assertEqual(operator.gzip, self._config['gzip']) - @mock.patch('airflow.providers.google.cloud.operators.local_to_gcs.GCSHook', + @mock.patch('airflow.providers.google.cloud.transfers.local_to_gcs.GCSHook', autospec=True) def test_execute(self, mock_hook): mock_instance = mock_hook.return_value diff --git a/tests/providers/google/cloud/operators/test_local_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_local_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_local_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_local_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_mssql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py similarity index 92% rename from tests/providers/google/cloud/operators/test_mssql_to_gcs.py rename to tests/providers/google/cloud/transfers/test_mssql_to_gcs.py index 08a3e3fcc6849..e23836fd58893 100644 --- a/tests/providers/google/cloud/operators/test_mssql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py @@ -23,7 +23,7 @@ from airflow import PY38 if not PY38: - from airflow.providers.google.cloud.operators.mssql_to_gcs import MSSQLToGCSOperator + from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator TASK_ID = 'test-mssql-to-gcs' MSSQL_CONN_ID = 'mssql_conn_test' @@ -65,8 +65,8 @@ def test_init(self): self.assertEqual(op.bucket, BUCKET) self.assertEqual(op.filename, JSON_FILENAME) - @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success_json(self, gcs_hook_mock_class, mssql_hook_mock_class): """Test successful run of execute function for JSON""" op = MSSQLToGCSOperator( @@ -97,8 +97,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): mssql_hook_mock_class.assert_called_once_with(mssql_conn_id=MSSQL_CONN_ID) mssql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL) - @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_file_splitting(self, gcs_hook_mock_class, mssql_hook_mock_class): """Test that ndjson is split by approx_max_file_size_bytes param.""" mssql_hook_mock = mssql_hook_mock_class.return_value @@ -128,8 +128,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): approx_max_file_size_bytes=len(expected_upload[JSON_FILENAME.format(0)])) op.execute(None) - @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_schema_file(self, gcs_hook_mock_class, mssql_hook_mock_class): """Test writing schema files.""" mssql_hook_mock = mssql_hook_mock_class.return_value diff --git a/tests/providers/google/cloud/operators/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py similarity index 91% rename from tests/providers/google/cloud/operators/test_mysql_to_gcs.py rename to tests/providers/google/cloud/transfers/test_mysql_to_gcs.py index 03a75d317e340..abd8a18d32f43 100644 --- a/tests/providers/google/cloud/operators/test_mysql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py @@ -24,7 +24,7 @@ from _mysql_exceptions import ProgrammingError from parameterized import parameterized -from airflow.providers.google.cloud.operators.mysql_to_gcs import MySQLToGCSOperator +from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator TASK_ID = 'test-mysql-to-gcs' MYSQL_CONN_ID = 'mysql_conn_test' @@ -110,8 +110,8 @@ def test_convert_type(self, value, schema_type, expected): op.convert_type(value, schema_type), expected) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success_json(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for JSON""" op = MySQLToGCSOperator( @@ -142,8 +142,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success_csv(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV""" op = MySQLToGCSOperator( @@ -175,8 +175,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success_csv_ensure_utc(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV""" op = MySQLToGCSOperator( @@ -209,8 +209,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_has_calls([mock.call(TZ_QUERY), mock.call(SQL)]) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success_csv_with_delimiter(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV with a field delimiter""" op = MySQLToGCSOperator( @@ -243,8 +243,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_file_splitting(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test that ndjson is split by approx_max_file_size_bytes param.""" mysql_hook_mock = mysql_hook_mock_class.return_value @@ -274,8 +274,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): approx_max_file_size_bytes=len(expected_upload[JSON_FILENAME.format(0)])) op.execute(None) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_schema_file(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test writing schema files.""" mysql_hook_mock = mysql_hook_mock_class.return_value @@ -303,8 +303,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disab # once for the file and once for the schema self.assertEqual(2, gcs_hook_mock.upload.call_count) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_schema_file_with_custom_schema(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test writing schema files with customized schema""" mysql_hook_mock = mysql_hook_mock_class.return_value @@ -333,8 +333,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disab # once for the file and once for the schema self.assertEqual(2, gcs_hook_mock.upload.call_count) - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_query_with_error(self, mock_gcs_hook, mock_mysql_hook): mock_mysql_hook.return_value.get_conn.\ return_value.cursor.return_value.execute.side_effect = ProgrammingError @@ -347,8 +347,8 @@ def test_query_with_error(self, mock_gcs_hook, mock_mysql_hook): with self.assertRaises(ProgrammingError): op.query() - @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook') - @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook') + @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_execute_with_query_error(self, mock_gcs_hook, mock_mysql_hook): mock_mysql_hook.return_value.get_conn.\ return_value.cursor.return_value.execute.side_effect = ProgrammingError diff --git a/tests/providers/google/cloud/operators/test_postgres_to_gcs.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py similarity index 95% rename from tests/providers/google/cloud/operators/test_postgres_to_gcs.py rename to tests/providers/google/cloud/transfers/test_postgres_to_gcs.py index 9587676169345..9fc0da5a05ffa 100644 --- a/tests/providers/google/cloud/operators/test_postgres_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py @@ -21,7 +21,7 @@ import pytest from mock import patch -from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator +from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator from airflow.providers.postgres.hooks.postgres import PostgresHook TABLES = {'postgres_to_gcs_operator', 'postgres_to_gcs_operator_empty'} @@ -84,7 +84,7 @@ def test_init(self): self.assertEqual(op.bucket, BUCKET) self.assertEqual(op.filename, FILENAME) - @patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_exec_success(self, gcs_hook_mock_class): """Test the execute function in case where the run is successful.""" op = PostgresToGCSOperator( @@ -108,7 +108,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): op.execute(None) - @patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_file_splitting(self, gcs_hook_mock_class): """Test that ndjson is split by approx_max_file_size_bytes param.""" @@ -135,7 +135,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): approx_max_file_size_bytes=len(expected_upload[FILENAME.format(0)])) op.execute(None) - @patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook') + @patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook') def test_schema_file(self, gcs_hook_mock_class): """Test writing schema files.""" diff --git a/tests/providers/google/cloud/operators/test_postgres_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_postgres_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_postgres_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_presto_to_gcs.py b/tests/providers/google/cloud/transfers/test_presto_to_gcs.py similarity index 92% rename from tests/providers/google/cloud/operators/test_presto_to_gcs.py rename to tests/providers/google/cloud/transfers/test_presto_to_gcs.py index 526abaa4680e6..64764efab1cec 100644 --- a/tests/providers/google/cloud/operators/test_presto_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_presto_to_gcs.py @@ -20,7 +20,7 @@ import pytest from mock import patch -from airflow.providers.google.cloud.operators.presto_to_gcs import PrestoToGCSOperator +from airflow.providers.google.cloud.transfers.presto_to_gcs import PrestoToGCSOperator TASK_ID = "test-presto-to-gcs" PRESTO_CONN_ID = "my-presto-conn" @@ -54,8 +54,8 @@ def test_init(self): self.assertEqual(op.bucket, BUCKET) self.assertEqual(op.filename, FILENAME) - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_json(self, mock_gcs_hook, mock_presto_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): self.assertEqual(BUCKET, bucket) @@ -97,8 +97,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): mock_gcs_hook.return_value.upload.assert_called() - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_json_with_file_splitting(self, mock_gcs_hook, mock_presto_hook): """Test that ndjson is split by approx_max_file_size_bytes param.""" @@ -142,8 +142,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): mock_gcs_hook.return_value.upload.assert_called() - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_presto_hook): """Test writing schema files.""" @@ -183,8 +183,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disab # once for the file and once for the schema self.assertEqual(2, mock_gcs_hook.return_value.upload.call_count) - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") def test_save_as_csv(self, mock_presto_hook, mock_gcs_hook): def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): self.assertEqual(BUCKET, bucket) @@ -227,8 +227,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): mock_presto_hook.assert_called_once_with(presto_conn_id=PRESTO_CONN_ID) mock_gcs_hook.assert_called_once_with(delegate_to=None, gcp_conn_id=GCP_CONN_ID) - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_csv_with_file_splitting(self, mock_gcs_hook, mock_presto_hook): """Test that csv is split by approx_max_file_size_bytes param.""" @@ -273,8 +273,8 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): mock_gcs_hook.return_value.upload.assert_called() - @patch("airflow.providers.google.cloud.operators.presto_to_gcs.PrestoHook") - @patch("airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_presto_hook): """Test writing schema files.""" diff --git a/tests/providers/google/cloud/operators/test_presto_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_presto_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_presto_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_presto_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_s3_to_gcs.py b/tests/providers/google/cloud/transfers/test_s3_to_gcs.py similarity index 93% rename from tests/providers/google/cloud/operators/test_s3_to_gcs.py rename to tests/providers/google/cloud/transfers/test_s3_to_gcs.py index e0eeba9e23d32..4f56c8ea23ed1 100644 --- a/tests/providers/google/cloud/operators/test_s3_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_s3_to_gcs.py @@ -20,7 +20,7 @@ import mock -from airflow.providers.google.cloud.operators.s3_to_gcs import S3ToGCSOperator +from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator TASK_ID = 'test-s3-gcs-operator' S3_BUCKET = 'test-bucket' @@ -51,10 +51,10 @@ def test_init(self): self.assertEqual(operator.gcp_conn_id, GCS_CONN_ID) self.assertEqual(operator.dest_gcs, GCS_PATH_PREFIX) - @mock.patch('airflow.providers.google.cloud.operators.s3_to_gcs.S3Hook') + @mock.patch('airflow.providers.google.cloud.transfers.s3_to_gcs.S3Hook') @mock.patch('airflow.providers.amazon.aws.operators.s3_list.S3Hook') @mock.patch( - 'airflow.providers.google.cloud.operators.s3_to_gcs.GCSHook') + 'airflow.providers.google.cloud.transfers.s3_to_gcs.GCSHook') def test_execute(self, gcs_mock_hook, s3_one_mock_hook, s3_two_mock_hook): """Test the execute function when the run is successful.""" @@ -86,10 +86,10 @@ def test_execute(self, gcs_mock_hook, s3_one_mock_hook, s3_two_mock_hook): # we expect MOCK_FILES to be uploaded self.assertEqual(sorted(MOCK_FILES), sorted(uploaded_files)) - @mock.patch('airflow.providers.google.cloud.operators.s3_to_gcs.S3Hook') + @mock.patch('airflow.providers.google.cloud.transfers.s3_to_gcs.S3Hook') @mock.patch('airflow.providers.amazon.aws.operators.s3_list.S3Hook') @mock.patch( - 'airflow.providers.google.cloud.operators.s3_to_gcs.GCSHook') + 'airflow.providers.google.cloud.transfers.s3_to_gcs.GCSHook') def test_execute_with_gzip(self, gcs_mock_hook, s3_one_mock_hook, s3_two_mock_hook): """Test the execute function when the run is successful.""" diff --git a/tests/providers/google/cloud/operators/test_sftp_to_gcs.py b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py similarity index 91% rename from tests/providers/google/cloud/operators/test_sftp_to_gcs.py rename to tests/providers/google/cloud/transfers/test_sftp_to_gcs.py index a46004865e438..d41c84468b70a 100644 --- a/tests/providers/google/cloud/operators/test_sftp_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py @@ -23,7 +23,7 @@ import mock from airflow.exceptions import AirflowException -from airflow.providers.google.cloud.operators.sftp_to_gcs import SFTPToGCSOperator +from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator TASK_ID = "test-gcs-to-sftp-operator" GCP_CONN_ID = "GCP_CONN_ID" @@ -52,8 +52,8 @@ # pylint: disable=unused-argument class TestSFTPToGCSOperator(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") def test_execute_copy_single_file(self, sftp_hook, gcs_hook): task = SFTPToGCSOperator( task_id=TASK_ID, @@ -84,8 +84,8 @@ def test_execute_copy_single_file(self, sftp_hook, gcs_hook): sftp_hook.return_value.delete_file.assert_not_called() - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") def test_execute_move_single_file(self, sftp_hook, gcs_hook): task = SFTPToGCSOperator( task_id=TASK_ID, @@ -118,8 +118,8 @@ def test_execute_move_single_file(self, sftp_hook, gcs_hook): SOURCE_OBJECT_NO_WILDCARD ) - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") def test_execute_copy_with_wildcard(self, sftp_hook, gcs_hook): sftp_hook.return_value.get_tree_map.return_value = [ ["main_dir/test_object3.json", "main_dir/sub_dir/test_object3.json"], @@ -167,8 +167,8 @@ def test_execute_copy_with_wildcard(self, sftp_hook, gcs_hook): ] ) - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") def test_execute_move_with_wildcard(self, sftp_hook, gcs_hook): sftp_hook.return_value.get_tree_map.return_value = [ ["main_dir/test_object3.json", "main_dir/sub_dir/test_object3.json"], @@ -196,8 +196,8 @@ def test_execute_move_with_wildcard(self, sftp_hook, gcs_hook): ] ) - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") def test_execute_more_than_one_wildcard_exception(self, sftp_hook, gcs_hook): task = SFTPToGCSOperator( task_id=TASK_ID, diff --git a/tests/providers/google/cloud/operators/test_sftp_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_sftp_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_sftp_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_sftp_to_gcs_system.py diff --git a/tests/providers/google/cloud/operators/test_sheets_to_gcs.py b/tests/providers/google/cloud/transfers/test_sheets_to_gcs.py similarity index 90% rename from tests/providers/google/cloud/operators/test_sheets_to_gcs.py rename to tests/providers/google/cloud/transfers/test_sheets_to_gcs.py index 7dc947de20d08..a1b8fb0679a79 100644 --- a/tests/providers/google/cloud/operators/test_sheets_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_sheets_to_gcs.py @@ -17,7 +17,7 @@ import mock -from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator +from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator RANGE = "test!A:E" FILTER = ["sheet_filter"] @@ -31,8 +31,8 @@ class TestGoogleSheetsToGCSOperator: - @mock.patch("airflow.providers.google.cloud.operators.sheets_to_gcs.csv.writer") - @mock.patch("airflow.providers.google.cloud.operators.sheets_to_gcs.NamedTemporaryFile") + @mock.patch("airflow.providers.google.cloud.transfers.sheets_to_gcs.csv.writer") + @mock.patch("airflow.providers.google.cloud.transfers.sheets_to_gcs.NamedTemporaryFile") def test_upload_data(self, mock_tempfile, mock_writer): filename = "file://97g23r" file_handle = mock.MagicMock() @@ -76,13 +76,13 @@ def test_upload_data(self, mock_tempfile, mock_writer): # Assert path to file is returned assert result == expected_dest_file - @mock.patch("airflow.providers.google.cloud.operators.sheets_to_gcs.GCSHook") - @mock.patch("airflow.providers.google.cloud.operators.sheets_to_gcs.GSheetsHook") + @mock.patch("airflow.providers.google.cloud.transfers.sheets_to_gcs.GCSHook") + @mock.patch("airflow.providers.google.cloud.transfers.sheets_to_gcs.GSheetsHook") @mock.patch( - "airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator.xcom_push" + "airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator.xcom_push" ) @mock.patch( - "airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator._upload_data" + "airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator._upload_data" ) def test_execute(self, mock_upload_data, mock_xcom, mock_sheet_hook, mock_gcs_hook): context = {} diff --git a/tests/providers/google/cloud/operators/test_sheets_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py similarity index 100% rename from tests/providers/google/cloud/operators/test_sheets_to_gcs_system.py rename to tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py diff --git a/tests/providers/google/suite/transfers/__init__.py b/tests/providers/google/suite/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/google/suite/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/google/suite/operators/test_gcs_to_gdrive.py b/tests/providers/google/suite/transfers/test_gcs_to_gdrive.py similarity index 98% rename from tests/providers/google/suite/operators/test_gcs_to_gdrive.py rename to tests/providers/google/suite/transfers/test_gcs_to_gdrive.py index 9991f1febd290..a80b640e28099 100644 --- a/tests/providers/google/suite/operators/test_gcs_to_gdrive.py +++ b/tests/providers/google/suite/transfers/test_gcs_to_gdrive.py @@ -19,9 +19,9 @@ from unittest import mock from airflow.exceptions import AirflowException -from airflow.providers.google.suite.operators.gcs_to_gdrive import GCSToGoogleDriveOperator +from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator -MODULE = "airflow.providers.google.suite.operators.gcs_to_gdrive" +MODULE = "airflow.providers.google.suite.transfers.gcs_to_gdrive" class TestGcsToGDriveOperator(unittest.TestCase): diff --git a/tests/providers/google/suite/operators/test_gcs_to_sheets.py b/tests/providers/google/suite/transfers/test_gcs_to_sheets.py similarity index 87% rename from tests/providers/google/suite/operators/test_gcs_to_sheets.py rename to tests/providers/google/suite/transfers/test_gcs_to_sheets.py index dda4a978957c0..dd1e59ebe8236 100644 --- a/tests/providers/google/suite/operators/test_gcs_to_sheets.py +++ b/tests/providers/google/suite/transfers/test_gcs_to_sheets.py @@ -17,7 +17,7 @@ import mock -from airflow.providers.google.suite.operators.gcs_to_sheets import GCSToGoogleSheetsOperator +from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator GCP_CONN_ID = "test" SPREADSHEET_ID = "1234567890" @@ -27,10 +27,10 @@ class TestGCSToGoogleSheets: - @mock.patch("airflow.providers.google.suite.operators.gcs_to_sheets.GCSHook") - @mock.patch("airflow.providers.google.suite.operators.gcs_to_sheets.GSheetsHook") - @mock.patch("airflow.providers.google.suite.operators.gcs_to_sheets.NamedTemporaryFile") - @mock.patch("airflow.providers.google.suite.operators.gcs_to_sheets.csv.reader") + @mock.patch("airflow.providers.google.suite.transfers.gcs_to_sheets.GCSHook") + @mock.patch("airflow.providers.google.suite.transfers.gcs_to_sheets.GSheetsHook") + @mock.patch("airflow.providers.google.suite.transfers.gcs_to_sheets.NamedTemporaryFile") + @mock.patch("airflow.providers.google.suite.transfers.gcs_to_sheets.csv.reader") def test_execute(self, mock_reader, mock_tempfile, mock_sheet_hook, mock_gcs_hook): filename = "file://97g23r" file_handle = mock.MagicMock() diff --git a/tests/providers/google/suite/operators/test_gcs_to_sheets_system.py b/tests/providers/google/suite/transfers/test_gcs_to_sheets_system.py similarity index 100% rename from tests/providers/google/suite/operators/test_gcs_to_sheets_system.py rename to tests/providers/google/suite/transfers/test_gcs_to_sheets_system.py diff --git a/tests/providers/microsoft/azure/transfers/__init__.py b/tests/providers/microsoft/azure/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/microsoft/azure/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/microsoft/azure/operators/test_file_to_wasb.py b/tests/providers/microsoft/azure/transfers/test_file_to_wasb.py similarity index 95% rename from tests/providers/microsoft/azure/operators/test_file_to_wasb.py rename to tests/providers/microsoft/azure/transfers/test_file_to_wasb.py index 0ad68c8f69348..cefc5f81e4dcb 100644 --- a/tests/providers/microsoft/azure/operators/test_file_to_wasb.py +++ b/tests/providers/microsoft/azure/transfers/test_file_to_wasb.py @@ -23,7 +23,7 @@ import mock from airflow.models.dag import DAG -from airflow.providers.microsoft.azure.operators.file_to_wasb import FileToWasbOperator +from airflow.providers.microsoft.azure.transfers.file_to_wasb import FileToWasbOperator class TestFileToWasbOperator(unittest.TestCase): @@ -65,7 +65,7 @@ def test_init(self): ) self.assertEqual(operator.load_options, {'timeout': 2}) - @mock.patch('airflow.providers.microsoft.azure.operators.file_to_wasb.WasbHook', + @mock.patch('airflow.providers.microsoft.azure.transfers.file_to_wasb.WasbHook', autospec=True) def test_execute(self, mock_hook): mock_instance = mock_hook.return_value diff --git a/tests/providers/microsoft/azure/operators/test_oracle_to_azure_data_lake_transfer.py b/tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py similarity index 93% rename from tests/providers/microsoft/azure/operators/test_oracle_to_azure_data_lake_transfer.py rename to tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py index 06f6a8c1dc121..5ccb92292dafe 100644 --- a/tests/providers/microsoft/azure/operators/test_oracle_to_azure_data_lake_transfer.py +++ b/tests/providers/microsoft/azure/transfers/test_oracle_to_azure_data_lake.py @@ -24,14 +24,14 @@ import unicodecsv as csv from mock import MagicMock -from airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer import ( - OracleToAzureDataLakeTransferOperator, +from airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake import ( + OracleToAzureDataLakeOperator, ) class TestOracleToAzureDataLakeTransfer(unittest.TestCase): - mock_module_path = 'airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer' + mock_module_path = 'airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake' def test_write_temp_file(self): task_id = "some_test_id" @@ -52,7 +52,7 @@ def test_write_temp_file(self): mock_cursor.description = cursor_description mock_cursor.__iter__.return_value = cursor_rows - op = OracleToAzureDataLakeTransferOperator( + op = OracleToAzureDataLakeOperator( task_id=task_id, filename=filename, oracle_conn_id=oracle_conn_id, @@ -107,7 +107,7 @@ def test_execute(self, mock_data_lake_hook, mock_oracle_hook): mock_oracle_conn.cursor().return_value = cursor_mock mock_oracle_hook.get_conn().return_value = mock_oracle_conn - op = OracleToAzureDataLakeTransferOperator( + op = OracleToAzureDataLakeOperator( task_id=task_id, filename=filename, oracle_conn_id=oracle_conn_id, diff --git a/tests/providers/mysql/transfers/__init__.py b/tests/providers/mysql/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/mysql/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/mysql/operators/test_presto_to_mysql.py b/tests/providers/mysql/transfers/test_presto_to_mysql.py similarity index 83% rename from tests/providers/mysql/operators/test_presto_to_mysql.py rename to tests/providers/mysql/transfers/test_presto_to_mysql.py index 33ead4cac42ad..052f55789f97e 100644 --- a/tests/providers/mysql/operators/test_presto_to_mysql.py +++ b/tests/providers/mysql/transfers/test_presto_to_mysql.py @@ -19,7 +19,7 @@ import unittest from unittest.mock import patch -from airflow.providers.mysql.operators.presto_to_mysql import PrestoToMySqlTransferOperator +from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment @@ -33,21 +33,21 @@ def setUp(self): ) super().setUp() - @patch('airflow.providers.mysql.operators.presto_to_mysql.MySqlHook') - @patch('airflow.providers.mysql.operators.presto_to_mysql.PrestoHook') + @patch('airflow.providers.mysql.transfers.presto_to_mysql.MySqlHook') + @patch('airflow.providers.mysql.transfers.presto_to_mysql.PrestoHook') def test_execute(self, mock_presto_hook, mock_mysql_hook): - PrestoToMySqlTransferOperator(**self.kwargs).execute(context={}) + PrestoToMySqlOperator(**self.kwargs).execute(context={}) mock_presto_hook.return_value.get_records.assert_called_once_with(self.kwargs['sql']) mock_mysql_hook.return_value.insert_rows.assert_called_once_with( table=self.kwargs['mysql_table'], rows=mock_presto_hook.return_value.get_records.return_value) - @patch('airflow.providers.mysql.operators.presto_to_mysql.MySqlHook') - @patch('airflow.providers.mysql.operators.presto_to_mysql.PrestoHook') + @patch('airflow.providers.mysql.transfers.presto_to_mysql.MySqlHook') + @patch('airflow.providers.mysql.transfers.presto_to_mysql.PrestoHook') def test_execute_with_mysql_preoperator(self, mock_presto_hook, mock_mysql_hook): self.kwargs.update(dict(mysql_preoperator='mysql_preoperator')) - PrestoToMySqlTransferOperator(**self.kwargs).execute(context={}) + PrestoToMySqlOperator(**self.kwargs).execute(context={}) mock_presto_hook.return_value.get_records.assert_called_once_with(self.kwargs['sql']) mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_preoperator']) @@ -58,7 +58,7 @@ def test_execute_with_mysql_preoperator(self, mock_presto_hook, mock_mysql_hook) 'AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set") def test_presto_to_mysql(self): - op = PrestoToMySqlTransferOperator( + op = PrestoToMySqlOperator( task_id='presto_to_mysql_check', sql=""" SELECT name, count(*) as ccount diff --git a/tests/providers/mysql/operators/test_s3_to_mysql.py b/tests/providers/mysql/transfers/test_s3_to_mysql.py similarity index 86% rename from tests/providers/mysql/operators/test_s3_to_mysql.py rename to tests/providers/mysql/transfers/test_s3_to_mysql.py index 2d5b82498e2d0..6b8596beb5925 100644 --- a/tests/providers/mysql/operators/test_s3_to_mysql.py +++ b/tests/providers/mysql/transfers/test_s3_to_mysql.py @@ -21,7 +21,7 @@ from sqlalchemy import or_ from airflow import configuration, models -from airflow.providers.mysql.operators.s3_to_mysql import S3ToMySqlTransferOperator +from airflow.providers.mysql.transfers.s3_to_mysql import S3ToMySqlOperator from airflow.utils import db from airflow.utils.session import create_session @@ -65,11 +65,11 @@ def setUp(self): 'dag': None } - @patch('airflow.providers.mysql.operators.s3_to_mysql.S3Hook.download_file') - @patch('airflow.providers.mysql.operators.s3_to_mysql.MySqlHook.bulk_load_custom') - @patch('airflow.providers.mysql.operators.s3_to_mysql.os.remove') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.S3Hook.download_file') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.MySqlHook.bulk_load_custom') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.os.remove') def test_execute(self, mock_remove, mock_bulk_load_custom, mock_download_file): - S3ToMySqlTransferOperator(**self.s3_to_mysql_transfer_kwargs).execute({}) + S3ToMySqlOperator(**self.s3_to_mysql_transfer_kwargs).execute({}) mock_download_file.assert_called_once_with( key=self.s3_to_mysql_transfer_kwargs['s3_source_key'] @@ -82,13 +82,13 @@ def test_execute(self, mock_remove, mock_bulk_load_custom, mock_download_file): ) mock_remove.assert_called_once_with(mock_download_file.return_value) - @patch('airflow.providers.mysql.operators.s3_to_mysql.S3Hook.download_file') - @patch('airflow.providers.mysql.operators.s3_to_mysql.MySqlHook.bulk_load_custom') - @patch('airflow.providers.mysql.operators.s3_to_mysql.os.remove') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.S3Hook.download_file') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.MySqlHook.bulk_load_custom') + @patch('airflow.providers.mysql.transfers.s3_to_mysql.os.remove') def test_execute_exception(self, mock_remove, mock_bulk_load_custom, mock_download_file): mock_bulk_load_custom.side_effect = Exception - self.assertRaises(Exception, S3ToMySqlTransferOperator( + self.assertRaises(Exception, S3ToMySqlOperator( **self.s3_to_mysql_transfer_kwargs).execute, {}) mock_download_file.assert_called_once_with( diff --git a/tests/providers/mysql/operators/test_vertica_to_mysql.py b/tests/providers/mysql/transfers/test_vertica_to_mysql.py similarity index 58% rename from tests/providers/mysql/operators/test_vertica_to_mysql.py rename to tests/providers/mysql/transfers/test_vertica_to_mysql.py index f9e4f696b943b..e8f0171b62dea 100644 --- a/tests/providers/mysql/operators/test_vertica_to_mysql.py +++ b/tests/providers/mysql/transfers/test_vertica_to_mysql.py @@ -21,7 +21,7 @@ from unittest import mock from airflow.models.dag import DAG -from airflow.providers.mysql.operators.vertica_to_mysql import VerticaToMySqlTransferOperator +from airflow.providers.mysql.transfers.vertica_to_mysql import VerticaToMySqlOperator def mock_get_conn(): @@ -49,41 +49,41 @@ def setUp(self): self.dag = DAG('test_dag_id', default_args=args) @mock.patch( - 'airflow.providers.mysql.operators.vertica_to_mysql.VerticaHook.get_conn', side_effect=mock_get_conn) + 'airflow.providers.mysql.transfers.vertica_to_mysql.VerticaHook.get_conn', side_effect=mock_get_conn) @mock.patch( - 'airflow.providers.mysql.operators.vertica_to_mysql.MySqlHook.get_conn', side_effect=mock_get_conn) + 'airflow.providers.mysql.transfers.vertica_to_mysql.MySqlHook.get_conn', side_effect=mock_get_conn) @mock.patch( - 'airflow.providers.mysql.operators.vertica_to_mysql.MySqlHook.insert_rows', return_value=True) + 'airflow.providers.mysql.transfers.vertica_to_mysql.MySqlHook.insert_rows', return_value=True) def test_select_insert_transfer(self, *args): """ Test check selection from vertica into memory and after that inserting into mysql """ - task = VerticaToMySqlTransferOperator(task_id='test_task_id', - sql='select a, b, c', - mysql_table='test_table', - vertica_conn_id='test_vertica_conn_id', - mysql_conn_id='test_mysql_conn_id', - params={}, - bulk_load=False, - dag=self.dag) + task = VerticaToMySqlOperator(task_id='test_task_id', + sql='select a, b, c', + mysql_table='test_table', + vertica_conn_id='test_vertica_conn_id', + mysql_conn_id='test_mysql_conn_id', + params={}, + bulk_load=False, + dag=self.dag) task.execute(None) @mock.patch( - 'airflow.providers.mysql.operators.vertica_to_mysql.VerticaHook.get_conn', side_effect=mock_get_conn) + 'airflow.providers.mysql.transfers.vertica_to_mysql.VerticaHook.get_conn', side_effect=mock_get_conn) @mock.patch( - 'airflow.providers.mysql.operators.vertica_to_mysql.MySqlHook.get_conn', side_effect=mock_get_conn) + 'airflow.providers.mysql.transfers.vertica_to_mysql.MySqlHook.get_conn', side_effect=mock_get_conn) def test_select_bulk_insert_transfer(self, *args): """ Test check selection from vertica into temporary file and after that bulk inserting into mysql """ - task = VerticaToMySqlTransferOperator(task_id='test_task_id', - sql='select a, b, c', - mysql_table='test_table', - vertica_conn_id='test_vertica_conn_id', - mysql_conn_id='test_mysql_conn_id', - params={}, - bulk_load=True, - dag=self.dag) + task = VerticaToMySqlOperator(task_id='test_task_id', + sql='select a, b, c', + mysql_table='test_table', + vertica_conn_id='test_vertica_conn_id', + mysql_conn_id='test_mysql_conn_id', + params={}, + bulk_load=True, + dag=self.dag) task.execute(None) diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/oracle/transfers/__init__.py similarity index 100% rename from tests/providers/oracle/operators/__init__.py rename to tests/providers/oracle/transfers/__init__.py diff --git a/tests/providers/oracle/operators/test_oracle_to_oracle_transfer.py b/tests/providers/oracle/transfers/test_oracle_to_oracle.py similarity index 94% rename from tests/providers/oracle/operators/test_oracle_to_oracle_transfer.py rename to tests/providers/oracle/transfers/test_oracle_to_oracle.py index 72ee0357f2ed8..296689dccf0be 100644 --- a/tests/providers/oracle/operators/test_oracle_to_oracle_transfer.py +++ b/tests/providers/oracle/transfers/test_oracle_to_oracle.py @@ -21,7 +21,7 @@ from mock import MagicMock -from airflow.providers.oracle.operators.oracle_to_oracle_transfer import OracleToOracleTransferOperator +from airflow.providers.oracle.transfers.oracle_to_oracle import OracleToOracleOperator class TestOracleToOracleTransfer(unittest.TestCase): @@ -47,7 +47,7 @@ def test_execute(): mock_cursor.description.__iter__.return_value = cursor_description mock_cursor.fetchmany.side_effect = [cursor_rows, []] - op = OracleToOracleTransferOperator( + op = OracleToOracleOperator( task_id='copy_data', oracle_destination_conn_id=oracle_destination_conn_id, destination_table=destination_table, diff --git a/tests/providers/snowflake/transfers/__init__.py b/tests/providers/snowflake/transfers/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/snowflake/transfers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/snowflake/operators/test_s3_to_snowflake.py b/tests/providers/snowflake/transfers/test_s3_to_snowflake.py similarity index 95% rename from tests/providers/snowflake/operators/test_s3_to_snowflake.py rename to tests/providers/snowflake/transfers/test_s3_to_snowflake.py index 6620e47240b8c..21bf0ef888c1b 100644 --- a/tests/providers/snowflake/operators/test_s3_to_snowflake.py +++ b/tests/providers/snowflake/transfers/test_s3_to_snowflake.py @@ -19,7 +19,7 @@ import unittest from unittest import mock -from airflow.providers.snowflake.operators.s3_to_snowflake import S3ToSnowflakeTransferOperator +from airflow.providers.snowflake.transfers.s3_to_snowflake import S3ToSnowflakeOperator from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces @@ -32,7 +32,7 @@ def test_execute(self, mock_run): file_format = 'file_format' schema = 'schema' - S3ToSnowflakeTransferOperator( + S3ToSnowflakeOperator( s3_keys=s3_keys, table=table, stage=stage, @@ -76,7 +76,7 @@ def test_execute_with_columns(self, mock_run): schema = 'schema' columns_array = ['col1', 'col2', 'col3'] - S3ToSnowflakeTransferOperator( + S3ToSnowflakeOperator( s3_keys=s3_keys, table=table, stage=stage, diff --git a/tests/providers/snowflake/operators/test_snowflake_to_slack.py b/tests/providers/snowflake/transfers/test_snowflake_to_slack.py similarity index 94% rename from tests/providers/snowflake/operators/test_snowflake_to_slack.py rename to tests/providers/snowflake/transfers/test_snowflake_to_slack.py index 3099619911249..d4e6b527c3471 100644 --- a/tests/providers/snowflake/operators/test_snowflake_to_slack.py +++ b/tests/providers/snowflake/transfers/test_snowflake_to_slack.py @@ -19,7 +19,7 @@ from unittest import mock from airflow.models import DAG -from airflow.providers.snowflake.operators.snowflake_to_slack import SnowflakeToSlackOperator +from airflow.providers.snowflake.transfers.snowflake_to_slack import SnowflakeToSlackOperator from airflow.utils import timezone TEST_DAG_ID = 'snowflake_to_slack_unit_test' @@ -35,8 +35,8 @@ def _construct_operator(**kwargs): operator = SnowflakeToSlackOperator(task_id=TEST_DAG_ID, **kwargs) return operator - @mock.patch('airflow.providers.snowflake.operators.snowflake_to_slack.SnowflakeHook') - @mock.patch('airflow.providers.snowflake.operators.snowflake_to_slack.SlackWebhookHook') + @mock.patch('airflow.providers.snowflake.transfers.snowflake_to_slack.SnowflakeHook') + @mock.patch('airflow.providers.snowflake.transfers.snowflake_to_slack.SlackWebhookHook') def test_hooks_and_rendering(self, mock_slack_hook_class, mock_snowflake_hook_class): operator_args = { 'snowflake_conn_id': 'snowflake_connection', diff --git a/tests/test_core_to_contrib.py b/tests/test_core_to_contrib.py index 8a16024e6c191..19414871b0aa6 100644 --- a/tests/test_core_to_contrib.py +++ b/tests/test_core_to_contrib.py @@ -24,7 +24,7 @@ from parameterized import parameterized -HOOK = [ +HOOKS = [ ( "airflow.providers.apache.cassandra.hooks.cassandra.CassandraHook", "airflow.contrib.hooks.cassandra_hook.CassandraHook", @@ -424,11 +424,7 @@ ), ] -OPERATOR = [ - ( - "airflow.providers.google.cloud.operators.adls_to_gcs.ADLSToGCSOperator", - "airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator", - ), +OPERATORS = [ ( "airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator", "airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator", @@ -449,10 +445,6 @@ "airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator", "airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator", ), - ( - "airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator", - "airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator", - ), ( "airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator", "airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator", @@ -690,14 +682,6 @@ "airflow.providers.google.cloud.operators.spanner.SpannerDeployInstanceOperator", "airflow.contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator", ), - ( - "airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator", - "airflow.contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator", - ), - ( - "airflow.providers.google.cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator", - "airflow.contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator", - ), ( "airflow.providers.google.cloud.operators.cloud_storage_transfer_service" ".CloudDataTransferServiceCreateJobOperator", @@ -743,12 +727,6 @@ "airflow.contrib.operators.gcp_transfer_operator." "GcpTransferServiceOperationsListOperator", ), - ( - "airflow.providers.google.cloud.operators.cloud_storage_transfer_service." - "CloudDataTransferServiceGCSToGCSOperator", - "airflow.contrib.operators.gcp_transfer_operator." - "GoogleCloudStorageToGoogleCloudStorageTransferOperator", - ), ( "airflow.providers.google.cloud.operators.translate.CloudTranslateTextOperator", "airflow.contrib.operators.gcp_translate_operator.CloudTranslateTextOperator", @@ -775,10 +753,6 @@ "airflow.contrib.operators.gcp_video_intelligence_operator." "CloudVideoIntelligenceDetectVideoShotsOperator", ), - ( - "airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator", - "airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator", - ), ( "airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator", "airflow.contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator", @@ -840,18 +814,6 @@ "airflow.contrib.operators.gcp_vision_operator." "CloudVisionRemoveProductFromProductSetOperator", ), - ( - "airflow.providers.google.cloud.operators.gcs_to_bigquery.GCSToBigQueryOperator", - "airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator", - ), - ( - "airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator", - "airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator", - ), - ( - "airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator", - "airflow.contrib.operators.gcs_to_s3.GoogleCloudStorageToS3Operator", - ), ( "airflow.providers.google.cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator", "airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator", @@ -868,19 +830,6 @@ "airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator", "airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator", ), - ( - "airflow.providers.google.cloud.operators.mssql_to_gcs.MSSQLToGCSOperator", - "airflow.contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator", - ), - ( - "airflow.providers.google.cloud.operators.mysql_to_gcs.MySQLToGCSOperator", - "airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator", - ), - ( - "airflow.providers.google.cloud.operators.postgres_to_gcs.PostgresToGCSOperator", - "airflow.contrib.operators.postgres_to_gcs_operator." - "PostgresToGoogleCloudStorageOperator", - ), ( "airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator", "airflow.contrib.operators.pubsub_operator.PubSubPublishOperator", @@ -901,10 +850,6 @@ "airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator", "airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator", ), - ( - "airflow.providers.google.cloud.operators.sql_to_gcs.BaseSQLToGCSOperator", - "airflow.contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator", - ), ( "airflow.providers.google.cloud." "operators.dataproc.DataprocCreateClusterOperator", @@ -1027,18 +972,6 @@ "airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator", "airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator", ), - ( - "airflow.providers.google.cloud.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator", - "airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator", - ), - ( - "airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator", - "airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator", - ), - ( - "airflow.providers.google.cloud.operators.bigquery_to_mysql.BigQueryToMySqlOperator", - "airflow.contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator", - ), ( "airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator", "airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator", @@ -1051,10 +984,6 @@ "airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator", "airflow.contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator", ), - ( - "airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator", - "airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator", - ), ( "airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator", "airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator", @@ -1111,14 +1040,6 @@ 'airflow.providers.apache.hive.operators.hive.HiveOperator', 'airflow.operators.hive_operator.HiveOperator', ), - ( - 'airflow.providers.apache.hive.operators.mysql_to_hive.MySqlToHiveTransferOperator', - 'airflow.operators.mysql_to_hive.MySqlToHiveTransfer', - ), - ( - 'airflow.providers.apache.hive.operators.s3_to_hive.S3ToHiveTransferOperator', - 'airflow.operators.s3_to_hive_operator.S3ToHiveTransfer', - ), ( 'airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator', 'airflow.operators.hive_stats_operator.HiveStatsCollectionOperator', @@ -1352,95 +1273,6 @@ 'airflow.providers.sftp.operators.sftp.SFTPOperator', 'airflow.contrib.operators.sftp_operator.SFTPOperator', ), - ( - 'airflow.providers.amazon.aws.operators.dynamodb_to_s3.DynamoDBToS3Operator', - 'airflow.contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator', - ), - ( - 'airflow.providers.amazon.aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator', - 'airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator', - ), - ( - 'airflow.providers.amazon.aws.operators.imap_attachment_to_s3.ImapAttachmentToS3Operator', - 'airflow.contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator', - ), - ( - 'airflow.providers.amazon.aws.operators.mongo_to_s3.MongoToS3Operator', - 'airflow.contrib.operators.mongo_to_s3.MongoToS3Operator', - ), - ( - 'airflow.providers.amazon.aws.operators.s3_to_sftp.S3ToSFTPOperator', - 'airflow.contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator', - ), - ( - 'airflow.providers.amazon.aws.operators.sftp_to_s3.SFTPToS3Operator', - 'airflow.contrib.operators.sftp_to_s3_operator.SFTPToS3Operator', - ), - ( - 'airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator', - 'airflow.operators.gcs_to_s3.GCSToS3Operator', - ), - ( - 'airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleApiToS3TransferOperator', - 'airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer', - ), - ( - 'airflow.providers.amazon.aws.operators.redshift_to_s3.RedshiftToS3TransferOperator', - 'airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer', - ), - ( - 'airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator', - 'airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer', - ), - ( - 'airflow.providers.apache.hive.operators.vertica_to_hive.VerticaToHiveTransferOperator', - 'airflow.contrib.operators.vertica_to_hive.VerticaToHiveTransfer', - ), - ( - 'airflow.providers.apache.druid.operators.hive_to_druid.HiveToDruidTransferOperator', - 'airflow.operators.hive_to_druid.HiveToDruidTransfer', - ), - ( - 'airflow.providers.apache.hive.operators.hive_to_mysql.HiveToMySqlTransferOperator', - 'airflow.operators.hive_to_mysql.HiveToMySqlTransfer', - ), - ( - 'airflow.providers.apache.hive.operators.hive_to_samba.Hive2SambaOperator', - 'airflow.operators.hive_to_samba_operator.Hive2SambaOperator', - ), - ( - 'airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlToHiveTransferOperator', - 'airflow.operators.mssql_to_hive.MsSqlToHiveTransfer', - ), - ( - 'airflow.providers.microsoft.azure.operators.file_to_wasb.FileToWasbOperator', - 'airflow.contrib.operators.file_to_wasb.FileToWasbOperator', - ), - ( - 'airflow.providers.google.suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator', - 'airflow.contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator', - ), - ( - 'airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer' - '.OracleToAzureDataLakeTransferOperator', - 'airflow.contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransferOperator', - ), - ( - 'airflow.providers.oracle.operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator', - 'airflow.contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer', - ), - ( - 'airflow.providers.google.cloud.operators.s3_to_gcs.S3ToGCSOperator', - 'airflow.contrib.operators.s3_to_gcs_operator.S3ToGCSOperator', - ), - ( - 'airflow.providers.mysql.operators.vertica_to_mysql.VerticaToMySqlTransferOperator', - 'airflow.contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer', - ), - ( - 'airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator', - 'airflow.operators.presto_to_mysql.PrestoToMySqlTransfer', - ), ( 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator', 'airflow.contrib.operators.gcp_sql_operator.CloudSqlBaseOperator' @@ -1481,11 +1313,6 @@ 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator', 'airflow.contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator' ), - ( - 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' - '.CloudDataTransferServiceS3ToGCSOperator', - 'airflow.contrib.operators.s3_to_gcs_transfer_operator.CloudDataTransferServiceS3ToGCSOperator' - ), ( 'airflow.providers.jira.operators.jira.JiraOperator', 'airflow.contrib.operators.jira_operator.JiraOperator', @@ -1495,8 +1322,12 @@ 'airflow.operators.postgres_operator.PostgresOperator', ), ( - 'airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraToGCSOperator', - 'airflow.contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator', + "airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator", + "airflow.contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator", + ), + ( + "airflow.providers.google.cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator", + "airflow.contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator", ), ] @@ -1519,7 +1350,7 @@ ), ] -SENSOR = [ +SENSORS = [ ( "airflow.providers.apache.cassandra.sensors.record.CassandraRecordSensor", "airflow.contrib.sensors.cassandra_record_sensor.CassandraRecordSensor", @@ -1733,22 +1564,179 @@ ) ] -PROTOCOLS = [ +TRANSFERS = [ + ( + "airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator", + "airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator", + ), + ( + "airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator", + "airflow.contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator", + ), + ( + "airflow.providers.google.cloud.operators.cloud_storage_transfer_service." + "CloudDataTransferServiceGCSToGCSOperator", + "airflow.contrib.operators.gcp_transfer_operator." + "GoogleCloudStorageToGoogleCloudStorageTransferOperator", + ), + ( + "airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator", + "airflow.contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator", + ), + ( + "airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator", + "airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator", + ), + ( + "airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator", + "airflow.contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator", + ), + ( + "airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator", + "airflow.contrib.operators.gcs_to_s3.GoogleCloudStorageToS3Operator", + ), + ( + "airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator", + "airflow.contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator", + ), ( - "airflow.providers.amazon.aws.hooks.batch_client.AwsBatchProtocol", - "airflow.contrib.operators.awsbatch_operator.BatchProtocol", + "airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator", + "airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator", ), ( - 'airflow.providers.amazon.aws.operators.ecs.ECSProtocol', - 'airflow.contrib.operators.ecs_operator.ECSProtocol', + "airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator", + "airflow.contrib.operators.postgres_to_gcs_operator." + "PostgresToGoogleCloudStorageOperator", + ), + ( + "airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator", + "airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator", + ), + ( + "airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator", + "airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator", + ), + ( + "airflow.providers.google.cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator", + "airflow.contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator", + ), + ( + "airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator", + "airflow.contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator", + ), + ( + 'airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator', + 'airflow.contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator', + ), + ( + 'airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator', + 'airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator', + ), + ( + 'airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator', + 'airflow.contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator', + ), + ( + 'airflow.providers.amazon.aws.transfers.mongo_to_s3.MongoToS3Operator', + 'airflow.contrib.operators.mongo_to_s3.MongoToS3Operator', + ), + ( + 'airflow.providers.amazon.aws.transfers.s3_to_sftp.S3ToSFTPOperator', + 'airflow.contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator', + ), + ( + 'airflow.providers.amazon.aws.transfers.sftp_to_s3.SFTPToS3Operator', + 'airflow.contrib.operators.sftp_to_s3_operator.SFTPToS3Operator', + ), + ( + 'airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator', + 'airflow.operators.gcs_to_s3.GCSToS3Operator', + ), + ( + 'airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator', + 'airflow.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer', + ), + ( + 'airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator', + 'airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer', + ), + ( + 'airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator', + 'airflow.operators.s3_to_redshift_operator.S3ToRedshiftTransfer', + ), + ( + 'airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator', + 'airflow.contrib.operators.vertica_to_hive.VerticaToHiveTransfer', + ), + ( + 'airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator', + 'airflow.operators.hive_to_druid.HiveToDruidTransfer', + ), + ( + 'airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator', + 'airflow.operators.hive_to_mysql.HiveToMySqlTransfer', + ), + ( + 'airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator', + 'airflow.operators.mysql_to_hive.MySqlToHiveTransfer', + ), + ( + 'airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator', + 'airflow.operators.s3_to_hive_operator.S3ToHiveTransfer', + ), + ( + 'airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator', + 'airflow.operators.hive_to_samba_operator.HiveToSambaOperator', + ), + ( + 'airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator', + 'airflow.operators.mssql_to_hive.MsSqlToHiveTransfer', + ), + ( + 'airflow.providers.microsoft.azure.transfers.file_to_wasb.FileToWasbOperator', + 'airflow.contrib.operators.file_to_wasb.FileToWasbOperator', + ), + ( + 'airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator', + 'airflow.contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator', + ), + ( + 'airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake' + '.OracleToAzureDataLakeOperator', + 'airflow.contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeOperator', + ), + ( + 'airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator', + 'airflow.contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer', + ), + ( + 'airflow.providers.google.cloud.transfers.s3_to_gcs.S3ToGCSOperator', + 'airflow.contrib.operators.s3_to_gcs_operator.S3ToGCSOperator', + ), + ( + 'airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator', + 'airflow.contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer', + ), + ( + 'airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator', + 'airflow.operators.presto_to_mysql.PrestoToMySqlTransfer', + ), + ( + 'airflow.providers.google.cloud.operators.cloud_storage_transfer_service' + '.CloudDataTransferServiceS3ToGCSOperator', + 'airflow.contrib.operators.s3_to_gcs_transfer_operator.CloudDataTransferServiceS3ToGCSOperator' + ), + ( + 'airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator', + 'airflow.contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator', ), ] -ALL = HOOK + OPERATOR + SECRETS + SENSOR + PROTOCOLS +ALL = HOOKS + OPERATORS + SECRETS + SENSORS + TRANSFERS RENAMED_HOOKS = [ (old_class, new_class) - for old_class, new_class in HOOK + OPERATOR + SECRETS + SENSOR + for old_class, new_class in HOOKS + OPERATORS + SECRETS + SENSORS if old_class.rpartition(".")[2] != new_class.rpartition(".")[2] ] @@ -1799,14 +1787,6 @@ def get_class_from_path(path_to_class, parent=False): return new_class return class_ - @parameterized.expand(PROTOCOLS) - def test_is_protocol_deprecated(self, _, old_module): - deprecation_warning_msg = "This class is deprecated." - old_module_class = self.get_class_from_path(old_module) - with self.assertWarnsRegex(DeprecationWarning, deprecation_warning_msg) as wrn: - self.assertTrue(deprecation_warning_msg, wrn) - old_module_class() - @parameterized.expand(RENAMED_HOOKS) def test_is_class_deprecated(self, new_module, old_module): self.skip_test_with_mssql_in_py38(new_module, old_module) diff --git a/tests/test_project_structure.py b/tests/test_project_structure.py index eee5086c38bca..fa693ff403737 100644 --- a/tests/test_project_structure.py +++ b/tests/test_project_structure.py @@ -32,11 +32,10 @@ 'tests/providers/apache/cassandra/sensors/test_record.py', 'tests/providers/apache/cassandra/sensors/test_table.py', 'tests/providers/apache/hdfs/sensors/test_web_hdfs.py', - 'tests/providers/apache/hive/operators/test_vertica_to_hive.py', 'tests/providers/apache/pig/operators/test_pig.py', 'tests/providers/apache/spark/hooks/test_spark_jdbc_script.py', 'tests/providers/google/cloud/operators/test_datastore.py', - 'tests/providers/google/cloud/operators/test_sql_to_gcs.py', + 'tests/providers/google/cloud/transfers/test_sql_to_gcs.py', 'tests/providers/google/cloud/utils/test_field_sanitizer.py', 'tests/providers/google/cloud/utils/test_field_validator.py', 'tests/providers/google/cloud/utils/test_mlengine_operator_utils.py',