Skip to content

Commit

Permalink
Migrate Google example DAG mssql_to_gcs to new design AIP-47 (#24541)
Browse files Browse the repository at this point in the history
related: #22447, #22430
  • Loading branch information
chenglongyan authored Jul 4, 2022
1 parent e2fd41f commit 37ea530
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ data from Microsoft SQL Server database to GCS.

Below is an example of using this operator to upload data to GCS.

.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_mssql_to_gcs.py
.. exampleinclude:: /../../tests/system/providers/google/cloud/gcs/example_mssql_to_gcs.py
:language: python
:start-after: [START howto_operator_mssql_to_gcs]
:end-before: [END howto_operator_mssql_to_gcs]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,27 +18,63 @@
from datetime import datetime

from airflow import models
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator
from airflow.utils.trigger_rule import TriggerRule

ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
DAG_ID = "example_mssql_to_gcs"

BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"

GCS_BUCKET = os.environ.get("GCP_GCS_BUCKET", "example-airflow")
FILENAME = 'test_file'

SQL_QUERY = "USE airflow SELECT * FROM Country;"

with models.DAG(
'example_mssql_to_gcs',
DAG_ID,
schedule_interval='@once',
start_date=datetime(2021, 12, 1),
start_date=datetime(2021, 1, 1),
catchup=False,
tags=['example'],
tags=['example', 'mssql'],
) as dag:
create_bucket = GCSCreateBucketOperator(
task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
)

# [START howto_operator_mssql_to_gcs]
upload = MSSQLToGCSOperator(
upload_mssql_to_gcs = MSSQLToGCSOperator(
task_id='mssql_to_gcs',
mssql_conn_id='airflow_mssql',
sql=SQL_QUERY,
bucket=GCS_BUCKET,
bucket=BUCKET_NAME,
filename=FILENAME,
export_format='csv',
)
# [END howto_operator_mssql_to_gcs]

delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
)

(
# TEST SETUP
create_bucket
# TEST BODY
>> upload_mssql_to_gcs
# TEST TEARDOWN
>> delete_bucket
)

from tests.system.utils.watcher import watcher

# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()


from tests.system.utils import get_test_run # noqa: E402

# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)

0 comments on commit 37ea530

Please sign in to comment.