AzureBlobStorageToTeradataOperator¶
的目的是定义涉及从 Azure Blob Storage 到 Teradata 表的 CSV、JSON 和 Parquet 格式数据传输的任务。使用 AzureBlobStorageToTeradataOperator
AzureBlobStorageToTeradataOperator
将数据从 Azure Blob Storage 传输到 Teradata。此操作符利用 Teradata READ_NOS 功能从 Azure Blob Storage 导入 CSV、JSON 和 Parquet 格式的数据到 Teradata。此操作符直接从对象存储中访问数据,并使用以下 SQL 语句通过 READ_NOS 和 CREATE TABLE AS 功能在数据库中生成永久表。
CREATE MULTISET TABLE multiset_table_name AS (
SELECT *
FROM (
LOCATION='YOUR-OBJECT-STORE-URI'
AUTHORIZATION=authorization_object
) AS d
) WITH DATA;
它有助于从公共和私有对象存储加载数据。对于私有对象存储,可以通过 Teradata 授权数据库对象或在 Airflow 中使用 Azure Blob Storage 连接定义的对象存储登录名和对象存储密钥来授予对对象存储的访问权限。相反,对于从公共对象存储传输数据,不需要任何授权或访问凭据。
Teradata 授权数据库对象访问类型可与
的AzureBlobStorageToTeradataOperator
teradata_authorization_name
参数一起使用对象存储访问密钥 ID 和访问密钥秘钥访问类型可与
的S3ToTeradataOperator
azure_conn_id
参数一起使用
注意
如果定义了这两种访问类型,则 Teradata 授权数据库对象优先。
将数据从公共 Azure Blob Storage 传输到 Teradata¶
以下是如何使用 AzureBlobStorageToTeradataOperator 将 CSV 数据格式从公共 Azure Blob Storage 传输到 Teradata 表:
transfer_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_csv",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
public_bucket=True,
teradata_table="example_blob_teradata_csv",
teradata_conn_id="teradata_default",
azure_conn_id="wasb_default",
trigger_rule="all_done",
)
使用 AWS 连接将数据从私有 Azure Blob Storage 传输到 Teradata¶
以下是如何使用 AzureBlobStorageToTeradataOperator 将 CSV 数据格式从私有 S3 对象存储传输到 Teradata,其中 AWS 凭证定义为 AWS 连接
transfer_key_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_key_data_blob_to_teradata_csv",
blob_source_key="/az/airflowteradata.blob.core.windows.net/csvdata/",
teradata_table="example_blob_teradata_csv",
azure_conn_id="wasb_default",
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
使用 Teradata 授权对象将数据从私有 Azure Blob Storage 传输到 Teradata¶
Teradata 授权数据库对象用于控制谁可以访问外部对象存储。Teradata 授权数据库对象应存在于 Teradata 数据库中,以便在将数据从 S3 传输到 Teradata 时使用它。请参阅 Teradata 中的外部对象存储身份验证
以下是如何使用 AzureBlobStorageToTeradataOperator 将 CSV 数据格式从私有 S3 对象存储传输到 Teradata,其中授权数据库对象在 Teradata 中定义。
transfer_auth_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_auth_data_blob_to_teradata_csv",
blob_source_key="/az/airflowteradata.blob.core.windows.net/csvdata/",
teradata_table="example_blob_teradata_csv",
teradata_authorization_name="azure_authorization",
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
以 CSV 格式将数据从 Azure Blob Storage 传输到 Teradata¶
以下是如何使用 AzureBlobStorageToTeradataOperator 将 CSV 数据格式从 Azure Blob Storage 传输到 Teradata 表:
transfer_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_csv",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
public_bucket=True,
teradata_table="example_blob_teradata_csv",
teradata_conn_id="teradata_default",
azure_conn_id="wasb_default",
trigger_rule="all_done",
)
以 JSON 格式将数据从 Azure Blob Storage 传输到 Teradata¶
以下是如何使用 AzureBlobStorageToTeradataOperator 将 JSON 数据格式从 Azure Blob Storage 传输到 Teradata 表:
transfer_data_json = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_json",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
teradata_table="example_blob_teradata_json",
public_bucket=True,
teradata_conn_id="teradata_default",
azure_conn_id="wasb_default",
trigger_rule="all_done",
)
以 PARQUET 格式将数据从 Azure Blob Storage 传输到 Teradata¶
以下是如何使用 AzureBlobStorageToTeradataOperator 将 PARQUET 数据格式从 Azure Blob Storage 传输到 Teradata 表:
transfer_data_parquet = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_parquet",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/PARQUETDATA/09394500/2018/06/",
teradata_table="example_blob_teradata_parquet",
public_bucket=True,
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
完整的 AzureBlobStorageToTeradataOperator
算子 DAG¶
当我们将所有内容放在一起时,我们的 DAG 应如下所示
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_azure_blob_to_teradata_transfer_operator"
CONN_ID = "teradata_default"
with DAG(
dag_id=DAG_ID,
start_date=datetime.datetime(2020, 2, 2),
schedule="@once",
catchup=False,
default_args={"teradata_conn_id": CONN_ID},
) as dag:
transfer_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_csv",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/CSVDATA/09380000/2018/06/",
public_bucket=True,
teradata_table="example_blob_teradata_csv",
teradata_conn_id="teradata_default",
azure_conn_id="wasb_default",
trigger_rule="all_done",
)
read_data_table_csv = TeradataOperator(
task_id="read_data_table_csv",
sql="SELECT count(1) from example_blob_teradata_csv;",
)
drop_table_csv = TeradataOperator(
task_id="drop_table_csv",
sql="DROP TABLE example_blob_teradata_csv;",
)
transfer_key_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_key_data_blob_to_teradata_csv",
blob_source_key="/az/airflowteradata.blob.core.windows.net/csvdata/",
teradata_table="example_blob_teradata_csv",
azure_conn_id="wasb_default",
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
read_key_data_table_csv = TeradataOperator(
task_id="read_key_data_table_csv",
conn_id=CONN_ID,
sql="SELECT count(1) from example_blob_teradata_csv;",
)
drop_key_table_csv = TeradataOperator(
task_id="drop_key_table_csv",
conn_id=CONN_ID,
sql="DROP TABLE example_blob_teradata_csv;",
)
create_azure_authorization = TeradataOperator(
task_id="create_azure_authorization",
conn_id=CONN_ID,
sql="CREATE AUTHORIZATION azure_authorization USER '{{ var.value.get('AZURE_BLOB_ACCOUNTNAME') }}' PASSWORD '{{ var.value.get('AZURE_BLOB_ACCOUNT_SECRET_KEY') }}' ",
)
transfer_auth_data_csv = AzureBlobStorageToTeradataOperator(
task_id="transfer_auth_data_blob_to_teradata_csv",
blob_source_key="/az/airflowteradata.blob.core.windows.net/csvdata/",
teradata_table="example_blob_teradata_csv",
teradata_authorization_name="azure_authorization",
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
read_auth_data_table_csv = TeradataOperator(
task_id="read_auth_data_table_csv",
conn_id=CONN_ID,
sql="SELECT count(1) from example_blob_teradata_csv;",
)
drop_auth_table_csv = TeradataOperator(
task_id="drop_auth_table_csv",
conn_id=CONN_ID,
sql="DROP TABLE example_blob_teradata_csv;",
)
drop_auth = TeradataOperator(
task_id="drop_auth",
conn_id=CONN_ID,
sql="DROP AUTHORIZATION azure_authorization;",
)
transfer_data_json = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_json",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/JSONDATA/09380000/2018/06/",
teradata_table="example_blob_teradata_json",
public_bucket=True,
teradata_conn_id="teradata_default",
azure_conn_id="wasb_default",
trigger_rule="all_done",
)
read_data_table_json = TeradataOperator(
task_id="read_data_table_json",
sql="SELECT count(1) from example_blob_teradata_json;",
)
drop_table_json = TeradataOperator(
task_id="drop_table_json",
sql="DROP TABLE example_blob_teradata_json;",
)
transfer_data_parquet = AzureBlobStorageToTeradataOperator(
task_id="transfer_data_blob_to_teradata_parquet",
blob_source_key="/az/akiaxox5jikeotfww4ul.blob.core.windows.net/td-usgs/PARQUETDATA/09394500/2018/06/",
teradata_table="example_blob_teradata_parquet",
public_bucket=True,
teradata_conn_id="teradata_default",
trigger_rule="all_done",
)
read_data_table_parquet = TeradataOperator(
task_id="read_data_table_parquet",
sql="SELECT count(1) from example_blob_teradata_parquet;",
)
drop_table_parquet = TeradataOperator(
task_id="drop_table_parquet",
sql="DROP TABLE example_blob_teradata_parquet;",
)
(
transfer_data_csv
>> transfer_data_json
>> transfer_data_parquet
>> read_data_table_csv
>> read_data_table_json
>> read_data_table_parquet
>> drop_table_csv
>> drop_table_json
>> drop_table_parquet
>> transfer_key_data_csv
>> read_key_data_table_csv
>> drop_key_table_csv
>> create_azure_authorization
>> transfer_auth_data_csv
>> read_auth_data_table_csv
>> drop_auth_table_csv
>> drop_auth
)