Skip to content

Instantly share code, notes, and snippets.

@bugraoz93
Created November 26, 2022 01:42
Show Gist options
  • Save bugraoz93/d3ee6d2d03d1881de4614d1e7c3b8234 to your computer and use it in GitHub Desktop.
Save bugraoz93/d3ee6d2d03d1881de4614d1e7c3b8234 to your computer and use it in GitHub Desktop.
BigQuery Copy Individual Partition with BigQueryToBigQueryOperator
import datetime
from airflow import DAG
from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator
with DAG(
dag_id="test_dag",
max_active_runs=1,
start_date=datetime.datetime(2022, 11, 24),
schedule_interval="@once",
catchup=False,
concurrency=1,
) as dag:
project = "test_project"
dataset = "bugraoz93_test"
source_table = "test_table$20221125"
destination_table = "test_table_dest$20221124"
copy_table_to_fact = BigQueryToBigQueryOperator(
task_id="copy_test",
gcp_conn_id="gcp_conn",
source_project_dataset_tables="{project}.{dataset}.{table}".format(
project=project, dataset=dataset, table=source_table),
destination_project_dataset_table="{project}.{dataset}.{table}".format(
project=project, dataset=dataset, table=destination_table
),
write_disposition="WRITE_APPEND",
create_disposition="CREATE_IF_NEEDED",
dag=dag,
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment