Skip to content

Instantly share code, notes, and snippets.

View antoniocachuan's full-sized avatar

Antonio Cachuan antoniocachuan

View GitHub Profile
@antoniocachuan
antoniocachuan / dataflow_pipeline.py
Last active April 27, 2021 03:00
Apache Beam pipeline read from biquery and write to bigquery
#Created by Antonio Cachuan (https://www.linkedin.com/in/antoniocachuan/)
#Example based in https://github.com/apache/beam/blob/master/sdks/python/apache_beam/examples/snippets/snippets.py
import logging
import argparse
import requests
import apache_beam as beam
from apache_beam.io.gcp.internal.clients import bigquery
from datetime import datetime
@antoniocachuan
antoniocachuan / bigquery_reservation.py
Created December 28, 2020 02:31
BigQuery Reservation API
# More details https://medium.com/@alipazaga07
from google.cloud import bigquery_reservation
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
project_id = 'medium-sandbox'
client = bigquery_reservation.ReservationServiceClient()
parent = f"projects/{project_id}/locations/US"
@antoniocachuan
antoniocachuan / bigquery_connection_api.py
Created December 28, 2020 01:27
BigQuery Connection API
from google.cloud import bigquery_connection
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
project_id = 'medium-sandbox'
client = bigquery_connection.ConnectionServiceClient()
parent = f"projects/{project_id}/locations/US"
connections = list(client.list_connections(parent=parent))
@antoniocachuan
antoniocachuan / bigquery_storage_api-example3.py
Created December 28, 2020 00:35
BigQuery Storage API Example 3 Using Apache Arrow Data Format
from google.cloud.bigquery_storage import BigQueryReadClient
from google.cloud.bigquery_storage import types
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
project_id = 'medium-sandbox'
#Setting the client
client = BigQueryReadClient()
@antoniocachuan
antoniocachuan / storage-api-example2.py
Created December 28, 2020 00:22
BigQuery Storage API BigQuery Example 2
from google.cloud.bigquery_storage import BigQueryReadClient
from google.cloud.bigquery_storage import types
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
project_id = 'medium-sandbox'
#Setting the client
client = BigQueryReadClient()
@antoniocachuan
antoniocachuan / storage-api.py
Created December 27, 2020 23:53
BigQuery Storage API
# Read the full article https://medium.com/@alipazaga07
import google.auth
from google.cloud import bigquery
from google.cloud import bigquery_storage
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
# Create credentials object for both the BigQuery and BigQuery Storage clients
credentials, project_id = google.auth.default(
@antoniocachuan
antoniocachuan / running-data-transfer.py
Created December 27, 2020 22:34
BigQuery Data Transfer API
import time
from google.cloud import bigquery_datatransfer_v1
from google.protobuf.timestamp_pb2 import Timestamp
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
client = bigquery_datatransfer_v1.DataTransferServiceClient()
@antoniocachuan
antoniocachuan / bigquery_transfer_api.py
Created December 27, 2020 22:26
Using BigQuery Data Transfer API
#Read more on https://medium.com/@alipazaga07
#Listing all Data Transfer
from google.cloud import bigquery_datatransfer
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
transfer_client = bigquery_datatransfer.DataTransferServiceClient()
@antoniocachuan
antoniocachuan / bq-to-pandas.py
Created December 26, 2020 03:04
BQ to Pandas
import os
from google.cloud import bigquery
import pandas as pd
# Reference the SA
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
# BigQuery client object.
client = bigquery.Client()
import os
from google.cloud import bigquery
# Reference the SA
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] ='D:\medium\example-apis\key\key_bqsa.json'
# BigQuery client object.
client = bigquery.Client()
query = """