Skip to content

Instantly share code, notes, and snippets.

@icoloma
Last active November 18, 2019 10:53
Show Gist options
  • Save icoloma/7285b535e27ea6342339 to your computer and use it in GitHub Desktop.
Save icoloma/7285b535e27ea6342339 to your computer and use it in GitHub Desktop.
Google Cloud Recipe Book
# Get the list of jobs launched for a given project (the project ID is optional)
bq ls -j -n 10000 -a project_id > jobs.txt
bq ls -j -n 10000 -a marine-tractor-567789 > jobs.txt
# Get the details for a given job (the project ID is optional)
bq show --format=prettyjson -j project_id:job_id
bq show --format=prettyjson -j marine-tractor-567789:job_ftnDrpo8JqiJtajBH2AEZxJCLpw
More details: http://stackoverflow.com/questions/31150542/logging-all-bigquery-queries
# Spend
#standardSQL
select month, product, resource_type, total from (
select month, product, resource_type, sum(cost) as total from (
select FORMAT_DATETIME('%Y-%m', extract (datetime from usage_end_time)) as month, service.description as product, sku.description as resource_type, cost
from `gcp_billing_export`
)
group by month, product, resource_type
order by month asc, total desc
) where total > 0
# Revenue for a specific service
#standardSQL
select month, product, resource_type, total_usage, usage_unit from (
select month, product, resource_type, sum(usage_amount) as total_usage, usage_unit from (
select FORMAT_DATETIME('%Y-%m', extract (datetime from usage_end_time)) as month, service.description as product, sku.description as resource_type, usage.amount as usage_amount, usage.unit as usage_unit
from `gcp_billing_export`
where service.description='BigQuery'
)
group by month, product, resource_type, usage_unit
order by month asc, total_usage desc
) where total_usage > 0
# Create the datalab network
gcloud compute networks create "datalab-network" \
--description "Network for Datalab servers"
# Allow ssh
gcloud compute firewall-rules create datalab-network-allow-ssh \
--allow tcp:22 \
--network "datalab-network" \
--description "Allow SSH access"
# Start the VM
gsutil cp gs://cloud-datalab/server.yaml ./datalab-server.yaml
gcloud compute instances create "datalab-instance" \
--network "datalab-network" \
--image-family "container-vm" \
--image-project "google-containers" \
--metadata "google-container-manifest=$(cat datalab-server.yaml)" \
--machine-type "n1-highmem-2" \
--scopes "cloud-platform"
# Open SSH tunnel
gcloud compute ssh --quiet \
--ssh-flag="-N" \
--ssh-flag="-L" \
--ssh-flag="localhost:8081:localhost:8080" \
"${USER}@datalab-instance"
# Open http://localhost:8081/tree/datalab
# Assign permissions to the Service Account used by Datalab
# Debug the status of the autoscaler
kubectl get configmap cluster-autoscaler-status -n kube-system -o yaml
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment