Skip to content

Instantly share code, notes, and snippets.

main:
params: [event]
steps:
- trainingPipelineCheck:
switch:
- condition: ${not("google.cloud.aiplatform.ui.PipelineService.CreateTrainingPipeline" == event.data.protoPayload.methodName)}
next: end
# proceed further only when we have the CreateTrainingPipeline event
- run:
call: googleapis.workflowexecutions.v1.projects.locations.workflows.executions.run
main:
params: [args]
steps:
- initialize:
assign:
- location: ${default(map.get(args,"location"),"us-central1")}
- trainingPipelineId: ${default(map.get(args,"trainingPipelineId"),"3590189825883373568")}
- destination_dataset: "vertexai_model_history"
- getTrainingPipeline:
call: VertexAI_GetTrainingPipeline
select creation_time, job_id, bi_engine_statistics
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) AND CURRENT_TIMESTAMP()
and job_type = "QUERY"
DECLARE var_day STRING DEFAULT '2022-10-03';
with t as (
SELECT creation_time,
round(5* (total_bytes_processed/POWER(2,40) ),2) AS processedBytesCostProjection,
round(5* (total_bytes_billed/POWER(2,40) ),2) AS billedBytesCostInUSD
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN timestamp(var_day) and TIMESTAMP_add(timestamp(var_day), INTERVAL 1 DAY)
and job_type = "QUERY"
)
SELECT
DECLARE var_day STRING DEFAULT '2022-10-03';
SELECT creation_time,
round(5* (total_bytes_processed/POWER(2,40) ),2) AS processedBytesCostProjection,
round(5* (total_bytes_billed/POWER(2,40) ),2) AS billedBytesCostInUSD
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN timestamp(var_day) and TIMESTAMP_add(timestamp(var_day), INTERVAL 1 DAY)
and job_type = "QUERY"
ORDER BY processedBytesCostProjection desc
substitutions:
_IMAGE_NAME: "gcr.io/${PROJECT_ID}/myimage"
options:
dynamic_substitutions: true
steps:
# Step 0 - Build the container image
- name: "gcr.io/cloud-builders/docker"
args: ["build", "-f", "Dockerfile", "-t", "${_IMAGE_NAME}", "."]
# Step 1 - Minify the container with docker-slim
- name: "gcr.io/cloud-builders/docker"
wrk -t5 -c200 -d300s https://fat-apache-container/php_info.php
Running 5m test @ https://fat-apache-container/php_info.php
5 threads and 200 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 546.14ms 380.60ms 2.00s 76.86%
Req/Sec 69.65 43.33 350.00 71.01%
103361 requests in 5.00m, 7.86GB read
Socket errors: connect 0, read 1, write 0, timeout 2200
Requests/sec: 344.49
Transfer/sec: 26.83MB
DECLARE var_day STRING DEFAULT '2022-01-09';
with t AS (
SELECT
protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime,
5* (protopayload_auditlog.servicedata_v1_bigquery .jobCompletedEvent.job.jobStatistics. totalProcessedBytes/POWER(2,40) ) AS processedBytesCostProjection,
5* (protopayload_auditlog.servicedata_v1_bigquery .jobCompletedEvent.job.jobStatistics. totalBilledBytes/POWER(2,40) ) AS billedBytesCostInUSD
FROM
`<dataset_auditlogs>.cloudaudit_googleapis_com_data_access_*`
WHERE
_TABLE_SUFFIX >= var_day and protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime>=TIMESTAMP(var_day)
- initialize:
assign:
- project: ${sys.get_env("GOOGLE_CLOUD_PROJECT_NUMBER")}
- location: US
- setBiEngineCapacity:
call: http.patch
args:
url:
${"https://bigqueryreservation.googleapis.com/v1/projects/" + sys.get_env("GOOGLE_CLOUD_PROJECT_NUMBER")
+ "/locations/" + location
DECLARE var_day STRING DEFAULT '2021-09-09';
SELECT
protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime,
round(5* (protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.totalProcessedBytes/POWER(2,40) ),2) AS processedBytesCostProjection,
round(5* (protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.totalBilledBytes/POWER(2,40) ),2) AS billedBytesCostInUSD
FROM
`<dataset_auditlogs>.cloudaudit_googleapis_com_data_access_*`
WHERE
_TABLE_SUFFIX >= var_day and protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime>=TIMESTAMP(var_day)
AND protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.eventName="query_job_completed"