Forked from mikesparr/gcp-audit-log-sink-bigquery-gcloud.sh
Created
November 15, 2022 20:05
-
-
Save Joey012345/b96849c8b5dd909becdaecf5d6c3a9b8 to your computer and use it in GitHub Desktop.
Example setting up aggregate log sink for Audit Logs on Google Cloud Platform (GCP) shipping to BigQuery
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
##################################################################### | |
# REFERENCES | |
# - https://cloud.google.com/logging/docs/export/aggregated_sinks | |
# - https://cloud.google.com/bigquery/docs/datasets#bq | |
# - https://cloud.google.com/bigquery/docs/access-control-basic-roles | |
##################################################################### | |
export PROJECT_ID=$(gcloud config get-value project) | |
export PROJECT_USER=$(gcloud config get-value core/account) # set current user | |
export PROJECT_NUMBER=$(gcloud projects describe $PROJECT_ID --format="value(projectNumber)") | |
export IDNS=${PROJECT_ID}.svc.id.goog # workload identity domain | |
export GCP_REGION="us-central1" # CHANGEME (OPT) | |
export GCP_ZONE="us-central1-a" # CHANGEME (OPT) | |
##################################################################### | |
# FOLDERS (create a folder for experiment) | |
##################################################################### | |
export SANDBOX_FOLDER=$FOLDER # created previously (DoiT-specific) | |
export DEMO_FOLDER_NAME="products" | |
gcloud resource-manager folders create \ | |
--display-name=$DEMO_FOLDER_NAME \ | |
--folder=$SANDBOX_FOLDER | |
export DEMO_FOLDER_ID=891980021895 # make note of folder ID after create (folders/<folder-id>) | |
##################################################################### | |
# SECURITY PROJECT (where security team access only and logs ship to) | |
##################################################################### | |
export BILLING_ACCOUNT_ID=$BILLING # created previously | |
export SECURITY_PROJECT_ID="example-security" | |
# security | |
gcloud projects create $SECURITY_PROJECT_ID \ | |
--folder $DEMO_FOLDER_ID | |
gcloud beta billing projects link $SECURITY_PROJECT_ID \ | |
--billing-account=$BILLING_ACCOUNT_ID | |
gcloud services enable compute.googleapis.com \ | |
storage.googleapis.com \ | |
bigquery.googleapis.com \ | |
--project $SECURITY_PROJECT_ID | |
# disable deletion (key project) | |
gcloud alpha resource-manager liens create \ | |
--restrictions=resourcemanager.projects.delete \ | |
--reason="Contains audit logs and sensitive data" \ | |
--project $SECURITY_PROJECT_ID | |
# disable public buckets ( storage.publicAccessPrevention ) | |
gcloud resource-manager org-policies enable-enforce \ | |
--project $SECURITY_PROJECT_ID \ | |
storage.publicAccessPrevention | |
##################################################################### | |
# BIG QUERY LOG SINK (aggregate audit logs for all projects in folder) | |
##################################################################### | |
export DATASET_ID="audit_logs" | |
export SINK_NAME="audit-log-sink" | |
export LOG_FILTER="protoPayload.@type:\"type.googleapis.com/google.cloud.audit.AuditLog\"" | |
# create bigquery dataset in security project | |
bq --location=US mk -d \ | |
--description "Audit log sink" \ | |
--project_id $SECURITY_PROJECT_ID \ | |
$DATASET_ID | |
# create aggregate log sink on demo folder -> bq dataset | |
gcloud logging sinks create $SINK_NAME \ | |
bigquery.googleapis.com/projects/$SECURITY_PROJECT_ID/datasets/$DATASET_ID \ | |
--include-children \ | |
--folder=$DEMO_FOLDER_ID \ | |
--log-filter=$LOG_FILTER | |
# get the sink service account ID to grant editor role to BigQuery | |
export SINK_SA=$(gcloud logging sinks describe $SINK_NAME --folder $DEMO_FOLDER_ID --format="value(writerIdentity)") | |
echo "Captured log sink SA: $SINK_SA" | |
# add IAM role to security project for sink SA | |
gcloud projects add-iam-policy-binding $SECURITY_PROJECT_ID \ | |
--member=$SINK_SA --role=roles/bigquery.dataEditor | |
##################################################################### | |
# EXAMPLE PROJECTS (will ship logs to sink) | |
##################################################################### | |
export EXAMPLE_PROJECT_1="mike-example-project-1" | |
export EXAMPLE_PROJECT_2="mike-example-project-2" | |
# project 1 | |
gcloud projects create $EXAMPLE_PROJECT_1 \ | |
--folder $DEMO_FOLDER_ID | |
gcloud beta billing projects link $EXAMPLE_PROJECT_1 \ | |
--billing-account=$BILLING_ACCOUNT_ID | |
gcloud services enable compute.googleapis.com \ | |
pubsub.googleapis.com \ | |
--project $EXAMPLE_PROJECT_1 | |
# project 2 | |
gcloud projects create $EXAMPLE_PROJECT_2 \ | |
--folder $DEMO_FOLDER_ID | |
gcloud beta billing projects link $EXAMPLE_PROJECT_2 \ | |
--billing-account=$BILLING_ACCOUNT_ID | |
gcloud services enable compute.googleapis.com \ | |
storage.googleapis.com \ | |
--project $EXAMPLE_PROJECT_2 | |
# check BQ dataset and confirm entries appear | |
##################################################################### | |
# GENERATE TERRAFORM FOR THE EXAMPLE ABOVE | |
# - https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/logging_folder_sink | |
# - https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/logging_organization_sink | |
##################################################################### | |
gcloud config set project $SECURITY_PROJECT_ID | |
gcloud services enable cloudasset.googleapis.com | |
gcloud beta resource-config bulk-export \ | |
--folder=$DEMO_FOLDER_ID \ | |
--resource-format=terraform > main.tf | |
# fetch relevant resources from main.tf file and modularize as needed |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment