Skip to content

Instantly share code, notes, and snippets.

@xiangshen-dk
Created July 16, 2020 04:39
Show Gist options
  • Save xiangshen-dk/17662d65c9e27ebf7a661830c0781062 to your computer and use it in GitHub Desktop.
Save xiangshen-dk/17662d65c9e27ebf7a661830c0781062 to your computer and use it in GitHub Desktop.
Install the components for GCP cloud monitoring metrics export
#!/bin/bash
# Get curent project id
export PROJECT_ID=$(gcloud config get-value project)
echo "Current project id: $PROJECT_ID"
# Suppress Python 2 warning in Cloud Shell
mkdir -p ~/.cloudshell
touch ~/.cloudshell/no-python-warning
touch ~/.cloudshell/no-pip-warning
echo "======================="
echo "Clone the solution repo"
git clone https://github.com/GoogleCloudPlatform/stackdriver-metrics-export
cd stackdriver-metrics-export
echo "======================="
echo "Enable the required APIs"
echo "It could take up to a few minutes. Please be patient..."
gcloud services enable compute.googleapis.com \
cloudscheduler.googleapis.com \
cloudfunctions.googleapis.com \
cloudresourcemanager.googleapis.com
echo "======================="
echo "Create the BigQuery tables"
echo "Create a Dataset and then a table using the schema JSON files"
bq mk metric_export
bq mk --table --time_partitioning_type=DAY metric_export.sd_metrics_export_fin ./bigquery_schemas/bigquery_schema.json
bq mk --table --time_partitioning_type=DAY metric_export.sd_metrics_stats ./bigquery_schemas/bigquery_schema_stats_table.json
bq mk --table metric_export.sd_metrics_params ./bigquery_schemas/bigquery_schema_params_table.json
echo "======================="
echo "Replace the JSON token in the config.py files"
# Generate a new token and then replace that token in the each of config.py files. Use this same token in the Cloud Scheduler.
TOKEN=$(python -c "import uuid; msg = uuid.uuid4(); print msg")
LIST_PROJECTS_TOKEN=$(python -c "import uuid; msg = uuid.uuid4(); print msg")
sed -i s/16b2ecfb-7734-48b9-817d-4ac8bd623c87/$TOKEN/g list_metrics/config.py
sed -i s/16b2ecfb-7734-48b9-817d-4ac8bd623c87/$TOKEN/g get_timeseries/config.py
sed -i s/16b2ecfb-7734-48b9-817d-4ac8bd623c87/$TOKEN/g write_metrics/config.py
sed -i s/16b2ecfb-7734-48b9-817d-4ac8bd623c87/$TOKEN/g list_projects/config.json
sed -i s/16b2ecfb-7734-48b9-817d-4ac8bd623c87/$TOKEN/g list_projects/config.json
sed -ibk "s/99a9ffa8797a629783cb4aa762639e92b098bac5/$LIST_PROJECTS_TOKEN/g" list_projects/config.json
sed -ibk "s/YOUR_PROJECT_ID/$PROJECT_ID/g" list_projects/config.json
echo "======================="
echo "Deploy the App Engine apps"
cd list_metrics
pip install -t lib -r requirements.txt
has_default_svc=$(gcloud app services list | grep default)
if [ -z "$has_default_svc"]
then
# If this was your first App Engine app in your project, deploy as default service.
sed -i '/^service:/d' app.yaml
export LIST_METRICS_URL=https://$PROJECT_ID.appspot.com
echo "y" | gcloud app create --region=us-central 2>&1 >/dev/null
echo "y" | gcloud app deploy
else
# If you already have a default App Engine app in your project, enter the following command.
export LIST_METRICS_URL=https://list-metrics-dot-$PROJECT_ID.appspot.com
echo "y" | gcloud app deploy
fi
# Now, get the get_timeseries and write_metrics URLs and create the Pub/Sub topics and subscriptions
export GET_TIMESERIES_URL=https://get-timeseries-dot-$PROJECT_ID.appspot.com
export WRITE_METRICS_URL=https://write-metrics-dot-$PROJECT_ID.appspot.com
cd ../get_timeseries
pip install -t lib -r requirements.txt
echo "y" | gcloud app deploy
cd ../write_metrics
pip install -t lib -r requirements.txt
echo "y" | gcloud app deploy
echo "======================="
echo "Create the Pub/Sub topics and subscriptions"
gcloud pubsub topics create metrics_export_start
gcloud pubsub subscriptions create metrics_export_start_sub --topic metrics_export_start \
--ack-deadline=60 --message-retention-duration=10m \
--push-endpoint="$LIST_METRICS_URL/_ah/push-handlers/receive_message"
gcloud pubsub topics create metrics_list
gcloud pubsub subscriptions create metrics_list_sub --topic metrics_list --ack-deadline=60 \
--message-retention-duration=30m \
--push-endpoint="$GET_TIMESERIES_URL/_ah/push-handlers/receive_message"
gcloud pubsub topics create write_metrics
gcloud pubsub subscriptions create write_metrics_sub --topic write_metrics \
--ack-deadline=60 --message-retention-duration=30m \
--push-endpoint="$WRITE_METRICS_URL/_ah/push-handlers/receive_message"
echo "======================="
echo "Create a service account for the list_projects function"
gcloud beta iam service-accounts create \
gce-list-projects \
--description "Used for the function that lists the projects for the GCE Footprint Cloud Function"
export LIST_PROJECTS_SERVICE_ACCOUNT=gce-list-projects@$PROJECT_ID.iam.gserviceaccount.com
echo "======================="
echo "Assign IAM permissions to the service account"
gcloud projects add-iam-policy-binding $PROJECT_ID --member="serviceAccount:$LIST_PROJECTS_SERVICE_ACCOUNT" --role="roles/compute.viewer"
gcloud projects add-iam-policy-binding $PROJECT_ID --member="serviceAccount:$LIST_PROJECTS_SERVICE_ACCOUNT" --role="roles/browser"
gcloud projects add-iam-policy-binding $PROJECT_ID --member="serviceAccount:$LIST_PROJECTS_SERVICE_ACCOUNT" --role="roles/pubsub.publisher"
echo "======================="
echo "Deploy the list_projects function"
cd ../list_projects
gcloud functions deploy list_projects \
--trigger-topic metric_export_get_project_start \
--runtime nodejs10 \
--entry-point list_projects \
--service-account=$LIST_PROJECTS_SERVICE_ACCOUNT
echo "======================="
echo "Deploy the Cloud Scheduler job"
# Runs every 5 seconds. Change the schedule if needed
gcloud scheduler jobs create pubsub metric_export \
--schedule "*/5 * * * *" \
--topic metric_export_get_project_start \
--message-body "{ \"token\":\"$(echo $LIST_PROJECTS_TOKEN)\"}"
echo "======================="
echo "Done."
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment