Skip to content

Instantly share code, notes, and snippets.

@dat-boris
Last active October 27, 2021 12:37
Show Gist options
  • Save dat-boris/0fd1f5dbc19d5c833cd144426ff0e827 to your computer and use it in GitHub Desktop.
Save dat-boris/0fd1f5dbc19d5c833cd144426ff0e827 to your computer and use it in GitHub Desktop.
Exporting instance count from stackdriver metrics
"""
Code for exporting instance count for last month
Setup:
pip install --upgrade google-cloud-monitoring
DOC:
https://cloud.google.com/monitoring/docs/reference/libraries#client-libraries-usage-python
Code snippet:
https://cloud.google.com/monitoring/custom-metrics/reading-metrics
"""
import csv
import time
import datetime
from collections import Counter, defaultdict
from google.cloud import monitoring_v3
from google.protobuf.timestamp_pb2 import Timestamp
from google.protobuf.duration_pb2 import Duration
from google.protobuf.json_format import MessageToJson
client = monitoring_v3.MetricServiceClient()
project_name = client.common_project_path('<project_name>')
def get_epoch(d):
return int((d - datetime.datetime(1970, 1, 1)).total_seconds())
def get_day_of_request(date, filter, days=1):
interval = monitoring_v3.types.TimeInterval(
start_time=Timestamp(seconds=get_epoch(date)),
end_time=Timestamp(seconds=get_epoch(
date + datetime.timedelta(days=days)))
)
# https://cloud.google.com/monitoring/api/ref_v3/rpc/google.monitoring.v3
aggregation = monitoring_v3.types.Aggregation(
alignment_period=Duration(seconds=60), # 1 hour
group_by_fields=["resource.labels.\"module_id\""],
per_series_aligner=monitoring_v3.Aggregation.Aligner.ALIGN_MEAN,
)
results = client.list_time_series(
request=monitoring_v3.ListTimeSeriesRequest(
name=project_name,
filter=filter,
interval=interval,
aggregation=aggregation,
)
)
date_count = defaultdict(Counter)
for result in results:
# For debugging
#resource_json = MessageToJson(result.resource)
module_id = result.resource.labels.get("module_id")
for db in result.points:
# Depends on metrics, this can be int or float
value = db.value.int64_value or db.value.double_value
date_count[db.interval.start_time.isoformat()][module_id] += value
return date_count
def analyse_data(start_date, days):
billable_day = get_day_of_request(
start_date,
filter='metric.type = "appengine.googleapis.com/system/billed_instance_estimate_count" resource.type="gae_app"',
days=days
)
active_day = get_day_of_request(
start_date,
filter="metric.type=\"appengine.googleapis.com/system/instance_count\" resource.type=\"gae_app\" metadata.system_labels.\"state\"=\"ACTIVE\"",
days=days
)
with open('instance_count.csv', 'w') as csvfile:
w = csv.writer(csvfile, delimiter=',',
quotechar='"', quoting=csv.QUOTE_MINIMAL)
w.writerow(['time', 'module_id', 'billable', 'active', 'ratio'])
total_billable = Counter()
total_active = Counter()
for dt, moduel_count in billable_day.items():
for module_id, billable_count in moduel_count.items():
active_count = active_day.get(dt, {}).get(module_id, 0)
w.writerow([
dt,
module_id,
billable_count,
active_count,
100. * active_count / billable_count if billable_count > 0 else None
])
total_billable[module_id] += billable_count
total_active[module_id] += active_count
for module_id in total_billable:
active = total_active[module_id]
billable = total_billable[module_id]
if billable <= 0:
print(f"{module_id} is not billed")
continue
ratio = min(100. * active/billable, 100)
print(f"{module_id}: {active}/{billable} ({ratio}%)")
if __name__ == "__main__":
analyse_data(start_date=datetime.datetime(2021, 10, 19), days=1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment