Skip to content

Instantly share code, notes, and snippets.

@djmitche
Created July 9, 2020 16:27
Show Gist options
  • Save djmitche/2bcd320d733f0d1821229a0e793c4219 to your computer and use it in GitHub Desktop.
Save djmitche/2bcd320d733f0d1821229a0e793c4219 to your computer and use it in GitHub Desktop.

Run this as python3 gcloud-tail.py FILTER. The result is in JSON format, with one line per log message. You can process the results with jq.

import sys
import json
import subprocess
# TODO: make this an option
freshness = '1m'
limit = 100
def get_logs():
global freshness, limit
args = ['gcloud', 'logging', 'read', *sys.argv[1:], '--format', 'json', '--freshness', freshness]
if limit:
args.extend(['--limit', str(limit)])
res = subprocess.run(args, stdout=subprocess.PIPE, check=True)
# after the first time, only fetch 30s of logs, but an unlimited number
freshness = '30s'
limit = None
return json.loads(res.stdout)
def main():
seen_insert_ids = []
while True:
logs = get_logs()
keepers = []
for log in logs:
insert_id = log['insertId']
if any(insert_id in seen for seen in seen_insert_ids):
continue
keepers.append(log)
for log in sorted(keepers, key=lambda l: l['receiveTimestamp']):
sys.stdout.write(json.dumps(log) + '\n')
# stash the list of insert_ids we saw, and only keep 10
seen_insert_ids.append(set(l['insertId'] for l in logs))
seen_insert_ids = seen_insert_ids[-10:]
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment