Skip to content

Instantly share code, notes, and snippets.

@OverLordGoldDragon
Last active August 27, 2020 01:12
Show Gist options
  • Save OverLordGoldDragon/b9507ad4cf81c22048985192b757a1de to your computer and use it in GitHub Desktop.
Save OverLordGoldDragon/b9507ad4cf81c22048985192b757a1de to your computer and use it in GitHub Desktop.
Track Steam server statistics
# -*- coding: utf-8 -*-
"""README
**Requirements**
Python 3, `steam` package (!pip install steam). Steam itself is *not* required.
**Usage**
In Python terminal:
python path/steamlog.py --servers="name1 name2" --stats="players map" --savepath="path/logs.csv" --appid=224260 --interval=600
**Arguments**
servers: list of server names to log
stats: list of statistic names to log for each server
savepath: relative/absolute path to write statistics to
appid: id of Steam server app
interval: log frequency in seconds
**Guide**
- `servers` can be one server, and `stats` one statistic.
- `appid` is in the game's store page URL. Steam > Library > click game > Store Page > right click > Copy page URL
> Paste somewhere. Example: https://store.steampowered.com/app/224260/No_More_Room_in_Hell/. `appid=224260`.
- `savepath` best bet is to specify full path, e.g. "D:\server_logs.csv". Path *must* end with ".csv".
- `python steamlog.py --help` for argument defaults (if you don't supply "--name value", these will be used)
- `python path/steamlog.py`: "path" here is the relative or abolute path to the script. Best bet is to "cd" into the directory.
Shift + Right click the script, "Copy as path", paste somewhere. Ex: "D:\stuff\scripts\steamlog.py":
1. Open terminal
2. cd d:
3. cd "stuff\scripts"
4. python steamlog.py
**Possible `stats`**
_ping, _type, protocol, name, map, folder, game, app_id, players, max_players,
bots, server_type, environment, visibility, vac, version, edf, port,
steam_id, keywords, game_id
"""
import os, argparse, csv
from datetime import datetime
from time import sleep
from steam import game_servers as gs
def query_stats(server_name, stats, appid='224260'):
filter_text = rf'\appid\{appid}\name_match\{server_name}'
addr = list(gs.query_master(filter_text, max_servers=3))[0]
query = gs.a2s_info(addr)
return [query[s] for s in stats]
def collect_stats(data, stats, interval=600, savepath=None, appid='224260'):
def _save(data, savepath):
def _format_values(data):
values = []
for server_stats in data.values():
values.extend(list(map(str, server_stats[-1])))
return values
if not os.path.isfile(savepath):
colnames = []
# server1-stat1 | server1-stat2 | ... | server2-stat1 | server2-stat2
for server_name in data:
for stat_name in (*stats, 'time'):
colnames.append(server_name + '-' + stat_name)
with open(savepath, mode='w', newline='') as f:
writer = csv.writer(f)
writer.writerow(colnames) # write column headers
values = _format_values(data) # fetch last row
with open(savepath, mode='a+', newline='') as f:
writer = csv.writer(f)
writer.writerow(values)
print("Logged to", savepath, flush=True)
def _collect(data, appid):
def _try_append(data, server_name, appid):
try:
data[server_name].append([*query_stats(server_name, stats, appid),
datetime.now()])
except Exception as e:
print(f"Failed to query {server_name}; retrying in 10 secs ",
flush=True)
print("Errmsg:", e)
ti = 0
while ti < 10:
print(end='.', flush=True)
sleep(1)
ti += 1
_try_append(data, server_name, appid)
for server_name in data:
_try_append(data, server_name, appid)
newline = not isinstance(savepath, str)
while True:
_collect(data, appid)
_print_to_terminal(data, stats, newline)
if savepath is not None:
try:
_save(data, savepath)
except Exception as e:
print("Couldn't save to {}; close file if open".format(
savepath), flush=True)
print("Errmsg:", e)
sleep(interval)
def _print_to_terminal(data, stats, newline):
def _print_latest(data, stat_name, stat_idx, max_name_len):
def _latest_values(data, stat_idx):
return [str(v[-1][stat_idx]) for v in data.values()]
latest_time = list(data.values())[0][-1][-1]
names_txt = '(%s)' % ', '.join(list(data.keys()))
values_txt = '(%s)' % ', '.join(_latest_values(data, stat_idx))
stat_name = stat_name.ljust(max_name_len) # for vertical align
print(end="{}: {} = {}".format(stat_name, names_txt, values_txt
), flush=True)
if stat_idx == 0: # log time only once
latest_time = list(data.values())[0][-1][-1]
print(end=" -- (%s)" % latest_time)
if stat_idx != len(stats) - 1:
print() # if more stats to come, newline
max_name_len = max(map(len, stats))
for stat_idx, stat_name in enumerate(stats):
_print_latest(data, stat_name, stat_idx, max_name_len)
print(end="\n" if newline else " ")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--servers', nargs='+', default=['WOOT', '....#2'],
help="names of servers to query for player count")
parser.add_argument('--stats', nargs='+', default=['players', 'map'],
help="names of statistics to gather")
parser.add_argument('-i', '--interval', default=600,
help="logging interval, in seconds")
parser.add_argument('--savepath', default="server_logs.csv",
help="path (.txt) to stream data to")
parser.add_argument('--appid', default='224260',
help="game's steam AppID; 224260=NMRiH")
args = parser.parse_args()
data = {}
server_names = args.servers
stats = args.stats
if isinstance(server_names, list) and len(server_names) == 1:
server_names = server_names[0].split()
if isinstance(stats, list) and len(stats) == 1:
stats = stats[0].split()
for server_name in server_names:
data[server_name] = []
collect_stats(data, stats, int(args.interval), args.savepath, args.appid)
@OverLordGoldDragon
Copy link
Author

Plotting (requires matplotlib):

python plotlog.py --path="C:\server_logs.csv" --smoothing=40

import argparse
import csv
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import dates
from datetime import datetime


def plot_stats(path, smoothing=2, w=1, h=1, fmt='-'):
    def _drop_nonnumeric(values, stat_names):
        drop_cols = []
        for col_idx, col in enumerate(values[0]):  # `col` == column entry
            if not str(col).isdigit():
                drop_cols.append(col_idx)
                print(("NOTE: column {} ({}) is non-numeric, will not plot"
                       ).format(col_idx, stat_names[col_idx]))

        new_values = []
        for row in values:
            new_values.append([])
            for col_idx, col in enumerate(row):
                if col_idx not in drop_cols:
                    new_values[-1].append(col)

        new_stats = [stat for col_idx, stat in enumerate(stat_names)
                     if col_idx not in drop_cols]
        return new_values, new_stats

    def _smooth(x, smoothing=0):
        # average `smoothing` adjacent points, sequentially, to smooth plot
        remainder = len(x) % smoothing
        r = x[-remainder:]
        _x = x[:-remainder]

        _x = _x.reshape(len(x) // smoothing, smoothing)
        # remainder averaged by its own number of samples;
        # end-result number of samples is same as with padding
        means = np.array([*list(_x.mean(axis=1)), r.mean()])
        return means

    def _downsample(times, smoothing=0):
        return np.array([t for i, t in enumerate(times)
                         if (i % smoothing) == 0 or i == 0])

    with open(path, 'r') as f:
        rows = list(csv.reader(f))

    stat_names = rows.pop(0)
    times = [dates.date2num(datetime.strptime(r.pop(-1), '%Y-%m-%d %H:%M:%S.%f'))
             for r in rows]
    values = rows

    data, names = _drop_nonnumeric(values, stat_names)
    data = np.array(data).astype('float32').T

    if smoothing:
        times = _downsample(times, smoothing)
    for x in data:
        if smoothing:
            x = _smooth(x, smoothing)
        plt.plot_date(times, x, fmt=fmt)
    plt.legend(stat_names, fontsize=15)
    plt.gcf().set_size_inches(14 * w, 7 * h)
    plt.show()

    print("Averages: ({}) = ({})".format(', '.join(map(str, data.mean(axis=1))),
                                         ', '.join(names)))

if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--path', help="path to .csv log file for plotting")
    parser.add_argument('--smoothing', default=0,
                        help=("plot smoothing factor; will average by this "
                              "number of adjacent points"))
    parser.add_argument('--fmt', default='-',
                        help="plot marker ('-' == line, '.' == dot, 'o', etc)")
    parser.add_argument('--width', default=1,
                        help="plot width scaling factor")
    parser.add_argument('--height', default=1,
                        help="plot height scaling factor")

    args = parser.parse_args()
    if not args.path:
        raise Exception("no path to file provided")
    plot_stats(args.path, int(args.smoothing), args.width, args.height, args.fmt)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment