Skip to content

Instantly share code, notes, and snippets.

@IGTHORN
Forked from ushkinaz/cygwin-mirror-speed.py
Last active February 16, 2019 20:34
Show Gist options
  • Save IGTHORN/e6662c6e4c79a3f81ea99ffbb316f30c to your computer and use it in GitHub Desktop.
Save IGTHORN/e6662c6e4c79a3f81ea99ffbb316f30c to your computer and use it in GitHub Desktop.
Tests speed of http mirrors of cygwin
#!/usr/bin/env python3
"""
Tests http mirrors of cygwin
"""
import random
import time
from urllib.request import urlopen
import sys
from multiprocessing import Pool, TimeoutError
__author__ = 'Dmitry Sidorenko'
mirrors_url = "http://cygwin.com/mirrors.lst"
test_file = "/x86_64/setup.ini"
block_sz = 8096
def test_host(hostentry):
host = hostentry["host"]
sys.stdout.flush()
start_time = time.time()
try:
test = urlopen(host[0] + test_file, timeout=5)
test.read(block_sz)
time_spent = time.time() - start_time
hostentry["time"] = time_spent
print("testing mirror %s ... %s sec" % (host[1], time_spent))
except IOError:
hostentry["time"] = 9999
print("testing mirror %s ... timeout" % host[1])
return hostentry
if __name__ == '__main__':
mirrors = [
# {"host": "",
# "time": 1
# }
]
print("Downloading mirrors list...", end='')
sys.stdout.flush()
u = urlopen(mirrors_url)
for line in u:
strline = str(line)[2:-3]
host = strline.split(";")
# Only test http
if host[0].startswith("http://"):
mirrors.append({"host": host, "time": 9999})
print("done, %d entries" % len(mirrors))
random.shuffle(mirrors)
max_hosts_to_try = 100
testn = 1
num_processors = len(mirrors) / 2.0
with Pool(processes=int(num_processors)) as pool:
result = pool.map(test_host, mirrors)
mirrors = result
mirrors = sorted(mirrors, key=lambda entry: entry["time"])
print("\nTop 5 mirrors\n")
sys.stdout.flush()
for i in range(5):
mirror = mirrors[i]
host_info = mirror["host"]
if mirror["time"] < 9999:
print("%.3f, %14s, %10s, %s" % (mirror["time"], host_info[2], host_info[3], host_info[0]), file=sys.stderr)
@IGTHORN
Copy link
Author

IGTHORN commented Aug 20, 2016

Modification on the original script to run the tests in parallel using multiprocessing.Pool.map(). This cuts down the time required to find the fastest servers.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment