Skip to content

Instantly share code, notes, and snippets.

@kotarou3
Created March 25, 2023 18:21
Show Gist options
  • Save kotarou3/1604787df367828e4bb1c153997530e9 to your computer and use it in GitHub Desktop.
Save kotarou3/1604787df367828e4bb1c153997530e9 to your computer and use it in GitHub Desktop.
# Scores Mudfish "advanced mode" nodes since the built-in tool isn't very useful.
# Run with: scrapy runspider mudfish.py
import json
import re
import scrapy
srcIp = "110.175.213.58"
destIp = "124.150.157.158"
srcCountry = "AU"
destCountry = "JP"
blacklist = [
# Due to congestion or packet loss
"Sydney - BinaryLane",
"Sydney - Google",
"Tokyo - Google",
"Tokyo - Linode",
# Due to physical distance
"Adelaide - ",
"Melbourne - ",
"Perth - ",
"Hokkaido - ",
]
class MudfishSpider(scrapy.Spider):
name = "Mudfish"
nodes = {}
def start_requests(self):
yield scrapy.Request(
"https://ping.mudfish.net/",
self.index
)
def index(self, response):
for node in response.xpath('//*[@name="nodes[]"]'):
if node.attrib["location"][:2] not in {srcCountry, destCountry}:
continue
for substr in blacklist:
if substr in node.attrib["location"]:
break
else:
self.nodes[node.attrib["value"]] = {"name": node.attrib["location"]}
yield scrapy.FormRequest(
"https://ping.mudfish.net/ping/start/8.8.8.8",
method="POST",
formdata={"nodes": ",".join(self.nodes.keys())},
callback=self.node_ips
)
def node_ips(self, response):
for match in re.findall('"/ping/(\d+)/([0-9.]+)/8.8.8.8"', response.text):
if match[0] not in self.nodes:
print("Unknown match", match)
self.nodes[match[0]]["ip"] = match[1]
return self.find_paths()
def find_paths(self):
n = 0;
for id, node in self.nodes.items():
yield self.ping(id, srcIp)
yield self.ping(id, destIp)
n += 2
if node["name"][:2] == srcCountry:
for node2 in self.nodes.values():
if node2["name"][:2] == destCountry:
yield self.ping(id, node2["ip"])
n += 1
print("Paths to scan:", n)
def ping(self, fromId, toIp):
assert(fromId in self.nodes)
node = self.nodes[fromId]
node.setdefault("score_to", {})
def parse(response):
nonlocal node, toIp
data = json.loads(response.text)
score = float(data["rtt_max"]) + (float(data["rtt_max"]) - float(data["rtt_min"]))**2
node["score_to"][toIp] = score
fromIp = node["ip"]
return scrapy.Request(
f"https://ping.mudfish.net/ping/{fromId}/{fromIp}/{toIp}",
parse
)
def closed(self, reason):
results = []
for node1 in self.nodes.values():
entryScore = node1["score_to"][srcIp]
exitScore = node1["score_to"][destIp]
results.append((round(entryScore + exitScore), [node1["name"]]))
if node1["name"][:2] == srcCountry:
for node2 in self.nodes.values():
if node2["name"][:2] != destCountry:
continue
intScore = node1["score_to"][node2["ip"]]
exitScore = node2["score_to"][destIp]
results.append((round(entryScore + intScore + exitScore), [node1["name"], node2["name"]]))
results.sort()
for result in reversed(results):
print(f"{result[0]}: {result[1]}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment