Skip to content

Instantly share code, notes, and snippets.

@sweeneyde
Last active March 8, 2024 03:56
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sweeneyde/4866faf6ee546b531ca3cd10800b4d60 to your computer and use it in GitHub Desktop.
Save sweeneyde/4866faf6ee546b531ca3cd10800b4d60 to your computer and use it in GitHub Desktop.
Monoid homology calculator
"""
Monoid Homology Calculator
written by Dennis Sweeney
Given a monoid M, we can produce a topological space (cw complex) BM,
whose n-cells are n-tuples of non-identity elements of M.
This program computes the integral homology of the space BM.
The problem of deciding whether two words are equal is not computable,
so we restrict to nice classes of monoid presentations. In particular,
we look at "rewriting systems": an alphabet along with rules of the
form "L->R" (e.g. "xyz->xz"). These take the form a
finite collection of pairs of strings (L, R),
and we will write uLv --> uRv for any strings u and v.
A rewriting system is called "Canonical" if is
(1) Noetherian: No infinite chains w0 --> w1 --> w2 --> ...
(2) Confluent: If a --> b and a --> c then
there is some z with b --> z and c --> z
(3) Normalized: If x->y is a rule then y is irreducible, and
x is not reducible by any rule other than x->y.
Any word in such a system reduces to a unique irreducible form.
This gives rise to a monoid: irreducbile words with an operation
given by reducing their concatenation.
In [1], Ken Brown considers such systems and finds that the space BM
deformation retracts onto a smaller cell complex with only finitely
many cells in each dimension. The program below implements Brown's
collapsing scheme to find the finitely many "essential" cells
in each dimension, then computes the cellular homology of this
smaller complex, which will be equal to the homology of M.
This program requires SageMath to compute homology. To use:
(1) Open a SageMath console or notebook.
(2) Run the following command:
load(URL OR PATH TO THIS FILE PERHAPS USING RAW BUTTON ON GITHUB)
(3) Run the demo with demo(), or try your own:
nat_nat = CRS('ab', [('ba', 'ab')])
print(nat_nat.print_homology(10))
[1] Kenneth S. Brown. "The geometry of rewriting systems: A proof of the Anick-Groves-Squier theorem."
In: Algorithms and classification in combinatorial group theory (Berkeley, CA, 1989), 137-163,
Math. Sci. Res. Inst. Publ., 23, Springer, New York, 1992. MR1230632 (94g:20041), Zbl 0764.20016.
"""
from itertools import permutations
from collections import defaultdict
from functools import lru_cache
class CanonicalRewritingSystem:
__slots__ = ("alphabet", "rules", "max_rewrites", "prefix_to_rules", "essentials")
def __init__(self, alphabet, rules, max_rewrites=1000):
self.alphabet = ''.join(alphabet)
self.rules = rules
self.max_rewrites = max_rewrites
# Assert we're provided correct arguments
if not isinstance(rules, list):
raise ValueError("Rules must be a list of pairs of strings")
for rule in rules:
if not isinstance(rule, tuple) or len(rule) != 2:
raise ValueError("Rules must be a list of pairs of strings")
set_alphabet = set(alphabet)
if len(set_alphabet) != len(alphabet):
raise ValueError("duplicate letters in alphabet")
letters = {c for rule in rules for side in rule for c in side}
if not (letters <= set_alphabet):
raise ValueError(f"{letters - set_alphabet} not in alphabet")
# Assert normalized
for _, right in rules:
for left, _ in rules:
if left in right:
raise ValueError(f"left side {left!r} "
f"contains right side {right!r}")
for (left1, _), (left2, _) in permutations(rules, 2):
if left1 in left2:
raise ValueError(f"left side {left2!r} "
f"contains left side {left1!r}")
for a in alphabet:
if self.reducible(a):
raise ValueError(f"Single-letter {a!r} was reducible")
# Collect proper nonempty prefixes/suffixes
prefix_to_rules = defaultdict(list)
suffix_to_rules = defaultdict(list)
for rule in rules:
left, _ = rule
for i in range(1, len(left)):
prefix, suffix = left[:i], left[i:]
prefix_to_rules[prefix].append(rule)
suffix_to_rules[suffix].append(rule)
self.prefix_to_rules = dict(prefix_to_rules)
# Tuples (a, b, c, reduce(a+b), reduce(b+c))
criticals = []
for middle in prefix_to_rules.keys() & suffix_to_rules.keys():
right_rules = prefix_to_rules[middle]
left_rules = suffix_to_rules[middle]
for (left_before, left_after) in left_rules:
for (right_before, right_after) in right_rules:
assert left_before[-len(middle):] == middle
subleft = left_before[:-len(middle)]
assert right_before[:len(middle)] == middle
subright = right_before[len(middle):]
criticals.append((subleft, middle, subright, left_after, right_after))
# Assert convergent
for (a, b, c, ab, bc) in criticals:
ab_c = self.reduce(ab + c)
a_bc = self.reduce(a + bc)
if ab_c != a_bc:
raise ValueError(f"Critical pair {(a,b,c)} did not resolve")
self.essentials = {0: [()],
1: [(a,) for a in alphabet],
2: [(left[0], left[1:]) for left, right in rules]}
def __str__(self):
rule_strings = [f"{left}->{right}" for left, right in self.rules]
return f"Monoid( {self.alphabet} : {', '.join(rule_strings)} )"
def reducible(self, word):
return any(left in word for left, right in self.rules)
def irreducible(self, word):
return all(left not in word for left, right in self.rules)
def reduce(self, word):
word0 = word
for _ in range(self.max_rewrites + 1):
old = word
for left, right in self.rules:
word = word.replace(left, right)
if word == old:
return word
raise RuntimeError(f"No fixed point was found for {word0}"
f"after {self.max_rewrites} iterations")
def op(self, word1, word2):
return self.reduce(word1 + word2)
@lru_cache(maxsize=1000)
def successor_words(self, last):
result = []
for i in range(len(last)):
suffix = last[i:]
# XXX is there a faster/better way than this?
for left, right in self.prefix_to_rules.get(suffix, ()):
assert left.startswith(suffix)
rest = left[len(suffix):]
if self.irreducible((last + rest)[:-1]):
result.append(rest)
return tuple(result)
def compute_essentials(self, up_to_dimension):
for dim in range(2, up_to_dimension + 1):
if dim in self.essentials:
continue
ess = []
starts = self.essentials[dim-1]
for start in starts:
for word in self.successor_words(start[-1]):
assert self.classify(start + (word,))[0] == "ESSENTIAL", (start, word)
ess.append(start + (word,))
self.essentials[dim] = ess
def classify(self, cell):
assert all(self.irreducible(w) for w in cell)
if len(cell) == 0:
return ("ESSENTIAL", None)
if "" in cell:
return ("DEGENERATE", None)
if len(cell[0]) > 1:
collapsible = (cell[0][0], cell[0][1:]) + cell[1:]
return ("REDUNDANT", (collapsible, 1))
for i in range(1, len(cell)):
a, b = cell[i-1], cell[i]
ab = a + b
if self.irreducible(ab):
redundant = cell[:i-1] + (ab,) + cell[i+1:]
return ("COLLAPSIBLE", (redundant, i))
if self.reducible(ab[:-1]):
for j in range(1, len(b)):
b1, b2 = b[:j], b[j:]
if self.reducible(a + b1):
collapsible = cell[:i] + (b1, b2) + cell[i+1:]
return ("REDUNDANT", (collapsible, i+1))
assert False
return ("ESSENTIAL", None)
def boundary(self, cell):
assert self.classify(cell)[0] == "ESSENTIAL"
if len(cell) <= 1:
return
yield from self.essential_representation(cell[1:], (-1)**0)
for i in range(1, len(cell)):
q = self.op(cell[i-1], cell[i])
face = cell[:i-1] + (q,) + cell[i+1:]
yield from self.essential_representation(face, (-1)**i)
yield from self.essential_representation(cell[:-1], (-1)**len(cell))
@lru_cache(maxsize=1000)
def essential_representation(self, cell, sign=+1):
kind = self.classify(cell)
if kind[0] == "ESSENTIAL":
return ((sign, cell),)
elif kind[0] == "COLLAPSIBLE":
return ()
elif kind[0] == "DEGENERATE":
return ()
elif kind[0] == "REDUNDANT":
collapsible, index = kind[1]
assert len(collapsible) >= 1
result = []
for i in range(0, len(collapsible) + 1):
subsign = sign * (-1)**i * ((-1)**index * -1)
if i == index:
continue
elif i == 0:
result.extend(self.essential_representation(collapsible[1:], subsign))
elif i == len(collapsible):
result.extend(self.essential_representation(collapsible[:-1], subsign))
else:
x = self.op(collapsible[i-1], collapsible[i])
face = collapsible[:i-1] + (x,) + collapsible[i+1:]
result.extend(self.essential_representation(face, subsign))
return tuple(result)
else:
raise AssertionError
@lru_cache(maxsize=10)
def chain_complex(self, up_to_dimension):
self.compute_essentials(up_to_dimension)
cell_to_index = {}
for dim, ess_list in self.essentials.items():
for i, cell in enumerate(ess_list):
cell_to_index[cell] = i
matrices = {}
for dim in range(up_to_dimension, 0, -1):
m = len(self.essentials[dim - 1])
n = len(self.essentials[dim])
M = [[0]*n for _ in range(m)]
for cell in self.essentials[dim]:
celli = cell_to_index[cell]
for coeff, face in self.boundary(cell):
M[cell_to_index[face]][celli] += coeff
matrices[dim] = M
return matrices
def SAGE_chain_complex(self, up_to_dimension):
# local imports so the rest can be run in vanilla Python without SAGE
from sage.matrix.constructor import Matrix
from sage.homology.chain_complex import ChainComplex
matrices = self.chain_complex(up_to_dimension)
data = {
-dim: Matrix(matrices[dim],
nrows=len(self.essentials[dim-1]),
ncols=len(self.essentials[dim]))
for dim in range(1, up_to_dimension + 1)
}
return ChainComplex(data)
def homology(self, up_to_dimension):
cc = self.SAGE_chain_complex(up_to_dimension + 1)
h = cc.homology()
return {dim: h.get(-dim, 0) for dim in range(0, up_to_dimension + 1)}
def print_homology(self, up_to_dimension):
print(self)
self.compute_essentials(up_to_dimension + 1)
print("essential cell counts:", [len(ess_list) for ess_list in self.essentials.values()])
self.chain_complex(up_to_dimension + 1)
print("computed boundaries.")
for dim, H_i in self.homology(up_to_dimension).items():
print(f"H_{dim} = {H_i}")
print()
CRS = CanonicalRewritingSystem
@sweeneyde
Copy link
Author

Example

>>> load("https://gist.githubusercontent.com/sweeneyde/4866faf6ee546b531ca3cd10800b4d60/raw/ff0a2e78c78503b026a6155603a2e88a68638a57/monoid_homology.py")

>>> # 2x2 rectangular band semigroup, with identity.
>>> CRS("xy", [("xx", "x"), ("yy", "y"), ("xyx", "x"), ("yxy", "y")]).print_homology(6)
Monoid( xy : xx->x, yy->y, xyx->x, yxy->y )
essential cell counts: [1, 2, 4, 8, 16, 32, 64, 128]
computed boundaries.
H_0 = Z
H_1 = 0
H_2 = Z
H_3 = 0
H_4 = 0
H_5 = 0
H_6 = 0

>>>  # Natural numbers modulo 5
>>> CRS('x', [("xxxxx", "")]).print_homology(6)
Monoid( x : xxxxx-> )
essential cell counts: [1, 1, 1, 1, 1, 1, 1, 1]
computed boundaries.
H_0 = Z
H_1 = C5
H_2 = 0
H_3 = C5
H_4 = 0
H_5 = C5
H_6 = 0

>>> # Integers
>>> CRS('aA', [("aA", ""), ("Aa", "")]).print_homology(6)
Monoid( aA : aA->, Aa-> )
essential cell counts: [1, 2, 2, 2, 2, 2, 2, 2]
computed boundaries.
H_0 = Z
H_1 = Z
H_2 = 0
H_3 = 0
H_4 = 0
H_5 = 0
H_6 = 0

>>> # Infinite dihedral group
>>> CRS("rRs", [("ss", ""), ("rR", ""), ("Rr", ""), ("sr", "Rs"), ("sR", "rs")]).print_homology(6)
Monoid( rRs : ss->, rR->, Rr->, sr->Rs, sR->rs )
essential cell counts: [1, 3, 5, 7, 9, 11, 13, 15]
computed boundaries.
H_0 = Z
H_1 = C2 x C2
H_2 = 0
H_3 = C2 x C2
H_4 = 0
H_5 = C2 x C2
H_6 = 0

>>> # McDuff's "On the classifying spaces of discrete monoids"
>>> # Gives a construction of a monoid M such that BM
>>> # is any given simplicial complex.
>>> # This is the construction applied to S^2 (boundary of 3-cell).
>>> CRS("aAbBcCxyzw", [
            ("aA", ""), ("Aa", ""), ("bB", ""), ("Bb", ""), ("cC", ""), ("Cc", ""),
            ("ax", "x"), ("Ax", "x"), ("by", "y"), ("By", "y"), ("cz", "z"), ("Cz", "z"),
            # ("abcw", "w"),
            # ("CBAw", "w"),
            ("xx", "x"), ("yy", "y"), ("zz", "z"), ("ww", "w"),
            # Had to add this in ourselves...
            # Otherwise Aabcw --> bcw != Aw <-- Aabcw
            ("bcw", "Aw"), ("BAw", "cw"),
]).print_homology(5)
Monoid( aAbBcCxyzw : aA->, Aa->, bB->, Bb->, cC->, Cc->, ax->x, Ax->x, by->y, By->y, cz->z, Cz->z, xx->x, yy->y, zz->z, ww->w, bcw->Aw, BAw->cw )
essential cell counts: [1, 10, 18, 26, 34, 42, 50]
computed boundaries.
H_0 = Z
H_1 = 0
H_2 = Z
H_3 = 0
H_4 = 0
H_5 = 0

>>> # The same thing as above but for S^3.
>>> # I had to use the Knuth-Bendix algorithm to normalize.
>>> rules = [('00', '0'), ('0V', 'V'), ('11', '1'), ('1U', 'U'), ('1W', 'W'), ('22', '2'), ('2V', 'V'), ('2W', 'W'), ('33', '3'), ('3U', 'U'), ('3X', 'X'), ('44', '4'), ('4V', 'V'), ('4X', 'X'), ('55', 
'5'), ('5W', 'W'), ('5X', 'X'), ('66', '6'), ('6U', 'U'), ('6Y', 'Y'), ('77', '7'), ('7V', 'V'), ('7Y', 'Y'), ('88', '8'), ('8W', 'W'), ('8Y', 'Y'), ('99', '9'), ('9X', 'X'), ('9Y', 'Y'), ('Aa', ''), ('Bb', ''), ('Cc', ''), ('Dd', ''), ('Ee', ''), ('Ff', ''), ('UU', 'U'), ('VV', 'V'), ('WW', 'W'), ('XX', 'X'), ('YY', 'Y'), ('a2', '2'), ('aA', ''), ('b4', '4'), ('bB', ''), ('c5', '5'), ('cC', ''), ('d7', '7'), ('dD', ''), ('e8', '8'), ('eE', ''), ('f9', '9'), ('fF', ''), ('cW', 'W'), ('cX', 'X'), ('0fU', 'fU'), ('fX', 'X'), ('fY', 'Y'), ('F9', '9'), ('dV', 'V'), ('dY', 'Y'), ('bV', 'V'), ('bX', 'X'), ('ad0', 'b0'), ('eW', 'W'), ('eY', 'Y'), ('df6', 'e6'), ('A2', '2'), ('ae1', 'c1'), ('bf3', 'c3'), ('B4', '4'), ('D7', '7'), ('E8', '8'), ('aV', 'V'), ('aW', 'W'), ('C5', '5'), ('CW', 'W'), ('EY', 'Y'), ('CX', 'X'), ('BV', 'V'), ('FX', 'X'), ('FY', 'Y'), ('aeU', 'cU'), ('DV', 'V'), ('DY', 'Y'), ('BX', 'X'), ('EW', 'W'), ('dfU', 'eU'), ('Ab0', 'd0'), ('Ac1', 'e1'), ('AV', 'V'), ('AW', 'W'), ('bfU', 'cU'), ('Bc3', 'f3'), ('De6', 'f6'), ('AcU', 'eU'), ('DeU', 'fU'), ('BcU', 'fU')]
>>> CRS("0123456789ABCDEFUVWXYabcdef", rules).print_homology(4)
Monoid( 0123456789ABCDEFUVWXYabcdef : 00->0, 0V->V, 11->1, 1U->U, 1W->W, 22->2, 2V->V, 2W->W, 33->3, 3U->U, 3X->X, 44->4, 4V->V, 4X->X, 55->5, 5W->W, 5X->X, 66->6, 6U->U, 6Y->Y, 77->7, 7V->V, 7Y->Y, 88->8, 8W->W, 8Y->Y, 99->9, 9X->X, 9Y->Y, Aa->, Bb->, Cc->, Dd->, Ee->, Ff->, UU->U, VV->V, WW->W, XX->X, YY->Y, a2->2, aA->, b4->4, bB->, c5->5, cC->, d7->7, dD->, e8->8, eE->, f9->9, fF->, cW->W, cX->X, 0fU->fU, fX->X, fY->Y, F9->9, dV->V, dY->Y, bV->V, bX->X, ad0->b0, eW->W, eY->Y, df6->e6, A2->2, ae1->c1, bf3->c3, B4->4, D7->7, E8->8, aV->V, aW->W, C5->5, CW->W, EY->Y, CX->X, BV->V, FX->X, FY->Y, aeU->cU, DV->V, DY->Y, BX->X, EW->W, dfU->eU, Ab0->d0, Ac1->e1, AV->V, AW->W, bfU->cU, Bc3->f3, De6->f6, AcU->eU, DeU->fU, BcU->fU )
essential cell counts: [1, 27, 97, 207, 357, 547]
computed boundaries.
H_0 = Z
H_1 = 0
H_2 = 0
H_3 = Z
H_4 = 0

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment