Skip to content

Instantly share code, notes, and snippets.

@omahdi
Last active November 16, 2017 21:58
Show Gist options
  • Save omahdi/e9ccf39c137a5ae75a45367cf7455fb7 to your computer and use it in GitHub Desktop.
Save omahdi/e9ccf39c137a5ae75a45367cf7455fb7 to your computer and use it in GitHub Desktop.
Conformal and projective disk models of the hyperbolic plane
license: mit
height: 720
{
"vertices": [[0.0, 0.0], [0.7, 0.0], [-0.35, 0.60621778], [-0.35, -0.60621778]],
"faces": [[0, 1, 2], [0, 2, 3], [0, 3, 1]]
}
{"vertices": [[0.0000000,0.0000000],[0.5816973,0.0000000],[0.5466167,0.1989522],[0.4456060,0.3739078],[0.2908486,0.5037647],[0.1010106,0.5728601],[-0.1010108,0.5728601],[-0.2908488,0.5037646],[-0.4456062,0.3739077],[-0.5466170,0.1989520],[-0.5816977,-0.0000003],[-0.5466172,-0.1989526],[-0.4456064,-0.3739083],[-0.2908489,-0.5037650],[-0.1010108,-0.5728602],[0.1010106,-0.5728599],[0.2908486,-0.5037643],[0.4456059,-0.3739074],[0.5466167,-0.1989518]],"faces":[[0,1,2],[0,2,3],[0,3,4],[0,4,5],[0,5,6],[0,6,7],[0,7,8],[0,8,9],[0,9,10],[0,10,11],[0,11,12],[0,12,13],[0,13,14],[0,14,15],[0,15,16],[0,16,17],[0,17,18],[0,18,1]]}
/**
* \file hyp-draw.js
* \author Obada Mahdi
* \copyright Copyright (c) 2017 Obada Mahdi
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
class HypEdge {
constructor(id, u, v, f) {
this.id = id;
this.u = u;
this.v = v;
this.faces = [f];
this.boundary = true;
}
add_face(f) {
if (this.faces.indexOf(f) < 0)
this.faces.push(f);
}
//WIP
};
/// Compute geodesic segment between points \p p and \p q in the conformal
/// disk model.
function set_Cedge(e, p, q) { // {{{
var result = hyp_Cgeo(p, q), cx = result.center.x, cy = result.center.y;
// zero radius indicates that points p, q are on a diameter
e.C = {r: result.r, cx: cx, cy: cy, px: p.x, py: p.y, qx: q.x, qy: q.y};
} // }}}
function set_Pedge(e, p, q) { // {{{
e.P = {px: p.px, py: p.py, qx: q.px, qy: q.py};
} // }}}
function build_pathspec_Clink(e) { // {{{
var ed = e.C;
if (ed.r == 0) {
var p = d3.path();
p.moveTo(ed.px, ed.py);
p.lineTo(ed.qx, ed.qy);
return p.toString();
} else {
// The "sweep" flag controls whether the arc is to be drawn in clockwise
// (sweep=1) or counter-clockwise direction (sweep=0). We always choose
// the smaller arc ("large-arc" flag set to zero), so we need to determine
// the direction of the geodesic arc from p to q relative to the direction
// normal to the vector (cx, cy), pointing from the origin to the center
// of the circle that contains the geodesic arc.
var sweep = ed.cy*(ed.qx-ed.px)-ed.cx*(ed.qy-ed.py) > 0 ? "1" : "0";
return "M"+ed.px+" "+ed.py+"A"+ed.r+","+ed.r+" 0 0,"+sweep+" "+ed.qx+","+ed.qy;
}
} // }}}
function build_pathspec_Cface(f) { // {{{
var p = "", e = f.edges[0];
var v; // keep track of vertex ID at last position
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
p += "M"+e.C.px+","+e.C.py;
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
p += "M"+e.C.qx+","+e.C.qy;
v = e.v;
} else
throw new Error("build_pathspec_Cface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
var k = 1;
_.forEach(f.edges, function (e) {
const ed = e.C;
var px, py, qx, qy;
if (v == e.u) {
px = ed.px; py = ed.py; qx = ed.qx; qy = ed.qy;
v = e.v;
} else if (v == e.v) {
px = ed.qx; py = ed.qy; qx = ed.px; qy = ed.py;
v = e.u;
} else
throw new Error("build_pathspec_Cface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
if (ed.r == 0) {
p += "L"+qx+","+qy;
} else {
var sweep = ed.cy*(qx-px)-ed.cx*(qy-py) > 0 ? "1" : "0";
p += "A"+ed.r+","+ed.r+" 0 0,"+sweep+" "+qx+","+qy;
}
k++;
});
return p;
} // }}}
function _path_add_Plink(p, e) { // {{{
p.lineTo(e.qx, e.qy);
return p;
} // }}}
function build_pathspec_Plink(e) { // {{{
var p = d3.path(), ed = e.P;
p.moveTo(ed.px, ed.py);
p.lineTo(ed.qx, ed.qy);
return p.toString();
} // }}}
function build_pathspec_Pface(f) { // {{{
var p = d3.path(), e = f.edges[0];
var v; // keep track of vertex ID at last position
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
p.moveTo(e.P.px, e.P.py);
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
p.moveTo(e.P.qx, e.P.qy);
v = e.v;
} else
throw new Error("build_pathspec_Pface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
var k = 1;
_.forEach(f.edges, function (e) {
if (v == e.u) {
p.lineTo(e.P.qx, e.P.qy);
v = e.v;
} else if (v == e.v) {
p.lineTo(e.P.px, e.P.py);
v = e.u;
} else
throw new Error("build_pathspec_Pface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
k++;
});
// FIXME: close path explicitly?
return p.toString();
} // }}}
function get_Ctangent(e, u, v) { // {{{
var px, py, qx, qy;
if (e.u == u) {
px = e.C.px; py = e.C.py; qx = e.C.qx; qy = e.C.qy;
} else {
px = e.C.qx; py = e.C.qy; qx = e.C.px; qy = e.C.py;
}
var dx = qx - px, dy = qy - py;
if (e.C.r == 0.0)
return {x: dx, y: dy};
var tx = -(py - e.C.cy), ty = (px - e.C.cx);
if (tx*dx + ty*dy < 0.0)
return {x: -tx, y: -ty};
else
return {x: tx, y: ty};
} // }}}
function compute_interior_angle(v) { // {{{
if (v.boundary_degree > 2) {
console.log("vertex #"+v.id+" (Error: incident with more than two boundary edges)");
return null;
}
var neighbors = _.filter(g_neighbors[v.id], function (vi) { return g_vertices[vi].boundary_degree > 0; });
if (neighbors.length != 2) {
console.log("Error: vertex #"+v.id+" records two incident boundary edges, found "+neighbors.length+" instead.");
return null;
}
var h0 = g_edgemap[hash_edge(v.id, neighbors[0])],
h1 = g_edgemap[hash_edge(v.id, neighbors[1])];
if (h0 === undefined) {
console.log("Error: unmapped edge ("+v.id+","+neighbors[0]+") incident to boundary vertex.");
return null;
}
if (h1 === undefined) {
console.log("Error: unmapped edge ("+v.id+","+neighbors[1]+") incident to boundary vertex.");
return null;
}
var e0 = g_edges[h0.id], e1 = g_edges[h1.id],
t0 = get_Ctangent(e0, v.id, neighbors[0]),
t1 = get_Ctangent(e1, v.id, neighbors[1]),
l = Math.sqrt((t0.x*t0.x + t0.y*t0.y)*(t1.x*t1.x + t1.y*t1.y)),
dp = (t0.x*t1.x + t0.y*t1.y)/l;
return Math.acos(dp);
} // }}}
// vim:ts=2:sw=2:expandtab
/**
* \file hyp.js
* \author Obada Mahdi
* \copyright Copyright (c) 2017 Obada Mahdi
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/// Convert hyperboloid coordinates to conformal disk coordinates.
function hyp_HtoC(co) {
return {x: co.x/(1+co.z), y: co.y/(1+co.z)};
}
/// Convert conformal disk coordinates to hyperboloid coordinates.
function hyp_CtoH(co) {
const s = 1 + (co.x**2+co.y**2);
return {x: 2*co.x/s, y: 2*co.y/s, z: (2-s)/s};
}
/// Convert hyperboloid coordinates to projective disk coordinates.
function hyp_HtoP(co) { return {x: co.x/co.z, y: co.y/co.z}; }
/// Convert projective disk coordinates to hyperboloid coordinates.
function hyp_PtoH(co) {
const l = co.x**2 + co.y**2, s = (l >= 1.0 ? 0.0 : Math.sqrt(1 - l));
return {x: co.x/s, y: co.y/s, z: 1/s};
}
/// Convert conformal disk coordinates to projective disk coordinates.
function hyp_CtoP(co) {
const s = 1 + (co.x**2+co.y**2);
return {x: 2*co.x/s, y: 2*co.y/s};
}
/// Convert projective disk coordinates to conformal disk coordinates.
function hyp_PtoC(co) {
const l = co.x**2 + co.y**2, s = 1 + (l >= 1.0 ? 0.0 : Math.sqrt(1 - l));
return {x: co.x/s, y: co.y/s};
}
/// Convert conformal disk coordinates to upper half-plane coordinates.
function hyp_CtoU(co) {
const s = (co.x**2 + (co.y-1)**2);
if (s < 1e-12) // no "abs" -- treat negative values as zero
return {x: Infinity, y: Infinity};
return {x: 2*co.x/s, y: (1-co.x**2-co.y**2)/s};
}
/// Convert upper half-plane coordinates to conformal disk coordinates.
function hyp_UtoC(co) {
if (!isFinite(co.x) || !isFinite(co.y))
return {x: 0.0, y: 1.0};
const s = (co.x**2 + (co.y+1)**2);
return {x: 2*co.x/s, y: (co.x**2+co.y**2-1)/s};
}
/// Convert projective disk coordinates to upper half-plane coordinates.
function hyp_PtoU(co) {
//const s = 1-co.x;
//return {x: 2*co.y/s, y: 2*Math.sqrt(1-co.x**2-co.y**2)/s};
return hyp_CtoU(hyp_PtoC(co));
}
/// Convert upper half-plane coordinates to projective disk coordinates.
function hyp_UtoP(co) {
//const l2 = co.x**2 + co.y**2;
//return {x: 4*co.y/(l2+4), y: (l2-4)/(l2+4)};
return hyp_CtoP(hyp_UtoC(co));
}
function hyp_geo_C(p, q) {
const EPS = 1e-6;
const det2 = 2.0*(p.x*q.y - q.x*p.y);
// special case: p or q are on a diameter, i.e. vectors p, q collinear
if (Math.abs(det2) < EPS)
return {center: {x: 0.0, y: 0.0}, r: 0.0};
const lp1 = (p.x**2 + p.y**2) + 1.0,
lq1 = (q.x**2 + q.y**2) + 1.0,
c = {x: (q.y*lp1 - p.y*lq1) / det2, y: (p.x*lq1 - q.x*lp1) / det2};
return {center: c, r: Math.sqrt(c.x**2 + c.y**2 - 1)};
}
// Alternative computation with fewer multiplications. This is based on
// parameterizing the line perpendicular to the straight line connecting
// the segment \p p and \p q through its midpoint as
//
// (x(t), y(t)) = (mx, my) + t*(-dy, dx)
//
// where (mx, my) denotes the midpoint (p+q)/2 and (dx, dy) is the difference
// vector (q-p), which means that (-dy, dx) is the perpendicular direction.
// Based on the fact that for the center (cx, cy) and radius r of a circle
// orthogonal to the unit circle we have
//
// cx^2 + cy^2 = 1 + r^2,
//
// we can solve for t by expressing r^2 in terms of t, p and q by realizing
// that the right triangle formed by the midpoint (mx, my), the center
// (cx, cy) and either point p=(px, py) or q=(qx, qy) has side opposite sides
// of length r, (sqrt(dx^2 + dy^2) / 2) and t0*sqrt(dx^2 + dy^2), giving
//
// r^2 = (dx^2 + dy^2)*((1/4) + t0^2)
//
// for the solution t0. Writing cx = x(t0) and cy = y(t0) yields
//
// (mx - t0*dy)^2 + (my + t0*dx)^2 = 1 + (dx^2 + dy^2)*((1/4) + t0^2)
//
// Simplifying the left-hand side, we get
//
// (1/4)*(px+qx)^2 + (py+qy)^2) + t0^2*(dx^2 + dy^2) + t0*(px*qy-py*qx)
// ...
// (TODO: finish)
function hyp_geo2_C(p, q) {
const EPS = 1e-6;
const det = (p.x*q.y - q.x*p.y);
// special case: p or q are on a diameter, i.e. vectors p, q collinear
if (Math.abs(det2) < EPS)
return {center: {x: 0.0, y: 0.0}, r: 0.0};
const mx = (p.x+q.x)/2, my = (p.x+q.x)/2,
dx = (q.x-p.x)/2, dy = (q.y-p.y)/2,
t = (1 - p.x*q.x - p.y*q.y) / (2*det),
c = {x: mx + t*dy, y: my - t*dx, r: Math.sqrt((0.25 + t**2)*(dx**2 + dy**2))};
return {center: c, r: Math.sqrt(c.x**2 + c.y**2 - 1)};
}
function hyp_geo_U(p, q) {
const EPS = 1e-9,
inf_p = !(isFinite(p.x) && isFinite(p.y)),
inf_q = !(isFinite(q.x) && isFinite(q.y));
if (!inf_p && !inf_q) {
if (Math.abs(q.x - p.x) < EPS)
return {center: {x: (q.x+p.x)/2, y: 0.0}, r: Infinity};
const c = {x: (p.x**2 + p.y**2 - q.x**2 - q.y**2)/(2.0*(p.x-q.x)), y: 0.0};
return {center: c, r: Math.sqrt((p.x-c.x)**2 + p.y**2)};
}
return {center: {x: !inf_p ? p.x : (!inf_q ? p.y : Infinity), y: 0.0}, r: Infinity};
}
/// Hyperbolic translation (conformal disk model)
///
/// p: point within unit disk; d: displacement vector
/// We use the following formula from [1]:
/// T(x) = ((1 + 2dx + x^2)d + (1 - d^2)x) / (1 + 2dx + x^2d^2),
/// where v is the point within the unit disk that the origin 0 is translated
/// to, i.e. T(0) = v, x is any point within the unit disk, "ab" denotes the
/// dot product of vectors a and b, and "a^2" the squared length.
///
/// [1]: https://en.wikipedia.org/wiki/Poincar%C3%A9_disk_model#Isometric_Transformations
function hyp_tran_C(p, d) {
//var rpx = p.x*d.x + p.y*d.y,
// rpy = -p.y*d.y + p.y*d.x,
//return {x: tpx*d.x - tpy*d.y, y: tpx*d.y + tpy*d.x};
var lv2 = d.x*d.x + d.y*d.y,
lp2 = p.x*p.x + p.y*p.y,
dpv = d.x*p.x + d.y*p.y,
denom = (1 + 2*dpv + lp2*lv2);
return {
x: ((1 + 2*dpv + lp2)*d.x + (1 - lv2)*p.x) / denom,
y: ((1 + 2*dpv + lp2)*d.y + (1 - lv2)*p.y) / denom};
}
/// Hyperbolic translation (projective disk model)
/// FIXME: bad implementation
///
/// p: point within unit disk; d: displacement vector
function hyp_tran_P(p, d) {
throw new Error("FIXME: non-functional implementation");
const ld = Math.sqrt(d.x*d.x + d.y*d.y),
nx = d.x/ld, ny = d.y/ld, // unit direction: [cos(theta), sin(theta)]
hl = Math.acosh(1/Math.sqrt(1-d.x*d.x-d.y*d.y)),
rx = p.x*nx + p.y*ny, ry = -p.x*ny + p.y*nx, // p rotated onto x axis
ty = rx*Math.cosh(hl) + Math.sinh(hl),
tz = rx*Math.sinh(hl) + Math.cosh(hl);
return {x: (rx/tz)*nx - (ty/tz)*ny, y: (rx/tz)*ny + (ty/tz)*nx};
}
/// Hyperbolic translation (upper half-plane model)
///
/// p: point within upper half-plane; d: displacement vector
function hyp_tran_P(p, d) {
throw new Error("FIXME: non-functional implementation");
return {x: p.x, y: p.y};
}
function hyp_tesselation_circumradius_C(p, q) {
if (p <= 0 || q <= 0) {
console.log("Error: hyp_Cregular_tesselation_circumradius: p and q must be positive.");
return 0.0;
}
if ((p-2)*(q-2) <= 4) { // there is no regular tesselation otherwise
console.log("Error: hyp_C_regular_tesselation_radius: no regular tesselation ("+p+","+q+") exists.");
return 0.0;
}
const A = (Math.PI / p), B = (Math.PI / q);
return Math.sqrt(Math.cos(A+B)/Math.cos(A-B));
}
function hyp_polygon_from_angles_C(thetas, eps = 1e-9) {
if (_.sum(thetas) >= (thetas.length-2)*Math.PI)
throw new Error("There is no hyperbolic "+thetas.length+"-gon with interior angles ["+_.join(thetas)+"].");
var t = 1.0,
n = thetas.length,
cos_halfthetas = _.map(thetas, th => Math.cos(th / 2)),
sin_halfthetas = _.map(thetas, th => Math.sin(th / 2));
function eval_g(x) {
var ch = Math.cosh(x);
return _.reduce(cos_halfthetas, (r, c) => r + Math.asin(c / ch), -Math.PI);
}
function eval_gprime(x) {
var ch = Math.cosh(x), ch2 = ch*ch;
//return -Math.tanh(x) * _.reduce(cos_halfthetas, (r, c) => r + (c / Math.sqrt(ch2 - c*c)), 0.0);
return -Math.tanh(x) * _.reduce(cos_halfthetas, (r, c) => r + (1.0 / Math.sqrt(ch2/(c*c) - 1.0)), 0.0);
}
var g = eval_g(t), loopmax = 100;
while (Math.abs(g) > eps && loopmax > 0) {
var gp = eval_gprime(t);
t = t - g / gp;
g = eval_g(t);
loopmax--;
}
if (loopmax == 0)
console.log("Warning: maximum number of iterations reached for thetas=["+_.join(thetas)+"], t="+t);
//console.log("Solution: t="+t);
const ch = Math.cosh(t), sh = Math.sinh(t);
var last_alpha = 0.0, alpha = 0.0, sum_alpha = 0.0,
alphas = [], rks = [], vertices = [[0.0, 0.0]];
for (var k = 0; k < n; k++) {
alphas.push(alpha);
last_alpha = alpha;
const sin_alpha = cos_halfthetas[k] / ch;
alpha = Math.asin(sin_alpha);
//var rk = Math.tanh(0.5*Math.asinh(sh / sin_halfthetas[k]));
// The following formula for rk uses the simplification
// tanh(Arsinh(x) / 2) = x / (sqrt(x^2 + 1) + 1)
// and setting x = sinh(R) = sinh(t) / sin(theta/2), where R is the
// hyperbolic length of the side we want to compute in the Poincare disk.
//const sin_ht = sin_halfthetas[k],
// rk = sh / (Math.sqrt(sh*sh + sin_ht*sin_ht) + sin_ht);
//const cosech_r = sin_halfthetas[k] / sh,
// rk = 1.0 / (Math.sqrt(1 + cosech_r*cosech_r) + cosech_r);
const sinh_r = sh / sin_halfthetas[k],
rk = sinh_r / (Math.sqrt(sinh_r*sinh_r + 1) + 1);
//console.log(" r["+k+"] = "+rk.toFixed(5));
sum_alpha += last_alpha+alpha;
vertices.push([rk*Math.cos(sum_alpha), rk*Math.sin(sum_alpha)]);
rks.push(rk);
alphas.push(alpha);
}
alphas[0] = alpha;
sum_alpha += alpha;
return {
vertices: vertices,
faces: _.map(_.range(1, n+1), k => [0, k, 1 + (k % n)]),
alphas: alphas,
t: t,
radii: rks,
scale: 1.0
};
}
/// Computes the determinant of a 3x3 matrix in column major format.
function det_mat3(m) {
// For a matrix m stored contiguously in column-major format, we have
// [ m0 m3 m6 ]
// m = [ m1 m4 m7 ]
// [ m2 m5 m8 ]
const m0 = m[0], m1 = m[1], m2 = m[2], m3 = m[3], m4 = m[4], m5 = m[5],
m6 = m[6], m7 = m[7], m8 = m[8];
return m0*(m4*m8 - m5*m7) + m1*(m5*m6 - m3*m8) + m2*(m3*m7 - m4*m6);
//return m0*m4*m8 + m1*m5*m6 + m2*m3*m7 - m2*m4*m6 - m1*m3*m8 - m0*m5*m7;
}
/// Computes the inverse of a 3x3 matrix in column major format using
/// its cofactor matrix.
function inv_mat3(m) {
const m0 = m[0], m1 = m[1], m2 = m[2], m3 = m[3], m4 = m[4], m5 = m[5],
m6 = m[6], m7 = m[7], m8 = m[8],
det = m0*(m4*m8 - m5*m7) + m1*(m5*m6 - m3*m8) + m2*(m3*m7 - m4*m6),
idet = 1/det;
// Return the transpose of the cofactor matrix, scaled by (1/det).
// Note that we return a matrix in column major format!
return [
idet*(m4*m8 - m5*m7), idet*(m2*m7 - m1*m8), idet*(m1*m5 - m2*m4),
idet*(m5*m6 - m3*m8), idet*(m0*m8 - m2*m6), idet*(m2*m3 - m0*m5),
idet*(m3*m7 - m4*m6), idet*(m1*m6 - m0*m7), idet*(m0*m4 - m1*m3)
];
}
/// Returns the cross product of two 3-vectors (right-hand rule).
function cross_vec3(p, q) {
const p0 = p[0], p1 = p[1], p2 = p[2], q0 = q[0], q1 = q[1], q2 = q[2];
return [p1*q2 - p2*q1, p2*q0 - p0*q2, p0*q1 - p1*q0];
}
/// Matrix-vector multiplication of 3x3 matrix \p m (as flat array,
/// column-major format) and 3-vector \p v.
function mul_matvec3(m, v) {
const v0 = v[0], v1 = v[1], v2 = v[2];
return [
m[0]*v0 + m[3]*v1 + m[6]*v2,
m[1]*v0 + m[4]*v1 + m[7]*v2,
m[2]*v0 + m[5]*v1 + m[8]*v2
];
}
/// Matrix-matrix multiplication of 3x3 matrices (col-major).
function mul_matmat3(a, b) {
return mul_matvec3(a, b.slice(0, 3))
.concat(mul_matvec3(a, b.slice(3, 6)))
.concat(mul_matvec3(a, b.slice(6, 9)));
}
/// Computes a projective frame uniquely determining the line through \p p and
/// \p q as well as the point \p p itself, using a construction based on three
/// ideal points and one ultra-ideal point, the pole of the line.
///
/// Two of such frames can then be used to construct projective maps that keep
/// the "light cone" \f$ x^2 + y^2 - z^2 = 0 \f$ invariant, i.e. that are
/// valid hyperbolic motions.
function hyp_segment_frame_P(p, q) {
const EPS = 1e-12;
// J = [1 0 0; 0 1 0; 0 0 -1];
// X3_t = -J*cross([P; 1], [Q; 1]);
const p0 = p[0], p1 = p[1], q0 = q[0], q1 = q[1],
X3_3 = p0*q1 - p1*q0;
var X3 = [q1 - p1, p0 - q0];
if (Math.abs(X3_3) >= EPS)
X3 = [X3[0]/X3_3, X3[1]/X3_3, 1.0];
else {
const l = Math.sqrt(X3[0]*X3[0] + X3[1]*X3[1]);
X3 = [X3[0]/l, X3[1]/l, 0.0];
}
const length_X3p = Math.sqrt(X3[0]*X3[0] + X3[1]*X3[1]),
npq_p0 = X3[0] / length_X3p, npq_p1 = X3[1] / length_X3p,
c_dir = (p1-q1)*npq_p0 + (q0-p0)*npq_p1;
if (Math.abs(c_dir) < EPS)
throw new Error("hyp_Psegment_frame: Cannot decide order of points, p and q too close");
const c_s = (c_dir < 0 ? -1.0 : 1.0),
c_dpq_cos = p0*npq_p0 + p1*npq_p1,
c_dpq_sin = Math.sqrt(1 - c_dpq_cos*c_dpq_cos),
A = [c_dpq_cos*npq_p0 - c_s*c_dpq_sin*npq_p1, c_s*c_dpq_sin*npq_p0 + c_dpq_cos*npq_p1, 1.0],
B = [c_dpq_cos*npq_p0 + c_s*c_dpq_sin*npq_p1, -c_s*c_dpq_sin*npq_p0 + c_dpq_cos*npq_p1, 1.0];
const npx3 = [-X3[1]+p1, X3[0]-p0],
length_npx3 = Math.sqrt(npx3[0]*npx3[0] + npx3[1]*npx3[1]),
npx3_p0 = npx3[0] / length_npx3, npx3_p1 = npx3[1] / length_npx3,
c_dpx3_cos = p0*npx3_p0 + p1*npx3_p1,
c_dpx3_sin = Math.sqrt(1 - c_dpx3_cos*c_dpx3_cos),
X4 = [c_dpx3_cos*npx3_p0 + c_s*c_dpx3_sin*npx3_p1, -c_s*c_dpx3_sin*npx3_p0 + c_dpx3_cos*npx3_p1, 1.0];
return [A, B, X3, X4];
}
/// Computes the deck transformation that takes the line through p1 and q1 to
/// the line through p2 and q2, as well as p1 to p2. If the hyperbolic
/// distance d(p1, q1) is equal to d(p2, q2), then q1 is mapped onto q2,
/// otherwise it will lie somewhere on the geodesic through p2 and q2.
///
/// The arguments p1, q1, p2 and q2 are expected to be 2-element vectors
/// representing coordinates in the projective disk model (Beltrami-Klein
/// model) that has the unit circle as line at infinity.
///
/// (based on my Matlab implementation)
function hyp_decktrans_P(p1, q1, p2, q2) {
const EPS = 1e-9;
const frame1 = hyp_segment_frame_P(p1, q1), frame2 = hyp_segment_frame_P(p2, q2);
// original code in Matlab implementation:
// lambdas1 = inv([A1, B1, X3_1])*X4_1;
// M1 = [A1, B1, X3_1]*diag(lambdas1);
// lambdas2 = inv([A2, B2, X3_2])*X4_2;
// M2 = [A2, B2, X3_2]*diag(lambdas2);
// M = M2*inv(M1);
//
// Note: exchanging X3 and X4
const X = frame1[2], Y = frame2[2],
S1 = [].concat(frame1[0], frame1[1], frame1[3]),
inv_S1 = inv_mat3(S1),
lambdas1 = mul_matvec3(inv_S1, X),
S2 = [].concat(frame2[0], frame2[1], frame2[3]),
inv_S2 = inv_mat3(S2),
lambdas2 = mul_matvec3(inv_S2, Y),
// combine lambdas:
// M = [A2, B2, X3_2] * diag(lambdas2) * inv(diag(lambdas1)) * inv([A1, B1, X3_1]);
// is equal to
// M = [A2, B2, X3_2] * diag(lambdas2./lambdas1) * inv([A1, B1, X3_1]);
l0 = lambdas2[0]/lambdas1[0], l1 = lambdas2[1]/lambdas1[1], l2 = lambdas2[2]/lambdas1[2];
return [].concat(
mul_matvec3(S2, [l0*inv_S1[0], l1*inv_S1[1], l2*inv_S1[2]]),
mul_matvec3(S2, [l0*inv_S1[3], l1*inv_S1[4], l2*inv_S1[5]]),
mul_matvec3(S2, [l0*inv_S1[6], l1*inv_S1[7], l2*inv_S1[8]])
);
}
/// Computes the deck transformation that takes the line through p1 and q1 to
/// the line through p2 and q2, as well as p1 to p2. If the hyperbolic
/// distance d(p1, q1) is equal to d(p2, q2), then q1 is mapped onto q2,
/// otherwise it will lie somewhere on the geodesic through p2 and q2.
///
/// The arguments p1, q1, p2 and q2 are expected to be 2-element vectors
/// representing coordinates in the conformal unit disk model.
function hyp_decktrans_C(p1, q1, p2, q2) {
return [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0];
}
/// Computes the deck transformation that takes the line through p1 and q1 to
/// the line through p2 and q2, as well as p1 to p2. If the hyperbolic
/// distance d(p1, q1) is equal to d(p2, q2), then q1 is mapped onto q2,
/// otherwise it will lie somewhere on the geodesic through p2 and q2.
///
/// The arguments p1, q1, p2 and q2 are expected to be 2-element vectors
/// representing coordinates in the upper half-plane model.
function hyp_decktrans_U(p1, q1, p2, q2) {
return [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0];
}
/// Computes the Euclidean translation that takes p1 to p2.
///
/// The arguments p1, q1, p2 and q2 are expected to be 2-element vectors
/// representing coordinates in the Euclidean plane.
function hyp_decktrans_E(p1, q1, p2, q2) {
const EPS = 1e-6;
let dx = p2[0] - p1[0], dy = p2[1] - p1[1],
dqx = q2[0] - q1[0], dqy = q2[1] - q1[1];
if (Math.abs(dx-dqx) > EPS || Math.abs(dy-dqy) > EPS)
console.log("hyp_decktrans_E(): different translation vectors for (p2-p1)=("+dx+","+dy+") and (q2-q1)="+dqx+","+dqy+")");
return [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, dx, dy, 1.0];
}
<!DOCTYPE html>
<!--
Copyright (c) 2017 Obada Mahdi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-->
<meta charset="utf-8">
<style type="text/css">
body {
/*background-color: #444;*/
/*color: #84A;*/
}
.main {
text-align: center;
padding-top: 3em;
}
.uc-container {
display: inline-block;
}
.console {
text-align: left;
overflow: scroll;
width: 100%;
max-height: 15em;
font-family: monospace, sans;
font-size: smaller;
font-style: normal;
}
.console .message {
line-height: 0.2em;
}
.console .timestamp {
font-style: bold;
color: grey;
width: 15em;
padding-right: 1em;
}
.console .text {
font-style: normal;
}
.console .message.msgcat--info .text {
}
.console .message.msgcat--error .text {
color: red;
font-style: bold;
}
h1 {
font-family: serif;
font-size: 1.2em;
text-align: center;
}
.menu {
position: fixed;
top: 0;
width: 100%;
}
.toolbar {
display: block;
text-align: left;
padding: 0.5em;
margin-left: 1em;
margin-right: 1em;
width: 100%;
background-color: #eee;
}
.toolbar button {
background-color: #ccf;
font-family: sans;
color: black;
border: none;
padding: 0.5em;
}
.toolbar button>label {
padding: 0;
margin-top: 0;
margin-bottom: 0;
}
.toolbar .button-symbol {
font-weight: bold;
padding-right: 1ex;
}
.toolbar .button-text {
padding: 0;
}
.toolbar .button--toggle {
}
.toolbar .toggle-on {
background-color: #7e7;
}
.toolbar .toggle-off {
background-color: #e55;
}
.toolbar .toggle-disabled {
background-color: #777;
}
.toolbar .button--action {
background-color: #44f;
color: #4ff;
font-weight: bold;
}
.toolbar .button--setting input {
padding: 0 0 0 0.2em;
background-color: white;
border: none;
}
.toolbar .action-disabled {
background-color: #777;
}
.toolbar .button--export>.svg-download-link {
display: hidden;
}
.vertex--selected {
stroke: red;
/*stroke-width: 1;*/
}
.vertex--dragging {
fill: red;
stroke: red;
/*stroke-width: 1;*/
}
.vertex--drag-handle {
cursor: move;
fill: none;
stroke: none;
}
.face--selected {
stroke: blue;
fill-opacity: 0.3;
}
.face--marked {
stroke: blue;
fill: #77c;
fill-opacity: 0.6;
}
.vertex--brushed {
stroke: red;
fill: pink;
/*stroke-width: 1.5;*/
}
.vertex-info-container {
display: block;
min-height: 2em;
}
.hyperbolic-model .uc-container {
fill: none;
stroke-width: 0.5;
}
.hyperbolic-model .mesh-container {
fill: none;
}
.hyperbolic-model .unit-circle {
fill: #eee;
stroke: black;
stroke-width: 0.5;
}
.hyperbolic-model .vertex-container {
fill: blue;
}
.hyperbolic-model .edge-container {
fill: none;
stroke: black;
stroke-width: 0.2;
}
.hyperbolic-model .altitude {
fill: none;
stroke: green;
stroke-width: 0.2;
}
.hyperbolic-model .ed.bn {
stroke: crimson;
stroke-width: 0.7;
}
.hyperbolic-model .face-container {
stroke: none;
fill: #ddf;
fill-opacity: 0.1;
}
.hyperbolic-model .fa[L="0"] {
fill: #77c;
fill-opacity: 0.25;
}
.hyperbolic-model .fa[L="0"].face--marked {
fill-opacity: 0.8;
}
</style>
<div class="progress-indicator">
</div>
<div class="ui-container">
<div class="menu toolbar">
<datalist id="list--hyperbolic-model">
<option value="conformal">
<option value="projective">
<option value="half-plane">
</datalist>
<button class="button--toggle toggle--vertices" onclick='toggle_vertices()'>vertices</button>
<button class="button--toggle toggle--edges" onclick='toggle_edges()'>edges</button>
<button class="button--toggle toggle--faces" onclick='toggle_faces()'>faces</button>
<!-- button class="button--toggle toggle--altitudes" onclick='toggle_altitudes()'>altitudes</button -->
<button class="button--toggle toggle--dragging" onclick='toggle_dragging()'>dragging</button>
<button class="button--toggle toggle--zooming" onclick='toggle_zooming()'>zoom/pan</button>
<button class="button--toggle toggle--brushing" onclick='toggle_brushing()'>brush</button>
<button class="button--toggle toggle--moving" onclick='toggle_moving()'>translate</button>
<button class="button--toggle toggle--scalestroke" onclick='toggle_scalestroke()'>scale stroke</button>
<button class="button--action action--reconstruct" onclick='action_reconstruct()'>reconstruct</button>
<button class="button--action action--tesselate" onclick='action_tesselate()'>tesselate</button>
<button class="button--action action--load" onclick='document.getElementById("input--mesh-file").click();'>load mesh<input type="file" id="input--mesh-file" name="mesh_file" style="display: none;" onchange='action_load(this);'></button>
<button class="button--setting">
<label for="menu--tesselation-depth">tesselation depth</label>
<input id="menu--tesselation-depth" name="tesselation-depth" type="number"
size="1" value="2" min="1" max="8" style="width: 2em" onchange='g_tesselation_depth = +this.value;'>
</button>
<!--input type="range" name="neighbor-radius" min="1" max="15" step="1" value="1"-->
<!-- button class="button--toggle toggle--linked-zoom" onclick='toggle_linked_zoom("svg#projective-disk")'>link zoom/pan</button -->
</div>
<div class="main">
<div class="uc-container">
<h1>Conformal disk model</h1>
<!--svg id="conformal-disk" class="hyperbolic-model conformal" viewBox="-1.1 -1.1 2.2 2.2"-->
<svg id="conformal-disk" class="hyperbolic-model conformal"></svg>
<div class="vertex-info-container">
</div>
<div class="toolbar">
<button class="button--export" onclick='export_svg("svg#conformal-disk", "conformal.svg", this)'><a class="svg-download-link" href=""></a><span class="button-symbol">&#x2b73;</span>SVG</button>
<button class="button--resetzoom" onclick='reset_zoom(this)'><span class="button-symbol">&#x21bb;</span>reset zoom</button>
<button class="button--close" onclick='close_model(this);'><span class="button-symbol">&#x2716;</span>close</button>
</div>
</div>
<div class="uc-container">
<h1>Projective disk model</h1>
<svg id="projective-disk" class="hyperbolic-model projective"> <!-- viewBox="-1.1 -1.1 2.2 2.2"--></svg>
<div class="vertex-info-container">
</div>
<div class="toolbar">
<button class="button--export" onclick='export_svg("svg#projective-disk", "projective.svg", this)'><a class="svg-download-link" href=""></a>&#x2b73;</span>SVG</button>
<button class="button-resetzoom" onclick='reset_zoom(this)'><span class="button-symbol">&#x21bb;</span>reset zoom</button>
<button class="button--close" onclick='close_model(this);'><span class="button-symbol">&#x2716;</span>close</button>
</div>
</div>
<div class="uc-container">
<h1>Half-plane model</h1>
<svg id="halfplane" class="hyperbolic-model halfplane"> <!-- viewBox="-1.1 -1.1 2.2 2.2"-->
</svg>
<div class="vertex-info-container">
</div>
<div class="toolbar">
<button class="button--export" onclick='export_svg("svg#halfplane", "halfplane.svg", this)'><a class="svg-download-link" href=""></a><span class="button-symbol">&#x2b73;</span>SVG</button>
<button class="button-resetzoom" onclick='reset_zoom(this)'><span class="button-symbol">&#x21bb;</span>reset zoom</button>
<button class="button--close" onclick='close_model(this);'><span class="button-symbol">&#x2716;</span>close</button>
</div>
</div>
<hr>
</div>
</div>
<div class="console">
</div>
<script src="https://d3js.org/d3.v4.min.js"></script>
<script src="https://cdn.jsdelivr.net/lodash/4.17.4/lodash.min.js"></script>
<script src="hyp.js"></script>
<script src="main.js"></script>
{"faces": [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 5, 1]], "scale": 2.6, "vertices": [[0.0, 0.0], [1.5, 0.85], [0.0, 2.2], [-1.25, 1.25], [-1.5, 0.0], [1.5, -0.85]]}
/**
* \file main.js
* \author Obada Mahdi
* \copyright Copyright (c) 2017 Obada Mahdi
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/// Controls the max. amount of vertices that will enable vertices initially
const c_enable_vertices_threshold = 100;
const c_default_vertex_radius = 2.0;
const c_svg_default_width = 400;
const c_svg_default_height = 400;
const c_axis_margin_x = 20;
const c_axis_margin_y = 20;
const c_svg_padding = 5;
const c_hyp_svg_style_keys = new Map([
[".uc-container", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".unit-circle", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".mesh-container", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".face-container", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".edge-container", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".vertex-container", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".vx", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".vx.bn", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".ed", {keys: [ "stroke", "stroke-width"]}],
[".ed.bn", {keys: [ "stroke", "stroke-width"]}],
[".fa", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ], skip: [ ".face--marked", '[L="0"]' ]}],
['.fa[L="0"]', {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ], skip: [ ".face--marked" ]}],
['.fa[L="0"].face--marked', {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ]}],
[".face--marked", {keys: [ "stroke", "stroke-width", "fill", "fill-opacity" ], skip: [ '[L="0"]' ]}]
// TODO: .axes-container
]);
// {{{ -- URI parameters
var g_svg_width = +getParameterByName("w"), g_svg_height = +getParameterByName("h");
if (g_svg_width == 0 && g_svg_height != 0)
g_svg_width = g_svg_height;
else if (g_svg_width != 0 && g_svg_height == 0)
g_svg_height = g_svg_width;
else if (g_svg_width == 0 && g_svg_height == 0)
g_svg_width = g_svg_height = 400;
var mesh_uri = getParameterByName("mesh");
if (!mesh_uri)
mesh_uri = "triangle.json";
var g_vertex_radius = +getParameterByName("vr", c_default_vertex_radius);
var g_draw_vertices = !!+getParameterByName("v", 1) && (g_vertex_radius != 0);
var g_draw_edges = !!+getParameterByName("e", 1);
var g_draw_faces = !!+getParameterByName("f", 1);
var g_draw_altitudes = !!+getParameterByName("a", 0);
var g_brushing = !!+getParameterByName("b", 0);
var g_dragging = !!+getParameterByName("d", 0) && g_draw_vertices;
var g_zooming = !!+getParameterByName("z", 0);
var g_linked_zoom = !!+getParameterByName("L", 0);
var g_precision = +getParameterByName("prec", 6);
var g_tclimit = +getParameterByName("tclimit", 0);
var g_tcmindist = +getParameterByName("tcmindist", 0.001);
// }}} -- URI parameters
// {{{ -- Global variables
// Console log
const c_console_maxlines = 1000;
var g_console = [];
var g_view_counter = 0;
var g_tesselation_depth = 2;
// Toggle buttons
var g_toggle = {
vertices: {state: g_draw_vertices, set: hyp_svg_vertices},
edges: {state: g_draw_edges, set: hyp_svg_edges},
faces: {state: g_draw_faces, set: hyp_svg_faces},
altitudes: {state: g_draw_altitudes, set: hyp_svg_altitudes},
dragging: {state: g_dragging, set: hyp_svg_dragging},
zooming: {state: g_zooming, set: hyp_svg_zooming},
brushing: {state: g_brushing, set: hyp_svg_brushing},
moving: {state: false, set: hyp_svg_moving},
scalestroke: {state: false, set: hyp_svg_scalestroke}
};
// svginfo: maps SVG element IDs to objects with the following keys:
// "axis": object containing d3.axis instances (axis.x, axis.y)
// "conformal": boolean; whether coords use projective or conformal model
// "halfplane": boolean; whether coords use the upper half-plane model;
// has precedence over "conformal"!
// "euclidean": boolean; whether coords are Euclidean coordinates;
// has precedence over "conformal" and "projective"!
// "edge_data": edge data array
// "face_data": face data array
// "path_builder_edge": function that generates a SVG path "d" attribute string
// from edge data
// "path_builder_face": function that generates a SVG path "d" attribute string
// from face data
// "vertex_data": vertex data array
// "vertex_transform": function that generates "transform()" string from vertex data
// "quadtree": d3.quadtree instance for locating vertices
// "zoom": d3.zoom instance
var g_svginfo = new Map();
var g_basemesh = {};
var g_pairings = new Map();
var g_vertices = [], g_edges = [], g_faces = [], g_edgemap = new Map(), g_neighbors = new Map();
var g_org_edges = new Map();
var g_C_selected_v = null, g_C_quadtree;
var g_P_selected_v = null, g_P_quadtree;
var g_U_selected_v = null, g_U_quadtree;
var g_E_selected_v = null, g_E_quadtree;
var g_C_zoom = d3.zoom().scaleExtent([1.0, 50]);
var g_P_zoom = d3.zoom().scaleExtent([1.0, 50]);
var g_U_zoom = d3.zoom().scaleExtent([0.1, 50]);
var g_E_zoom = d3.zoom().scaleExtent([0.1, 50]);
var g_state__dragging = false;
// }}} -- Global variables
// getParameterByName(): function taken from
// https://stackoverflow.com/a/901144/4200722
function getParameterByName(name, default_value) { // {{{
name = name.replace(/[\[\]]/g, "\\$&");
let regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"),
results = regex.exec(window.location.href);
if (!results)
return (default_value === undefined) ? null : default_value;
return !results[2] ? '' : decodeURIComponent(results[2].replace(/\+/g, " "));
} // }}}
function strobj(v) { // {{{
return "{"+_.join(_.map(v, function (v, k) { return k + ":" + v; }), ", ")+"}";
} // }}}
function _fprec(v) {
if (g_precision > 0)
return v.toPrecision(g_precision)
return v;
}
function get_msgcat_class(c) {
if (c == "error")
return "msgcat--error";
return "msgcat--info";
}
function log_update() { // {{{
let con = d3.select(".console"),
msgs = con.selectAll("p").data(g_console);
msgs.exit().remove();
let new_msgs = msgs.enter()
.append("p").attr("class", d => "message "+get_msgcat_class(d.cat));
new_msgs.append("span").attr("class", "timestamp").text(d => d.timestamp);
new_msgs.append("span").attr("class", "text").text(d => d.msg);
con.node().scrollTop = con.node().scrollHeight;
} // }}}
function log_console(msg, category = "info") { // {{{
const ts = new Date();
g_console.push({timestamp: ts.toLocaleTimeString(), msg: msg, cat: category});
if (g_console.length > c_console_maxlines)
g_console.shift();
log_update();
} // }}}
function log_msg(msg, log_to_console = false) { // {{{
log_console(msg);
if (log_to_console)
console.log(msg);
} // }}}
function log_err(msg, log_to_console = false) { // {{{
log_console(msg, "error");
if (log_to_console)
console.log(msg);
} // }}}
/// Returns the scale factor of the linear \p scale, which is the quotient of
/// the lengths of the range interval and the domain interval. It is
/// a *signed* factor, which is negative if the scale reverses orientation.
function linear_scale_factor(scale) { // {{{
let dom = scale.domain(), range = scale.range();
return (range[1]-range[0]) / (dom[1]-dom[0]);
} // }}}
/// Compute the extent of data points stored in a quadtree. This is like
/// quadtree.extent() for a fresh quadtree, except that quadtrees do not
/// automatically shrink their extent if points disappear and the bounding box
/// gets smaller.
function compute_quadtree_extent(quadtree) { // {{{
const qX = quadtree.x(), qY = quadtree.y(),
qdata = quadtree.data(), n = qdata.length;
let x0 = +Infinity, y0 = +Infinity, x1 = -Infinity, y1 = -Infinity;
for (let i = 0; i < n; i++) {
const x = qX(qdata[i]), y = qY(qdata[i]);
if (x < x0) x0 = x;
if (y < y0) y0 = y;
if (x > x1) x1 = x;
if (y > y1) y1 = y;
}
return [[x0, y0], [x1, y1]];
} // }}}
/// Hash function for undirected edges identified by a pair of vertices \p u
/// and \p v.
function hash_edge(u, v) { // {{{
return (u < v) ? 2**32 * u + v : 2**32 * v + u;
} // }}}
/// Set up geodesic segment between points \p p and \p q in the conformal
/// disk model.
function set_Cedge(e, p, q) { // {{{
let result = hyp_geo_C(p, q), cx = result.center.x, cy = result.center.y;
// zero radius indicates that points p, q are on a diameter
e.C = {r: result.r, cx: cx, cy: cy, px: p.x, py: p.y, qx: q.x, qy: q.y};
} // }}}
/// Set up geodesic segment between points \p p and \p q in the projective
/// disk model.
function set_Pedge(e, p, q) { // {{{
e.P = {px: p.px, py: p.py, qx: q.px, qy: q.py};
} // }}}
/// Compute geodesic segment between points \p p and \p q in the upper
/// half-plane model.
function set_Uedge(e, p, q) { // {{{
let result = hyp_geo_U({x: p.ux, y: p.uy}, {x: q.ux, y: q.uy});
// zero radius indicates that points p, q are on a diameter
e.U = {r: result.r, cx: result.center.x, cy: result.center.y, px: p.ux, py: p.uy, qx: q.ux, qy: q.uy};
} // }}}
/// Set up Euclidean geodesic segment between points \p p and \p q.
function set_Eedge(e, p, q) { // {{{
e.E = {px: p.x, py: p.y, qx: q.x, qy: q.y};
} // }}}
/// Adds a face with index \p i, defined by an ordered list of \p indices
/// and sets up related data structures. This includes:
/// - building the set of undirected edges in \p g_edges, which also store
/// pre-computed parameters used by drawing primitives, like the center
/// and radius for the circular arc representing a hyperbolic geodesic in
/// the Poincaré disk model
/// - adjacency lists in \p g_neighbors, which helps with local updates when
/// moving around single vertices
function push_face(euclidean, src_indices, i, label = null, layer = 0, orbit = 0, f_offset = null) { // {{{
if (label === null)
label = i;
if (f_offset === null)
f_offset = _.reduce(g_faces, (s, f) => (s + f.indices.length), 0);
function push_edge(u, v, f, org_id) {
let e;
const h = hash_edge(u, v), ei = g_edges.length;
let cp = g_vertices[u], cq = g_vertices[v];
if (!g_edgemap.has(h)) {
g_edgemap.set(h, {id: ei});
g_org_edges.set(org_id, ei);
e = {id: ei, u: u, v: v, org_id: org_id, layer: layer, orbit: orbit, faces: [f], boundary: true};
cp.boundary_degree++;
cq.boundary_degree++;
if (!euclidean) {
set_Cedge(e, cp, cq);
set_Pedge(e, cp, cq);
set_Uedge(e, cp, cq);
} else
set_Eedge(e, cp, cq);
g_edges.push(e);
} else {
g_org_edges.set(org_id, g_edgemap.get(h).id);
e = g_edges[g_edgemap.get(h).id];
// only clear boundary flag if edges were created for same "copy" of the
// base fundamental region
e.boundary = (orbit != e.orbit);
e.faces.push(f);
if (!e.boundary) {
cp.boundary_degree--;
cq.boundary_degree--;
}
}
// Note: g_neighbors is expected to be initialized with empty lists for
// each vertex ID.
let nu = g_neighbors.get(u), nv = g_neighbors.get(v);
if (nu.indexOf(v) < 0)
nu.push(v);
if (nv.indexOf(u) < 0)
nv.push(u);
return e;
}
let edges = [],
indices = src_indices.slice(),
face = {id: i, label_id: label, layer: layer, indices: indices, edges: edges, marked: false};
g_faces.push(face);
for (let k = 0; k < indices.length; k++ )
edges.push(push_edge(indices[k], indices[(k+1) % indices.length], face, f_offset+k));
} // }}}
// Note: the following function is not used at the moment.
// TODO: Finish using arcTo() for HTML canvas support.
function _path_add_Clink(p, e, X, Y) { // {{{
// flip direction to always draw smaller arc
if (e.r != 0) {
p.arc(X(e.cx), Y(e.cy), e.r, e.phi, e.psi, e.psi-e.phi > Math.PI);
// TODO: compute point z and use arcTo()
//let zx, zy;
//p.arcTo(X(zx), Y(zy), X(e.qx), Y(e.qy), e.r);
} else
p.lineTo(X(e.qx), Y(e.qy));
return p;
} // }}}
function build_pathspec_Clink(e, X, Y) { // {{{
const ed = e.C, px = ed.px, py = ed.py, qx = ed.qx, qy = ed.qy;
if (!isFinite(ed.r) || ed.r == 0) {
return "M"+_fprec(X(px))+","+_fprec(Y(py))+"L"+_fprec(X(qx))+","+_fprec(Y(qy));
} else {
// The "sweep" flag controls whether the arc is to be drawn in clockwise
// (sweep=1) or counter-clockwise direction (sweep=0). We always choose
// the smaller arc ("large-arc" flag set to zero), so we need to determine
// the direction of the geodesic arc from p to q relative to the direction
// normal to the vector (cx, cy), pointing from the origin to the center
// of the circle that contains the geodesic arc.
const sweep = -ed.cy*(qx-px) + ed.cx*(qy-py) > 0 ? "1" : "0",
scaleX = Math.abs(linear_scale_factor(X)),
scaleY = Math.abs(linear_scale_factor(Y));
return "M"+_fprec(X(px))+","+_fprec(Y(py))+"A"+_fprec(ed.r*scaleX)+","+_fprec(ed.r*scaleY)+" 0 0,"+sweep+" "+_fprec(X(qx))+","+_fprec(Y(qy));
}
} // }}}
function build_pathspec_Cface(f, X, Y) { // {{{
const e = f.edges[0];
let p = "", v = -1; // keep track of vertex ID at last position
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
p += "M"+_fprec(X(e.C.px))+","+_fprec(Y(e.C.py));
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
p += "M"+_fprec(X(e.C.qx))+","+_fprec(Y(e.C.qy));
v = e.v;
} else
throw new Error("build_pathspec_Cface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
let k = 1;
_.forEach(f.edges, function (e) {
const ed = e.C;
let px, py, qx, qy;
if (v == e.u) {
px = ed.px; py = ed.py; qx = ed.qx; qy = ed.qy;
v = e.v;
} else if (v == e.v) {
px = ed.qx; py = ed.qy; qx = ed.px; qy = ed.py;
v = e.u;
} else
throw new Error("build_pathspec_Cface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
if (ed.r == 0) {
p += "L"+_fprec(X(qx))+","+_fprec(Y(qy));
} else {
let sweep = -ed.cy*(qx-px) + ed.cx*(qy-py) > 0 ? "1" : "0",
scaleX = Math.abs(linear_scale_factor(X)),
scaleY = Math.abs(linear_scale_factor(Y));
p += "A"+_fprec(ed.r*scaleX)+","+_fprec(ed.r*scaleY)+" 0 0,"+sweep+" "+_fprec(X(qx))+","+_fprec(Y(qy));
}
k++;
});
return p;
} // }}}
function _path_add_Plink(p, e, X, Y) { // {{{
p.lineTo(X(e.qx), Y(e.qy));
return p;
} // }}}
function build_pathspec_Plink(e, X, Y) { // {{{
const ed = e.P;
return "M"+_fprec(X(ed.px))+","+_fprec(Y(ed.py))+"L"+_fprec(X(ed.qx))+","+_fprec(Y(ed.qy));
} // }}}
function build_pathspec_Pface(f, X, Y) { // {{{
const e = f.edges[0];
let p = "", v = -1;
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
p = "M"+_fprec(X(e.P.px))+","+_fprec(Y(e.P.py));
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
p = "M"+_fprec(X(e.P.qx))+","+_fprec(Y(e.P.qy));
v = e.v;
} else
throw new Error("build_pathspec_Pface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
let k = 1;
_.forEach(f.edges, function (e) {
if (v == e.u) {
p += "L"+_fprec(X(e.P.qx))+","+_fprec(Y(e.P.qy));
v = e.v;
} else if (v == e.v) {
p += "L"+_fprec(X(e.P.px))+","+_fprec(Y(e.P.py));
v = e.u;
} else
throw new Error("build_pathspec_Pface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
k++;
});
return p;
} // }}}
// Basically the same as build_pathspec_Clink, but uses different coordinates
// obtained from e.U instead of e.C.
function build_pathspec_Ulink(e, X, Y) { // {{{
const ed = e.U,
px = X(ed.px), py = Y(ed.py), qx = X(ed.qx), qy = Y(ed.qy);
if (!isFinite(ed.r) || ed.r == 0) {
const infinity_y = Y.range()[1]-1;
let p = "";
if (isFinite(ed.py) && !isFinite(ed.qy))
p = "M"+_fprec(px)+","+_fprec(py)+"L"+_fprec(px)+","+_fprec(infinity_y);
else if (!isFinite(ed.py) && isFinite(ed.qy))
p = "M"+_fprec(qx)+","+_fprec(qy)+"L"+_fprec(qx)+","+_fprec(infinity_y);
return p;
} else {
//console.log("Ulink: r="+ed.r+", p=("+px+","+py+"), q=("+qx+","+qy+")");
const sweep = (qx-px) > 0 ? "1" : "0",
scaleX = Math.abs(linear_scale_factor(X)),
scaleY = Math.abs(linear_scale_factor(Y));
return "M"+_fprec(px)+" "+_fprec(py)+"A"+_fprec(ed.r*scaleX)+","+_fprec(ed.r*scaleY)+" 0 0,"+sweep+" "+_fprec(qx)+","+_fprec(qy);
}
} // }}}
// Basically the same as build_pathspec_Cface, but uses different coordinates
// obtained from e.U instead of e.C.
function build_pathspec_Uface(f, X, Y) { // {{{
const e = f.edges[0],
scaleX = Math.abs(linear_scale_factor(X)),
scaleY = Math.abs(linear_scale_factor(Y)),
infinity_y = Y.range()[1]-1;
// infinity_y is set to a coordinate just outside the range. We use it to fake
// geodesics that extent towards the point at infinity and are represented by
// half-lines parallel to the y-axis. Since SVG does not accept "Infinity" as
// in coordinates, we draw just outside the viewable area and connect
// two vertical geodesics with an (invisible) horizontal line segment as
// needed.
//
// Note: This relies on the scale Y to be set up such that the extent matches
// the displayed area, which means that it needs updating whenever the
// viewport changes. This may not work well if we use SVG transforms for
// scaling and panning as opposed to recomputing coordinates, in which case
// we may require a different mechanism in order to keep up with viewport
// changes.
let p = "",
v = -1, // keep track of vertex ID at last position
at_inf = false; // whether current y coordinate is at infinity
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
if (isFinite(e.U.py)) {
p = "M"+_fprec(X(e.U.px))+","+_fprec(Y(e.U.py));
} else {
p = "M"+_fprec(X(e.U.px))+","+_fprec(infinity_y);
at_inf = true;
}
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
if (isFinite(e.U.qy)) {
p += "M"+_fprec(X(e.U.qx))+","+_fprec(Y(e.U.qy));
} else {
p = "M"+_fprec(X(e.U.qx))+","+_fprec(infinity_y);
at_inf = true;
}
v = e.v;
} else
throw new Error("build_pathspec_Uface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
let k = 1;
_.forEach(f.edges, function (e) {
const ed = e.U;
let px, py, qx, qy;
if (v == e.u) {
px = ed.px; py = ed.py; qx = ed.qx; qy = ed.qy;
v = e.v;
} else if (v == e.v) {
px = ed.qx; py = ed.qy; qx = ed.px; qy = ed.py;
v = e.u;
} else
throw new Error("build_pathspec_Uface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
let x = X(qx);
if (!isFinite(ed.r) || ed.r == 0) {
if (isFinite(qy)) {
// move to proper x coordinate while still producing a closed shape
if (at_inf)
p += " L"+_fprec(x)+","+_fprec(infinity_y);
p += " L"+_fprec(x)+","+_fprec(Y(qy));
at_inf = false;
} else {
// Note: use px because qx may be Infinity. If px is Infinity, too, we have an
// edge connecting the point at infinity with itself, which should not happen.
// We handle it gracefully by simply omitting any line directive.
if (isFinite(px))
p += " L"+_fprec(X(px))+","+_fprec(infinity_y);
at_inf = true;
}
} else {
const sweep = (qx-px) > 0 ? "1" : "0";
p += " A"+_fprec(ed.r*scaleX)+","+_fprec(ed.r*scaleY)+" 0 0,"+sweep+" "+_fprec(X(qx))+","+_fprec(Y(qy));
at_inf = false;
}
k++;
});
return p;
} // }}}
function build_pathspec_Elink(e, X, Y) { // {{{
const ed = e.E;
return "M"+_fprec(X(ed.px))+","+_fprec(Y(ed.py))+"L"+_fprec(X(ed.qx))+","+_fprec(Y(ed.qy));
} // }}}
function build_pathspec_Eface(f, X, Y) { // {{{
const e = f.edges[0];
let p = "", v = -1;
if (e.v == f.edges[1].u || e.v == f.edges[1].v) {
p = "M"+_fprec(X(e.E.px))+","+_fprec(Y(e.E.py));
v = e.u;
} else if (e.u == f.edges[1].v || e.u == f.edges[1].u) {
p = "M"+_fprec(X(e.E.qx))+","+_fprec(Y(e.E.qy));
v = e.v;
} else
throw new Error("build_pathspec_Eface: face #"+f.id+" does not have a consecutive edge sequence (edge #0 to #1).");
let k = 1;
_.forEach(f.edges, function (e) {
if (v == e.u) {
p += "L"+_fprec(X(e.E.qx))+","+_fprec(Y(e.E.qy));
v = e.v;
} else if (v == e.v) {
p += "L"+_fprec(X(e.E.px))+","+_fprec(Y(e.E.py));
v = e.u;
} else
throw new Error("build_pathspec_Eface: face #"+f.id+" does not have a consecutive edge sequence (edge #"+k+" to #"+(k+1)+").");
k++;
});
return p;
} // }}}
function vertex_drag_handler(d) { // {{{
const subject = d3.event.subject,
conformal_mode = subject.conformal,
halfplane_mode = subject.halfplane,
euclidean_mode = subject.euclidean,
X = subject.X, Y = subject.Y;
d = subject.data;
let vertex = d3.selectAll(".vx[vi='"+d.id+"']").classed("vertex--dragging", true);
if (!euclidean_mode)
d3.event.on("drag", dragging).on("end", release);
else
d3.event.on("drag", dragging_euc).on("end", release);
g_state__dragging = true;
d3.selectAll("svg g.face-container").style("pointer-events", "none");
_.forEach(g_svginfo, function (info) { if (info.quadtree) info.quadtree.remove(g_vertices[d.id]); });
function dragging() {
let cursor = d3.mouse(this),
x = X.invert(cursor[0]), y = Y.invert(cursor[1]),
l = Math.sqrt(x*x + y*y);
// Normalize: For halfplane models, ensure we stay on or above the real
// line, and for unit disk models, stay on or within the unit disk.
if (halfplane_mode) {
if (y < 0)
y = 0.0;
} else if (l > 1.0) {
x /= l;
y /= l;
}
vertex.classed("dirty", true);
let c_coord, p_coord, u_coord;
if (conformal_mode && !halfplane_mode) {
c_coord = {x: x, y: y};
p_coord = hyp_CtoP(c_coord);
u_coord = hyp_CtoU(c_coord);
} else if (halfplane_mode) {
u_coord = {x: x, y: y};
c_coord = hyp_UtoC(u_coord);
p_coord = hyp_UtoP(u_coord);
} else { //if (!conformal_mode)
p_coord = {x: x, y: y};
c_coord = hyp_PtoC(p_coord);
u_coord = hyp_PtoU(p_coord);
}
d.x = c_coord.x;
d.y = c_coord.y;
d.px = p_coord.x;
d.py = p_coord.y;
d.ux = u_coord.x;
d.uy = u_coord.y;
const dirty = hyp_update_vertex(d.id, d, false);
dirty.edges.forEach(function (ei) {
d3.selectAll(".ed[ei='"+ei+"']").classed("dirty", true);
//d3.selectAll(".altitude[alt-id='"+ei+"']").classed("dirty", true);
});
dirty.faces.forEach(function (fi) {
d3.selectAll(".fa[fi='"+fi+"']").classed("dirty", true);
});
hyp_svg_update_all();
}
function dragging_euc() {
let cursor = d3.mouse(this),
x = X.invert(cursor[0]), y = Y.invert(cursor[1]),
l = Math.sqrt(x*x + y*y);
vertex.classed("dirty", true);
d.x = x;
d.y = y;
const dirty = hyp_update_vertex(d.id, d, true);
dirty.edges.forEach(function (ei) {
d3.selectAll(".ed[ei='"+ei+"']").classed("dirty", true);
});
dirty.faces.forEach(function (fi) {
d3.selectAll(".fa[fi='"+fi+"']").classed("dirty", true);
});
hyp_svg_update_all();
}
function release() {
d3.event.on("drag", null).on("end", null);
_.forEach(g_svginfo, function (info) {
if (info["quadtree"]) {
// only add points with finite coordinates
if (!info.halfplane || isFinite(info.quadtree.y()(d)))
info.quadtree.add(g_vertices[d.id]);
info.extent = compute_quadtree_extent(info.quadtree);
}
});
vertex.classed("vertex--dragging", false);
g_state__dragging = false;
d3.selectAll("svg g.face-container").style("pointer-events", null);
}
} // }}}
// {{{ -- translation: dragging handler
function translation_drag_start(d) { // {{{
let last_coords_raw = d3.mouse(this),
vertices = {};
const svg_id = d3.select(this.ownerSVGElement).attr("id"),
svg_info = g_svginfo[svg_id],
conformal_mode = svg_info.conformal,
halfplane_mode = svg_info.halfplane,
euclidean_mode = svg_info.euclidean,
X = svg_info.axis.x.scale(),
Y = svg_info.axis.y.scale(),
last_coords = [X.invert(last_coords_raw[0]), Y.invert(last_coords_raw[1])];
// copy original vertex positions
_.each(g_vertices, function (v) { vertices[v.id] = {x: v.x, y: v.y}; });
d3.event.on("drag", move).on("end", release);
d3.selectAll("svg g.face-container").style("pointer-events", "none");
// clear quadtrees and mark all graphical elements as dirty
_.forEach(g_svginfo, function (info) {
if (info.quadtree)
info.quadtree = d3.quadtree().x(info.quadtree.x()).y(info.quadtree.y());
});
let svgs = d3.selectAll("svg.hyperbolic-model"),
s_vertices = svgs.selectAll(".vx"),
s_edges = svgs.selectAll(".ed"),
s_faces = svgs.selectAll(".fa"),
s_altitudes = svgs.selectAll(".altitude");
function move() {
let coords = d3.mouse(this),
dx = X.invert(coords[0])-last_coords[0],
dy = Y.invert(coords[1])-last_coords[1],
l = Math.sqrt(dx**2 + dy**2);
// skip if length of translation vector is close to zero
if (l < 1e-9)
return;
//log_msg("move: (dx,dy)=("+dx+","+dy+"), l="+l);
let dv = {x: dx, y: dy};
_.each(vertices, function (v, k) {
let ov = g_vertices[k];
let op, cp, pp, up;
if (!euclidean_mode) {
if (conformal_mode && !halfplane_mode) {
op = {x: v.x, y: v.y};
cp = hyp_tran_C(op, dv);
pp = hyp_CtoP(cp);
up = hyp_CtoU(cp);
} else if (halfplane_mode) {
op = {x: v.ux, y: v.uy};
up = hyp_tran_U(op, dv);
cp = hyp_UtoC(up);
pp = hyp_UtoP(up);
} else {
op = {x: v.px, y: v.py};
pp = hyp_tran_P(op, dv);
cp = hyp_PtoC(pp);
up = hyp_PtoU(cp);
}
ov.x = cp.x;
ov.y = cp.y;
ov.px = pp.x;
ov.py = pp.y;
ov.ux = up.x;
ov.uy = up.y;
} else {
//let op = {x: v.ux, y: v.uy},
// ep = hyp_tran_E(op, dv);
ov.x = v.x + dv.x;
ov.y = v.y + dv.y;
}
});
_.each(g_edges, function (e) {
let p = g_vertices[e.u], q = g_vertices[e.v];
if (!euclidean_mode) {
set_Cedge(e, p, q);
set_Pedge(e, p, q);
set_Uedge(e, p, q);
} else
set_Eedge(e, p, q);
});
s_vertices.classed("dirty", true);
s_edges.classed("dirty", true);
s_faces.classed("dirty", true);
s_altitudes.classed("dirty", true);
hyp_svg_update_all();
}
function release() {
d3.event.on("drag", null).on("end", null);
_.each(g_svginfo, function (info) {
if (info.quadtree)
info.quadtree.addAll(g_vertices);
});
d3.selectAll("svg g.face-container").style("pointer-events", null);
}
} // }}}
// }}} -- translation: dragging handler
// {{{ -- brushing: brush start/brush/end handlers
function brush_handler_start(d) { // {{{
} // }}}
function brush_handler_update(d) { // {{{
let svg_id = this.__svg_id,
ext_screen = d3.event.selection,
selected = d3.set();
if (ext_screen) {
let info = g_svginfo[svg_id],
X = info.axis.x.scale(), Y = info.axis.y.scale(),
quadtree = info.quadtree,
get_x = quadtree.x(), get_y = quadtree.y(),
ext = [[X.invert(ext[0][0]), Y.invert(ext[0][1])],
[X.invert(ext[1][0]), Y.invert(ext[1][1])]];
//console.log("brush: extent=["+ext[0][0]+","+ext[0][1]+"]..["+ext[1][0]+","+ext[1][1]+"]");
// Note: quadtree node extents are inclusive.
quadtree.visit(function (node, ax, ay, bx, by) {
//console.log(" visit: extent=["+ax+","+ay+"]..["+bx+","+by+"]");
if (node.length)
return (ext[0][0] > bx || ext[1][0] <= ax || ext[0][1] > by || ext[1][1] <= ay);
do {
var d = node.data, px = get_x(d), py = get_y(d);
if (ext[0][0] <= px && px < ext[1][0] && ext[0][1] <= py && py < ext[1][1])
selected.add(d.id);
} while (node = node.next);
return false;
});
}
d3.selectAll("svg.hyperbolic-model .vx")
.classed("vertex--brushed", function (d, i) {
return selected.has(d.id);
});
} // }}}
function brush_handler_end(d) {
var svg_id = this.__svg_id;
}
// }}} -- brushing: brush start/brush/end handlers
// {{{ ---- main ----
d3.queue()
.defer(d3.json, mesh_uri)
.await(main);
function setup_UI(model_list) {
const UI_title = {
"conformal": "Conformal disk model",
"projective": "Projective disk model",
"halfplane": "Half-plane model",
"euclidean": "Euclidean plane"
},
UI_id = {
"conformal": "conformal-disk",
"projective": "projective-disk",
"halfplane": "upper-halfplane",
"euclidean": "euclidean-plane"
};
d3.selectAll(".ui-container .main .uc-container").remove();
_.each(model_list, function (model) {
let uic = d3.select(".ui-container .main"),
uidiv = uic.append("div").attr("class", "uc-container");
uidiv.append("h1").text(UI_title[model]);
uidiv.append("svg")
.attr("id", UI_id[model])
.attr("class", ["hyperbolic-model", model].join(" "));
uidiv.append("div").attr("class", "vertex-info-container");
let toolbar = uidiv.append("div").attr("class", "toolbar");
let export_button = toolbar.append("button")
.attr("class", "button--export")
.attr("onclick", 'export_svg("svg#'+UI_id[model]+'", "'+model+'.svg", this)');
export_button.append("a").attr("class", "svg-download-link").attr("href", "");
export_button.append("span").attr("class", "button-symbol").text("\u21e9"); //2b73");
export_button.append("span").attr("class", "button-text").text("SVG");
let resetzoom_button = toolbar.append("button")
.attr("class", "button--resetzoom")
.attr("onclick", 'reset_zoom(this);');
resetzoom_button.append("span").attr("class", "button-symbol").text("\u21bb");
resetzoom_button.append("span").attr("class", "button-text").text("reset zoom");
let close_button = toolbar.append("button")
.attr("class", "button--close")
.attr("onclick", 'close_model(this);');
close_button.append("span").attr("class", "button-symbol").text("\u2716");
close_button.append("span").attr("class", "button-text").text("close");
});
}
function main(error, mesh) {
if (error) {
log_err("Error while loading mesh: "+error.message);
//throw error;
return;
}
if (!mesh.hasOwnProperty("vertices") || !mesh.hasOwnProperty("faces")) {
log_err("JSON meshes need to provide the 'vertices' or 'faces' properties.");
return;
}
log_msg("loading mesh with "+mesh.vertices.length+" vertices and "+mesh.faces.length+" faces");
g_draw_vertices &= !!+getParameterByName("v", 1) && (g_vertex_radius > 0) && (mesh.vertices.length <= c_enable_vertices_threshold);
build_mesh(mesh);
if (mesh.model == "euclidean")
setup_UI(["euclidean"]);
else
setup_UI(["conformal", "projective", "halfplane"]);
d3.selectAll("svg.hyperbolic-model").each(function () {
var svg = d3.select(this),
euclidean_mode = !!svg.classed("euclidean"),
halfplane_mode = !!svg.classed("halfplane"),
conformal_mode = (!!svg.classed("conformal") || halfplane_mode) &&
!svg.classed("projective") && !euclidean_mode,
nmode = euclidean_mode ? 3 : (halfplane_mode ? 2 : (conformal_mode ? 0 : 1));
// ``nmode'':
// 0: Conformal disk
// 1: Projective disk
// 2: Conformal plane (upper half-plane)
// 3: Euclidean plane
hyp_svg_init(this, nmode,
g_vertices, g_edges, g_faces,
[g_C_quadtree, g_P_quadtree, g_U_quadtree, g_E_quadtree][nmode],
[ (d, X, Y) => ("translate("+X(d.x)+","+Y(d.y)+")"),
(d, X, Y) => ("translate("+X(d.px)+","+Y(d.py)+")"),
(d, X, Y) => ((!isFinite(d.ux) || !isFinite(d.uy)) ? null : "translate("+X(d.ux)+","+Y(d.uy)+")"),
(d, X, Y) => ("translate("+X(d.x)+","+Y(d.y)+")")][nmode],
[build_pathspec_Clink, build_pathspec_Plink, build_pathspec_Ulink, build_pathspec_Elink][nmode],
[build_pathspec_Cface, build_pathspec_Pface, build_pathspec_Uface, build_pathspec_Eface][nmode]
);
});
toggle_faces(g_draw_faces, true);
toggle_edges(g_draw_edges, true);
//toggle_altitudes(g_draw_altitudes, true);
toggle_vertices(g_draw_vertices, true);
toggle_brushing(g_brushing, true);
toggle_dragging(g_dragging, true);
toggle_zooming(g_zooming, true);
toggle_moving(false, true);
toggle_scalestroke(false, true);
hyp_svg_update_all();
}
function compute_deck_transformations(pairings) {
let fuchsians = new Map();
pairings.forEach(function (t, s) {
const e1 = g_edges[s], e2 = g_edges[t],
p1 = g_vertices[e1.v], q1 = g_vertices[e1.u],
p2 = g_vertices[e2.u], q2 = g_vertices[e2.v],
euclidean = e1.E !== undefined;
if (!euclidean) {
fuchsians.set(s, {
paired_edge: t,
ptrans: hyp_decktrans_P([p2.px, p2.py], [q2.px, q2.py], [p1.px, p1.py], [q1.px, q1.py]),
ctrans: hyp_decktrans_C([p2.x, p2.y ], [q2.x, q2.y ], [p1.x, p1.y ], [q1.x, q1.y ]),
utrans: hyp_decktrans_U([p2.ux, p2.uy], [q2.ux, q2.uy], [p1.ux, p1.uy], [q1.ux, q1.uy])
});
} else {
fuchsians.set(s, {
paired_edge: t,
etrans: hyp_decktrans_E([p2.x, p2.y ], [q2.x, q2.y ], [p1.x, p1.y ], [q1.x, q1.y ])
});
}
});
return fuchsians;
}
function build_mesh(mesh) {
let scale = +mesh["scale"],
model = mesh["model"];
if (mesh["scale"] === undefined || scale <= 0)
scale = 1.0;
if (model === undefined)
model = "projective";
else if (typeof model === "string" || model instanceof String)
model = model.toLowerCase();
mesh.scale = scale;
mesh.model = model;
const map_from_C = function (v, i) {
var x = v[0]/scale, y = v[1]/scale,
pp = hyp_CtoP({x: x, y: y}),
up = hyp_CtoU({x: x, y: y});
return {id: i, boundary_degree: 0, x: x, y: y, px: pp.x, py: pp.y, ux: up.x, uy: up.y};
};
const map_from_P = function (v, i) {
var px = v[0]/scale, py = v[1]/scale,
pc = hyp_PtoC({x: px, y: py}),
up = hyp_PtoU({x: px, y: py});
return {id: i, boundary_degree: 0, x: pc.x, y: pc.y, px: px, py: py, ux: up.x, uy: up.y};
};
const map_from_U = function (v, i) {
var ux = v[0]/scale, uy = v[1]/scale,
pc = hyp_UtoC({x: ux, y: uy}),
pp = hyp_UtoP({x: ux, y: uy});
return {id: i, boundary_degree: 0, x: pc.x, y: pc.y, px: pp.x, py: pp.y, ux: ux, uy: uy};
};
const map_from_E = function (v, i) {
var x = v[0]/scale, y = v[1]/scale;
return {id: i, boundary_degree: 0, x: x, y: y};
};
const map_func = {
"conformal": map_from_C,
"projective": map_from_P,
"halfplane": map_from_U,
"euclidean": map_from_E
};
let mapper = map_func[model];
if (mapper === undefined) {
log_err("Unable to understand model spec ``"+toString(mapper)+"'', assuming ``conformal'' (other accepted options are ``projective'', ``halfplane'' and ``euclidean'').");
mapper = map_from_C;
}
let vertices = _.map(mesh.vertices, mapper),
neighbors = new Map(),
euclidean_mode = model == "euclidean";
_.each(vertices, v => neighbors.set(v.id, []));
// TODO: quadtrees are built unconditionally; should only build when needed.
g_basemesh = mesh;
g_vertices = vertices;
g_neighbors = neighbors;
g_edges = [];
g_edgemap = new Map();
g_org_edges = new Map();
g_faces = [];
if (!euclidean_mode) {
g_C_quadtree = d3.quadtree(g_vertices, d => d.x, d => d.y);
g_P_quadtree = d3.quadtree(g_vertices, d => d.px, d => d.py);
g_U_quadtree = d3.quadtree(g_vertices, d => d.ux, d => d.uy);
g_E_quadtree = d3.quadtree();
} else {
g_C_quadtree = d3.quadtree();
g_P_quadtree = d3.quadtree();
g_U_quadtree = d3.quadtree();
g_E_quadtree = d3.quadtree(g_vertices, d => d.x, d => d.y);
}
_.forEach(mesh.faces, (f, i) => push_face(euclidean_mode, f, i));
if (mesh["org_pairings"] !== undefined) {
g_pairings = new Map(_.map(mesh["org_pairings"], (v, k) => [g_org_edges.get(+k), g_org_edges.get(v)]));
} else if (mesh["pairings"] !== undefined) {
// turn keys into numbers
g_pairings = new Map(_.map(mesh["pairings"], (v, k) => [+k, v]));
} else
g_pairings = new Map();
if (g_pairings.size == 0) {
// pair each boundary edge with itself if nothing else is specified; this
// results in a 180 degree rotation, mapping each boundary edge onto
// itself, flipping it around
g_pairings = new Map(_.map(_.filter(g_edges, e => e.boundary), e => [e.id, e.id]));
}
g_fuchsians = compute_deck_transformations(g_pairings);
}
function make_triangle_Cequilat(m) {
m = 0.0+m;
if (m <= 0.0 || m > 1.0)
throw new Error("Invalid argument: m must be in (0, 1)");
var s = Math.sqrt(0.75), c = -0.5;
return {faces: [[0, 1, 2]], vertices: [[m, 0.0], [m*c, m*s], [m*c, -m*s]]};
}
// }}} ---- main ----
function hyp_get_svgid(svg) { // {{{
var svg_id = svg.attr("id");
if (svg_id === undefined)
throw "Error: undefined zoom behavior for SVG with id='"+svg.attr("id")+"'";
return svg_id;
} // }}}
/// Installs event listeners for zoom/pan behavior.
function hyp_svg_zooming(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id],
zoom = info.zoom;
if (value === undefined)
value = zoom !== undefined;
if (zoom === undefined && value) {
info.zoom = zoom = d3.zoom();
// Restricted zooming: it does not make sense half-plane models to
// show anything below the real line, and it does not make sense for unit
// disk models to zoom further out than when showing the whole circle.
if (info.euclidean) {
zoom.extent([
[c_svg_padding+c_axis_margin_x, c_svg_padding],
[+svg.attr("width")-c_svg_padding, +svg.attr("height")-2*c_svg_padding-c_axis_margin_y]
]);
} else if (info.halfplane) {
// FIXME: properly achieve limited translate extent that does scroll
// past the real line at y=0.
zoom.extent([
[c_svg_padding+c_axis_margin_x, c_svg_padding],
[+svg.attr("width")-c_svg_padding, +svg.attr("height")-2*c_svg_padding-c_axis_margin_y]
]);
zoom.translateExtent([
[-Infinity, -Infinity],
[+Infinity, info.yscale_full(0.0)]
]);
} else {
zoom.scaleExtent([1.0, +Infinity]);
}
}
if (value) {
let axis_group_x = svg.select(".axes-container .axis--x"),
axis_group_y = svg.select(".axes-container .axis--y"),
content = svg.select(".content"),
uc = content.select(".uc-container");
svg.call(zoom.on("zoom", function () {
let info = g_svginfo[svg_id],
tr = d3.event.transform;
axis_group_x.call(info.axis.x.scale(tr.rescaleX(info.xscale_full)));
axis_group_y.call(info.axis.y.scale(tr.rescaleY(info.yscale_full)));
//uc.attr("transform", tr);
svg.selectAll(".axis-component,.mesh-container").classed("dirty", true);
hyp_svg_update(svg);
}));
} else {
// do not reset zoom, user may want to just temporarily disable it so it
// does not interfere with dragging
//svg.call(zoom.transform, d3.zoomIdentity);
//delete g_svginfo[svg_id].zoom;
svg.on(".zoom", null);
}
//This does not work: "zoom" is a behavior, not a zoom transformation!
//if (zoom)
// svg.call(zoom.transform, zoom);
} // }}}
function get_Ctangent(e, u, v) { // {{{
var px, py, qx, qy;
if (e.u == u) {
px = e.C.px; py = e.C.py; qx = e.C.qx; qy = e.C.qy;
} else {
px = e.C.qx; py = e.C.qy; qx = e.C.px; qy = e.C.py;
}
var dx = qx - px, dy = qy - py;
if (e.C.r == 0.0)
return {x: dx, y: dy};
var tx = -(py - e.C.cy), ty = (px - e.C.cx);
if (tx*dx + ty*dy < 0.0)
return {x: -tx, y: -ty};
else
return {x: tx, y: ty};
} // }}}
function get_Etangent(e, u, v) { // {{{
var px, py, qx, qy;
if (e.u == u) {
px = e.E.px; py = e.E.py; qx = e.E.qx; qy = e.E.qy;
} else {
px = e.E.qx; py = e.E.qy; qx = e.E.px; qy = e.E.py;
}
return {x: qx-px, y: qy-py};
} // }}}
function compute_interior_angle(v) { // {{{
//if (v.boundary_degree > 2) {
// log_err("Error: vertex #"+v.id+" incident with more than two boundary edges.");
// return null;
//}
const neighbors = _.filter(g_neighbors.get(v.id), vi => (g_vertices[vi].boundary_degree > 0)),
spokes = _.map(neighbors, vi => g_edges[g_edgemap.get(hash_edge(v.id, vi)).id]),
min_layer = _.reduce(spokes, (s, e) => Math.min(s, e.layer), Infinity),
candidates = _.filter(spokes, e => (e.layer == min_layer));
if (candidates.length != 2) {
log_err("Error: vertex #"+v.id+" has "+candidates.length+" boundary edges on layer "+min_layer+", expected 2.");
return null;
}
// The following code discerns between Euclidean and hyperbolic models by
// testing whether the ``E'' member of edge ``e0'' exists or not.
let e0 = candidates[0], e1 = candidates[1],
euclidean = e0.E !== undefined,
get_tangent = euclidean ? get_Etangent : get_Ctangent,
t0 = get_tangent(e0, v.id, neighbors[0]),
t1 = get_tangent(e1, v.id, neighbors[1]),
l = Math.sqrt((t0.x*t0.x + t0.y*t0.y)*(t1.x*t1.x + t1.y*t1.y)),
dp = (t0.x*t1.x + t0.y*t1.y)/l;
return Math.acos(dp);
} // }}}
function build_vertex_info(v_info_div, selected_vertices) { // {{{
var verts = v_info_div.selectAll(".vertex-info").data(selected_vertices);
function r2d(r) { return r*(180.0/Math.PI); }
verts.enter()
.append("div").attr("class", "vertex-info")
.merge(verts)
.text(function (d) {
if (d.boundary_degree == 0) {
return "vertex #"+d.id;
}
var theta = compute_interior_angle(d);
if (theta === null)
return "vertex #"+d.id+": bad boundary edges (see log)";
else
return "vertex #"+d.id+": interior angle "+r2d(theta).toFixed(2)+"°";
});
verts.exit().remove();
} // }}}
function hyp_update_vertex(vi, d, euclidean = false) { // {{{
let adj_faces = new Set(), adj_edges = new Set();
_.each(g_neighbors.get(vi), function (v) {
let ei = g_edgemap.get(hash_edge(vi, v)).id, e = g_edges[ei];
for (let k = 0; k < e.faces.length; k++)
adj_faces.add(e.faces[k].id);
adj_edges.add(ei);
var cq = g_vertices[v];
if (e.u == d.id) {
if (!euclidean) {
set_Cedge(e, d, cq);
set_Pedge(e, d, cq);
set_Uedge(e, d, cq);
} else
set_Eedge(e, d, cq);
} else {
if (!euclidean) {
set_Cedge(e, cq, d);
set_Pedge(e, cq, d);
set_Uedge(e, cq, d);
} else
set_Eedge(e, cq, d);
}
});
return {faces: adj_faces, edges: adj_edges};
} // }}}
/// Adds vertex layer and installs event listeners for selecting verticex
function hyp_svg_vertices(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id],
vertices = info.vertex_data,
uc = svg.select("g.uc-container"),
mc = svg.select("g.mesh-container"),
vc = mc.select("g.vertex-container");
if (value) {
if (vc.empty())
vc = mc.append("g").attr("class", "vertex-container");
var v_info_div = d3.select("div.vertex-info-container"),
verts_update = vc.selectAll(".vx").data(vertices),
verts = verts_update.enter()
.append("g")
.attr("class", "vx dirty")
.classed("bn", function (d) { return d.boundary_degree > 0; })
.attr("vi", function (d) { return d.id; })
.append("circle").attr("r", g_vertex_radius)
.merge(verts_update);
uc.on("mousemove", function () {
let cursor = d3.mouse(this),
X = info.axis.x.scale(), Y = info.axis.y.scale(),
cx = X.invert(cursor[0]), cy = Y.invert(cursor[1]),
scale_factor = Math.abs(linear_scale_factor(X)),
cp = info.quadtree.find(cx, cy, (3*g_vertex_radius)/scale_factor);
//console.log("mousemove: cursor=("+cursor[0]+","+cursor[1]+") -> ("+cx+","+cy+")");
//verts.classed("vertex--selected", function (d) { return d === cp; });
d3.selectAll(".vx.vertex--selected")
.classed("vertex--selected", false)
.select(".vertex--drag-handle").remove();
if (cp !== undefined) {
d3.selectAll(".vx[vi='"+cp.id+"']")
.classed("vertex--selected", true)
.append("g")
.attr("class", "vertex--drag-handle")
.attr("pointer-events", "all")
.attr("transform", "translate("+(-3*g_vertex_radius)+","+(-3*g_vertex_radius)+")")
.append("rect")
.attr("width", 5*g_vertex_radius).attr("height", 5*g_vertex_radius);
build_vertex_info(v_info_div, [cp]);
} else {
build_vertex_info(v_info_div, []);
}
});
} else {
uc.on("mousemove", null);
vc.remove();
}
} // }}}
function hyp_svg_edges(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id],
mc = svg.select("g.mesh-container"),
ec = mc.select("g.edge-container");
if (value) {
if (ec.empty())
ec = mc.append("g").attr("class", "edge-container");
var edges = ec.selectAll(".ed").data(info.edge_data),
new_edges = edges.enter()
.append("path").attr("class", "ed");
// .append("g").attr("class", "ed");
//new_edges.append("path");
new_edges.merge(edges)
.attr("ei", function (d) { return d.id; })
.classed("dirty", true)
.classed("bn", function (d) { return d.boundary; });
edges.exit().remove();
} else {
ec.remove();
}
} // }}}
function hyp_svg_altitudes(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id];
if (!info.conformal)
return;
var mc = svg.select("g.mesh-container"),
ac = mc.select("g.alt-container");
if (value) {
if (ac.empty())
ac = mc.append("g").attr("class", "alt-container");
var alt_data = _.filter(info.edge_data, function (ed) { return ed.boundary; });
var altitudes = ac.selectAll(".altitude").data(alt_data),
new_altitudes = altitudes.enter()
.append("g").attr("class", "altitude");
new_altitudes.append("path");
new_altitudes.merge(altitudes)
.attr("alt-id", function (d) { return d.id; })
.classed("dirty", true);
altitudes.exit().remove();
} else {
ac.remove();
}
} // }}}
function hyp_svg_faces(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id],
mc = svg.select("g.mesh-container"),
fc = mc.select("g.face-container");
if (value) {
if (fc.empty())
fc = mc.append("g").attr("class", "face-container");
var faces = fc.selectAll(".fa").data(info.face_data),
new_faces = faces.enter()
.append("path").attr("class", "fa");
// .append("g").attr("class", "fa");
//new_faces.append("path");
new_faces.merge(faces)
.attr("fi", d => d.label_id)
.attr("L", d => d.layer)
.classed("dirty", true);
faces.exit().remove();
new_faces.on("mouseover", function() {
var face_id = d3.select(this).attr("fi");
if (face_id)
d3.selectAll("svg.hyperbolic-model .fa[fi='"+face_id+"']").classed("face--selected", true);
})
.on("mouseout", function () {
var face_id = d3.select(this).attr("fi");
if (face_id)
d3.selectAll("svg.hyperbolic-model .fa[fi='"+face_id+"']").classed("face--selected", false);
})
.on("click", function () {
var face_id = d3.select(this).attr("fi");
if (face_id) {
let value = !(g_faces[face_id].marked);
g_faces[face_id].marked = value;
d3.selectAll("svg.hyperbolic-model .fa[fi='"+face_id+"']").classed("face--marked", value);
}
});
} else {
fc.remove();
}
} // }}}
function hyp_svg_brushing(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
brush = g_svginfo[svg_id].brush,
uc = svg.select(".uc-container"),
bc = uc.select(".brush-container");
if (value) {
if (bc.empty()) {
bc = uc.append("g").attr("class", "brush-container")
.style("stroke-width", "0.01%");
bc.node().__svg_id = svg_id;
}
bc.call(brush);
} else if (!bc.empty() && bc.node().__brush !== undefined) {
try {
brush.move(bc, null);
//bc.on(".brush", null);
bc.remove();
} catch (err) {
console.log("Warning: ignoring exception during removal of brush layer: "+err.message);
}
}
} // }}}
// {{{ -- generator functions for "drag subject" handlers
function _hyp_svg_dragsubject_C(info) {
return function () {
let cursor = d3.mouse(this),
X = info.axis.x.scale(), Y = info.axis.y.scale(),
cx = X.invert(cursor[0]), cy = Y.invert(cursor[1]),
result = info.quadtree.find(cx, cy, 3*g_vertex_radius / Math.abs(linear_scale_factor(X)));
return !result ? null : {
x: result.x, y: result.y,
conformal: true, halfplane: false, euclidean: false,
X: X, Y: Y,
data: result
};
};
}
function _hyp_svg_dragsubject_P(info) {
return function () {
let cursor = d3.mouse(this),
X = info.axis.x.scale(), Y = info.axis.y.scale(),
cx = X.invert(cursor[0]), cy = Y.invert(cursor[1]),
result = info.quadtree.find(cx, cy, 3*g_vertex_radius / Math.abs(linear_scale_factor(X)));
return !result ? null : {
x: result.px, y: result.py,
conformal: false, halfplane: false, euclidean: false,
X: X, Y: Y,
data: result
};
};
}
function _hyp_svg_dragsubject_U(info) {
return function () {
let cursor = d3.mouse(this),
X = info.axis.x.scale(), Y = info.axis.y.scale(),
cx = X.invert(cursor[0]), cy = Y.invert(cursor[1]),
result = info.quadtree.find(cx, cy, 3*g_vertex_radius / Math.abs(linear_scale_factor(X)));
return !result ? null : {
x: result.ux, y: result.uy,
conformal: false, halfplane: true, euclidean: false,
X: X, Y: Y,
data: result
};
};
}
function _hyp_svg_dragsubject_E(info) {
return function () {
let cursor = d3.mouse(this),
X = info.axis.x.scale(), Y = info.axis.y.scale(),
cx = X.invert(cursor[0]), cy = Y.invert(cursor[1]),
result = info.quadtree.find(cx, cy, 3*g_vertex_radius / Math.abs(linear_scale_factor(X)));
return !result ? null : {
x: result.x, y: result.y,
conformal: false, halfplane: false, euclidean: true,
X: X, Y: Y,
data: result
};
};
}
// }}} -- generator functions for "drag subject" handlers
function hyp_svg_dragging(svg, value) { // {{{
let svg_id = hyp_get_svgid(svg),
//uc = svg.select("g.uc-container"),
vc = svg.select("g.vertex-container"),
info = g_svginfo[svg_id],
drag_subject_handler = info.euclidean ? _hyp_svg_dragsubject_E(info) : (
info.halfplane ? _hyp_svg_dragsubject_U(info) : (
info.conformal ? _hyp_svg_dragsubject_C(info) : _hyp_svg_dragsubject_P(info))
);
if (value && vc.empty())
throw "Error: hyp_svg_dragging(): svg #"+svg_id+" does not have a vertex-container group";
if (value) {
vc.call(d3.drag()
.on("start", vertex_drag_handler)
.subject(drag_subject_handler)
);
} else {
vc.on(".drag", null);
}
} // }}}
function hyp_svg_moving(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id];
if (!info.conformal)
return;
var mc = svg.select("g.mesh-container"),
fc = mc.select("g.face-container");
if (fc.empty()) {
console.log("Warning: no face container for SVG ID ``"+svg_id+"'', not installing translation listener.");
return;
}
var faces = fc.selectAll(".fa");
if (value) {
faces.call(d3.drag().on("start", translation_drag_start));
} else {
faces.on(".drag", null);
}
} // }}}
function hyp_svg_scalestroke(svg, value) { // {{{
var svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id];
} // }}}
/// Initializes and fills a SVG root element representing a model of
/// hyperbolic space.
function hyp_svg_init(which, nmode, vertices, edges, faces, quadtree, vertex_transform, path_builder_edge, path_builder_face) { // {{{
let svg = d3.select(which);
const svg_id = hyp_get_svgid(svg),
clip_path_id = svg_id+"--content-clip",
svg_width = (g_svg_width > 0 ? g_svg_width : c_svg_default_width),
svg_height = (g_svg_height > 0 ? g_svg_height : c_svg_default_height);
svg.attr("width", svg_width+c_axis_margin_x+2*c_svg_padding);
svg.attr("height", svg_height+c_axis_margin_y+2*c_svg_padding);
// Note: not using viewBox attribute when using scales (d3.scale, d3.axis)
//if (!halfplane)
// svg.attr("viewBox", "-1.1 -1.1 2.2 2.2");
//else
// svg.attr("viewBox", "-5.0 -0.5 5.0 5.0");
let info = g_svginfo[svg_id];
if (info === undefined)
info = g_svginfo[svg_id] = {};
info.clip_path = clip_path_id;
info.conformal = nmode == 0;
info.halfplane = nmode == 2;
info.euclidean = nmode == 3;
info.uc_home_transform = "translate("+(c_axis_margin_x+c_svg_padding)+","+c_svg_padding+")";
// Create linear scales with y axis flipped, set domain conditionally
const range_diff = info.halfplane || info.euclidean ? 0 : svg_width - svg_height,
half_range_diff = range_diff / 2,
range_adjust = range_diff % 2;
let xscale = d3.scaleLinear().range([
Math.max(0, half_range_diff),
Math.min(svg_width, svg_width - half_range_diff)
]),
yscale = d3.scaleLinear().range([
Math.min(svg_height, svg_height + half_range_diff),
Math.max(0, -half_range_diff)
]);
if (!info.halfplane && !info.euclidean) {
xscale.domain([-1.0, 1.0]);
yscale.domain([-1.0, 1.0]);
} else {
const extent = compute_quadtree_extent(quadtree),
max_range = Math.max(extent[1][0]-extent[0][0], extent[1][1]) + 1,
x_mid = (extent[0][0] + extent[1][0]) / 2,
y_mid = (extent[0][1] + extent[1][1]) / 2,
y_low = info.halfplane ? Math.max(0, y_mid-max_range/2) : y_mid-max_range/2;
aspect = (xscale(1)-xscale(0)) / (yscale(0)-yscale(1));
xfit = Math.max(1.0, aspect),
yfit = Math.max(1.0, 1/aspect);
xscale.domain([x_mid-max_range*xfit/2, x_mid+max_range*xfit/2]);
yscale.domain([y_low, y_low+max_range*yfit]);
}
info.xscale_full = xscale.copy();
info.yscale_full = yscale.copy();
info.axis = {
x: d3.axisBottom().scale(xscale).ticks(10).tickSize(2),
y: d3.axisLeft().scale(yscale).ticks(10).tickSize(2)
};
info.brush = d3.brush()
.extent(function () {
return [[-1.0, -1.0], [1.0, 1.0]];
})
.handleSize(0.01);
info.brush.on("start", brush_handler_start)
.on("end", brush_handler_end)
.on("brush", brush_handler_update);
info.edge_data = edges;
info.face_data = faces;
info.path_builder_edge = path_builder_edge;
info.path_builder_face = path_builder_face;
info.vertex_data = vertices;
info.vertex_transform = vertex_transform;
info.quadtree = quadtree;
//svg.selectAll(".uc-container,.axes-container").remove();
svg.selectAll("g.content").remove();
let content = svg.append("g")
.attr("class", "content")
.attr("transform", info.uc_home_transform);
svg.select("defs clipPath#"+clip_path_id).remove();
let svg_defs = svg.select("defs");
if (svg_defs.empty())
svg_defs = svg.append("defs");
svg_defs.append("clipPath")
.attr("id", clip_path_id)
.append("rect")
.attr("width", svg_width).attr("height", svg_height);
let clip_area = content.append("g").attr("class", "clip-area");
let uc = clip_area.append("g").attr("class", "uc-container");
if (!info.halfplane && !info.euclidean) {
uc.append("circle")
.attr("class", "unit-circle axis-component")
.attr("transform", "translate("+xscale(0)+","+yscale(0)+")")
.attr("r", Math.abs(xscale(1)-xscale(0)));
} else {
clip_area.style("clip-path", "url(#"+clip_path_id+")");
//content.append("rect").attr("class", "evt-region")
// .attr("width", svg_width).attr("height", svg_height)
// .style("fill", "none").style("stroke", "red")
// .style("pointer-events", "all");
}
uc.append("g").attr("class", "mesh-container");
let axes = content.append("g").attr("class", "axes-container");
if (info.halfplane || info.euclidean) {
axes.append("g")
.attr("class", "axis--x")
.attr("transform", "translate(0,"+svg_height+")")
.call(info.axis.x);
axes.append("g")
.attr("class", "axis--y").call(info.axis.y);
}
g_view_counter++;
return svg;
} // }}}
/// Updates a hyperbolic model SVG by re-building all primitives (vertices,
/// edges, faces...) of the associated mesh that are marked as "dirty."
function hyp_svg_update(svg) { // {{{
let svg_id = hyp_get_svgid(svg),
info = g_svginfo[svg_id],
X = info.axis.x.scale(),
Y = info.axis.y.scale();
// re-order elements to ensure proper drawing order
svg.select(".face-container").raise();
svg.select(".edge-container").raise();
svg.select(".vertex-container").raise();
if (!info.halfplane && !info.euclidean) {
let uc_dirty = !svg.select(".uc-container circle.unit-circle.dirty").empty();
if (uc_dirty) {
let unit_circle = svg.select(".uc-container circle.unit-circle.dirty");
unit_circle.classed("dirty", false)
.attr("transform", "translate("+X(0)+","+Y(0)+")")
.attr("r", Math.abs(X(1)-X(0)));
}
}
svg.selectAll(".axis-component").classed("dirty", false);
const all_mesh = !svg.select(".mesh-container.dirty").empty();
const all_faces = all_mesh || !svg.select(".face-container.dirty").empty();
if (g_draw_faces) {
let faces = info.face_data,
path_builder_face = d => info.path_builder_face(d, X, Y);
try {
svg.selectAll(all_faces ? ".fa" : ".fa.dirty")
.datum(function (d) { return faces[d.id]; })
.classed("dirty", false)
//.select("path")
.attr("d", path_builder_face);
} catch (err) {
log_err("hyp_svg_update(svg_id='"+svg_id+"'): Exception while updating faces: "+err.message, true);
}
if (all_faces)
svg.select(".face-container").classed("dirty", false);
}
const all_edges = all_mesh || !svg.select(".edge-container.dirty").empty();
if (g_draw_edges) {
let edge_data = info.edge_data,
path_builder_edge = d => info.path_builder_edge(d, X, Y);
try {
let edges = svg.selectAll(all_edges ? ".ed" : ".ed.dirty");
//console.log("hyp_svg_update(svg_id='"+svg_id+"'): updating "+edges.size()+" edges")
edges
.datum(function (d) { return edge_data[d.id]; })
.classed("dirty", false)
//.select("path")
.attr("d", path_builder_edge);
} catch (err) {
log_err("hyp_svg_update(svg_id='"+svg_id+"'): Exception while updating edges: "+err.message, true);
}
if (all_edges)
svg.select(".edge-container").classed("dirty", false);
}
if (g_draw_altitudes && info.conformal) {
let edge_data = info.edge_data;
try {
let edges = svg.selectAll(".altitude.dirty");
//console.log("hyp_svg_update(svg_id='"+svg_id+"'): updating "+edges.size()+" edges")
edges
.datum(function (d) { return edge_data[d.id]; })
.classed("dirty", false)
//.select("path")
.attr("d", function (d) { return "M0,0L"+X(d.C.cx)+","+Y(d.C.cy); });
} catch (err) {
log_err("hyp_svg_update(svg_id='"+svg_id+"'): Exception while updating edges: "+err.message, true);
}
}
const all_vertices = all_mesh || !svg.select(".vertex-container.dirty").empty();
if (g_draw_vertices) {
let vertices = info.vertex_data,
vertex_transform = d => info.vertex_transform(d, X, Y);
svg.selectAll(all_vertices ? ".vx" : ".vx.dirty")
.datum(function (d) { return vertices[d.id]; })
.attr("transform", vertex_transform)
.classed("dirty", false);
if (all_vertices)
svg.select(".vertex-container").classed("dirty", false);
}
if (all_mesh)
svg.select(".mesh-container").classed("dirty", false);
} // }}}
/// Convenience function that updates all hyperbolic model SVGs.
function hyp_svg_update_all() { // {{{
d3.selectAll("svg.hyperbolic-model").each(function () { hyp_svg_update(d3.select(this)); });
} // }}}
function export_svg(which, filename, this_) { // {{{
var svg = d3.select(which),
bc = svg.select(".brush-container"),
brush_selection = null;
if (g_brushing) {
brush_selection = d3.brushSelection(bc.node());
hyp_svg_brushing(svg, false);
}
let style = svg.append("style").attr("type", "text/css"),
style_content = "";
c_hyp_svg_style_keys.forEach(function (descr, sel) {
let notspec = descr.skip ? ":not("+descr.skip.join("):not(")+")" : "";
let el = svg.select(sel+notspec);
if (!el.empty()) {
style_content += sel + " {\n";
_.each(descr.keys, function (k) {
let sty = window.getComputedStyle(el.node()),
value = sty.getPropertyValue(k);
if (value)
style_content += " " + k + ": " + value + ";\n"
});
style_content += "}\n";
}
});
//console.log("computed stylesheet:\n" + style_content);
style.text(style_content);
svg.selectAll("style").lower();
var s = new XMLSerializer, content = s.serializeToString(svg.node());
style.remove();
if (g_brushing) {
hyp_svg_brushing(svg, true);
g_svginfo[hyp_get_svgid(svg)].brush.move(bc, brush_selection);
}
var ac = this_.getElementsByClassName("svg-download-link");
if (ac.length == 0)
throw "Error: Missing a.svg-download-link";
var a = ac[0];
a.href = window.URL.createObjectURL(new File([content], filename, {type: "image/svg+xml"}));
a.download = filename;
a.click();
} // }}}
function reset_zoom(self) { // {{{
let container = d3.select(self.parentElement.parentElement);
if (container.empty())
return;
let svg = container.select(".hyperbolic-model");
if (svg.size() == 0) {
log_err("Error: reset_zoom(): cannot figure out SVG element. [Unexpected UI change?]");
return;
}
let svg_id = hyp_get_svgid(svg), info = g_svginfo[svg_id], zoom = info.zoom;
if (zoom)
svg.transition().duration(350).call(zoom.transform, d3.zoomIdentity);
else
svg.select("g.uc-container").attr("transform", null);
} // }}}
function close_model(self) { // {{{
if (g_view_counter == 1) {
log_err("Refusing to close the last remaining view.")
return;
}
let container = d3.select(self.parentElement.parentElement);
if (!container.empty()) {
container.remove();
g_view_counter--;
}
// let svg = d3.select(which);
// if (!svg.empty()) {
// let container = d3.select(svg.node().parent);
// if (!container.empty())
// container.remove();
// }
} // }}}
// {{{ -- Toggle-button action handlers
function toggle_vertices(value, force = false) {
if (toggle_button("vertices", value, force)) {
if (!g_toggle["vertices"].state)
toggle_dragging(false, true);
g_draw_vertices = g_toggle["vertices"].state;
hyp_svg_update_all();
}
}
function toggle_edges(value, force = false) {
if (toggle_button("edges", value, force)) {
g_draw_edges = g_toggle["edges"].state;
hyp_svg_update_all();
}
}
function toggle_faces(value, force = false) {
if (toggle_button("faces", value, force)) {
g_draw_faces = g_toggle["faces"].state;
hyp_svg_update_all();
}
}
function toggle_altitudes(value, force = false) {
if (toggle_button("altitudes", value, force)) {
g_draw_altitudes = g_toggle["altitudes"].state;
hyp_svg_update_all();
}
}
function toggle_dragging(value, force = false) {
if (toggle_button("dragging", value, force)) {
g_dragging = g_toggle["dragging"].state;
hyp_svg_update_all();
}
}
function toggle_zooming(value, force = false) {
if (toggle_button("zooming", value, force)) {
g_zooming = g_toggle["zooming"].state;
hyp_svg_update_all();
}
}
function toggle_brushing(value, force = false) {
if (toggle_button("brushing", value, force)) {
g_brushing = g_toggle["brushing"].state;
hyp_svg_update_all();
}
}
function toggle_moving(value, force = false) {
if (toggle_button("moving", value, force)) {
hyp_svg_update_all();
}
}
function toggle_scalestroke(value, force = false) {
if (toggle_button("scalestroke", value, force)) {
hyp_svg_update_all();
}
}
// }}} -- Toggle-button action handlers
/// Generic action handler for toggle buttons that sets the \p state value in
/// \p g_toggle for the button identified with \p which to \p value, or
/// flips it if \p value is \p undefined. Also sets the appropriate class of
/// the UI element selected by <tt>"button.toggle--"+which</tt> and calls the
/// toggle's \p set handler if the value has changed or \p force evaluates as
/// \p true.
///
/// \return \p true if the value has changed, \p false otherwise.
function toggle_button(which, value, force = false) { // {{{
var toggle_info = g_toggle[which];
if (toggle_info === undefined || toggle_info["state"] === undefined) {
console.log("Error: ignoring unknown toggle button ID ``"+which+"''.");
return;
}
var old_value = toggle_info.state;
if (value === undefined)
value = !old_value;
d3.selectAll("button.toggle--"+which)
.classed("toggle-on", value)
.classed("toggle-off", !value);
g_toggle[which].state = value;
if (!!force || old_value != value) {
d3.selectAll("svg.hyperbolic-model").each(function () {
toggle_info.set(d3.select(this), value);
});
return true;
}
return false;
} // }}}
/// FIXME: not used/not implemented at the moment
function toggle_linked_zoom(_target, value, force = false) { // {{{
var target = d3.select(_target),
svg_id = hyp_get_svgid(target),
zoom = g_svginfo[svg_id].zoom,
old_value = g_linked_zoom;
if (value === undefined)
value = !old_value;
d3.selectAll("button.toggle--linked-zoom")
.classed("toggle-on", value)
.classed("toggle-off", !value);
g_linked_zoom = value;
if (!!force || old_value != value) {
if (value && zoom === undefined) {
//console.log("[INFO] linking to non-existant zoom, creating default");
hyp_svg_zooming(target, true);
zoom = g_svginfo[svg_id].zoom;
hyp_svg_zooming(target, g_zooming);
}
d3.selectAll("svg.hyperbolic-model").each(function () {
var sid = hyp_get_svgid(d3.select(this));
//console.log("g_svginfo["+sid+"].zoom === zoom: "+(g_svginfo[sid].zoom === zoom));
//console.log("this === target.node(): "+(this === target.node()));
if (value && g_svginfo[sid].zoom !== zoom) {
hyp_svg_zooming(d3.select(this), false);
g_svginfo[sid].zoom = zoom;
hyp_svg_zooming(d3.select(this));
}
});
}
} // }}}
/// Reconstruct the polygon defined by the currently given boundary edges by
/// measuring its interior angles and using the method from Beardon's proof of
/// his theorem 7.16.2 to build a (convex) polygon with the same interior
/// angles.
function action_reconstruct() { // {{{
var n = g_vertices.length,
boundary_vertices = _.filter(g_vertices, function (v) { return v.boundary_degree == 2; }),
thetas = _.map(boundary_vertices, function (v) { return compute_interior_angle(v); }),
polymesh = hyp_polygon_from_angles_C(thetas);
main(null, polymesh);
return polymesh;
} // }}}
/// Use deck transformations computed from side pairings to construct
/// additional "copies" of the fundamental mesh along its orbit.
/// Does nothing if no side pairing information has been provided with the
/// mesh, which can be provided as an object/map under the "pairings" key,
/// which is expected to map boundary edge indices to their paired edge index.
///
/// For triangle meshes, edge indices are integers of the form e = 3*k + i,
/// where k is in [0..faces.length-1] and i in {0, 1, 2}. In that case, e is
/// the edge from vertex i to vertex i+1 (mod 3) of face k.
function action_tesselate() { // {{{
if (g_pairings === undefined || _.size(g_pairings) == 0) {
log_err("No pairing information available.");
return;
}
function select_E(v) { return {x: v.x, y: v.y }; }
function select_P(v) { return {x: v.px, y: v.py}; }
let euclidean = g_basemesh["model"] == "euclidean",
n = g_basemesh.vertices.length,
// ``nmode'' argument: use Euclidean mode if appropriate, use projective
// transformations otherwise.
select_point = euclidean ? select_E : select_P,
fuchsians = generate_tesselation_transformations(euclidean ? 3 : 1, g_tesselation_depth),
quadtree = d3.quadtree().x(d => d.x).y(d => d.y).addAll(g_vertices),
v_offset = g_vertices.length;
const search_radius = 1e-5;
_.each(fuchsians, function (fuchsian, orbit) {
const M = fuchsian.trans,
//v_offset = g_vertices.length,
f_offset = g_faces.length,
f_layer = fuchsian.hist.length;
let vmap = [];
// transform each vertex and generate new ones if they are sufficiently
// far from any existing vertex
for (let j = 0; j < n; j++) {
// compute new vertex position in the Klein model
const org_v = g_vertices[j],
op = select_point(org_v),
vp = mul_matvec3(M, [op.x, op.y, 1.0]),
vp_x = vp[0]/vp[2], vp_y = vp[1]/vp[2];
let new_vertex = {
id: v_offset,
boundary_degree: 0
};
let vc; // used for quadtree lookups
// FIXME: hackish -- we are duplicating part of the code that builds vertices
// here, separate from the use in build_mesh()
// if (j == 0)
// console.log("fuchsian #"+fi+": center -> ("+vc.x+","+vc.y+"), history: "+fuchsian.hist.join(','));
if (!euclidean) {
vc = hyp_PtoC({x: vp_x, y: vp_y});
let vu = hyp_PtoU({x: vp_x, y: vp_y});
new_vertex.x = vc.x; new_vertex.y = vc.y;
new_vertex.px = vp_x; new_vertex.py = vp_y;
new_vertex.ux = vu.x; new_vertex.uy = vu.y;
} else {
vc = {x: vp_x, y: vp_y};
new_vertex.x = vp_x; new_vertex.y = vp_y;
}
let existing = quadtree.find(vc.x, vc.y, search_radius);
//log_msg("trying to find vertex at ("+new_vertex.x+","+new_vertex.y+") -> "+(!!existing));
if (existing === undefined) {
v_offset++;
vmap.push(new_vertex.id);
g_neighbors.set(new_vertex.id, []);
g_vertices.push(new_vertex);
quadtree.add(new_vertex);
} else {
//log_msg("reusing existing vertex #"+existing.id+" for layer "+f_layer);
vmap.push(existing.id);
}
}
// translate face indices, taking into account that we may re-use old
// vertices found using the quadtree
//const new_faces = _.map(g_basemesh.faces, f => _.map(f, fi => (v_offset+fi)));
const new_faces = _.map(g_basemesh.faces, f => _.map(f, fi => vmap[fi]));
_.each(new_faces, (f, i) => push_face(euclidean, f, f_offset+i, i, f_layer, orbit+1));
});
d3.selectAll("svg.hyperbolic-model").each(function () {
const node = this;
_.each(["vertices", "edges", "faces"], function (which) {
const toggle_info = g_toggle[which];
toggle_info.set(d3.select(node), toggle_info.state);
});
});
hyp_svg_update_all();
} // }}}
/// Use deck transformations computed from side pairings to construct
/// additional "copies" of the fundamental mesh along its orbit, using
/// a combinatorial algorithm due to Dunham [1].
/// Does nothing if no side pairing information has been provided with the
/// mesh, which can be provided as an object/map under the "pairings" key,
/// which is expected to map boundary edge indices to their paired edge index.
///
/// For triangle meshes, edge indices are integers of the form e = 3*k + i,
/// where k is in [0..faces.length-1] and i in {0, 1, 2}. In that case, e is
/// the edge from vertex i to vertex i+1 (mod 3) of face k.
///
/// [1] Douglas Dunham, An Algorithm to Generate Repeating Hyperbolic
/// Patterns, 2007.
function action_tesselate_dunham() { // {{{
throw "not implemented";
if (g_pairings === undefined || _.size(g_pairings) == 0) {
log_err("No pairing information available.");
return;
}
let euclidean = g_basemesh["model"] == "euclidean",
n = g_basemesh.vertices.length,
fuchsians = generate_tesselation_transformations(euclidean ? 3 : 1, g_tesselation_depth),
quadtree = d3.quadtree().x(d => d.x).y(d => d.y).addAll(g_vertices),
v_offset = g_vertices.length;
const search_radius = 1e-9;
_.each(fuchsians, function (fuchsian, orbit) {
const M = fuchsian.ptrans,
//v_offset = g_vertices.length,
f_offset = g_faces.length,
f_layer = fuchsian.hist.length;
let vmap = [];
// transform each vertex and generate new ones if they are sufficiently
// far from any existing vertex
for (let j = 0; j < n; j++) {
// compute new vertex position in the Klein model
const org_v = g_vertices[j],
vp = mul_matvec3(M, [org_v.px, org_v.py, 1.0]),
vp_x = vp[0]/vp[2], vp_y = vp[1]/vp[2],
vc = hyp_PtoC({x: vp_x, y: vp_y}),
vu = hyp_PtoU({x: vp_x, y: vp_y});
// FIXME: hackish -- we are duplicating part of the code that builds
// vertices here, separate from the use in build_mesh()
//if (j == 0)
// console.log("fuchsian #"+fi+": center -> ("+vc.x+","+vc.y+"), history: "+fuchsian.hist.join(','));
const new_vertex = {
id: v_offset,
boundary_degree: 0,
x: vc.x, y: vc.y,
px: vp_x, py: vp_y,
ux: vu.x, uy: vu.y
}
let existing = quadtree.find(new_vertex, search_radius);
if (existing === undefined) {
v_offset++;
vmap.push(new_vertex.id);
g_neighbors.set(new_vertex.id, []);
g_vertices.push(new_vertex);
} else {
vmap.push(existing.id);
}
}
// translate face indices, taking into account that we may re-use old
// vertices found using the quadtree
//const new_faces = _.map(g_basemesh.faces, f => _.map(f, fi => (v_offset+fi)));
const new_faces = _.map(g_basemesh.faces, f => _.map(f, fi => vmap[fi]));
_.each(new_faces, (f, i) => push_face(f, f_offset+i, i, f_layer, orbit+1));
});
d3.selectAll("svg.hyperbolic-model").each(function () {
const node = this;
_.each(["vertices", "edges", "faces"], function (which) {
const toggle_info = g_toggle[which];
toggle_info.set(d3.select(node), toggle_info.state);
});
});
hyp_svg_update_all();
} // }}}
//var g_locator = {};
//var g_centers = [];
//var g_test_centers = [];
/// Computes a set of deck transformations that will (almost) fill the
/// conformal disk model. The algorithm is simple and iteratively concatenates
/// Fuchsian group elements and tests if the center vertex is translated to
/// reasonably different location than through any transformation built so
/// far. Uses a quadtree for testing against known locations.
function generate_tesselation_transformations(nmode, max_lvl = 3, min_dist = null) { // {{{
const origin = [0.0, 0.0, 1.0];
min_dist = min_dist == null ? g_tcmindist : min_dist;
const tclimit = g_tclimit == 0 ? (nmode == 3 || nmode == 2 ? +Infinity : 1.0) : g_tclimit;
let filtered_mindist = 0, filtered_tclimit = 0;
let centers = [{
x: 0.0, y: 0.0, cw: {x: 0.0, y: 0.0}, hc: origin,
level: 0, hist: [], M: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
}],
todo = [{vi: 0, v: centers[0], via: 0}],
fuchsians = [],
locator = d3.quadtree()
.extent([[-1.0, -1.0], [1.0, 1.0]])
//.x(v => v.x).y(v => v.y)
.x(v => v.cw.x).y(v => v.cw.y)
.addAll(centers);
g_fuchsians = compute_deck_transformations(g_pairings);
function select_fuchsian(fe) {
return nmode == 0 ? fe.ctrans : (
nmode == 1 ? fe.ptrans : (
nmode == 2 ? fe.utrans :
fe.etrans)
);
}
//g_test_centers = [];
while (todo.length > 0) {
const next = todo.pop(), vi = next.vi, v = next.v,
lv = v.level, hc = v.hc, M0 = v.M;
//console.log("todo: "+vi+": x="+vx+", y="+vy);
if (lv >= max_lvl)
continue;
g_pairings.forEach(function (t, s) {
const M = mul_matmat3(select_fuchsian(g_fuchsians.get(s)), M0),
//w = mul_matvec3(M, [vx, vy, 1.0]),
w = mul_matvec3(M, origin),
wx = w[0]/w[2], wy = w[1]/w[2];
//wx = w[0]/(1+w[2]), wy = w[1]/(1+w[2]);
//console.log("* testing new center ("+wx+","+wy+")");
//let oc;
let cw;
if (nmode != 3) {
//oc = hyp_PtoC({x: hc[0]/hc[2], y: hc[1]/hc[2]});
cw = hyp_PtoC({x: wx, y: wy});
} else {
//oc = {x: hc[0]/hc[2], y: hc[1]/hc[2]};
cw = {x: wx, y: wy};
}
//cw.ox = oc.x; cw.oy = oc.y;
//g_test_centers.push(cw);
let nearest = cw;
if (Math.sqrt(cw.x*cw.x + cw.y*cw.y) < tclimit) {
//nearest = locator.find(wx, wy, min_dist);
nearest = locator.find(cw.x, cw.y, min_dist);
if (nearest === undefined)
filtered_mindist++;
} else
filtered_tclimit++;
if (nearest === undefined) {
cw.valid = true;
var hist = centers[vi].hist.slice(),
new_v = {x: wx, y: wy, cw: cw, hc: w, level: lv+1, hist: hist, M: M};
hist.push(s); // update history (reflected in new_v, which stores a reference)
const new_vi = centers.push(new_v) - 1;
//console.log("- adding level "+(lv+1)+" conformal center #"+new_vi+": (x,y)=("+new_v.x+","+new_v.y+")");
locator.add(new_v);
todo.unshift({vi: new_vi, v: new_v, via: s});
fuchsians.push({hist: hist, trans: M});
} else {
cw.valid = false;
//console.log("! skipping level "+lv+" transition "+s+"->"+t+": found center at ("+nearest.x+","+nearest.y+")");
//console.log("! distance = "+Math.sqrt((nearest.x-wx)^2 + (nearest.y-wy)^2));
}
});
}
if (filtered_tclimit > 0)
log_msg(filtered_tclimit + " transformations filtered due to tclimit="+tclimit);
if (filtered_mindist > 0)
log_msg(filtered_mindist + " transformations filtered due to tcmindist="+min_dist);
//g_locator = locator;
//g_centers = centers;
return fuchsians;
} // }}}
function action_load(file_input) { // {{{
if (file_input.files.length == 0)
return; // ignore action if e.g. the user pressed the "Cancel" button
var reader = new FileReader();
reader.onload = function (evt) {
var mesh = {};
try {
mesh = JSON.parse(evt.target.result);
} catch (err) {
log_err("Caught exeception while trying to parse JSON input: "+err.message);
return;
}
main(null, mesh);
};
reader.readAsText(file_input.files[0]);
} // }}}
// vim:ts=2:sw=2:expandtab
#!/bin/sh
if test -n "$1" -a ! -f "$1"; then echo "Error: unable to read from file $1"; exit 1; fi
cat $1 | awk '
BEGIN {
OFS=""; ORS=""; h=0; v=-1; f=-1; c=3;
print "{\"vertices\":[";
}
END {
print "]}\n";
}
/^C?OFF[4]?/ {
if (h>0) { printf "Error: unexpected header"; exit 1; }
h=2;
sub("^[Cn]*OFF", "");
if (strtonum($0) > 0) c=strtonum($0);
}
/^C?nOFF/ {
if (h>0) next;
h=1;
}
/[0-9]+/ {
if (h == 1) {
c=strtonum($0);
h=2;
next;
} else if (h>1 && v<0) {
v=$1; f=$2; next;
} else if (h == 0) {
print "Error: missing header";
exit 1;
}
}
/[-[:digit:]]+/ {
if (h<2) next;
if (v>0) {
v--;
printf "[%s,%s", $1, $2;
if (c>=3) print ",", $3;
if (c>=4) print ",", $4;
print "]";
if (v>0) print ","; else print "],\"faces\":[";
} else if (f>0) {
f--; n=$1; i=2;
print "[";
while (i<=n+1) {
printf strtonum($i);
i++;
if (i<=n+1) print ",";
};
print "]";
if (f>0) print ",";
}
}'
{"model":"conformal","pairings": {"1": 1, "9": 9, "3": 3, "5": 5, "7": 7}, "vertices": [[0.0, 0.0], [0.39797542678479075, 0.0], [0.12298117022012299, 0.37849712296902005], [-0.32196888361251835, 0.23392408663890296], [-0.3219688836125184, -0.23392408663890285], [0.1229811702201229, -0.37849712296902005]], "faces": [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 5, 1]]}
{"faces": [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 5, 6], [0, 6, 1]], "vertices": [[0.0, 0.0], [0.7, 0.0], [0.35000000000000003, 0.606217782649107], [-0.3499999999999998, 0.6062177826491071], [-0.7, 8.572527594031472e-17], [-0.3500000000000003, -0.6062177826491069], [0.35000000000000003, -0.606217782649107]]}
{"faces": [[0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 5, 6], [0, 6, 7], [0, 7, 1]], "vertices": [[0.0, 0.0], [0.7, 0.0], [0.4364428613011135, 0.5472820377276209], [-0.15576465376942003, 0.6824495385272765], [-0.6306782075316932, 0.3037186173822907], [-0.6306782075316933, -0.3037186173822906], [-0.1557646537694202, -0.6824495385272765], [0.4364428613011133, -0.5472820377276209]]}
#!/usr/bin/env python
import sys, math, json
EPS = 1e-9
def hyp_tesselation_circumradius_C(p, q):
if p <= 0 or q <= 0:
raise Exception("hyp_tesselation_circumradius_C: p and q must be positive.")
if (p-2)*(q-2) <= 4: # there is no regular tesselation otherwise
raise Exception("hyp_tesselation_circumradius_C: no regular tesselation (%d,%d) exists." % (p,q))
from math import pi, cos, sqrt
A = pi/p
B = pi/q
return sqrt(cos(A+B) / cos(A-B))
def trim_float(f):
return (f, 0.0)[abs(f) < EPS]
def build_fan(n):
return [[0, i, 1+(i % n)] for i in range(1, n+1)]
def regular_ngon_corners(r, n):
return [
[trim_float(r*math.cos(2*i*math.pi/n)),
trim_float(r*math.sin(2*i*math.pi/n))]
for i in range(0, n)]
def regular_ngon(r, n):
return {
"vertices": [[0.0, 0.0]]+regular_ngon_corners(r, n),
"faces": build_fan(n),
"pairings": {str(1+2*k): 1+2*k for k in range(0, n)}
}
p = q = 8
if len(sys.argv) >= 2:
p = int(sys.argv[1])
if len(sys.argv) >= 3:
q = int(sys.argv[2])
sys.stdout.write(json.dumps(regular_ngon(hyp_tesselation_circumradius_C(p, q), p)))
{
"vertices": [
[0.0, 0.0],
[1.0, 0.0],
[0.9396926207859084, 0.3420201433256687],
[0.766044443118978, 0.6427876096865393],
[0.5000000000000001, 0.8660254037844386],
[0.17364817766693041, 0.984807753012208],
[-0.1736481776669303, 0.984807753012208],
[-0.4999999999999998, 0.8660254037844387],
[-0.7660444431189779, 0.6427876096865395],
[-0.9396926207859083, 0.3420201433256689],
[-1.0, 0.0],
[-0.9396926207859084, -0.34202014332566866],
[-0.7660444431189783, -0.6427876096865389],
[-0.5000000000000004, -0.8660254037844384],
[-0.17364817766693033, -0.984807753012208],
[0.17364817766692997, -0.9848077530122081],
[0.49999999999999933, -0.866025403784439],
[0.7660444431189778, -0.6427876096865396],
[0.9396926207859084, -0.3420201433256686]
],
"faces": [
[0, 1, 2],
[0, 2, 3],
[0, 3, 4],
[0, 4, 5],
[0, 5, 6],
[0, 6, 7],
[0, 7, 8],
[0, 8, 9],
[0, 9, 10],
[0, 10, 11],
[0, 11, 12],
[0, 12, 13],
[0, 13, 14],
[0, 14, 15],
[0, 15, 16],
[0, 16, 17],
[0, 17, 18],
[0, 18, 1]
],
"scale": 1.370906722418348,
"pairings": {
"1": 31,
"3": 21,
"5": 11,
"7": 15,
"9": 17,
"13": 19,
"23": 29,
"25": 33,
"27": 35,
"31": 1,
"21": 3,
"11": 5,
"15": 7,
"17": 9,
"19": 13,
"29": 23,
"33": 25,
"35": 27
}
}
{
"model": "conformal",
"vertices": [
[0.00000,0.00000],
[0.71361,0.12810],
[0.62450,0.36830],
[0.43854,0.63043],
[0.07872,0.76391],
[-0.23320,0.68649],
[-0.45741,0.56251],
[-0.62450,0.36830],
[-0.71361,0.12810],
[-0.71361,-0.12810],
[-0.62450,-0.36830],
[-0.43854,-0.63043],
[-0.07872,-0.76391],
[0.23320,-0.68649],
[0.45741,-0.56251],
[0.62450,-0.36830],
[0.71361,-0.12810]
],
"faces": [
[0,1,2],
[0,2,3],
[0,3,4],
[0,4,5],
[0,5,6],
[0,6,7],
[0,7,8],
[0,8,9],
[0,9,10],
[0,10,11],
[0,11,12],
[0,12,13],
[0,13,14],
[0,14,15],
[0,15,16],
[0,16,1]
],
"pairings": {
"1":27, "27":1, "3":7, "7":3, "5":21, "21":5, "9":25, "25":9, "11":17, "17":11, "13":29, "29":13, "15":31, "31":15, "19":23, "23":19
}
}
{
"model": "conformal",
"vertices": [[-0.4, 0.4], [-0.1, 0.8], [0.7, 0.2]],
"faces": [[0, 1, 2]]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment