Skip to content

Instantly share code, notes, and snippets.

@jorgehatccrma
Last active August 29, 2015 14:19
Show Gist options
  • Save jorgehatccrma/4d29c289c1c261b62d27 to your computer and use it in GitHub Desktop.
Save jorgehatccrma/4d29c289c1c261b62d27 to your computer and use it in GitHub Desktop.
3freq monomial finding relevant code
Display the source blob
Display the rendered blob
Raw
{
"metadata": {
"name": "",
"signature": "sha256:36f171473fcd44df1165d76c4710b75f292ad6c01c795dbb5224c9a581163c56"
},
"nbformat": 3,
"nbformat_minor": 0,
"worksheets": [
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# 3-freq monomials calculation\n",
"\n",
"Find all 3-freq monomials up to order `N`, given a tolerance `tol`."
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"# Utilities \n",
"\n",
"from __future__ import division\n",
"import numpy as np\n",
"\n",
"\n",
"class MemoizeMutable:\n",
" \"\"\"\n",
" Implement memoization with mutable arguments\n",
" \"\"\"\n",
" def __init__(self, fn):\n",
" self.fn = fn\n",
" self.memo = {}\n",
" def __call__(self, *args, **kwds):\n",
" import cPickle\n",
" str = cPickle.dumps(args, 1)+cPickle.dumps(kwds, 1)\n",
" if not self.memo.has_key(str):\n",
" self.memo[str] = self.fn(*args, **kwds)\n",
"# else:\n",
"# print \"returning memoized calculation\"\n",
"\n",
" return self.memo[str]\n",
"\n",
"\n",
"# got from http://stackoverflow.com/questions/1208118/using-numpy-to-build-an-array-of-all-combinations-of-two-arrays\n",
"# (faster than other alternatives)\n",
"def cartesian(arrays, out=None):\n",
" \"\"\"\n",
" Generate a cartesian product of input arrays.\n",
"\n",
" Parameters\n",
" ----------\n",
" arrays : list of array-like\n",
" 1-D arrays to form the cartesian product of.\n",
" out : ndarray\n",
" Array to place the cartesian product in.\n",
"\n",
" Returns\n",
" -------\n",
" out : ndarray\n",
" 2-D array of shape (M, len(arrays)) containing cartesian products\n",
" formed of input arrays.\n",
"\n",
" Examples\n",
" --------\n",
" >>> cartesian(([1, 2, 3], [4, 5], [6, 7]))\n",
" array([[1, 4, 6],\n",
" [1, 4, 7],\n",
" [1, 5, 6],\n",
" [1, 5, 7],\n",
" [2, 4, 6],\n",
" [2, 4, 7],\n",
" [2, 5, 6],\n",
" [2, 5, 7],\n",
" [3, 4, 6],\n",
" [3, 4, 7],\n",
" [3, 5, 6],\n",
" [3, 5, 7]])\n",
"\n",
" \"\"\"\n",
"\n",
" arrays = [np.asarray(x) for x in arrays]\n",
" dtype = arrays[0].dtype\n",
"\n",
" n = np.prod([x.size for x in arrays])\n",
" if out is None:\n",
" out = np.zeros([n, len(arrays)], dtype=dtype)\n",
"\n",
" m = n / arrays[0].size\n",
" out[:,0] = np.repeat(arrays[0], m)\n",
" if arrays[1:]:\n",
" cartesian(arrays[1:], out=out[0:m,1:])\n",
" for j in xrange(1, arrays[0].size):\n",
" out[j*m:(j+1)*m,1:] = out[0:m,1:]\n",
" return out\n",
"\n",
"\n",
"\n"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 1
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## The actual monomial finding functions ..."
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"from collections import defaultdict, namedtuple\n",
"\n",
"\n",
"@MemoizeMutable\n",
"def fareySequence(N, k=1):\n",
" \"\"\"\n",
" Generate Farey sequence of order N, less than 1/k\n",
" \"\"\"\n",
" # assert type(N) == int, \"Order (N) must be an integer\"\n",
" a, b = 0, 1\n",
" c, d = 1, N\n",
" seq = [(a,b)]\n",
" while c/d <= 1/k:\n",
" seq.append((c,d))\n",
" tmp = int(math.floor((N+b)/d))\n",
" a, b, c, d = c, d, tmp*c-a, tmp*d-b\n",
" return seq\n",
"\n",
"\n",
"@MemoizeMutable\n",
"def resonanceSequence(N, k):\n",
" \"\"\"\n",
" Compute resonance sequence\n",
"\n",
" Arguments:\n",
" - N (int): Order\n",
" - k (int): denominator of the farey frequency resonances are attached to\n",
" \"\"\"\n",
" a, b = 0, 1\n",
" c, d = k, N-k\n",
" seq = [(a,b)]\n",
" while d >= 0:\n",
" seq.append((c,d))\n",
" tmp = int(math.floor((N+b+a)/(d+c)))\n",
" a, b, c, d = c, d, tmp*c-a, tmp*d-b\n",
" return seq\n",
"\n",
"\n",
"\n",
"def rationalApproximation(points, N, tol=1e-3, lowest_order_only=True):\n",
" \"\"\"\n",
" Arguments:\n",
" points: 2D (L x 2) points to approximate\n",
" N: max order\n",
" \"\"\"\n",
" L,_ = points.shape\n",
"\n",
" # since this solutions assumes a>0, a 'quick' hack to also obtain solutions\n",
" # with a < 0 is to flip the dimensions of the points and explore those\n",
" # solutions as well\n",
" points = np.vstack((points, np.fliplr(points)))\n",
"\n",
" solutions = defaultdict(set)\n",
"\n",
" sequences = {1: set(fareySequence(1))}\n",
" for n in range(2, N+1):\n",
" sequences[n] = set(fareySequence(n)) - sequences[n-1]\n",
"\n",
" for h,k in fareySequence(N,1):\n",
" if 0 in (h,k):\n",
" continue\n",
" # print h,k\n",
" for x,y in resonanceSequence(N, k):\n",
"\n",
" # avoid 0-solutions\n",
" if 0 in (x,y):\n",
" continue\n",
"\n",
" norm = np.sqrt(x**2+y**2)\n",
"\n",
" n = np.array([ y/norm, x/norm]) * np.ones_like(points)\n",
" n[points[:,0] < h/k, 0] *= -1 # points approaching from the left\n",
"\n",
" # nomenclature inspired in http://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line#Vector_formulation\n",
" ap = np.array([h/k, 0]) - points\n",
" apn = np.zeros((1,L))\n",
" d = np.zeros_like(points)\n",
"\n",
" apn = np.sum(n*ap, 1, keepdims=True)\n",
" d = ap - apn*n\n",
"\n",
" ## DON'T RETURN IMMEDIATELY; THERE MIGHT BE OTHER SOLUTIONS OF THE SAME ORDER OR HIGHER\n",
" indices, = np.nonzero(np.sqrt(np.sum(d*d,1)) <= tol)\n",
" for i in indices:\n",
" # print \"h/k:\", h , \"/\", k\n",
" # print \"point:\", points[i,:]\n",
" if points[i,0] >= h/k:\n",
" if i<L:\n",
" # print \"non-flipped >= h/k\"\n",
" solutions[i].add((x,-y, h*x/k))\n",
" # print i, (x,-y, h*x/k)\n",
" elif x*(-y)<0: # only consider solutions where (a,b) have different sign for the \"flipped\" points (the other solutions should have already been found for the non-flipped points)\n",
" # print \"flipped >= h/k\"\n",
" solutions[i-L].add((-y, x, h*x/k))\n",
" # print i-L, (-y, x, h*x/k)\n",
" else:\n",
" if i<L:\n",
" # print \"non-flipped < h/k\"\n",
" solutions[i].add((x, y, h*x/k))\n",
" # print i, (x, y, h*x/k)\n",
" elif x*y>0: # only consider solutions where (a,b) have different sign for the \"flipped\" points (the other solutions should have already been found for the non-flipped points)\n",
" # print \"flipped < h/k\"\n",
" solutions[i-L].add((y, x, h*x/k))\n",
" # print i-L, (y, x, h*x/k)\n",
"\n",
" if lowest_order_only:\n",
" # removed = 0\n",
" for k in solutions:\n",
" # keep lowest order solutions only\n",
" lowest_order = 2*N\n",
" s = set([])\n",
" for sol in solutions[k]:\n",
" K = abs(sol[0])+abs(sol[1])+abs(sol[2])\n",
" if K == lowest_order:\n",
" s.add(sol)\n",
" elif K < lowest_order:\n",
" lowest_order = K\n",
" # if len(s) > 0:\n",
" # print(\"point: ({},{}) -> removing {} for {}\".format(points[k,0], points[k,1], s, sol))\n",
" # removed += len(s)\n",
" s = set([sol])\n",
" solutions[k] = s\n",
" # print(\"Removed {} solutions\".format(removed))\n",
"\n",
" return solutions\n",
"\n",
"\n",
"@MemoizeMutable\n",
"def monomialsForVectors(fj, fi, allow_self_connect=True, N=5, tol=1e-10, lowest_order_only=False):\n",
" \"\"\"\n",
" Arguments:\n",
" fj (np.array_like): frequency vector of the source (j in the paper)\n",
" fi (np.array_like): frequency vector of the target (i in the paper)\n",
" N: max order\n",
" tol: tolerance in the point to line distance calculation\n",
" \"\"\"\n",
" from time import time\n",
" st = time()\n",
"\n",
" # use 32bits (64 can take too much memory)\n",
" fj = np.array([f for f in fj], dtype=np.float32)\n",
" fi = np.array([f for f in fi], dtype=np.float32)\n",
" Fj, Fi = len(fj), len(fi)\n",
"\n",
" cart_idx = cartesian((np.arange(Fj),\n",
" np.arange(Fj),\n",
" np.arange(Fi)))\n",
"\n",
" # we care only when y2 > y1\n",
" cart_idx = cart_idx[cart_idx[:,1]>cart_idx[:,0]]\n",
"\n",
" if not allow_self_connect:\n",
" cart_idx = cart_idx[(cart_idx[:,0] != cart_idx[:,2]) & (cart_idx[:,1] != cart_idx[:,2])]\n",
"\n",
" # actual frequency triplets\n",
" cart = np.vstack((fj[cart_idx[:,0]], fj[cart_idx[:,1]], fi[cart_idx[:,2]])).T\n",
" nr, _ = cart_idx.shape\n",
"\n",
" # sort in order to get a*x+b*y=c with 0<x,y<1\n",
" sorted_idx = np.argsort(cart, axis=1)\n",
" cart.sort()\n",
" print(\"a) Elapsed: {} secs\".format(time() - st))\n",
" all_points = np.zeros((nr, 2), dtype=np.float32)\n",
" all_points[:,0] = cart[:,0] / cart[:,2]\n",
" all_points[:,1] = cart[:,1] / cart[:,2]\n",
" # del cart\n",
" print(\"b) Elapsed: {} secs\".format(time() - st))\n",
"\n",
" redundancy_map = defaultdict(list)\n",
" for i in xrange(all_points.shape[0]):\n",
" redundancy_map[(all_points[i,0],all_points[i,1])].append(i)\n",
" del all_points\n",
" print(\"c) Elapsed: {} secs\".format(time() - st))\n",
"\n",
" points = np.array([[a,b] for a,b in redundancy_map])\n",
" print(\"d) Elapsed: {} secs\".format(time() - st))\n",
"\n",
" exponents = rationalApproximation(points, N, tol=tol, lowest_order_only=lowest_order_only)\n",
" print(\"e) Elapsed: {} secs\".format(time() - st))\n",
"\n",
"\n",
" monomials = [defaultdict(list) for x in fi]\n",
" M = namedtuple('Monomials', ['indices', 'exponents'])\n",
" # FIXME: is there a way to reduce the number of nested loops?\n",
" for k in exponents:\n",
" x, y = points[k,0], points[k,1]\n",
" all_points_idx = redundancy_map[(x,y)]\n",
" sols = exponents[k]\n",
" for a, b, c in sols:\n",
" for idx in all_points_idx:\n",
" j1, j2, i = (cart_idx[idx, 0], cart_idx[idx, 1], cart_idx[idx, 2])\n",
" reordered = (sorted_idx[idx,0], sorted_idx[idx,1], sorted_idx[idx,2])\n",
" if reordered == (0,1,2):\n",
" n1, n2, d = a, b, c\n",
" elif reordered == (0,2,1):\n",
" # n1, d, n2 = -a, b, c\n",
" n1, n2, d = -a, c, b\n",
" elif reordered == (2,0,1):\n",
" # d, n1, n2 = a, -b, c\n",
" n1, n2, d = -b, c, a\n",
" else:\n",
" raise Exception(\"Unimplemented order!\")\n",
" if d < 0:\n",
" n1, n2, d = -n1, -n2, -d\n",
" monomials[i]['j1'] += [j1]\n",
" monomials[i]['j2'] += [j2+len(fj)] # add offset for fast look-up at run time (will use a flattened array)\n",
" monomials[i]['i'] += [i+len(fj)+len(fi)] # add offset for fast look-up at run time (will use a flattened array)\n",
" monomials[i]['n1'] += [n1]\n",
" monomials[i]['n2'] += [n2]\n",
" # monomials[i]['d'] += [d]\n",
" monomials[i]['d'] += [d-1] # small optimization to avoid extra subtraction in every step when actually running the network\n",
"\n",
" # print i, (a,b,c), (n1, n2, d)\n",
"\n",
" print(\"f) Elapsed: {} secs\".format(time() - st))\n",
"\n",
" # make them 2d arrays for speed up\n",
" for i, m in enumerate(monomials):\n",
" I = np.array([m['j1'], m['j2'], m['i']], dtype=np.int).T\n",
" E = np.array([m['n1'], m['n2'], m['d']], dtype=np.int).T\n",
" monomials[i] = M(indices=I, exponents=E)\n",
" print(\"g) Elapsed: {} secs\".format(time() - st))\n",
"\n",
" return monomials"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 2
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Tests ..."
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"from time import time\n",
"\n",
"fc = 2.0\n",
"num_oscs = 321\n",
"f1 = fc * np.logspace(-2.5, 2.5, num_oscs, base=2)\n",
"f2 = fc * np.logspace(-2.5, 2.5, num_oscs, base=2)\n",
"\n",
"start = time()\n",
"monomials = monomialsForVectors(f1, f2, N=10, tol=1e-10)\n",
"\n",
"print \"Run in {:.2f} seconds\".format(time()-start)\n",
"\n",
"# print monomials"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
"a) Elapsed: 4.45098590851 secs\n",
"b) Elapsed: 4.67161798477 secs"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n",
"c) Elapsed: 29.6413450241 secs"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n",
"d) Elapsed: 34.9806830883 secs"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n",
"e) Elapsed: 51.7057318687 secs"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n",
"f) Elapsed: 51.9102458954 secs"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n",
"g) Elapsed: 51.9562869072 secs\n",
"Run in 53.68 seconds"
]
},
{
"output_type": "stream",
"stream": "stdout",
"text": [
"\n"
]
}
],
"prompt_number": 3
},
{
"cell_type": "code",
"collapsed": false,
"input": [],
"language": "python",
"metadata": {},
"outputs": []
}
],
"metadata": {}
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment