Skip to content

Instantly share code, notes, and snippets.

@cheery
Last active November 10, 2019 13:23
Show Gist options
  • Save cheery/21bfd84c1cc32ed0c743 to your computer and use it in GitHub Desktop.
Save cheery/21bfd84c1cc32ed0c743 to your computer and use it in GitHub Desktop.
Sympy funnies & Cubic hermite
from sympy import *
# https://people.maths.ox.ac.uk/hitchin/hitchinnotes/Geometry_of_surfaces/Chapter_3_Surfaces_in_R3.pdf
def vec_diff(vector, *args):
return [diff(axis, *args) for axis in vector]
t, u = symbols('t u')
r = symbols('r', positive=True)
sphere = (r*sin(t)*sin(u), r*cos(t)*sin(u), r*cos(u))
sphere_dt = vec_diff(sphere, t)
sphere_du = vec_diff(sphere, u)
def dot(av, bv):
return sum(a*b for a, b in zip(av, bv))
E = simplify(dot(sphere_dt, sphere_dt))
F = simplify(dot(sphere_dt, sphere_du))
G = simplify(dot(sphere_du, sphere_du))
print 'E =', E
print 'F =', F
print 'G =', G
v, w = symbols('v w')
equator = (v, pi/2.0)
equator_dv = vec_diff(equator, v)
fff = sqrt(E*equator_dv[0]**2 +
2*F*equator_dv[0]*equator_dv[1] +
G*equator_dv[1]**2).subs({t:equator[0], u:equator[1]})
print integrate(fff, (t, 0, 2*pi))
from sympy import *
import random
# forward differencing provides a quick way
# to interpolate polynomial splines such as
# bezier or hermite curves
# this CAS script helps setting up a fwd
# differencing scheme.
a0, a1, a2, a3, t, h = symbols('a0 a1 a2 a3 t h')
polynomial = a0 + a1*t + a2*t**2 + a3*t**3
# The goal is to get the differencing scheme independent from t
# With a polynomial it turns into set of summations.
d1 = simplify(polynomial.subs(t, t+h) - polynomial)
d2 = simplify(d1.subs(t, t+h) - d1)
d3 = simplify(d2.subs(t, t+h) - d2)
# You could derive differential from the clause by calculating
# the limit for 'h' approaching 0
print "d1 = {}".format(d1.subs(t, 0))
print "d2 = {}".format(d2.subs(t, 0))
print "d3 = {}".format(d3.subs(t, 0))
# At d3 we no longer have 't' in the equation.
# test!
spline = dict((var, random.random()) for var in [a0, a1, a2, a3])
steps = random.randint(100, 10000)
f0 = polynomial.subs(spline).subs({t: 0.0})
f1 = d1.subs(spline).subs({t: 0.0, h:1.0/steps})
f2 = d2.subs(spline).subs({t: 0.0, h:1.0/steps})
f3 = d3.subs(spline).subs({t: 0.0, h:1.0/steps})
print "f0={} f1={} f2={} f3={}".format(f0,f1,f2,f3)
# These all values should be real numbers at this point.
error = 0.0
for i in range(steps):
x = polynomial.subs(spline).subs({t:1.0*i/steps})
error += f0-x
f0 += f1
f1 += f2
f2 += f3
print "error({1}) = {0:.32f}".format(error, steps)
from sympy import *
# https://www.rose-hulman.edu/~finn/CCLI/Notes/day09.pdf
a0, a1, a2, a3, t = symbols('a0 a1 a2 a3 t')
polynomial = a0 + a1*t + a2*t**2 + a3*t**3
polynomial_d = diff(polynomial, t)
p0, v0, p1, v1 = symbols('p0 v0 p1 v1')
spline = [
Eq(p0, polynomial.subs(t, 0)),
Eq(v0, polynomial_d.subs(t, 0)),
Eq(p1, polynomial.subs(t, 1)),
Eq(v1, polynomial_d.subs(t, 1))]
print '\n'.join(map(str, spline))
print 'solution:'
solution = solve(spline, (a0, a1, a2, a3))
for lhs, rhs in solution.items():
print Eq(lhs, rhs)
cf = polynomial.subs(solution)
print 'c(t) =', cf
for label, var in [('h0', p0), ('h1', v0), ('h2', v1), ('h3', p1)]:
cf = apart(cf, var)
coeff = cf.coeff(var)
cf = simplify(cf - coeff * var)
print label, '=', coeff
assert cf == 0
# These would also "work", and be quicker. r
# But I wanted to extract coefficients safe rather than sorry.
#print 'h0 =', apart(cf, p0).coeff(p0)
#print 'h1 =', apart(cf, v0).coeff(v0)
#print 'h2 =', apart(cf, v1).coeff(v1)
#print 'h3 =', apart(cf, p1).coeff(p1)
#print 'h0 =', cf.subs({p0:1, v0:0, v1:0, p1:0})
#print 'h1 =', cf.subs({p0:0, v0:1, v1:0, p1:0})
#print 'h2 =', cf.subs({p0:0, v0:0, v1:1, p1:0})
#print 'h3 =', cf.subs({p0:0, v0:0, v1:0, p1:1})
# stdout
# p0 == a0
# v0 == a1
# p1 == a0 + a1 + a2 + a3
# v1 == a1 + 2*a2 + 3*a3
# solution:
# a3 == 2*p0 - 2*p1 + v0 + v1
# a0 == p0
# a2 == -3*p0 + 3*p1 - 2*v0 - v1
# a1 == v0
# c(t) = p0 + t**3*(2*p0 - 2*p1 + v0 + v1) + t**2*(-3*p0 + 3*p1 - 2*v0 - v1) + t*v0
# h0 = 2*t**3 - 3*t**2 + 1
# h1 = t**3 - 2*t**2 + t
# h2 = -2*t**3 + 3*t**2
# h3 = t**3 - t**2
from sympy import *
# A small sample of how to solve an implicit differentation problem.
# Given an implicit continuous equation, determine coordinates with horizontal tangents.
x, y = symbols('x y', real=True)
f = Eq(0, 8*(x**2 + y**2)**2 - 49*(x**2 - y**2))
# Since there's a horizontal tangent at 0 = d/dx fn
# We solve this system of equations: 0 = fn, 0 = d/dx fn
for res in solve([f, Eq(0, diff(f.rhs, x))]):
print (res[x], res[y])
import sympy
from sympy import *
t = symbols('t')
function = (t, t**2)
print "C(t) =", function
print "sample t=0.0", [axis.subs(t, 0.0) for axis in function]
print "sample t=0.5", [axis.subs(t, 0.5) for axis in function]
print "sample t=1.0", [axis.subs(t, 1.0) for axis in function]
def curvature_2d((xf, yf)):
x_d1 = diff(xf, t)
x_d2 = diff(x_d1, t)
y_d1 = diff(yf, t)
y_d2 = diff(y_d1, t)
return (x_d1*y_d2 - y_d1*x_d2) / sqrt(x_d1**2 + y_d1**2)**3.0
kf = curvature_2d(function)
print "k(t) =", kf
print "sample k(0.0) =", kf.subs(t, 0.0)
print "sample k(0.5) =", kf.subs(t, 0.5)
print "sample k(1.0) =", kf.subs(t, 1.0)
mec = Integral(kf, (t, -1.0, +1.0)).evalf()
print "mec =", mec
mvcc = Integral(diff(kf, t), (t, -1.0, +1.0)).evalf()
print "mvcc =", mvcc
x_d = diff(function[0], t)
y_d = diff(function[1], t)
length = Integral(sqrt(x_d**2 + y_d**2), (t, -1.0, +1.0)).evalf()
print "length =", length
print ccode(kf)
print ccode(diff(kf, t))
print ccode(sqrt(x_d**2 + y_d**2))
from sympy import *
x = symbols('x')
# Gradient descent would usually operate on
# multiple variables, but here lets demonstrate it
# on something simple.
# With bigger step it converges faster, but
# larger step may prevent the algorithm from converging.
step = 0.05
precision = 0.00001
# The sine here gives it more than one point to converge.
f = (1-x)**2*0.3 + x**4 + sin(x*10)*0.3
f_dx = diff(f, x) # this is the 'gradient',
# the gradient is a vector containing differentials
# for every variable you optimize with here.
# Starting guesses
for x_new in [-1.5, 0.0, 0.5, 1.5]:
x_old = x_new - 2*precision # to ensure the while loop always runs.
y_old = y_new = f.subs(x, x_new)
while abs(x_new - x_old) > precision:
x_old = x_new
y_old = y_new
# For every variable and differential, we plug in the current
# values into the differential to determine how the value
# changes if the variable changes. And then we nudge every
# variable towards low point a little bit.
x_new = x_old - f_dx.subs(x, x_old) * step
y_new = f.subs(x, x_new)
if y_old <= y_new:
print "stopped converging"
x_new = x_old
y_new = y_old
break
else:
print 'local minimum at: x={} y={}'.format(x_new, y_new)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment