Skip to content

Instantly share code, notes, and snippets.

@0x0L
Last active January 26, 2024 13:04
Show Gist options
  • Save 0x0L/1d6c75f53b779b059cb034df89948213 to your computer and use it in GitHub Desktop.
Save 0x0L/1d6c75f53b779b059cb034df89948213 to your computer and use it in GitHub Desktop.
Fast moving window regressions
import numba
import numpy as np
@numba.jit(nopython=True, fastmath=True)
def move_regress(X, Y, w, fit_intercept=True):
"""Moving window multi-regressions
Solves the least-squares problems `Y[t-w+1:t+1] = X[t-w+1:t+1] @ B[t] + A[t]`
for `B` and `A` for all `t` in `[w-1, len(Y)-1]`.
Parameters
----------
X : (T, N) ndarray
Input array.
Y : (T, M) ndarray
Response array.
w : int
The number of elements in the moving window.
fit_intercept : boolean, optional
whether to include the intercept `A_t` in the equations.
Returns
-------
B : (T, N, M) ndarray
The moving coefficients.
A : (T, M) ndarray
The moving intercepts if fit_intercept is True otherwise omitted.
"""
if fit_intercept:
mX = X[:w].sum(0) / w
mY = Y[:w].sum(0) / w
H = (X[:w] - mX).T @ (Y[:w] - mY)
C = (X[:w] - mX).T @ (X[:w] - mX)
q = w ** -0.5
mX /= q
mY /= q
else:
H = X[:w].T @ Y[:w]
C = X[:w].T @ X[:w]
K = np.ascontiguousarray(np.linalg.pinv(C))
B = np.full(X.shape + Y.shape[1:], np.nan)
B[w-1] = K @ H
if fit_intercept:
A = np.full(Y.shape, np.nan)
A[w-1] = mY - mX @ B[w-1]
for i in range(w, len(B)):
x_in, y_in, x_out, y_out = X[i], Y[i], X[i-w], Y[i-w]
if fit_intercept:
z = K @ mX
H += np.outer(mX, mY)
K -= np.outer(z, z) / (1.0 + mX @ z)
mX += q * (x_in - x_out)
mY += q * (y_in - y_out)
z = K @ mX
H -= np.outer(mX, mY)
K += np.outer(z, z) / (1.0 - mX @ z)
z = K @ x_in
H += np.outer(x_in, y_in)
K -= np.outer(z, z) / (1.0 + x_in @ z)
z = K @ x_out
H -= np.outer(x_out, y_out)
K += np.outer(z, z) / (1.0 - x_out @ z)
B[i] = K @ H
if fit_intercept:
A[i] = mY - mX @ B[i]
if not fit_intercept:
return B, None
A *= q
return B, A
from sklearn.linear_model import LinearRegression
X = np.random.randn(1000, 5)
B = np.random.randn(5, 3)
A = np.random.randn(1, 3)
err = 0.1 * np.random.randn(1000, 3)
Y = X @ B + A + err
b, a = move_regress(X, Y, 100)
%timeit b, a = move_regress(X, Y, 100)
clf = LinearRegression().fit(X[5:105], Y[5:105])
assert np.allclose(clf.coef_, b[99 + 5].T)
assert np.allclose(clf.intercept_, a[99 + 5])
b, _ = move_regress(X, Y, 100, fit_intercept=False)
%timeit b, _ = move_regress(X, Y, 100, fit_intercept=False)
clf = LinearRegression(fit_intercept=False).fit(X[5:105], Y[5:105])
assert np.allclose(clf.coef_, b[99 + 5].T)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment