Skip to content

Instantly share code, notes, and snippets.

@suhaskv
Created January 1, 2021 18:13
Show Gist options
  • Save suhaskv/1cd7e23fbd30539f57d948c16bd11251 to your computer and use it in GitHub Desktop.
Save suhaskv/1cd7e23fbd30539f57d948c16bd11251 to your computer and use it in GitHub Desktop.
VSB Power Line Blog - Higuchi FD
def _linear_regression(x, y):
n_times = x.size
sx2 = 0
sx = 0
sy = 0
sxy = 0
for j in range(n_times):
sx2 += x[j] ** 2
sx += x[j]
sxy += x[j] * y[j]
sy += y[j]
den = n_times * sx2 - (sx ** 2)
num = n_times * sxy - sx * sy
slope = num / den
intercept = np.mean(y) - slope * np.mean(x)
return slope, intercept
def _higuchi_fd(x, kmax):
n_times = x.size
lk = np.empty(kmax)
x_reg = np.empty(kmax)
y_reg = np.empty(kmax)
for k in range(1, kmax + 1):
lm = np.empty((k,))
for m in range(k):
ll = 0
n_max = floor((n_times - m - 1) / k)
n_max = int(n_max)
for j in range(1, n_max):
ll += abs(x[m + j * k] - x[m + (j - 1) * k])
ll /= k
ll *= (n_times - 1) / (k * n_max)
lm[m] = ll
# Mean of lm
m_lm = 0
for m in range(k):
m_lm += lm[m]
m_lm /= k
lk[k - 1] = m_lm
x_reg[k - 1] = log(1. / k)
y_reg[k - 1] = log(m_lm)
higuchi, _ = _linear_regression(x_reg, y_reg)
return higuchi
def higuchi_fd(x, kmax=10):
x = np.asarray(x, dtype=np.float64)
kmax = int(kmax)
return _higuchi_fd(x, kmax)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment