This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import matplotlib.pyplot as plt | |
import torch | |
def laplacian_per(f, dx, dy): | |
f_per = torch.cat([f[...,[-1]], f, f[...,[0]]], dim=-1) | |
f_per = torch.cat([f_per[...,[-1],:], f_per, f_per[...,[0],:]], dim=-2) | |
return ((f_per[...,2:,1:-1] + f_per[...,:-2,1:-1] - 2*f_per[...,1:-1,1:-1]) / dx**2 \ | |
+ (f_per[...,1:-1,2:] + f_per[...,1:-1,:-2]- 2*f_per[...,1:-1,1:-1]) / dy**2) | |
xmin = 0.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import scipy.fftpack | |
import numpy as np | |
np.set_printoptions(precision=4, linewidth=200) | |
N = 8 | |
x = torch.DoubleTensor(8).normal_() | |
exp_vec_1 = 2 * torch.exp(-1j*torch.pi*torch.arange(N)/(2*N)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import matplotlib.pyplot as plt | |
x = 1.3 | |
f = np.exp | |
fp = np.exp | |
errs2, errs4 = [], [] | |
dxs = np.linspace(1e-4, 1e-1, 200) | |
for dx in dxs: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn.functional as F | |
def compute_laplace_dst(nx, ny, dx, dy, arr_kwargs): | |
"""Discrete sine transform of the 2D centered discrete laplacian | |
operator.""" | |
x, y = torch.meshgrid(torch.arange(1,nx-1, **arr_kwargs), | |
torch.arange(1,ny-1, **arr_kwargs), | |
indexing='ij') | |
return 2*(torch.cos(torch.pi/(nx-1)*x) - 1)/dx**2 + 2*(torch.cos(torch.pi/(ny-1)*y) - 1)/dy**2 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import matplotlib.pyplot as plt | |
inp = torch.FloatTensor(1,1,32,32).uniform_(-1,1) | |
plt.imshow(inp[0,0]) | |
plt.show() | |
# noyau gaussien | |
gauss_ker_7 = torch.FloatTensor(1,1,7,7) | |
x,y = torch.meshgrid(torch.linspace(-3,3,7), torch.linspace(-3,3,7), indexing='xy') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""DST I using FFT routines, Louis Thiry | |
Method 1 is 'naive' and used FFTs with twice bigger input signal. | |
Method 2 is more sophisticated and used iRFFT with half the input signal size. | |
The naive method 1 seems however to be more efficient, and JIT compilation is not key. | |
""" | |
import numpy as np | |
import scipy.fftpack | |
import torch | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# 58.4 % accuracy with K-nearest-neighbor classifier on CIFAR. | |
# Images are whitened and normalized | |
import pickle | |
import numpy as np | |
import os | |
from sklearn.neighbors import KNeighborsClassifier | |
def compute_whitening_op(X, reg=0.1): | |
X = X.astype('float64') | |
mean = X.mean(axis=0, keepdims=True) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Python implementation of the Angular Fourier Series descriptors defined in the paper | |
'On representing chemical environments', DOI: https://doi.org/10.1103/PhysRevB.87.184115 | |
""" | |
import argparse | |
import os | |
import numpy as np | |
import scipy | |
import scipy.spatial as spatial | |
from mpl_toolkits.mplot3d import axes3d # noqa: f401 unused import |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import scipy.spatial as spatial | |
import numpy as np | |
configuration = np.random.rand((1024, 3)) | |
point_tree = spatial.cKDTree(configuration) | |
r_cut = 0.5 | |
i_atom = 0 | |
neighbors_indices = point_tree.query_ball_point(configuration[i_atom], r_cut) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def sinkhorn_logsumexp(cost_matrix, reg=1e-1, maxiter=30, momentum=0.): | |
"""Log domain version on sinkhorn distance algorithm ( https://arxiv.org/abs/1306.0895 ). | |
Inspired by https://github.com/gpeyre/SinkhornAutoDiff/blob/master/sinkhorn_pointcloud.py .""" | |
m, n = cost_matrix.size() | |
mu = torch.FloatTensor(m).fill_(1./m) | |
nu = torch.FloatTensor(n).fill_(1./n) | |
if torch.cuda.is_available(): | |
mu, nu = mu.cuda(), nu.cuda() |
NewerOlder