Skip to content

Instantly share code, notes, and snippets.

@mocquin
mocquin / matplotlib_add_context_menu.py
Created March 26, 2024 14:56
Add context menu on right-click in matplotlib
%matplotlib qt
import matplotlib.pyplot as plt
from matplotlib.backends.qt_compat import QtCore, QtWidgets
import numpy as np
def on_button_release(event, ax):
if event.button != 3: # Right-click.
return
if event.inaxes is None:
return
@mocquin
mocquin / quantile.py
Last active February 16, 2024 12:57
Quantile Regressor interaction
%matplotlib ipympl
import numpy as np
import matplotlib.pyplot as plt
import ipywidgets as widgets
from IPython.display import display
def symmetrize_xy(ax, ref="min"):
x_min, x_max = ax.get_xlim()
y_min, y_max = ax.get_ylim()
x_range = x_max - x_min
@mocquin
mocquin / regression3.py
Created February 12, 2024 19:31
regression3.py
X, y = make_regression(n_samples=100, n_features=1, noise=10, random_state=42)
# Apply a more skewed transformation to the target values (e.g., square root)
y = np.sqrt(np.abs(y)) * X[:,0]**2
%matplotlib qt
import numpy as np
import pandas as pd
import seaborn as sns
from sklearn.datasets import make_regression
@mocquin
mocquin / regression2.py
Created February 12, 2024 19:30
regression2.py
%matplotlib qt
import numpy as np
import pandas as pd
import seaborn as sns
from sklearn.datasets import make_regression
from sklearn.dummy import DummyRegressor
# Create a synthetic dataset for regression
X, y = make_regression(n_samples=100, n_features=1, noise=10, random_state=42)
@mocquin
mocquin / regression1.py
Created February 12, 2024 19:17
regression1.py
%matplotlib qt
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_regression
from sklearn.dummy import DummyRegressor
# We first create a toy dataset, with 100 samples and a single feature
X, y = make_regression(n_samples=100, n_features=1, noise=10, random_state=0)
# Split the dataset into train/test sets
@mocquin
mocquin / classif1.py
Created February 12, 2024 18:32
classif1.py
%matplotlib qt
from sklearn.dummy import DummyClassifier
import pandas as pd
import seaborn as sns
# Load the penguins dataset from seaborn
penguins_df = sns.load_dataset("penguins")
# Split the dataset into features (X) and target variable (y)
X = penguins_df[['bill_length_mm', 'bill_depth_mm', 'flipper_length_mm', 'body_mass_g']]
# from scipy.ndimage.convolve doc :
# > Each value in result is C_i = \sum_j{I_{i+k-j} W_j}, where
# W is the `weights` kernel,
# j is the N-D spatial index over :math:`W`,
# I is the `input` and k is the coordinate of the center of
# W, specified by `origin` in the input parameters.
# Hence we need to center the kernel
vpad = (V-N)//2+N%2, (V-N)//2
hpad = (H-N)//2+N%2, (H-N)//2
kernel_padded_centered = np.pad(kernel, (vpad, hpad))
# in order to multiply the fourier transforms, we need to pad
# the kernel to the same size as the image
kernel_padded = np.pad(kernel, ((0, V-N), (0, H-N)))
fft_convolved = np.fft.ifft2(np.fft.fft2(img) * np.fft.fft2(kernel_padded))
p_fft = PeriodicArray(fft_convolved.real)
p_fft.plot()
N = 15
kernel = np.ones((N, N))/N**2
fig, axes = plt.subplots(2, 2)
p_reflect = PeriodicArray(scipy.ndimage.convolve(img, kernel, mode='reflect'))
p_wrap = PeriodicArray(scipy.ndimage.convolve(img, kernel, mode='wrap'))
p_nearest = PeriodicArray(scipy.ndimage.convolve(img, kernel, mode='nearest'))
p_constant = PeriodicArray(scipy.ndimage.convolve(img, kernel, mode='constant'))
p_reflect.plot(axes[0,0]); axes[0,0].set_title("mode='reflect'")
p_wrap.plot(axes[0,1]); axes[0,1].set_title("mode='wrap'")
# the choice of V and H must big enough so the middle of the image is the same
# whatever the padding mode type, but small enough so that the differences in the
# boundaries can be seen
V, H = 100, 150
img = np.random.randn(V, H)
img += np.linspace(0, 5, 100)[..., np.newaxis]
img += np.sin(2*np.pi*1/75*np.arange(150))*5
d = 3
img[V//2-d:V//2+d, H//2-d:H//2+d]+= 5