Skip to content

Instantly share code, notes, and snippets.

Avatar

Julien Jerphanion jjerphan

View GitHub Profile
View sklearn_19418_bench.py
from sklearn.feature_selection import mutual_info_regression, mutual_info_classif
from sklearn.neighbors import KernelDensity, NearestNeighbors
from .common import Benchmark
from sklearn.datasets import make_classification, make_regression
class RCBenchmarks(Benchmark):
param_names = ['n', 'd']
params = (
@jjerphan
jjerphan / mismatch.py
Created Apr 23, 2021
Explo scikit-learn#19952
View mismatch.py
import numpy as np
from scipy.linalg import cho_solve, cholesky
from sklearn.gaussian_process.kernels import RBF
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
kernel = RBF(length_scale=1.0)
X, y = make_regression()
X_train, X_test, y_train, y_test = train_test_split(X, y)
@jjerphan
jjerphan / binary_tree_bench.py
Created Apr 15, 2021
Simple Benchmark for `sklearn.neighbours.BinaryTree`
View binary_tree_bench.py
import numpy as np
from sklearn.neighbors import KDTree, BallTree
from .common import Benchmark
class BinaryTreeStatsBenchmark(Benchmark):
"""
Base class for BinaryTree benchmarks for removing statistics.
"""
@jjerphan
jjerphan / multi_dot_logreg.py
Last active Mar 12, 2021
Benchmark np.linalg.multi_dot on dense matrices for scikit-learn/scikit-learn#19571
View multi_dot_logreg.py
import numpy as np
from .common import Benchmark
class MultiDotLogReg(Benchmark):
""" Benchmark np.linalg.multi_dot on dense matrices for #19571
"""
param_names = [
View binary_tree.py
import numpy as np
from sklearn.neighbors import KDTree, BallTree
from .common import Benchmark
class BinaryTreeBenchmark(Benchmark):
"""
Base class for BinaryTree benchmarks.
"""
View benchmark.py
import gc
import itertools
import time
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
from sklearn.datasets import make_classification
@jjerphan
jjerphan / test_system_nakagami_fit.py
Created Jan 21, 2021
Nakagami Loglikelihood tests: fit vs scipy.optimize.root
View test_system_nakagami_fit.py
@pytest.mark.parametrize('nu', [1.6, 2.5, 3.9])
@pytest.mark.parametrize('loc', [25.0, 10, 35])
@pytest.mark.parametrize('scale', [13, 5, 20])
def test_fit(self, nu, loc, scale):
# Regression test for gh-13396 (21/27 cases failed previously)
# The first tuple of the parameters' values is discussed in gh-10908
N = 100
samples = stats.nakagami.rvs(size=N, nu=nu, loc=loc,
scale=scale, random_state=1337)
nu_est, loc_est, scale_est = stats.nakagami.fit(samples)
@jjerphan
jjerphan / benchmark.py
Created Jan 14, 2021
sklearn#18850 - benchmark
View benchmark.py
import gc
import time
import numpy as np
import pandas as pd
from scipy import linalg
"""
A simple benchmark to know the performances of setting `check_finite`
to `False` for `linalg.cholesky`
@jjerphan
jjerphan / trace.log
Created Jul 29, 2020
Trace describing ilastik#2169 — Problem when saving ("Object dtype dtype('O') has no native HDF5 equivalent") — Ilastik 1.3.3post3
View trace.log
ERROR 2020-07-29 11:54:39,805 slot 7664 139953135314752 setDirty called
ERROR 2020-07-29 11:54:39,806 slot 7664 139953135314752 self.stype.isConfigured
ERROR 2020-07-29 11:54:39,806 slot 7664 139953135314752 Number of Downstream_slots: 0
ERROR 2020-07-29 11:54:39,806 slot 7664 139953135314752 OpExportSlot.Input []: {_ready : True, NOTREADY : None, shape : (511, 512, 512, 3), axistags : z y x c, original_axistags : z y x c, dtype : <class 'numpy.float32'>, drange : (0.0, 1.0), has_mask : None, _dirty : False, ideal_blockshape : [0, 0, 0, 3], display_mode : 'grayscale', channel_names : ['Gaussian Smoothing (σ=0.3) [0]', 'Gaussian Smoothing (σ=0.3) [1]', 'Gaussian Smoothing (σ=0.3) [2]', 'Gaussian Smoothing (σ=0.3) [3]', 'Gaussian Smoothing (σ=0.7) [0]', 'Gaussian Smoothing (σ=0.7) [1]', 'Gaussian Smoothing (σ=0.7) [2]', 'Gaussian Smoothing (σ=0.7) [3]', 'Gaussian Smoothing (σ=1.0) [0]', 'Gaussian Smoothing (σ=1.0) [1]', 'Gaussian Smoothing (σ=1.0) [2]', 'Gaussian Smoothing (σ=1.0) [3]', 'Gaussian Smoothing (σ=
View benchmark.py
import numpy as np
import itertools
import time
import seaborn as sns
import gc
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.calibration import CalibratedClassifierCV
from sklearn.datasets import make_classification