View local_regression.py
"""Local regression"""
# Author: Mathieu Blondel <mathieu@mblondel.org>
# License: BSD 3 clause
import numpy as np
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.metrics.pairwise import pairwise_kernels
from sklearn.linear_model import Ridge
View echo_server.py
# adapted from http://roscidus.com/desktop/node/413
import socket
import gobject
def server(host, port):
'''Initialize server and start listening.'''
sock = socket.socket()
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
View coroutines.py
def recv_count():
try:
while True:
n = (yield)
print "T-minus", n
except GeneratorExit:
print "Kaboom!"
def ex1():
r = recv_count()
View number_plate_solver.py
#!/usr/bin/env python
"""
Find the operations needed to sum up to TARGET by using all 4 numbers in NUMBERS.
"""
from itertools import permutations, product
NUMBERS = ["3","4","7","8"]
TARGET = 10.0
View mc_pi.py
from random import random
"""
Find pi by the Monte-Carlo method.
area of a circle = pi r^2
area of a square = (2r)^2 = 4 r^2
Perform random uniform sampling between -1 and 1.
The proportion of points in the unit circle is:
View regression_lp.py
# (C) 2011 Mathieu Blondel
# License: BSD 3 clause
import numpy as np
import numpy.linalg as linalg
import pylab as pl
from cvxopt import matrix, solvers
np.random.seed(0)
View online_variance.py
def online_mean_variance(iterable):
mN = 0
mM = 0.0
mS = 0.0
for x in iterable:
mN += 1
nextM = mM + (x - mM) / mN
mS += (x - mM) * (x - nextM)
View lbfgs_nnls.py
# (C) Mathieu Blondel 2012
# License: BSD 3 clause
import numpy as np
from scipy.optimize import fmin_l_bfgs_b
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.extmath import safe_sparse_dot
View xref.txt
paper.tex: main manuscript
supp.tex: supplementary material
Cross-referencing
-----------------
We want to cross-reference equations in paper.tex from supp.tex.
View sparse_multiclass_numba.py
"""
(C) August 2013, Mathieu Blondel
# License: BSD 3 clause
This is a Numba-based reimplementation of the block coordinate descent solver
(without line search) described in the paper:
Block Coordinate Descent Algorithms for Large-scale Sparse Multiclass
Classification. Mathieu Blondel, Kazuhiro Seki, and Kuniaki Uehara.
Machine Learning, May 2013.