Skip to content

Instantly share code, notes, and snippets.

from __future__ import print_function
import numpy as np
from spark_config import sc
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.regression import LabeledPoint
from util import RDD_check, LabeledPoint_check
from sklearn.base import BaseEstimator

def perceptron_loss(y, y_pred):

basic import

from __future__ import print_function
import numpy as np
from spark_config import sc
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.regression import LabeledPoint
from util import RDD_check, LabeledPoint_check
from sklearn.base import BaseEstimator
@jiumem
jiumem / data.py
Created December 10, 2015 12:40 — forked from rezoo/data.py
Simple implementation of Generative Adversarial Nets using chainer
import gzip
import os
import numpy as np
import six
from six.moves.urllib import request
parent = 'http://yann.lecun.com/exdb/mnist'
train_images = 'train-images-idx3-ubyte.gz'
train_labels = 'train-labels-idx1-ubyte.gz'
@jiumem
jiumem / simple_gan.py
Created December 10, 2015 11:39 — forked from Newmu/simple_gan.py
Simple Generative Adversarial Network Demo
import os
import numpy as np
from matplotlib import pyplot as plt
from time import time
from foxhound import activations
from foxhound import updates
from foxhound import inits
from foxhound.theano_utils import floatX, sharedX
@jiumem
jiumem / distcorr.py
Created December 1, 2015 04:20 — forked from satra/distcorr.py
Distance Correlation in Python
from scipy.spatial.distance import pdist, squareform
import numpy as np
from numbapro import jit, float32
def distcorr(X, Y):
""" Compute the distance correlation function
>>> a = [1,2,3,4,5]
>>> b = np.array([1,2,9,4,4])
@jiumem
jiumem / ienn.md
Last active November 5, 2015 15:06
import numpy as np
import matplotlib.pyplot as plt
from sklearn.neighbors import NearestNeighbors


class IENN():
    def __init__(self, n_estimator, sample_size, n_jobs=1):
        self.n_estimator = n_estimator
        self.sample_size = sample_size
@jiumem
jiumem / Callback.md
Last active May 8, 2018 22:45
keras for deep learning
class LossHistory(Callback):
    def __init__(self, X_train, y_train, layer_index):
        super(Callback, self).__init__()
        self.layer_index = layer_index
        if X_train.shape[0] >= 1000:
            mask = np.random.choice(X_train.shape[0], 1000)
            self.X_train_subset = X_train[mask]
            self.y_train_subset = y_train[mask]
        else: