Skip to content

Instantly share code, notes, and snippets.

Avatar

kirk86

View GitHub Profile
View emojione-picker.py
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
#
import os
import subprocess
import time
import socket
import sys
import json
@kirk86
kirk86 / akmtdfgen.py
Created Aug 26, 2017 — forked from timehaven/akmtdfgen.py
kmtdfgen: Keras multithreaded dataframe generator
View akmtdfgen.py
"""akmtdfgen: A Keras multithreaded dataframe generator.
Works with Python 2.7 and Keras 2.x.
For Python 3.x, need to fiddle with the threadsafe generator code.
Test the generator_from_df() functions by running this file:
python akmtdfgen.py
View model.py
from keras.models import Model
from keras.layers import Input, merge, Convolution2D, MaxPooling2D
from keras.layers import UpSampling2D, Reshape, Activation, Dropout
from keras.layers import Deconvolution2D, Dense, Flatten, Input
from keras.layers import Permute
from keras.optimizers import Adam, SGD
from keras import backend as K
def dice_coef(y_true, y_pred):
View gist:f6ef0baabff07d748c3e7c9c0a744ee9
"""
This is a batched LSTM forward and backward pass
"""
import numpy as np
import code
class LSTM:
@staticmethod
def init(input_size, hidden_size, fancy_forget_bias_init = 3):
@kirk86
kirk86 / min-char-rnn.py
Created Aug 25, 2016 — forked from karpathy/min-char-rnn.py
Minimal character-level language model with a Vanilla Recurrent Neural Network, in Python/numpy
View min-char-rnn.py
"""
Minimal character-level Vanilla RNN model. Written by Andrej Karpathy (@karpathy)
BSD License
"""
import numpy as np
# data I/O
data = open('input.txt', 'r').read() # should be simple plain text file
chars = list(set(data))
data_size, vocab_size = len(data), len(chars)
View adam.py
def adam(loss, all_params, learning_rate=0.0002, beta1=0.1, beta2=0.001,
epsilon=1e-8, gamma=1-1e-7):
"""
ADAM update rules
Default values are taken from [Kingma2014]
References:
[Kingma2014] Kingma, Diederik, and Jimmy Ba.
"Adam: A Method for Stochastic Optimization."
arXiv preprint arXiv:1412.6980 (2014).
View adam.py
def adam(loss, all_params, learning_rate=0.001, b1=0.9, b2=0.999, e=1e-8,
gamma=1-1e-8):
"""
ADAM update rules
Default values are taken from [Kingma2014]
References:
[Kingma2014] Kingma, Diederik, and Jimmy Ba.
"Adam: A Method for Stochastic Optimization."
arXiv preprint arXiv:1412.6980 (2014).
@kirk86
kirk86 / adam.py
Created Jun 27, 2016 — forked from Newmu/adam.py
Adam Optimizer
View adam.py
"""
The MIT License (MIT)
Copyright (c) 2015 Alec Radford
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
@kirk86
kirk86 / conv_deconv_variational_autoencoder.py
Created Jun 27, 2016 — forked from Newmu/conv_deconv_variational_autoencoder.py
Prototype code of conv/deconv variational autoencoder, probably not runable, lots of inter-dependencies with local codebase =/
View conv_deconv_variational_autoencoder.py
import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.extra_ops import repeat
from theano.sandbox.cuda.dnn import dnn_conv
from time import time
import numpy as np
from matplotlib import pyplot as plt
@kirk86
kirk86 / simple_gan.py
Created Jun 27, 2016 — forked from Newmu/simple_gan.py
Simple Generative Adversarial Network Demo
View simple_gan.py
import os
import numpy as np
from matplotlib import pyplot as plt
from time import time
from foxhound import activations
from foxhound import updates
from foxhound import inits
from foxhound.theano_utils import floatX, sharedX