Skip to content

Instantly share code, notes, and snippets.

View tomonari-masada's full-sized avatar

Tomonari MASADA tomonari-masada

View GitHub Profile
@tomonari-masada
tomonari-masada / ms_normal.py
Created March 5, 2013 11:36
Sampling from a normal distribution by Metropolis Monte Carlo
import sys
import math
import random
def invariant_distribution(x):
return math.exp(- 0.5 * x ** 2 / (0.3 * 0.3))
def next_state(x, dx):
return x + dx * (random.uniform(0.0, 1.0) - 0.5)
@tomonari-masada
tomonari-masada / rectm.c
Created March 15, 2013 10:23
A Revised Inference for Correlated Topic Model
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <time.h>
#include <omp.h>
#include <xmmintrin.h>
#include <pmmintrin.h>
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import time
import random
mail_str = 'your@email.address'
password_str = 'your_password'
@tomonari-masada
tomonari-masada / get_dist.py
Last active December 16, 2015 09:19
Lambert-Andoyer
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import math
DEG2RAD = math.pi / 180.0
# WGS84
dLA_A = 6378137.0 # 赤道半径
@tomonari-masada
tomonari-masada / HMC.py
Created August 16, 2013 10:57
David MacKay. Information Theory, Inference, and Learning Algorithms. Algorithm 30.1.
import numpy as np
import matplotlib
matplotlib.use('agg')
from pylab import *
def findE(x):
return 250.25 * (x[0] * x[0] + x[1] * x[1]) \
- 449.5 * x[0] * x[1]
def gradE(x):
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include <time.h>
#include <math.h>
#include <omp.h>
#define real float
@tomonari-masada
tomonari-masada / sample_mlp.py
Last active August 29, 2015 14:21
MLP sample code
import sys
import numpy as np
import matplotlib.pyplot as plt
import theano
import theano.tensor as T
import scipy
rng = np.random
rng.seed(0)
class Layer(object):
@tomonari-masada
tomonari-masada / Chainer_MLP.py
Last active August 29, 2015 14:22
MLP sample code (Chainer)
import sys
import numpy as np
from scipy import special
from chainer import cuda, Function, FunctionSet, gradient_check, Variable, optimizers
import chainer.functions as F
def target(args, params):
return np.exp(special.gammaln(np.sum(params))
- np.sum(special.gammaln(params))
+ np.sum((params - 1.) * np.log(args)))
@tomonari-masada
tomonari-masada / least_squares_1.py
Created May 16, 2016 11:20
The simplest least squares with TensorFlow
import tensorflow as tf
x = tf.placeholder(tf.float32, [None, 1])
y_ = tf.placeholder(tf.float32, [None, 1])
b = tf.Variable(tf.zeros([1]))
w = tf.Variable(tf.zeros([1, 1]))
y = w * x + b
@tomonari-masada
tomonari-masada / GAN_normal.py
Created May 18, 2016 14:35
approximating the normal distribution with GAN
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
mu,sigma=5.0,0.2
latent_dim=2
def mlp(input):
w1=tf.get_variable("w1", [input.get_shape()[1], 8], initializer=tf.random_normal_initializer())
b1=tf.get_variable("b1", [8], initializer=tf.constant_initializer(0.1))