Skip to content

Instantly share code, notes, and snippets.

@sbos
sbos / gist:5859327
Created June 25, 2013 15:18
build-error.log
==> Using Homebrew-provided fortran compiler.
This may be changed by setting the FC environment variable.
==> Cloning https://github.com/JuliaLang/julia.git
git --git-dir /Library/Caches/Homebrew/julia--git/.git status -s
Updating /Library/Caches/Homebrew/julia--git
git config remote.origin.url https://github.com/JuliaLang/julia.git
git config remote.origin.fetch +refs/heads/master:refs/remotes/origin/master
git fetch origin
git checkout -f master
Already on 'master'
@sbos
sbos / HMM.jl
Created November 1, 2013 11:25
Hidden Markov Model in Julia
module HMM
using Distributions
import Distributions.rand
import Distributions.fit
immutable HiddenMarkovModel{TP, K}
theta::Vector{TP}
A::Matrix{Float64}
-.13015382 .033825178 -.02594826 -.288166 .04162671 -.06678655 -.22145432 .1749719 .030906504 -.12878266 .08305055 .11668899 -.06319859 -.0777372 .040560413 -.0305699 .17826276 -.10443129 -.00669569 .014138044 -.064837754 -.24928348 .060342744 -.08906554 .080405414 .0045637907 .007919261 -.14719364 .07927967 .011611855 -.0018610754 .08388769 .09033888 .0517999 .020472864 .19127382 .022346925 .06461436 .035737775 .049969383 .006792533 .11398394 -.03975431 -.112482205 -.03677935 .023018813 .044070702 .09208278 -.021259554 .13439591 -.048604295 .058010057 -.051273342 .03115164 .100593045 -.12984459 .10762949 .18540311 -.009627994 .0026174872 -.05229366 -.23992816 -.076821424 .12318988 -.1506341 -.13093792 .0035753904 -.09583123 .067648284 -.09169266 -.09788873 -.1272856 -.1690156 .017811107 -.027340638 .07608332 .10959153 -.027422326 .08745462 -.18536375 .013429028 -.05132068 .04882859 .23418726 -.026919987 .034001417 -.010213713 .035071775 -.038683213 -.092883974 -.056313466 .111097194 -.074547 -.035446037 -.00
@sbos
sbos / SVM.py
Last active October 19, 2022 04:06
Simple linear SVM using quadratic programming
import numpy as np
from cvxopt import matrix, spmatrix
from cvxopt.solvers import qp
from cvxopt import solvers
class LinearSVM():
def __init__(self, C):
self.C = C
def fit(self, data, labels):
import cgt
import cgt.nn as nn
import numpy as np
from scipy.stats import norm
def gaussian_density(x, mu, sigma):
return cgt.exp(-cgt.square(x - mu) / 2 / cgt.square(sigma)) \
/ cgt.sqrt(2 * np.pi) / sigma
var_mu = nn.parameter(np.array(0.5))
import cgt
import cgt.nn as nn
import numpy as np
from scipy.stats import norm
def gaussian_density(x, mu, sigma):
return cgt.exp(-cgt.square(x - mu) / 2 / cgt.square(sigma)) \
/ cgt.sqrt(2 * np.pi) / sigma
var_mu = nn.parameter(np.array(0.5))
import cgt
import cgt.nn as nn
import numpy as np
batch = 5000
mean = 0.
for i in xrange(batch):
mean += (cgt.randn() - mean) / (i+1)
import theano as th
import theano.tensor as T
import lasagne
import numpy as np
from theano.tensor.shared_randomstreams import RandomStreams
from lasagne.nonlinearities import tanh
import matplotlib.pyplot as plt
import sys
# change to True and see what happens
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
function write_word2vec(path::AbstractString, vm::VectorModel, dict::Dictionary)
fout = open(path, "w")
sense_prob = zeros(T(vm))
write(fout, "$(V(vm)) $(T(vm)) $(M(vm))\n")
for v in 1:V(vm)
write(fout, "$(dict.id2word[v])\n")
expected_pi!(sense_prob, vm, v)
for k in 1:T(vm)
if sense_prob[k] < 1e-3 continue end
write(fout, "$k $(sense_prob[k]) ")