Skip to content

Instantly share code, notes, and snippets.

View ryanrhymes's full-sized avatar
🎯
Focusing

Liang Wang ryanrhymes

🎯
Focusing
View GitHub Profile
#require "owl_neural";;
open Owl_neural;;
(* config the neural network *)
let nn = Feedforward.create ();;
let l0 = linear 784 300 ~init_typ:Init.(Uniform (-0.075,0.075));;
let l1 = linear 300 10 ~init_typ:Init.(Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0 ~act_typ:Activation.Tanh;;
Feedforward.add_layer nn l1 ~act_typ:Activation.Softmax;;
print nn;;
#require "owl_neural";;
open Owl_neural;;
(* config the neural network *)
let nn = Feedforward.create ();;
let l0 = linear ~inputs:784 ~outputs:300 ~init_typ:Init.(Uniform (-0.075,0.075));;
let l1 = linear ~inputs:300 ~outputs:10 ~init_typ:Init.(Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0;;
Feedforward.add_activation nn Activation.Tanh;;
Feedforward.add_layer nn l1;;
@ryanrhymes
ryanrhymes / owl_neural_test01.ml
Created April 1, 2017 21:27
Test Owl Neural 01
#require "owl_neural";;
open Owl_neural;;
let nn = Feedforward.create ();;
let l0 = linear ~inputs:784 ~outputs:300 ~init_typ:(Init.Uniform (-0.075,0.075));;
let l1 = linear ~inputs:300 ~outputs:10 ~init_typ:(Init.Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0;;
Feedforward.add_activation nn Activation.Tanh;;
Feedforward.add_layer nn l1;;
Feedforward.add_activation nn Activation.Softmax;;
@ryanrhymes
ryanrhymes / owl_ext_test01.ml
Last active April 1, 2017 21:30
interoperate different number types
(* Owl Lib: https://github.com/ryanrhymes/owl *)
open Owl_ext;;
let a = F 5.;; (* float number *)
let b = C Complex.({re = 2.; im = 5.});; (* complex number *)
let x = Dense.Matrix.S.uniform 3 3;; (* float32 matrix *)
let y = Dense.Matrix.D.uniform 3 3;; (* float64 matrix *)
let z = Dense.Matrix.Z.uniform 3 3;; (* complex64 matrix *)
let test_model nn x y =
Mat.iter2_rows (fun u v ->
Dataset.print_mnist_image (unpack_mat u);
let p = run_network u nn |> unpack_mat in
Owl.Mat.print p;
Printf.printf "prediction: %i\n" (let _, _, j = Owl.Mat.max_i p in j)
) x y
let _ =
let x, _, y = Dataset.load_mnist_train_data () in
let backprop nn eta x y =
let t = tag () in
Array.iter (fun l ->
l.w <- make_reverse l.w t;
l.b <- make_reverse l.b t;
) nn.layers;
let loss = Maths.(cross_entropy y (run_network x nn) / (F (Mat.row_num x |> float_of_int))) in
reverse_prop (F 1.) loss;
Array.iter (fun l ->
l.w <- Maths.((primal l.w) - (eta * (adjval l.w))) |> primal;
open Owl
open Algodiff.AD
type layer = { mutable w : t; mutable b : t; a : t -> t }
type network = { layers : layer array }
let run_layer x l = Maths.((x $@ l.w) + l.b) |> l.a
let run_network x nn = Array.fold_left run_layer x nn.layers
let l0 = {
@ryanrhymes
ryanrhymes / owl_nn.ml
Created March 10, 2017 15:54
using owl to build a trivial nn sing AD module from scratch
open Owl_algodiff_ad
type layer = { mutable w : t; mutable b : t; a : t -> t }
type network = { layers : layer array }
let run_layer x l = Maths.((x $@ l.w) + l.b) |> l.a
let run_network x nn = Array.fold_left run_layer x nn.layers
@ryanrhymes
ryanrhymes / mnist.ml
Created March 9, 2017 17:26
re-format mnist data set
(* Test neural network on MNIST *)
#require "bitstring";;
#require "bitstring.syntax";;
open Bitstring
type t = { magic : int; items : int }
let dataset = "/Users/liang/owl_dataset/t10k-images-idx3-ubyte"
@ryanrhymes
ryanrhymes / eigen_dsmat.ml
Created January 12, 2017 16:41
Functor of dsmat in Eigen library
(*
* Eigen - an OCaml interface to C++ Eigen library
* Copyright (c) 2016 Liang Wang <liang.wang@cl.cam.ac.uk>
*)
module type MatSig = sig
type elt
type mat