Skip to content

Instantly share code, notes, and snippets.

View ryanrhymes's full-sized avatar
🎯
Focusing

Liang Wang ryanrhymes

🎯
Focusing
View GitHub Profile
let backprop nn eta x y =
let t = tag () in
Array.iter (fun l ->
l.w <- make_reverse l.w t;
l.b <- make_reverse l.b t;
) nn.layers;
let loss = Maths.(cross_entropy y (run_network x nn) / (F (Mat.row_num x |> float_of_int))) in
reverse_prop (F 1.) loss;
Array.iter (fun l ->
l.w <- Maths.((primal l.w) - (eta * (adjval l.w))) |> primal;
let test_model nn x y =
Mat.iter2_rows (fun u v ->
Dataset.print_mnist_image (unpack_mat u);
let p = run_network u nn |> unpack_mat in
Owl.Mat.print p;
Printf.printf "prediction: %i\n" (let _, _, j = Owl.Mat.max_i p in j)
) x y
let _ =
let x, _, y = Dataset.load_mnist_train_data () in
@ryanrhymes
ryanrhymes / owl_ext_test01.ml
Last active April 1, 2017 21:30
interoperate different number types
(* Owl Lib: https://github.com/ryanrhymes/owl *)
open Owl_ext;;
let a = F 5.;; (* float number *)
let b = C Complex.({re = 2.; im = 5.});; (* complex number *)
let x = Dense.Matrix.S.uniform 3 3;; (* float32 matrix *)
let y = Dense.Matrix.D.uniform 3 3;; (* float64 matrix *)
let z = Dense.Matrix.Z.uniform 3 3;; (* complex64 matrix *)
@ryanrhymes
ryanrhymes / owl_neural_test01.ml
Created April 1, 2017 21:27
Test Owl Neural 01
#require "owl_neural";;
open Owl_neural;;
let nn = Feedforward.create ();;
let l0 = linear ~inputs:784 ~outputs:300 ~init_typ:(Init.Uniform (-0.075,0.075));;
let l1 = linear ~inputs:300 ~outputs:10 ~init_typ:(Init.Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0;;
Feedforward.add_activation nn Activation.Tanh;;
Feedforward.add_layer nn l1;;
Feedforward.add_activation nn Activation.Softmax;;
#require "owl_neural";;
open Owl_neural;;
(* config the neural network *)
let nn = Feedforward.create ();;
let l0 = linear ~inputs:784 ~outputs:300 ~init_typ:Init.(Uniform (-0.075,0.075));;
let l1 = linear ~inputs:300 ~outputs:10 ~init_typ:Init.(Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0;;
Feedforward.add_activation nn Activation.Tanh;;
Feedforward.add_layer nn l1;;
#require "owl_neural";;
open Owl_neural;;
(* config the neural network *)
let nn = Feedforward.create ();;
let l0 = linear 784 300 ~init_typ:Init.(Uniform (-0.075,0.075));;
let l1 = linear 300 10 ~init_typ:Init.(Uniform (-0.075,0.075));;
Feedforward.add_layer nn l0 ~act_typ:Activation.Tanh;;
Feedforward.add_layer nn l1 ~act_typ:Activation.Softmax;;
print nn;;
@ryanrhymes
ryanrhymes / gist:022151ae7d1b8c94363f509532df7a29
Last active May 2, 2017 23:35
reply to boolean indexing ...
```ocaml
Mat.map (min 5.) x;;
```
```ocaml
let x = Mat.uniform 5 5;;
Mat.filter (( < ) 0.5) x;;
...
```
function f00()
x = rand(1000, 2000)
t0 = time()
LinAlg.LAPACK.gesvd!('A','A',x)
t1 = time()
@printf "time:\t\t%.8f\n" (t1 -t0)
end
function f00()
This file has been truncated, but you can view the full file.
module CI = Cstubs_internals
external owl_stub_1_LAPACKE_sbdsdc
: int -> char -> char -> int -> _ CI.fatptr -> _ CI.fatptr ->
_ CI.fatptr -> int -> _ CI.fatptr -> int -> _ CI.fatptr -> _ CI.fatptr ->
int = "owl_stub_1_LAPACKE_sbdsdc_byte12" "owl_stub_1_LAPACKE_sbdsdc"
external owl_stub_2_LAPACKE_dbdsdc
: int -> char -> char -> int -> _ CI.fatptr -> _ CI.fatptr ->
_ CI.fatptr -> int -> _ CI.fatptr -> int -> _ CI.fatptr -> _ CI.fatptr ->
@ryanrhymes
ryanrhymes / owl_linalg.ml
Last active June 30, 2017 23:49
owl linalg
let x = Mat.uniform 4 4;;
let x = Mat.symmetric x;;
let v, w = Linalg.D.eig x;;
let w = Dense.Matrix.Z.re w;;
let v = Dense.Matrix.Z.re v;;
let v0 = Mat.col v 0;;
let a = Mat.(x *@ v0);;
let b = Mat.(v0 *$ w.{0,0});;
Mat.(a =~ b);;