Skip to content

Instantly share code, notes, and snippets.

@jzstark
Last active February 12, 2018 20:56
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jzstark/da5cc7f771bc8d9699cedc491b23f856 to your computer and use it in GitHub Desktop.
Save jzstark/da5cc7f771bc8d9699cedc491b23f856 to your computer and use it in GitHub Desktop.
#!/usr/bin/env owl
open Owl
open Owl_types
open Algodiff.S
open Neural
open Neural.S
open Neural.S.Graph
let model h w =
let nn = input [|h; w; 3|]
(* block 1 *)
|> conv2d [|3;3;3;64|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;64;64|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> max_pool2d [|2;2|] [|2;2|] ~padding:VALID
(* block 2 *)
|> conv2d [|3;3;64;128|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;128;128|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> max_pool2d [|2;2|] [|2;2|] ~padding:VALID
(* block 3 *)
|> conv2d [|3;3;128;256|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;256;256|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;256;256|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;256;256|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> max_pool2d [|2;2|] [|2;2|] ~padding:VALID
(* block 4 *)
|> conv2d [|3;3;256;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> max_pool2d [|2;2|] [|2;2|] ~padding:VALID
(* block 5 *)
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> conv2d [|3;3;512;512|] [|1;1|] ~act_typ:Activation.Relu ~padding:SAME
|> max_pool2d [|2;2|] [|2;2|] ~padding:VALID
(* classification block *)
(*
|> flatten
|> fully_connected ~act_typ:Activation.Relu 4096
|> fully_connected ~act_typ:Activation.Relu 4096
|> fully_connected ~act_typ:Activation.Softmax classes *)
|> global_max_pool2d (* ~act_typ:Activation.Softmax *)
|> get_network
in
nn
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment