Skip to content

Instantly share code, notes, and snippets.

@jw3126
Created October 2, 2020 09:08
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jw3126/42f02ddb39842880581fa3151c78050e to your computer and use it in GitHub Desktop.
Save jw3126/42f02ddb39842880581fa3151c78050e to your computer and use it in GitHub Desktop.
Benchmark 1d convnet keras vs flux
using PyCall
using Flux
function doit_keras(cfg)
keras = pyimport("tensorflow.keras")
inp = keras.layers.Input((nothing, 1))
x = inp
x = keras.layers.Conv1D(kernel_size=51, filters=50)(x)
x = keras.layers.Conv1D(kernel_size=1, filters=1)(x)
out = x
m = keras.Model(inputs=inp, outputs=out)
m.compile(optimizer="adam", loss="mse")
x = randn(Float32, cfg.nexamples, cfg.nw, cfg.nc)
y = randn(Float32, cfg.nexamples, cfg.nw-50, 1)
for _ in 1:cfg.nepochs
@time m.fit(x,y, batch_size=cfg.batch_size)
end
end
function doit_flux(cfg)
dims_x = (cfg.nw , cfg.nc, cfg.nexamples)
dims_y = (cfg.nw-50, cfg.nc, cfg.nexamples)
x = randn(Float32, dims_x)
y = randn(Float32, dims_y)
data = Flux.Data.DataLoader((x,y), batchsize=cfg.batch_size)
opt = ADAM()
net = Chain(
Conv((51,), 1 => 50, Flux.relu),
Conv((1,), 50 => 1, Flux.relu),
)
the_loss = let net=net
function _loss(x,y)
y_pred = net(x)
@assert size(y) == size(y_pred)
Flux.mse(y_pred, y)
end
end
for _ in 1:cfg.nepochs
@time Flux.train!(the_loss, params(net), data, opt)
end
end
cfg = (nexamples=1000, nw=10000, nc=1, nepochs=3, batch_size=10)
doit_keras(cfg)
doit_flux(cfg)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment