Skip to content

Instantly share code, notes, and snippets.

Avatar

Jan Weidner jw3126

  • Freiburg, Germany
View GitHub Profile
@jw3126
jw3126 / mwe.py
Last active Nov 20, 2020
pytorch_lightning_ddp_gradient_checkpointing_bug
View mwe.py
# This reproduces a pytorch_lightning issue
# where gradient checkpointing + ddp results in nan loss
#
# * Run with gpus=1 and it works fine.
# * Run with gpus=4 and it loss becomes nan quickly
#
# See also https://forums.pytorchlightning.ai/t/gradient-checkpointing-ddp-nan/398
import torch
from torch import nn
from torch.nn import functional as F
@jw3126
jw3126 / flux_vs_keras.jl
Created Oct 2, 2020
Benchmark 1d convnet keras vs flux
View flux_vs_keras.jl
using PyCall
using Flux
function doit_keras(cfg)
keras = pyimport("tensorflow.keras")
inp = keras.layers.Input((nothing, 1))
x = inp
x = keras.layers.Conv1D(kernel_size=51, filters=50)(x)
x = keras.layers.Conv1D(kernel_size=1, filters=1)(x)
out = x
View minimize_scaled_L1_diff.jl
using Test
using Convex1D
using BenchmarkTools
function minimize_scaled_L1_diff(xs, ys)
# find t::Number that minimizers f(t) = sum(abs, t*xs - ys)
# f is convex with piecewise constant derivative given by
# f'(t) = Σ xi * sign(t*xi - yi)
# One of the points ti := yi/xi must be a minimizer (for some xi !=0. If all xi==0 then f == const anyway)
# Based on remarks of Mathieu Tanneau in slack
@jw3126
jw3126 / conv1d.jl
Created Apr 27, 2020
Julia CUDA 1d nn style large batch convolution
View conv1d.jl
using CUDAnative, CuArrays
macro cushow(ex)
val = gensym("val")
s = string(ex)
quote
$val = $(esc(ex))
CUDAnative.@cuprintln($(Expr(:string, s, " = ", val)))
$val
end
View layered_array.jl
using Revise
using ArgCheck
struct LayeredArray{T, N, L} <: AbstractArray{T,N}
layers::L
function LayeredArray(layers)
@argcheck !isempty(layers)
@argcheck first(layers) isa AbstractArray
l = first(layers)
L = typeof(layers)
View gist:e81cdf65c18610bc66066f06b9b72301
using Makie
using JuAFEM, SparseArrays
using LinearAlgebra
#
# Poisson example from JuAFEM docs
#
grid = generate_grid(Triangle, (20, 20));
dim = 2
@jw3126
jw3126 / advection.jl
Created Aug 5, 2019
advection problems
View advection.jl
# ]add AbstractPlotting
# ]add Makie
using Makie
function step!(u_new, u, o)
# u_new[i] = u[i] + dt*v*(u[i] - u[i-1])/dx
#
for i in reverse(eachindex(u))
s = o.v*o.dt/o.dx
u_new[i] = u[i] + s * (u[i] - get(u, i-1, zero(eltype(u))))
@jw3126
jw3126 / release.jl
Created Apr 28, 2019
Tag and release package to local registry.
View release.jl
using LibGit2
using ArgCheck
using Pkg: TOML
import Pkg
import Registrator
struct Release
package::Module
registry::String
check_master::Bool
View LibGit2.jl
using LibGit2
repo_path = "LibGit2TestRepo"
repo_url = "git@github.com:jw3126/LibGit2TestRepo.git"
rm(repo_path, recursive=true, force=true)
mkpath(path)
# init
repo = try
@info "Cloning repo"
@jw3126
jw3126 / DirichletAnnulusApproxFun.jl
Last active Jan 23, 2019
DirichletAnnulusApproxFun.jl
View DirichletAnnulusApproxFun.jl
using ApproxFun
using LinearAlgebra
using Interact
using Plots
a = 1.; b=5.
Ω = a..b
# Ω = Chebyshev(a..b)
r = Fun(identity, Ω)
Δ_rad = 𝒟^2 + 1.0/r * 𝒟