Skip to content

Instantly share code, notes, and snippets.

@cossio
cossio / fast-latex-input.el
Created November 4, 2022 18:39 — forked from karthink/fast-latex-input.el
Configuration for super-fast Latex input using AucTeX, CDLatex and a bit of YaSnippet. See https://karthinks.com/software/latex-input-for-impatient-scholars
;; This elisp code uses use-package, a macro to simplify configuration. It will
;; install it if it's not available, so please edit the following code as
;; appropriate before running it.
;; Note that this file does not define any auto-expanding YaSnippets.
;; Install use-package
(package-install 'use-package)
;; AucTeX settings - almost no changes
@cossio
cossio / file1
Last active March 16, 2022 16:01
testing gists
hola mundo
@cossio
cossio / softmax_online.jl
Last active December 9, 2021 21:56
Attempt at making an online version of softmax
function softmax_online(x::AbstractArray) # seems slower than softmax
max_ = fill(convert(eltype(x), -Inf), 1, tail(size(x))...)
sum_ = zeros(eltype(x), 1, tail(size(x))...)
for j in CartesianIndices(tail(size(x))), i = 1:size(x,1)
if x[i,j] > max_[1,j]
sum_[1,j] *= exp(max_[1,j] - x[i,j])
max_[1,j] = x[i,j]
end
sum_[1,j] += exp(x[i,j] - max_[1,j])
end
@cossio
cossio / columns.jl
Created December 9, 2021 21:53
iterator over columns of high-dimensional array
"""
columns(A)
Returns an array over the columns of `A` (as views). Similar to `eachcol` but
for higher-dimensional arrays. In general column (i,j,k,...) is defined as
`A[:,i,j,k,...]`.
"""
function columns(A::AbstractArray)
[A[:,I] for I in CartesianIndices(Base.tail(axes(A)))]
@cossio
cossio / argmax_first.jl
Created December 9, 2021 21:51
argmax of `A` over its first `N` dimensions and drops them. By default `N = 1`.
"""
argmax_first(A, Val(N) = Val(1))
argmax of `A` over its first `N` dimensions and drops them. By default `N = 1`.
"""
function argmax_first(A::AbstractArray, ::Val{N} = Val(1)) where {N}
dims = tuplen(Val(N))
argmax_(A; dims=dims)
end
@cossio
cossio / log1msoftmax.jl
Created November 6, 2021 18:55
Computes log(1 - softmax(X)), accurately.
"""
log1msoftmax(X; dims=1)
Computes log(1 - softmax(X)), accurately.
"""
function log1msoftmax(x::AbstractArray; dims=1)
@warn "log1msoftmax can have numerical issues, https://stats.stackexchange.com/questions/469706/log1-softmaxx/469803"
#FIXME: https://stats.stackexchange.com/questions/469706/log1-softmaxx/469803?noredirect=1#comment867691_469803
m = maximum(x; dims=dims)
e = exp.(x .- m)
@cossio
cossio / mylog_rrule.jl
Created October 1, 2021 11:56
Testing rrules
using ChainRulesCore, ChainRulesTestUtils, Test
function mylog(x::Real)
r = log(abs(x))
if x > 0
return r
else
return oftype(r, -Inf)
end
end
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-3-dc0f2ac96f27> in <module>
15 RBM.fit(all_data[in_train][order_train], weights= all_weights[in_train][order_train], batch_size = batch_size,
16 n_iter = n_iter, l1b = l1b, N_MC = N_MC,
---> 17 decay_after = decay_after, verbose = 0,vverbose=1 )
~/work/PGM/source/rbm.py in fit(self, data, batch_size, learning_rate, extra_params, init, optimizer, batch_norm, CD, N_PT, N_MC, nchains, n_iter, MoI, MoI_h, MoI_tau, PTv, PTh, interpolate_z, degree_interpolate_z, zero_track_RBM, only_sampling, lr_decay, lr_final, decay_after, l1, l1b, l1c, l2, l2_fields, reg_delta, no_fields, weights, adapt_PT, AR_min, adapt_MC, tau_max, update_every, N_PT_max, N_MC_max, from_hidden, learning_rate_multiplier, update_betas, record_acceptance, shuffle_data, epsilon, verbose, vverbose, record, record_interval, data_test, weights_test, l1_custom, l1b_cust
@cossio
cossio / erfcx.nb
Created May 23, 2020 01:30
Scaled complementary error function, erfcx(x), in Mathematica
Erfcx[x_] := 2/Sqrt[Pi] HermiteH[-1, x]
# Rejection sampler based on algorithm from Robert (1995)
#
# - Available at http://arxiv.org/abs/0907.4010
#
# Copied this implementation from Distributions.jl, with few modifications
# to make it generic.
using Random, Statistics
Δ2(x, y) = (x - y) * (x + y)