Skip to content

Instantly share code, notes, and snippets.

View dirkschumacher's full-sized avatar
👶
I may be slow to respond.

Dirk Schumacher dirkschumacher

👶
I may be slow to respond.
View GitHub Profile
from l0bnb import fit_path
from l0bnb import gen_synthetic
X, y, b = gen_synthetic(n=10, p=10, supp_size=10)
sols = fit_path(X, y, lambda_2 = 0.01, max_nonzeros = 1)
print(sols)
local({
  # a quick, just for fun base R implementation of group_by/summarise/`%>%`
  # many edge cases not covered
  # also group_by does not produce a data.frame with the same shape
  # as the input
  group_by <- function(data, ...) {
    exprs <- substitute(list(...))
    grouping_cols <- vapply(exprs[-1], as.character, character(1))
library(blake3)
library(digest)
input <- charToRaw(paste0(sample(LETTERS, 1e6, replace = TRUE), collapse = ""))
microbenchmark::microbenchmark(
  blake3 = sodium::bin2hex(blake3_hash_raw(input)),
  sha1 = digest(input, "sha1", serialize = FALSE),
  md5 = digest(input, "md5", serialize = FALSE),
  sha256 = digest(input, "sha256", serialize = FALSE),
 osha1 = openssl::sha1(input),
``` r
partition <- function(groups_vector, n_shards) {
stopifnot(is.integer(groups_vector))
group_sizes <- sort(table(groups_vector), decreasing = TRUE)
n_groups <- length(group_sizes)
stopifnot(n_groups > n_shards)
# Build sparse models with filter guards
# this problem arises in network models where you have a variable for each
# pair of nodes. However if your graph is not fully connected, you end up
# creating a lot of useless variables if you have all combinations in your MIP Model
# and then set the invalid edges to 0.
# Below is a toy example with 1 millionen edges, but only 36 are actually being used.
library(rmpk)
is_adjacent <- function(i, j) {
 i &lt; j &amp; j &lt; 10 # just a dummy function indicating when two nodes are adjacent
# an example of the TSP solved through solver callbacks
# follows the formulation of the Gurobi example
# http://examples.gurobi.com/traveling-salesman-problem/
# and from the TSP vignette for the MTZ formulation

# all experimental

library(ggplot2)
suppressPackageStartupMessages(library(dplyr))
`?` <- function(lhs, rhs) {
  if (missing(rhs)) {
    return(eval(bquote(utils::`?`(.(substitute(lhs))))))
  }
  rhs <- substitute(rhs)
  envir <- parent.frame()
  split_colon <- strsplit(deparse(rhs), ":")
  stopifnot(length(split_colon) == 1L, length(split_colon[[1L]]) == 2L)
 rhs_chr &lt;- split_colon[[1L]]
library(armacmp)
# code from https://nextjournal.com/wolfv/how-fast-is-r-with-fastr-pythran
# which in turn comes in part from http://www.tylermw.com/throwing-shade/
# Author: Tyler Morgan-Wall

# first the R version

faster_bilinear <- function (Z, x0, y0){
  i = floor(x0)
library(armacmp)

# some of julia's microbenchmarks translated to C++
# https://github.com/JuliaLang/Microbenchmarks/blob/master/perf.R

fib_cpp <- armacmp(function(n = type_scalar_int()) {
  fib_rec <-  function(nr = type_scalar_int()) {
    if (nr < 2) {
      return(nr, type = type_scalar_int())
library(armacmp)
# Arnold, T., Kane, M., & Lewis, B. W. (2019). A Computational Approach to Statistical Learning. CRC Press.
# logistic regression using the Newton-Raphson
log_reg <- armacmp(function(X, y) {
  beta <- rep.int(0, ncol(X))
  for (i in seq_len(25)) {
    b_old <- beta
    alpha <- X %*% beta
    p <- 1 / (1 + exp(-alpha))