Skip to content

Instantly share code, notes, and snippets.

Avatar

Gavin Leech g-leech

View GitHub Profile
View Cold_Takes_Tables.ipynb
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
View cold_takes_analysis.ipynb
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
View match_data_extrapolation.py
# You can get your own copy of the MCMC trace: https://github.com/g-leech/masks_v_mandates#run
# but I've included the quantiles in this script for reproducibility.
import numpy as np
sns.set_style("whitegrid")
def exp_reduction(a, x):
reductions = 1 - np.exp((-1.0) * a * x)
return reductions.mean()
@g-leech
g-leech / spock.py
Last active Apr 10, 2022
Comparing Spock's predictions to a coin flip, yielding a Brier score of 0.57
View spock.py
import numpy as np
# impossible 0
# v unlik 10
# unlik 25
# lik 75
# vv likely 99.5
preds = [
[0, 1],
[0.75, 1],
View reported_vs_excess_death.py
#!/usr/bin/env python
# coding: utf-8
# ## Excess COVID-19 mortality vs reported deaths over time
#
# split bar?
#
# y-axis: deaths
# x-axis: time
#
View pyswip_helpers.py
def handle_utterance_str(text) :
if text[0] != "'" and text[0] != '"' :
text = f'"{text}"'
text = text.replace('"', '\"')
text = text.replace("'", '\"')
return "handle_utterance(1,{},Output)".format(text)
def escape_and_call_prolexa(text) :
libPrefix = "prolexa:"
@g-leech
g-leech / utils.py
Last active May 20, 2020
NLP helpers
View utils.py
#%tensorflow_version 2.x
import pandas as pd
import numpy as np
import re
from nltk import word_tokenize
from nltk.stem import WordNetLemmatizer
from scipy.sparse import hstack
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
@g-leech
g-leech / school-uni-coactivation.py
Created May 8, 2020
Checking the coactivation of school and uni closures
View school-uni-coactivation.py
import pandas as pd
# From https://www.notion.so/977d5e5be0434bf996704ec361ad621d?v=fe54f89ca9e04ac799af42b39e1efc4b
path = "COVID 19 Containment measures data.csv"
df = pd.read_csv(path)
withoutUS = df[~df["Country"].str.contains("US:")]
withoutUS = withoutUS[~withoutUS["Country"].str.contains("United States")]
numCountries = withoutUS.Country.unique().shape[0]
View effect_sizes.py
import math
# assumes bivariate normal, dichotomised groups
def dichotomy_r_to_d(r) :
d = 2*r / (math.sqrt(1 - r**2))
return d
# Equation 9
# https://sci-hub.tw/10.1037/1082-989X.11.4.386
def r_to_d(r, n1, n2) :
View fixed_rbf.py
from scipy.spatial.distance import cdist
import numpy as np
import matplotlib.pyplot as plt
# First write a covariance function. e.g. rbf
def radial_basis_kernel(x1, x2, varSigma, lengthScale):
if x2 is None:
d = cdist(x1, x1)
else:
d = cdist(x1, x2)