Skip to content

Instantly share code, notes, and snippets.

View airalcorn2's full-sized avatar

Michael A. Alcorn airalcorn2

View GitHub Profile
@airalcorn2
airalcorn2 / minimal_numba_race_condition_example.py
Last active June 9, 2023 13:19
A minimal example showing a race condition in a Numba CUDA kernel.
import numpy as np
import os
import torch
from numba import cuda, int32, njit
@njit((int32[:, :], int32[:, :], int32[:, :], int32[:], int32[:, :], int32[:]))
def loop(keys, key_mask, key2idx, cur_idx, idx2key, idx_counts):
for row, col in keys:
@airalcorn2
airalcorn2 / bad_vae.py
Last active August 7, 2022 14:57
Minimal example demonstrating how a variational autoencoder frequently generates unrealistic samples when optimized to learn a simple 2D bimodal distribution.
# Adapted from: https://github.com/pytorch/examples/blob/main/vae/main.py.
import torch
import torch.utils.data
from torch import nn, optim
from torch.nn import functional as F
class VAE(nn.Module):
@airalcorn2
airalcorn2 / sentinel2_example.py
Last active February 16, 2022 19:43
Minimal example demonstrating how to pull Sentinel-2 data for a specific location.
# Heavily inspired by: https://geospatial.101workbook.org/ImportingData/ImportingImages.html.
import geopandas as gpd
import numpy as np
import pandas as pd
import requests
import stackstac
from PIL import Image
from satsearch import Search
import matplotlib.pyplot as plt
import math
import numpy as np
import pandas as pd
import sklearn.datasets
import time
import torch
from sklearn.linear_model import LinearRegression
from torch import nn, optim
@airalcorn2
airalcorn2 / multimodal_multivariate.py
Last active March 29, 2022 13:30
Demonstrates the issues that can arise when assuming a multivariate normal distribution for a multimodal multivariate distribution. See discussion here --> https://openreview.net/forum?id=sO4tOk2lg9I&noteId=d30Xi6n6BcJ.
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
N = 1000
mu = 1
sd = 0.1
# Separate dimensions example.
@airalcorn2
airalcorn2 / create_covid19_counties_video.py
Created September 26, 2021 16:53
Creates a video showing COVID-19 cases per million in U.S. counties over time.
import cv2
import geopandas
import io
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import requests
import time
import zipfile
@airalcorn2
airalcorn2 / synthetic.py
Last active June 20, 2021 12:21
Testing out the synthetic control approach used in Dave et al. (2020).
# See: https://matheusfacure.github.io/python-causality-handbook/15-Synthetic-Control.html
# and: http://ftp.iza.org/dp13670.pdf.
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from datetime import datetime, timedelta
from scipy.optimize import fmin_slsqp
@airalcorn2
airalcorn2 / hook_transformer_attn.py
Last active July 15, 2024 19:38
A simple script for extracting the attention weights from a PyTorch Transformer.
# Inspired by: https://towardsdatascience.com/the-one-pytorch-trick-which-you-should-know-2d5e9c1da2ca.
# Monkey patching idea suggested by @kklemon here:
# https://gist.github.com/airalcorn2/50ec06517ce96ecc143503e21fa6cb91?permalink_comment_id=4407423#gistcomment-4407423.
import torch
from torch import nn
def patch_attention(m):
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
csv_url = "https://raw.githubusercontent.com/gradlab/CtTrajectories/main/data/ct_dat_clean.csv"
df = pd.read_csv(csv_url)
person_ids = df["Person.ID"].unique()
(min_x, max_x) = (df["Date.Index"].min(), df["Date.Index"].max())
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
households = 10000
infected_households_prop = 0.01
persons_per_household = 100
sample_prop = 0.001
samples = int(sample_prop * households * persons_per_household)