Skip to content

Instantly share code, notes, and snippets.

View JossWhittle's full-sized avatar
😸

Joss Whittle JossWhittle

😸
View GitHub Profile
import numpy as np
from numba import njit, prange
BIT_COUNT_LOOKUP = np.array([bin(i).count('1') for i in range(256)]).astype(np.uint8)
@njit(fastmath=True, nopython=True, parallel=True)
def fast_tanimoto_matrix(fingerprints, progress):
"""
Compute a symmetric Tanimoto similarity matrix over a set of fingerprints of size (N, F//8).
Where N is the number of fingerprints, and F is the length of the boolean fingerprint.
@JossWhittle
JossWhittle / earth-view.py
Last active August 29, 2022 18:06
Simple script to download all of the images on https://earthview.withgoogle.com/ in 1800x1200 resolution.
import os
import json
import urllib.request
from tqdm import tqdm
import multiprocessing
with urllib.request.urlopen('https://earthview.withgoogle.com/_api/photos.json') as f:
slugs = list(map(lambda slug: slug | dict(id=slug['slug'].split('-')[-1]),
json.loads(f.read().decode('utf-8'))))
FROM frolvlad/alpine-python3
# Build and install numpy
RUN apk add --no-cache \
--virtual=.build-dependencies \
g++ gfortran file binutils \
musl-dev python3-dev cython openblas-dev lapack-dev && \
apk add libstdc++ openblas lapack && \
\
pip install --disable-pip-version-check --no-build-isolation --no-cache-dir numpy==1.18.5 && \
# Keep last N weights and one weight every H hours, discard others
old_weight_paths = sorted(list(glob(os.path.join(weight_path, 'style_weights-*.h5'))))
# Iterate over weights from newest to oldest, discard oldest weights if multiple were saved that hour
prev_weight_age = -1
for old_weight_path in reversed(old_weight_paths[:-keep_last_n_weights]):
# Age of this weight file in hours
weight_age = int((time() - os.path.getmtime(old_weight_path)) // (60 * 60 * keep_weights_every_n_hours))
class LinearWarmUpAndCosineDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate, warmup_steps, total_steps, alpha, name=None):
super(LinearWarmUpAndCosineDecay, self).__init__(name=name)
self.warmup_steps = warmup_steps
self.total_steps = total_steps
self.alpha = alpha
self.initial_learning_rate = initial_learning_rate
self.min_learning_rate = self.initial_learning_rate * self.alpha
\usepackage{listings}
\usepackage{textcomp}
\usepackage[utf8]{inputenc}
\usepackage[TS1,T1]{fontenc}
\usepackage[english]{babel}
\usepackage{sourcecodepro}
\usepackage{scrextend}
\addtokomafont{labelinglabel}{\sffamily}
\pdfmapfile{=SourceCodePro.map}
import numpy as np
import tensorflow as tf
class LogMetrics(tf.keras.callbacks.Callback):
def __init__(self, log_dir, loss, metrics, steps, dataset, training=False):
super(LogMetrics, self).__init__()
self.log_dir = log_dir
self.metrics = metrics
self.steps = steps
\usepackage{listings,lstautogobble}
\usepackage{sourcecodepro}
\pdfmapfile{=SourceCodePro.map}
\lstset{
xleftmargin=0.5cm,frame=tlbr,framesep=4pt,framerule=0.5pt,
language=,
upquote=true,
columns=fixed,
tabsize=2,
extendedchars=true,
@JossWhittle
JossWhittle / wales.graph
Created February 16, 2020 17:32
An un-directed graph of Welsh constituencies and their neighbour connectivities.
40
Aberavon,6,6,29,32,17,26,33
Aberconwy,3,16,19,3
Alyn and Deeside,4,18,16,15,38
Arfon,3,19,1,39
Blaenau Gwent,5,21,35,24,5,23
Brecon and Radnorshire,8,24,25,14,12,26,17,23,4
Bridgend,3,37,0,29
Caerphilly,6,9,28,21,23,17,30
Cardiff Central,3,10,9,11
class LoggingTensorBoard(tf.keras.callbacks.TensorBoard):
def __init__(self, log_dir):
super().__init__(log_dir=log_dir, update_freq='epoch', write_graph=True, profile_batch=2)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs.update({
'learning_rate' : tf.keras.backend.eval(self.model.optimizer.lr),
'regularization' : tf.keras.backend.eval(sum(self.model.losses)),
})