Skip to content

Instantly share code, notes, and snippets.

View Algomancer's full-sized avatar
💌
Hmu if your gradients are misbehaving, email in profile.

Adam Hibble Algomancer

💌
Hmu if your gradients are misbehaving, email in profile.
  • Mancer Labs
  • Brisbane, AU
View GitHub Profile
import torch
import torch.nn as nn
import torch.nn.functional as F
class SubspaceLinear(nn.Linear):
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Forward pass for the BaseSubspaceLinear layer. Calls `subspace_weights` to sample from the subspace
and uses the corresponding weight and bias.
@Algomancer
Algomancer / lr.py
Last active January 3, 2024 16:02
learning_rate = 4e-4
warmup_steps = 2000
log_step_interval = 1
eval_iters = 100
save_step_interval = 1000
eval_step_interval = 1000
weight_decay = 1e-1
beta1 = 0.9
beta2 = 0.95
import torch
from torchdiffeq import odeint
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import tqdm
import imageio
import os
# Load target image and preprocess it
@Algomancer
Algomancer / mozaicing.py
Created January 16, 2022 00:12
Spectral augmentation using Mozaicing
# Author Adam Hibble @algomancer
import torch
import torch.nn.functional as F
import torch.nn as nn
import tqdm
def get_padding(padding_type, kernel_size):
assert padding_type in ['SAME', 'VALID']
if padding_type == 'SAME':
return tuple((k - 1) // 2 for k in kernel_size)
@Algomancer
Algomancer / collab.py
Created August 26, 2020 00:25
Collab SSH
# Install useful stuff
! apt install --yes ssh screen nano htop ranger git > /dev/null
# SSH setting
! echo "root:carbonara" | chpasswd
! echo "PasswordAuthentication yes" > /etc/ssh/sshd_config
! echo "PermitUserEnvironment yes" >> /etc/ssh/sshd_config
! echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
! service ssh restart > /dev/null
# Download ngrok
! wget -q -c -nc https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip
#include <stdio.h>
#include <stdlib.h>
#define N 16 /* buffer size */
int main(void) {
char name[N]; /* buffer */
/* prompt user for name */
printf("What's your name? ");
from torch.autograd import Variable
from torch.nn import functional as F
class SELU(nn.Module):
def __init__(self):
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask
app = Flask(__name__)
@app.route('/')
def foo():
app.logger.warning('A warning occurred (%d apples)', 42)
from IPython.display import Image, display, HTML
def strip_consts(graph_def, max_const_size=32):
"""Strip large constant values from graph_def."""
strip_def = tf.GraphDef()
for n0 in graph_def.node:
n = strip_def.node.add()
n.MergeFrom(n0)
if n.op == 'Const':
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"s3:PutObject"
],
"Resource": [
"arn:aws:s3:::YOUR_S3_BUCKET_NAME/*"