Skip to content

Instantly share code, notes, and snippets.

View Smerity's full-sized avatar

Stephen Merity Smerity

View GitHub Profile
@Smerity
Smerity / version.bad
Created May 17, 2020 00:31
Difference between high performing and low performing autovectorization
 fn main() {
 push  rbp
 mov  rbp, rsp
 push  r15
 push  r14
 push  r12
 push  rbx
 sub  rsp, 3344
 let mut rng = rand::thread_rng();
 call  rand::rngs::thread::thread_rng
@Smerity
Smerity / send_message
Created January 10, 2019 23:18
Send a message using the Slack postMessage API
import requests
AUTH_TOKEN = 'xoxb-...'
channel = '#locked-out'
USER_ID = 'UDAS0J04S' # A user ID obtained by `list_members.py`
text = f'<@{USER_ID}>, Smerity locked himself out'
params = {
'token': AUTH_TOKEN,
@Smerity
Smerity / list_members.py
Created January 10, 2019 10:27
List members of a bot's Slack
import pprint
import requests
AUTH_TOKEN = 'xoxb-...'
params = {'token': AUTH_TOKEN}
r = requests.post('https://slack.com/api/users.list', params=params)
for member in r.json()['members']:
@Smerity
Smerity / post_process.py
Created November 19, 2017 20:55
WikiText: Python 2 post processing used on Moses tokenized input
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import re
number_match_re = re.compile(r'^([0-9]+[,.]?)+$')
number_split_re = re.compile(r'([,.])')
@Smerity
Smerity / failed_logins
Created June 6, 2017 22:09
List of failed SSH logins produced by `egrep -o "invalid user ([^ ]+?) " /var/log/auth.log | cut -d ' ' -f 3 | sort | uniq -c | sort -nk 1`
1 .+?
1 [^
2 0000
2 010101
2 1111
2 1234
2 12345
2 666666
2 adm
2 anna
@Smerity
Smerity / cupy-pytorch-ptx.py
Created May 21, 2017 23:21 — forked from szagoruyko/cupy-pytorch-ptx.py
CuPy example for PyTorch updated to support Python 3
import torch
from cupy.cuda import function
from pynvrtc.compiler import Program
from collections import namedtuple
a = torch.randn(1,4,4).cuda()
b = torch.zeros(a.size()).cuda()
kernel = '''
extern "C"
@Smerity
Smerity / count_wikitext.py
Created February 9, 2017 23:00
Count the number of unique tokens in WikiText-2 and/or WikiText-103
vocab = set()
for i, line in enumerate(open('wiki.train.tokens')):
words = [x for x in line.split(' ') if x]
[vocab.add(word) for word in words]
if i < 10: print(words)
print('Vocab size:', len(vocab))
@Smerity
Smerity / cartpole.py
Last active May 26, 2017 13:47
Script for Cartpole using policy gradient via Chainer, two layer MLP, dropout, and rejection sampling of historical memories
''' Script for Cartpole using policy gradient via Chainer, two layer MLP, dropout, and rejection sampling of historical memories '''
import gym
import numpy as np
import chainer
from chainer import optimizers
from chainer import ChainList, Variable
import chainer.functions as F
@Smerity
Smerity / buggy_cartpole.py
Last active September 2, 2016 00:09
Buggy (but preserved for posterity) script for Cartpole using policy gradient via Chainer, two layer MLP, dropout, and vaguely rejection sampling of historical memories
""" Quick script for Cartpole using policy gradient via Chainer, two layer MLP, dropout, and vaguely rejection sampling of historical memories """
import gym
import numpy as np
import chainer
from chainer import optimizers
from chainer import ChainList, Variable
import chainer.functions as F
@Smerity
Smerity / time_dist.py
Last active September 20, 2015 01:22
Test the difference between Dense and TimeDistributedDense in Keras
from __future__ import print_function
import numpy as np
np.random.seed(1337)
import sys
from keras.utils.test_utils import get_test_data
from keras.models import Sequential
from keras.layers.core import Dense, TimeDistributedDense
from keras.layers.recurrent import GRU