Skip to content

Instantly share code, notes, and snippets.

View codereason's full-sized avatar

codereason codereason

View GitHub Profile
@jboner
jboner / latency.txt
Last active July 30, 2024 02:24
Latency Numbers Every Programmer Should Know
Latency Comparison Numbers (~2012)
----------------------------------
L1 cache reference 0.5 ns
Branch mispredict 5 ns
L2 cache reference 7 ns 14x L1 cache
Mutex lock/unlock 25 ns
Main memory reference 100 ns 20x L2 cache, 200x L1 cache
Compress 1K bytes with Zippy 3,000 ns 3 us
Send 1K bytes over 1 Gbps network 10,000 ns 10 us
Read 4K randomly from SSD* 150,000 ns 150 us ~1GB/sec SSD
import theano
import lasagne.layers
from lasagne.layers import Conv2DLayer as C2D
from lasagne.nonlinearities import rectify as relu
from lasagne.layers import NonlinearityLayer as NL
from lasagne.layers import ElemwiseSumLayer as ESL
from lasagne.layers import batch_norm as BN
l_in = lasagne.layers.InputLayer(shape=(None,3,64,64)) # Assume incoming shape is a batch x RGB x W x H image
encoder_stem = C2D(
@jintao-zero
jintao-zero / weibo_search_page.py
Last active February 28, 2020 15:52
解析微博搜索结果,获取微博内容和博主信息
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import sys
import urllib
import urllib2
from lxml import etree
import time
import random
import logging
@jetnew
jetnew / lstm_autoencoder.py
Last active September 16, 2022 02:49
LSTM Autoencoder using Keras
from keras.layers import LSTM, Dense, RepeatVector, TimeDistributed
from keras.models import Sequential
class LSTM_Autoencoder:
def __init__(self, optimizer='adam', loss='mse'):
self.optimizer = optimizer
self.loss = loss
self.n_features = 1
def build_model(self):