A personal diary of DataFrame munging over the years.
Convert Series datatype to numeric (will error if column has non-numeric values)
(h/t @makmanalp)
# dropping a database via pymongo | |
from pymongo import Connection | |
c = Connection() | |
c.drop_database('mydatabase') | |
# drop a collection via pymongo | |
from pymongo import Connection | |
c = Connection() | |
c['mydatabase'].drop_collection('mycollection') |
# the following two lines give a two-line status, with the current window highlighted | |
hardstatus alwayslastline | |
hardstatus string '%{= kG}[%{G}%H%? %1`%?%{g}][%= %{= kw}%-w%{+b yk} %n*%t%?(%u)%? %{-}%+w %=%{g}][%{B}%m/%d %{W}%C%A%{g}]' | |
# huge scrollback buffer | |
defscrollback 5000 | |
# no welcome message | |
startup_message off |
Latency Comparison Numbers (~2012) | |
---------------------------------- | |
L1 cache reference 0.5 ns | |
Branch mispredict 5 ns | |
L2 cache reference 7 ns 14x L1 cache | |
Mutex lock/unlock 25 ns | |
Main memory reference 100 ns 20x L2 cache, 200x L1 cache | |
Compress 1K bytes with Zippy 3,000 ns 3 us | |
Send 1K bytes over 1 Gbps network 10,000 ns 10 us | |
Read 4K randomly from SSD* 150,000 ns 150 us ~1GB/sec SSD |
import boto | |
import boto.s3 | |
import os.path | |
import sys | |
# Fill these in - you get them when you sign up for S3 | |
AWS_ACCESS_KEY_ID = '' | |
AWS_ACCESS_KEY_SECRET = '' | |
# Fill in info on data to upload |
A personal diary of DataFrame munging over the years.
Convert Series datatype to numeric (will error if column has non-numeric values)
(h/t @makmanalp)
from __future__ import print_function | |
import imageio | |
from PIL import Image | |
import numpy as np | |
import keras | |
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, AveragePooling2D, ZeroPadding2D, Dropout, Flatten, Concatenate, Reshape, Activation | |
from keras.models import Model | |
from keras.regularizers import l2 | |
from keras.optimizers import SGD |
class AttentionLSTM(LSTM): | |
"""LSTM with attention mechanism | |
This is an LSTM incorporating an attention mechanism into its hidden states. | |
Currently, the context vector calculated from the attended vector is fed | |
into the model's internal states, closely following the model by Xu et al. | |
(2016, Sec. 3.1.2), using a soft attention model following | |
Bahdanau et al. (2014). | |
The layer expects two inputs instead of the usual one: |
from keras.engine.topology import Layer | |
from keras import initializations | |
from keras import backend as K | |
class Attention(Layer): | |
'''Attention operation for temporal data. | |
# Input shape | |
3D tensor with shape: `(samples, steps, features)`. | |
# Output shape | |
2D tensor with shape: `(samples, features)`. |
from keras import backend as K, initializers, regularizers, constraints | |
from keras.engine.topology import Layer | |
def dot_product(x, kernel): | |
""" | |
Wrapper for dot product operation, in order to be compatible with both | |
Theano and Tensorflow | |
Args: |
#!/usr/bin/env bash | |
set -x -e | |
JUPYTER_PASSWORD=${1:-"myJupyterPassword"} | |
NOTEBOOK_DIR=${2:-"s3://myS3Bucket/notebooks/"} | |
# home backup | |
if [ ! -d /mnt/home_backup ]; then | |
sudo mkdir /mnt/home_backup | |
sudo cp -a /home/* /mnt/home_backup |