Skip to content

Instantly share code, notes, and snippets.

Jaemin Cho j-min

Block or report user

Report or block j-min

Hide content and notifications from this user.

Learn more about blocking users

Contact Support about this user’s behavior.

Learn more about reporting abuse

Report abuse
View GitHub Profile
@j-min
j-min / srt_example.py
Last active Feb 3, 2019
Korean express train ticket reservation example
View srt_example.py
# pip install SRTpy (https://github.com/dotaitch/SRTpy)
from SRTpy import Srt
from heconvert.converter import h2e
from time import sleep
# Login
EMAIL = '' # email
PW = '' # password
srt = Srt(EMAIL, PW)
@j-min
j-min / tmux_install.sh
Last active Nov 10, 2017
tmux 2.6 install script (linux)
View tmux_install.sh
TMUX_VERSION=2.6
cd $HOME
# Dependencies
sudo apt install libevent-dev ncurses-dev -y
# Download tmux
wget https://github.com/tmux/tmux/releases/download/$TMUX_VERSION/tmux-$TMUX_VERSION.tar.gz
tar -xvzf tmux-$TMUX_VERSION.tar.gz
@j-min
j-min / exp_lr_scheduler.py
Created Jun 25, 2017
learning rate decay in pytorch
View exp_lr_scheduler.py
# http://pytorch.org/tutorials/beginner/transfer_learning_tutorial.html
def exp_lr_scheduler(optimizer, epoch, init_lr=0.001, lr_decay_epoch=7):
"""Decay learning rate by a factor of 0.1 every lr_decay_epoch epochs."""
lr = init_lr * (0.1**(epoch // lr_decay_epoch))
if epoch % lr_decay_epoch == 0:
print('LR is set to {}'.format(lr))
for param_group in optimizer.param_groups:
@j-min
j-min / matplotlib_plot_demo.py
Created Jun 25, 2017
matplotlib configuration
View matplotlib_plot_demo.py
import matplotlib
# font configuration
matplotlib.rc('font', family='NanumGothic', size=22)
View tensorboard_inline.py
from IPython.display import clear_output, Image, display, HTML
import numpy as np
def strip_consts(graph_def, max_const_size=32):
"""Strip large constant values from graph_def."""
strip_def = tf.GraphDef()
for n0 in graph_def.node:
n = strip_def.node.add()
n.MergeFrom(n0)
if n.op == 'Const':
@j-min
j-min / backprop.ipynb
Created Mar 1, 2017
Simple backprop implementation in TensorFlow without its optimizer API
View backprop.ipynb
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
View .zshrc
ZSH=$HOME/.zsh
HISTFILE=$HOME/.history
HISTSIZE=10000
SAVEHIST=10000
export TERM=xterm-256color
export LANG=en_US.UTF-8
# added by Anaconda3 4.1.1 installer
export PATH=$HOME/anaconda3/bin:$PATH
View convertSize.py
import math
def convertSize(size):
"""
Return filesize (in Bytes) in human-readable format
"""
if (size == 0):
return '0B'
units = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size,1024)))
View hangul.py
# -*- coding: utf-8 -*-
class Hangul:
BASE_CODE = 44032
CHOSUNG = 588
JUNGSUNG = 28
# 초성 리스트. 00 ~ 18
CHOSUNG_LIST = [
'', '', '', '', '', '', '', '', '',
@j-min
j-min / tokenize_dparser.py
Last active Dec 3, 2016
Get tokenized list from dparser
View tokenize_dparser.py
import json
import requests
def tokenize_dparser(text):
dparser_link = 'http://parser.datanada.com/parse?version=1&string='
url = dparser_link+text
response = requests.get(url)
You can’t perform that action at this time.