Instantly share code, notes, and snippets.

View basic-rnn.py
# Lab 12 RNN
import tensorflow as tf
import numpy as np
tf.set_random_seed(777) # reproducibility
idx2char = ['h', 'i', 'e', 'l', 'o']
# Teach hello: hihell -> ihello
x_data = [[0, 1, 0, 2, 3, 3]] # hihell
x_one_hot = [[[1, 0, 0, 0, 0], # h 0
[0, 1, 0, 0, 0], # i 1
View inference.sh
python -m bin.infer \
--tasks "
- class: DecodeText
params:
delimiter: '' " \
--model_dir vizmodel \
--model_params "
inference.beam_search.beam_width: 2" \
--input_pipeline "
class: ParallelTextInputPipeline
View nmt_large.yml
model: AttentionSeq2Seq
model_params:
attention.class: seq2seq.decoders.attention.AttentionLayerBahdanau
attention.params:
num_units: 512
bridge.class: seq2seq.models.bridges.ZeroBridge
embedding.dim: 512
encoder.class: seq2seq.encoders.BidirectionalRNNEncoder
encoder.params:
rnn_cell:
View loading_data_file.py
/*
$ cat test.csv
1,2,3,,5,6,7,8,9
0,9,8,7,6,5,4,3,2
2,3,4,5,6,7,8,9,0
5,4,3,2,16,7,8,9,3
*/
import tensorflow as tf
View webserver_snippet.py
def run_inference():
# tf.reset_default_graph()
with graph.as_default():
saver = tf.train.Saver()
checkpoint_path = loaded_checkpoint_path
if not checkpoint_path:
checkpoint_path = tf.train.latest_checkpoint(model_dir_input)
def session_init_op(_scaffold, sess):
saver.restore(sess, checkpoint_path)
View snippet of webserver.py in data2vis
def run_inference():
# tf.reset_default_graph()
with graph.as_default():
saver = tf.train.Saver()
checkpoint_path = loaded_checkpoint_path
if not checkpoint_path:
checkpoint_path = tf.train.latest_checkpoint(model_dir_input)
def session_init_op(_scaffold, sess):
saver.restore(sess, checkpoint_path)
View create_model_ckpt.data2vis.sh
# export DATA_DIR=project-directory
export DATA_DIR=.
python -m bin.train \
--config_paths="
$DATA_DIR/example_configs/nmt_large.yml,
$DATA_DIR/example_configs/train_seq2seq.yml,
$DATA_DIR/example_configs/text_metrics_bpe.yml" \
--model_params "
vocab_source: $DATA_DIR/sourcedata/vocab.source
vocab_target: $DATA_DIR/sourcedata/vocab.target"
View train_options.json
{
"model_class": "AttentionSeq2Seq",
"model_params": {
"encoder.params": {
"rnn_cell": {
"dropout_input_keep_prob": 0.5,
"num_layers": 2,
"cell_params": {
"num_units": 512
},
View React Lifecycle Methods
Lifecycle: | Update:
Mounting and Unmounting | New Props or State
--------------------------------+-----------------------------------
|
getDefaultProps() | componentWillReceiveProps()*
|
getInitialState() | shouldComponentUpdate()
|
componentWillMount() | componentWillUpdate()
|
View React Lifecycle Methods
Lifecycle: | Update:
Mounting and Unmounting | New Props or State
--------------------------------+-----------------------------------
|
getDefaultProps() | componentWillReceiveProps()*
|
getInitialState() | shouldComponentUpdate()
|
componentWillMount() | componentWillUpdate()
|