Skip to content

Instantly share code, notes, and snippets.

@talolard
talolard / gist:bc4af5aece5c3ac3c2e6
Created March 10, 2016 05:30
Working RGB calibrate example
#include "FastLED.h"
#include <Adafruit_NeoPixel.h>
Adafruit_NeoPixel strip = Adafruit_NeoPixel(60, 8, NEO_BRG + NEO_KHZ800);
void setup() {
strip.begin();
strip.show(); // Initialize all pixels to 'off'
}
extern const TProgmemRGBPalette16 autumnleaves FL_PROGMEM = {
0Xb70c0c, 0Xc71221, 0Xdc0606, 0Xfa250c,
0Xfc9105, 0Xfcb108, 0X318d03, 0X9dd523,
0Xfc9105, 0Xfcb108, 0X318d03, 0X9dd523,
0Xb70c0c, 0Xc71221, 0Xdc0606, 0Xfa250c
};
extern const TProgmemRGBPalette16 desert FL_PROGMEM = {
0X000000, 0Xeb6810, 0Xeb8d10, 0Xd35d0e,
0X8b3d09, 0XFFC233, 0X000000, 0Xeb6810,
@talolard
talolard / gru_example.py
Created December 31, 2017 09:23
Example of gru implementation
'''
GRU layer implementation orignally taken from https://github.com/ottokart/punctuator2
'''
class GRULayer(object):
def __init__(self, rng, n_in, n_out, minibatch_size):
super(GRULayer, self).__init__()
# Notation from: An Empirical Exploration of Recurrent Network Architectures
self.n_in = n_in
@talolard
talolard / tensorflow_gru_example.py
Created December 31, 2017 09:27
tensorflow_gru_example.py
from tf.contrib.rnn import GRUCell
cell = GruCell()
import tensorflow as tf
lstm = tf.contrib.rnn.BasicLSTMCell(lstm_size)
@talolard
talolard / padded_batch_example.ipynb
Last active February 8, 2018 18:02
Quick example of padded_batch for medium post
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
'''
Tools to take a directory of txt files and convert them to TF records
'''
from collections import defaultdict, Counter
import numpy as np
import tensorflow as tf
PAD = "<PAD>"
EOS = "<EOS>"
@talolard
talolard / bibpreppy.py
Created February 15, 2018 18:27
Storing and Extracting TFRecords
class BibPreppy(Preppy):
'''
We'll slightly extend to way we right tfrecords to store the id of the book it came from
'''
def __init__(self,tokenizer_fn):
super(BibPreppy,self).__init__(tokenizer_fn)
self.book_map ={}
def sequence_to_tf_example(self, sequence, book_id):
id_list = self.sentance_to_id_list(sequence)
@talolard
talolard / UsingIterators.py
Last active February 19, 2018 14:04
Example of using dataset and iterators to the train and val
if __name__=="__main__":
#make the iterators and next element op
next_element, training_init_op, validation_init_op = prepare_dataset_iterators(batch_size=32)
...
for epoch in range(1000):
#Initialize the iterator to consume training data
sess.run(training_init_op)
while True:
#As long as the iterator is not empty
@talolard
talolard / prepare_dataset.py
Last active March 8, 2018 14:16
Making a dataset
import tensorflow as tf
from preppy import BibPreppy
def expand(x):
'''
Hack. Because padded_batch doesn't play nice with scalres, so we expand the scalar to a vector of length 1
:param x:
:return: