Skip to content

Instantly share code, notes, and snippets.

View joelthchao's full-sized avatar

Joel joelthchao

View GitHub Profile
<head>
<!-- Plotly.js -->
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
</head>
<body>
<!-- Plotly chart will be drawn inside this DIV -->
<div id="plot"></div>
<script>
Plotly.d3.csv("record/dht.csv", function(err, rows){
@joelthchao
joelthchao / cron.py
Last active March 18, 2018 11:13
DHT
#!/home/pi/miniconda3/bin/python
from datetime import datetime
import sys
import Adafruit_DHT
def main():
sensor = Adafruit_DHT.AM2302
pin = 4
record_file = '/home/pi/joel/dht_monitor/record/dht.csv' # TODO: change to your own path
import Adafruit_DHT
sensor = Adafruit_DHT.AM2302
pin = 4
humidity, temperature = Adafruit_DHT.read_retry(
sensor, pin, retries=5, delay_seconds=1)
print('{},{}'.format(humidity, temperature))
#!/bin/bash
# Install Anaconda
wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-armv7l.sh
bash Miniconda3-latest-Linux-armv7l.sh
# Install Adafruit_Python_DHT
git clone https://github.com/adafruit/Adafruit_Python_DHT.git
cd Adafruit_Python_DHT
python setup.py install
@joelthchao
joelthchao / dead_neuron_2.py
Last active September 16, 2017 14:07
Dead neuron
import keras.backend as K
from keras.initializers import Constant
from keras.layers import Input, Dense
from keras.models import Model
from keras.optimizers import SGD
b_init = -0.5 # [-1, -0.75, -0.5, -0.25, 0, 0.25, 0.5, 0.75, 1]
net_input = Input(shape=(4,))
net = Dense(2, activation='relu', bias_initializer=Constant(b_init))(net_input)
net = Dense(1, activation='sigmoid')(net)
@joelthchao
joelthchao / dead_neuron_1.py
Last active September 16, 2017 13:44
Dead neuron
import numpy as np
# x: 4-dim zero mean random
# y: sign of x's mean
x = np.random.rand(320, 4) - 0.5
y = np.zeros((320, 1))
for i in range(x.shape[0]):
y[i] = int(np.mean(x[i, :]) > 0)
from keras.callbacks import ProgbarLogger
class ProgbarLoggerVerbose(ProgbarLogger):
def on_train_begin(self, logs=None):
super(ProgbarLoggerVerbose, self).on_train_begin(logs)
self.verbose = True
log_file = 'path/to/log.txt' # if you don't want to do logging, just leave the kwags unfilled
my_callback = MyCallback(test_x, test_y, log_file=log_file, verbose=True)
model.fit(X, Y, callbacks=[ProgbarLoggerVerbose('samples'), my_callback], verbose=0)
log_file = 'path/to/log.txt' # if you don't want to do logging, just leave the kwags unfilled
my_callback = MyCallback(test_x, test_y, log_file=log_file, verbose=True)
model.fit(train_x, train_y, callbacks=[my_callback])
"""
Output:
59264/60000 [============================>.] - ETA: 0s - loss: 0.3520 - acc: 0.8925Epoch 0 acc= 0.9753
60000/60000 [==============================] - 5s - loss: 0.3497 - acc: 0.8932 - val_loss: 0.0818 - val_acc: 0.9753
"""
from keras.callbacks import Callback
import numpy as np
class MyCallback(Callback):
def __init__(self, test_data, test_label, log_file=None, verbose=True):
self.test_data = test_data
self.test_label = test_label
self.result = []
self.log = open(log_file, 'w') if log_file else None
self.verbose = verbose
@joelthchao
joelthchao / demo.py
Last active August 31, 2021 18:02
Keras uses TensorBoard Callback with train_on_batch
import numpy as np
import tensorflow as tf
from keras.callbacks import TensorBoard
from keras.layers import Input, Dense
from keras.models import Model
def write_log(callback, names, logs, batch_no):
for name, value in zip(names, logs):
summary = tf.Summary()