Skip to content

Instantly share code, notes, and snippets.

@JohnAllen
Created April 24, 2019 15:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save JohnAllen/dd89ebd1e56045f25360044f6d9433d9 to your computer and use it in GitHub Desktop.
Save JohnAllen/dd89ebd1e56045f25360044f6d9433d9 to your computer and use it in GitHub Desktop.
import numpy as np
import os
import shutil
from PIL import Image
from tensorboardX import SummaryWriter
INFECTED = 'infected'
UNINFECTED = 'uninfected'
train_dir = 'dataset/train_signs'
X_train = os.listdir(train_dir)
SIZE = 64
image_pixels = SIZE * SIZE * 3
reg = 1e-3
log_path = 'tensorboard'
if os.path.isdir(log_path):
shutil.rmtree(log_path)
os.makedirs(log_path)
writer = SummaryWriter(log_path)
def load_image(file_path):
img = Image.open(train_dir + '/' + file_path)
if img:
img.load()
data = np.asarray(img, dtype="int32")
data = data.reshape(-1)
return data
else:
return None
def softmax(x):
x[x == 0] += 1e-10
exp_scores = np.exp(x)
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
return probs
def cross_entropy(softmax_values, labels):
m = labels.shape[0]
correct_logprobs = -np.log(softmax_values[range(m), labels])
loss = np.sum(correct_logprobs) / m
reg_loss = 0.5 * reg * np.sum(W * W)
return loss + reg_loss
learn_rate = .001
batch_size = 32
num_classes = 2
neurons = 300
W = np.random.randn(image_pixels, neurons) * np.sqrt(2 / neurons)
b = np.zeros((1, neurons))
W2 = np.random.randn(neurons, 2) * np.sqrt(2 / neurons)
b2 = np.zeros((1, num_classes))
for i in range(300000):
images_into_set = i
X_train_mini_files = X_train[images_into_set:images_into_set + batch_size]
predictions = np.zeros((batch_size, 2))
labels = np.zeros(batch_size, dtype=np.int8)
num_images = batch_size if len(X_train_mini_files) == batch_size else len(X_train_mini_files)
images_into_set += num_images
X = np.zeros((batch_size, image_pixels))
for x in range(num_images):
image = np.zeros(0)
while not image.any():
image = load_image(X_train_mini_files[x])
X[x] = image
y = 0 if UNINFECTED in X_train_mini_files[x] else 1
labels[x] = y
X[x] = y
if X.any():
hidden_layer = np.maximum(0, np.dot(X, W) + b)
scores = np.dot(hidden_layer, W2) + b2
softmax_values = softmax(scores)
loss = cross_entropy(softmax_values, labels)
dscores = softmax_values
dscores[range(num_images), y] -= 1
dscores /= num_images
dW2 = np.dot(hidden_layer.T, dscores)
db2 = np.sum(dscores, axis=0, keepdims=True)
dhidden = np.dot(dscores, W2.T)
dhidden[hidden_layer <= 0] = 0
dW = np.dot(X.T, dhidden)
db = np.sum(dhidden, axis=0, keepdims=True)
dW2 += reg * W2
dW += reg * W
# update the parameters
W += -learn_rate * dW
b += -learn_rate * db
W2 += -learn_rate * dW2
b2 += -learn_rate * db2
writer.add_scalar('Train/Loss', loss, i)
if i % 10 == 0:
print("iteration %d: loss %f" % (i, loss))
hidden_layer = np.maximum(0, np.dot(X, W) + b)
scores = np.dot(hidden_layer, W2) + b2
predicted_class = np.argmax(scores, axis=1)
accuracy = np.mean(predicted_class == y)
writer.add_scalar('Train/Accuracy', accuracy, i)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment