Skip to content

Instantly share code, notes, and snippets.

View Rsych's full-sized avatar
🙈
I may be slow to respond.

J. W. Kim Rsych

🙈
I may be slow to respond.
View GitHub Profile
@Rsych
Rsych / mac_tf_metal_test.py
Last active August 12, 2021 14:56
Tensorflow Mac metal acceleration test
import tensorflow as tf
from keras.layers import SimpleRNN
from keras.models import Sequential
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
model_rnn = Sequential()
model_rnn.add(SimpleRNN(units=20, activation='relu', input_shape=(10,5)))
print(model_rnn.summary())
@Rsych
Rsych / per_AND.py
Created July 29, 2021 03:21
AND gate with perceptron
def AND(x1, x2):
w1 = 0.5
w2 = 0.5
theta = 0.7
if w1 * x1 + w2 * x2 > theta:
return 1
else:
return 0
print(AND(0,0))
print(AND(0,1))
@Rsych
Rsych / logic_gate.py
Created July 29, 2021 03:23
logic gate using perceptron
def NAND(x1, x2):
w1 = -0.5
w2 = -0.5
theta = -0.7
if w1 * x1 + w2 * x2 > theta:
return 1
else:
return 0
def OR(x1, x2):
@Rsych
Rsych / stepfunc.py
Created July 29, 2021 03:49
neural network stepfunction
def stepfunc(x):
return np.where(x <= 0, 0, 1) # if x <= 0 return 0, else return 1
x = np.arange(-10, 10, 0.1)
y = stepfunc(x)
plt.plot(x, y)
plt.title('Step Function')
plt.show()
@Rsych
Rsych / sigmoid.py
Created July 29, 2021 03:55
neural network sigmoid function
def sigmoid(x):
return 1 / (1 + np.exp(-x))
x = np.arange(-10, 10, 0.1)
y = sigmoid(x)
plt.plot(x,y)
plt.title('Sigmoid Function')
plt.show()
@Rsych
Rsych / tanh.py
Created July 29, 2021 03:57
neural network tanh func
def tanh(x):
return ((np.exp(2 * x) - 1) / (np.exp(2 * x) + 1) )
x = np.arange(-10, 10, 0.1)
y = tanh(x)
plt.plot(x,y)
plt.title('tanh Function')
plt.show()
@Rsych
Rsych / relu.py
Created July 29, 2021 03:59
neural network relu func
def relu(x):
return np.maximum(0, x)
x = np.arange(-10, 10, 0.1)
y = relu(x)
plt.plot(x,y)
plt.title('ReLU Function')
plt.show()
@Rsych
Rsych / softmax.py
Created July 29, 2021 04:01
neural network softmax func
def softmax(x):
return np.exp(x) / np.sum(np.exp(x))
print(softmax([1,1,3,6,7]))
@Rsych
Rsych / tf_basic.py
Created July 29, 2021 05:44
tensorflow basic example
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import tensorflow as tf
import seaborn as sns
sns.set()
x = [1, 2, 3, 4, 5]
y = [2, 3, 4, 5, 6]
w = tf.Variable(0.7) # we set weight to 0.7, smaller difference between initial value to actual weight is faster
@Rsych
Rsych / SimpleNeuralNetwork_T&L.py
Last active August 9, 2021 05:08
Preparing dataset for our simple neural network
import tensorflow as tf
import numpy as np
import os
from tensorflow import keras
# Neural network and train variables
EPOCHS = 200
BATCH_SIZE = 128
VERBOSE = 1
NB_CLASSES = 10 # Num of outputs