Skip to content

Instantly share code, notes, and snippets.

@MinaGabriel
Last active March 10, 2020 21:57
Show Gist options
  • Save MinaGabriel/985cd7d9f8b9c07b18f9f3e64761d786 to your computer and use it in GitHub Desktop.
Save MinaGabriel/985cd7d9f8b9c07b18f9f3e64761d786 to your computer and use it in GitHub Desktop.
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras import losses
import numpy as np
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
model = Sequential()
model.add(Dense(2, input_dim=2, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
model.fit(X, y, epochs=500, batch_size=10)
_, accuracy = model.evaluate(X, y)
print('Accuracy: %.2f' % (accuracy * 100))
for layer in model.layers:
weights = layer.get_weights() # list of numpy arrays
print(weights)
x_new = np.array([[0, 1]])
predict = model.predict_classes(x_new)
print(predict)
#Anther way
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
print(X.shape)
x = Input(shape=(2,))
h = Dense(3, activation='relu')(x)
r = Dense(1, activation='sigmoid')(h)
model = Model(inputs=x, outputs=r)
model.compile(optimizer='adam', loss='mse')
history = model.fit(X, y, batch_size=4, epochs=5, verbose=1, validation_data=(X, y))
plt.plot(list(history.history.values())[0], 'k-o')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment