Skip to content

Instantly share code, notes, and snippets.

@cwyang
Last active December 18, 2018 21:55
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cwyang/f089d3b460e406b5cf2e7139b5d3eaaa to your computer and use it in GitHub Desktop.
Save cwyang/f089d3b460e406b5cf2e7139b5d3eaaa to your computer and use it in GitHub Desktop.
polynomial regression try with keras
%matplotlib inline
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import Adam
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#Generate data
num_data=10000
x_train = np.random.random((num_data,2))* 10
x = x_train[:, 0]
y = x_train[:, 1]
y_train = x*x*y + y + 2
y_train.shape=(num_data,1)
print x_train.shape
print y_train.shape
x_test = np.array([3,4])
x_test.shape=(1,2)
model = Sequential()
# 이렇게 하면 되나 몰라..
# Dense(64) is a fully-connected layer with 64 hidden units.
model.add(Dense(100, activation='relu', input_dim=2))
model.add(Dense(100, activation='relu'))
model.add(Dense(1, activation='linear'))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(x_train, y_train,
epochs=100,
batch_size=128)
y = model.predict(x_test, batch_size=32, verbose=1)
print "3^2*4+4+2 = %r (should be 42)" % y
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment