Created
August 8, 2017 07:50
-
-
Save tizuo/b9fea69bb9a6d0e019ef6c98ec48c0de to your computer and use it in GitHub Desktop.
Kerasで多変量LSTM ref: http://qiita.com/tizuo/items/b9af70e8cdc7fb69397f
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
scaler = MinMaxScaler(feature_range=(0, 1)) | |
dataset = scaler.fit_transform(dataset) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
train_size = int(len(dataset) * 0.67) | |
test_size = len(dataset) - train_size | |
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def create_dataset(dataset, look_back=1): | |
dataX, dataY = [], [] | |
for i in range(len(dataset)-look_back-1): | |
xset = [] | |
for j in range(dataset.shape[1]): | |
a = dataset[i:(i+look_back), j] | |
xset.append(a) | |
dataY.append(dataset[i + look_back, 0]) | |
dataX.append(xset) | |
return numpy.array(dataX), numpy.array(dataY) | |
look_back = 12 | |
trainX, trainY = create_dataset(train, look_back) | |
testX, testY = create_dataset(test, look_back) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
trainX = numpy.reshape(trainX, (trainX.shape[0], trainX.shape[1], trainX.shape[2])) | |
testX = numpy.reshape(testX, (testX.shape[0], testX.shape[1], testX.shape[2])) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
model = Sequential() | |
model.add(LSTM(4, input_shape=(testX.shape[1], look_back))) | |
model.add(Dense(1)) | |
model.compile(loss='mean_squared_error', optimizer='adam') | |
model.fit(trainX, trainY, epochs=1000, batch_size=1, verbose=2) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
trainPredict = model.predict(trainX) | |
testPredict = model.predict(testX) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
pad_col = numpy.zeros(dataset.shape[1]-1) | |
def pad_array(val): | |
return numpy.array([numpy.insert(pad_col, 0, x) for x in val]) | |
trainPredict = scaler.inverse_transform(pad_array(trainPredict)) | |
trainY = scaler.inverse_transform(pad_array(trainY)) | |
testPredict = scaler.inverse_transform(pad_array(testPredict)) | |
testY = scaler.inverse_transform(pad_array(testY)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
trainScore = math.sqrt(mean_squared_error(trainY[:,0], trainPredict[:,0])) | |
print('Train Score: %.2f RMSE' % (trainScore)) | |
testScore = math.sqrt(mean_squared_error(testY[:,0], testPredict[:,0])) | |
print('Test Score: %.2f RMSE' % (testScore)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment