1
0
mirror of https://github.com/newnius/YAO-optimizer.git synced 2025-12-15 09:06:43 +00:00
This commit is contained in:
2020-05-02 15:08:06 +08:00
parent d18488c05f
commit bda8bd6ffc
2 changed files with 35 additions and 37 deletions

View File

@@ -61,20 +61,16 @@ def invert_scale(scaler, X, yhat):
# fit an LSTM network to training data
def fit_lstm(train, batch_size2, nb_epoch, neurons):
print(train)
X, y = train[:, 0:-1], train[:, -1]
X = X.reshape(X.shape[0], 1, X.shape[1])
print(X, y)
model = Sequential()
model.add(LSTM(neurons, batch_input_shape=(batch_size2, X.shape[1], X.shape[2]), stateful=True))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer=Adam(lr=0.01))
model.compile(loss='mean_squared_error', optimizer='adam')
for i in range(nb_epoch):
print("Epoch {}/{}".format(i, nb_epoch))
model.fit(X, y, epochs=1, batch_size=batch_size2, verbose=0, shuffle=False)
#loss, accuracy = model.evaluate(X, y)
print(model.evaluate(X, y))
#print(loss, accuracy)
loss = model.evaluate(X, y)
print("Epoch {}/{}, loss = {}".format(i, nb_epoch, loss))
model.reset_states()
return model