1
0
mirror of https://github.com/newnius/YAO-optimizer.git synced 2025-12-13 08:26:43 +00:00
This commit is contained in:
2020-05-02 10:29:32 +08:00
parent e7652959af
commit 294ec8d853
2 changed files with 13 additions and 15 deletions

View File

@@ -59,19 +59,17 @@ def invert_scale(scaler, X, yhat):
# fit an LSTM network to training data
def fit_lstm(train, batch_size, nb_epoch, neurons):
t = train.shape[0] % batch_size
train = train[train.shape[0] - t * batch_size:]
def fit_lstm(train, batch_size2, nb_epoch, neurons):
X, y = train[:, 0:-1], train[:, -1]
X = X.reshape(X.shape[0], 1, X.shape[1])
model = Sequential()
model.add(LSTM(neurons, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True))
model.add(LSTM(neurons, batch_input_shape=(batch_size2, X.shape[1], X.shape[2]), stateful=True))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
for i in range(nb_epoch):
print("Epoch {}/{}".format(i, nb_epoch))
model.fit(X, y, epochs=1, batch_size=batch_size, verbose=0, shuffle=False)
model.fit(X, y, epochs=1, batch_size=batch_size2, verbose=0, shuffle=False)
model.reset_states()
return model