Wind energy is the energy of the wind transformed into useful energy through wind turbines. This renewable energy source is widely used because it is an alternative to fossil energy, it is clean, does not produce greenhouse gases and can generally be used in various locations, however there are still some environmental and social problems attached, such as soil compaction and the noise emitted by the blades. In addition, wind energy suffers a lot from the fluctuation of winds and, therefore, doors are opened for the application of Machine Learning models to be used to make generation forecasts. Finally, this project aims to forecast the wind power generation of a wind turbine located in Germany with historical data from 2011 to the end of 2021.
# Importing Libraries:
import pandas as pd
import numpy as np
import plotly.express as px
import plotly as pl
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.feature_selection import r_regression
from sklearn.preprocessing import MinMaxScaler, StandardScaler, RobustScaler
from sklearn.ensemble import RandomForestRegressor
from xgboost import XGBRegressor
import tensorflow as tf
from keras.layers import LSTM
from sklearn.metrics import mean_squared_error
from tensorflow.keras.layers import Conv1D, MaxPooling1D
import os
from datetime import datetime as dt
# Loading dataset
df = pd.read_csv("data.csv")
# Copy of the dataset:
dado_horarios = df.copy()
# Let's see the first five rows:
dado_horarios.head()
dt | MW | |
---|---|---|
0 | 2011-01-01 00:00:00 | 3416.0 |
1 | 2011-01-01 00:15:00 | 4755.0 |
2 | 2011-01-01 00:30:00 | 4939.0 |
3 | 2011-01-01 00:45:00 | 4939.0 |
4 | 2011-01-01 01:00:00 | 4998.0 |
# Some information about data type and memory:
dado_horarios.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 385566 entries, 0 to 385565 Data columns (total 2 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 dt 385566 non-null object 1 MW 385566 non-null float64 dtypes: float64(1), object(1) memory usage: 5.9+ MB
# Changing the type of the Time series column:
dado_horarios['dt'] = pd.to_datetime(dado_horarios['dt'])
# Dt column as index:
dado_horarios.set_index('dt', inplace=True)
# Some decriptive statistics:
dado_horarios.describe()
MW | |
---|---|
count | 385566.000000 |
mean | 3183.560481 |
std | 3045.941591 |
min | 0.000000 |
25% | 905.000000 |
50% | 2164.000000 |
75% | 4529.877500 |
max | 16727.500000 |
# Missing data:
dado_horarios.isna().sum()
MW 0 dtype: int64
As we can see above, there is no missing data.
# Aggregating information:
dado_horarios['month'] = dado_horarios.index.month
dado_horarios['year'] = dado_horarios.index.year
group = dado_horarios.groupby(["year", "month"]).mean().reset_index()
# Ploting the monthly average Wind Power by year:
fig = px.line(data_frame=group, x='month', y='MW', color='year', title="Monthly Average Wind power generation by year")
fig.show("png", width=1200, height=500)
Observations:
# Histograma da série temporal:
fig, ax = plt.subplots(ncols=4, nrows=3, sharex=False, sharey=False, figsize=(25, 20))
sns.histplot(x=group[group['year'] == 2011]['MW'], kde=True, ax=ax[0, 0])
sns.histplot(x=group[group['year'] == 2012]['MW'], kde=True, ax=ax[0, 1])
sns.histplot(x=group[group['year'] == 2013]['MW'], kde=True, ax=ax[0, 2])
sns.histplot(x=group[group['year'] == 2014]['MW'], kde=True, ax=ax[0, 3])
sns.histplot(x=group[group['year'] == 2015]['MW'], kde=True, ax=ax[1, 0])
sns.histplot(x=group[group['year'] == 2016]['MW'], kde=True, ax=ax[1, 1])
sns.histplot(x=group[group['year'] == 2017]['MW'], kde=True, ax=ax[1, 2])
sns.histplot(x=group[group['year'] == 2018]['MW'], kde=True, ax=ax[1, 3])
sns.histplot(x=group[group['year'] == 2019]['MW'], kde=True, ax=ax[2, 0])
sns.histplot(x=group[group['year'] == 2020]['MW'], kde=True, ax=ax[2, 1])
sns.histplot(x=group[group['year'] == 2021]['MW'], kde=True, ax=ax[2, 2])
ax[2, 3].set_visible(False)
ax[0, 0].set_title("Monthly average Wind power distribution of 2011")
ax[0, 1].set_title("Monthly average Wind power distribution of 2012")
ax[0, 2].set_title("Monthly average Wind power distribution of 2013")
ax[0, 3].set_title("Monthly average Wind power distribution of 2014")
ax[1, 0].set_title("Monthly average Wind power distribution of 2015")
ax[1, 1].set_title("Monthly average Wind power distribution of 2016")
ax[1, 2].set_title("Monthly average Wind power distribution of 2017")
ax[1, 3].set_title("Monthly average Wind power distribution of 2018")
ax[2, 0].set_title("Monthly average Wind power distribution of 2019")
ax[2, 1].set_title("Monthly average Wind power distribution of 2020")
ax[2, 2].set_title("Monthly average Wind power distribution of 2021")
plt.show()
Observations:
# Setting the time series column as index:
df.set_index('dt', inplace=True)
The Class below has a method that can transform a dataset with a Time series structure into a dataset that can be used in a supervised manner.
# Proprocessing class
class Preprocessamento:
def timeseries_to_supervised(self, df, n_features, n_target):
n_linhas = 0
colunas_features = ['var(t - {})'.format(str(i)) for i in range(n_features, -1, -1) if i != 0]
colunas_target = ['var(t)' if i==0 else 'var(t + {})'.format(str(i)) for i in range(0, n_target)]
colunas_total = colunas_features + colunas_target
lista=[]
while n_linhas <= len(df) - n_target - n_features:
quantidade_de_features_iteracao = df.iloc[n_linhas:n_linhas + n_features].values
quantidade_de_target_iteracao = df.iloc[n_linhas + n_features: n_linhas + n_features + n_target]
linha = np.concatenate([quantidade_de_features_iteracao, quantidade_de_target_iteracao], axis=0)
linha_reshape = linha.reshape(1, -1)
lista.append(linha_reshape[0])
n_linhas += 1
df_iter = pd.DataFrame(lista, columns=colunas_total)
return df_iter
# Function that can plot several metrics:
def metricas(X_test, y_test, models):
for name, model in models.items():
if name == 'LSTM':
y_pred = []
for i in range(len(X_test)):
X_test_linha = X_test[i, 0:]
X_test_reshaped = X_test_linha.reshape(1, 1, len(X_test_linha))
predicoes = model.predict(X_test_reshaped, batch_size=1, verbose=0)
retorno = [x for x in predicoes[0]][0]
y_pred.append(retorno)
y_pred = np.array(y_pred)
else:
y_pred = model.predict(X_test)
mse = mean_squared_error(y_test, y_pred)
rmse = np.sqrt(mse)
coef_pearson = r_regression(y_pred.reshape(-1, 1), y_test)[0]
print(f'Mean squared error: {mse}')
print(f'Root Mean squared error: {rmse}')
print(f'Coef de pearson: {coef_pearson}')
print('###########################################\n')
return mse, rmse, coef_pearson
# Splitting into train an test datasets:
percentagem_treino_inicial = 0.8
limite_treino_inicial = int(len(df)*percentagem_treino_inicial)
df_train_inicial = df.iloc[0:limite_treino_inicial]
df_test = df.iloc[limite_treino_inicial:]
# Splitting into training and validation datasets:
percentagem_treino = 0.8
limite_treino = int(len(df_train_inicial)*percentagem_treino)
df_train = df_train_inicial.iloc[0:limite_treino]
df_val = df_train_inicial.iloc[limite_treino:]
# Intance of Preprocessing class:
prep_obj = Preprocessamento()
We will use a window of 20 and a horizon of 10. In other words, we are going to use 20 features to predict ten timesteps ahead in time. Furthemore, it is important to say that we will make a model for each timestep ahead, so it is going to be 10 models for each algorithm.
# Transforming the time series into a supervised problem:
n_features = 20
n_target = 10
df_train_supervised = prep_obj.timeseries_to_supervised(df_train, n_features, n_target)
df_val_supervised = prep_obj.timeseries_to_supervised(df_val, n_features, n_target)
df_test_supervised = prep_obj.timeseries_to_supervised(df_test, n_features, n_target)
# Some descriptive statistics about the features:
df_train_supervised.describe()
var(t - 20) | var(t - 19) | var(t - 18) | var(t - 17) | var(t - 16) | var(t - 15) | var(t - 14) | var(t - 13) | var(t - 12) | var(t - 11) | ... | var(t) | var(t + 1) | var(t + 2) | var(t + 3) | var(t + 4) | var(t + 5) | var(t + 6) | var(t + 7) | var(t + 8) | var(t + 9) | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
count | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | ... | 246732.000000 | 246732.00000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 | 246732.000000 |
mean | 2629.614760 | 2629.621063 | 2629.621858 | 2629.621876 | 2629.622045 | 2629.622477 | 2629.622653 | 2629.622223 | 2629.621149 | 2629.619584 | ... | 2629.571150 | 2629.56493 | 2629.558805 | 2629.552966 | 2629.546922 | 2629.541040 | 2629.534696 | 2629.528298 | 2629.521042 | 2629.513345 |
std | 2577.736081 | 2577.739905 | 2577.740590 | 2577.740607 | 2577.740760 | 2577.741166 | 2577.741341 | 2577.740898 | 2577.739783 | 2577.738152 | ... | 2577.683505 | 2577.67614 | 2577.668791 | 2577.661796 | 2577.654572 | 2577.647439 | 2577.639970 | 2577.632878 | 2577.624891 | 2577.616327 |
min | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | ... | 0.000000 | 0.00000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 |
25% | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | ... | 710.000000 | 710.00000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 | 710.000000 |
50% | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | ... | 1756.000000 | 1756.00000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 | 1756.000000 |
75% | 3720.270000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | ... | 3720.385000 | 3720.38500 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 | 3720.385000 |
max | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | ... | 14266.070000 | 14266.07000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 | 14266.070000 |
8 rows × 30 columns
Observations:
# Splitting into train and test fetures and targets:
X_train, y_train = df_train_supervised.values[:, 0:n_features], df_train_supervised.values[:, n_features:]
X_val, y_val = df_val_supervised.values[:, 0:n_features], df_val_supervised.values[:, n_features:]
X_test, y_test = df_test_supervised.values[:, 0:n_features], df_test_supervised.values[:, n_features:]
# Features shape:
print('Feature shapes:')
print(f'Training: {X_train.shape}')
print(f'Validation: {X_val.shape}')
print(f'Test: {X_test.shape}')
print('#########################\n')
# Target shape:
print('Target shapes:')
print(f'Training: {y_train.shape}')
print(f'Validation: {y_val.shape}')
print(f'Test: {y_test.shape}')
Feature shapes: Training: (246732, 20) Validation: (61662, 20) Test: (77085, 20) ######################### Target shapes: Training: (246732, 10) Validation: (61662, 10) Test: (77085, 10)
Definition:
OBS: Standard Scaler can perform slightly worst than the other transformations because it assumes that the data is normally distributed. However you can still standardize your data.
Matematical Definition:
$X_{new_{i}} = \frac{X_{i} - \hat{\mu}_{i}}{\sigma_{i}}$
# Fitting a Standard Scaler object:
std_scaler = StandardScaler()
std_scaler.fit(X_train)
# Transforming all the sets:
X_train_std = std_scaler.transform(X_train)
X_val_std = std_scaler.transform(X_val)
X_test_std = std_scaler.transform(X_test)
# Create nested directories:
def make_directory(path):
try:
os.makedirs(path)
except FileExistsError:
print("File already exists!")
# Directory name:
directory = dt.now().strftime("%Y-%m-%d__%H_%M_%S")
# Function that structure a simple neural network architecture:
def mlp_simples(device):
with tf.device(device):
model = tf.keras.Sequential([
tf.keras.layers.Dense(100, activation='relu', input_shape=X_train.shape[1:]),
tf.keras.layers.Dense(50, activation='relu'),
tf.keras.layers.Dense(25, activation='relu'),
tf.keras.layers.Dense(10, activation='relu'),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam', loss='mean_squared_error', metrics=['mse'])
return model
# Making a directory for MLP models:
path_modelos_mlp_simples = "modelos_mlp_simples/{}".format(directory)
make_directory(path_modelos_mlp_simples)
# Training a MLP:
for target in range(n_target):
# EarlyStopping callback:
earlystopping = tf.keras.callbacks.EarlyStopping(patience=10, restore_best_weights=True)
checkpoint_mlp = tf.keras.callbacks.ModelCheckpoint('{}/model{}_mlp.h5'.format(path_modelos_mlp_simples, target))
model_mlp = mlp_simples('/device:GPU:0')
history_2 = model_mlp.fit(X_train_std, y_train[:, target], epochs=60,
validation_data=(X_val_std, y_val[:, target]), callbacks=[earlystopping, checkpoint_mlp])
File already exists! Epoch 1/60 7711/7711 [==============================] - 14s 2ms/step - loss: 428830.5938 - mse: 428830.5938 - val_loss: 272890.6562 - val_mse: 272890.6562 Epoch 2/60 60/7711 [..............................] - ETA: 13s - loss: 113575.3281 - mse: 113575.3281
c:\Users\caios\AppData\Local\Programs\Python\Python311\Lib\site-packages\keras\src\engine\training.py:3000: UserWarning: You are saving your model as an HDF5 file via `model.save()`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')`.
7711/7711 [==============================] - 11s 1ms/step - loss: 146245.0156 - mse: 146245.0156 - val_loss: 286984.1250 - val_mse: 286984.1250 Epoch 3/60 7711/7711 [==============================] - 10s 1ms/step - loss: 145753.0781 - mse: 145753.0781 - val_loss: 266464.6250 - val_mse: 266464.6250 Epoch 4/60 7711/7711 [==============================] - 11s 1ms/step - loss: 145245.1875 - mse: 145245.1875 - val_loss: 267078.1875 - val_mse: 267078.1875 Epoch 5/60 7711/7711 [==============================] - 12s 2ms/step - loss: 144143.6406 - mse: 144143.6406 - val_loss: 261799.9062 - val_mse: 261799.9062 Epoch 6/60 7711/7711 [==============================] - 11s 1ms/step - loss: 142808.4688 - mse: 142808.4688 - val_loss: 262283.1562 - val_mse: 262283.1562 Epoch 7/60 7711/7711 [==============================] - 11s 1ms/step - loss: 141882.1719 - mse: 141882.1719 - val_loss: 256878.8438 - val_mse: 256878.8438 Epoch 8/60 7711/7711 [==============================] - 11s 1ms/step - loss: 139972.2656 - mse: 139972.2656 - val_loss: 258874.5469 - val_mse: 258874.5469 Epoch 9/60 7711/7711 [==============================] - 11s 1ms/step - loss: 139518.5312 - mse: 139518.5312 - val_loss: 253696.1094 - val_mse: 253696.1094 Epoch 10/60 7711/7711 [==============================] - 11s 1ms/step - loss: 138247.3438 - mse: 138247.3438 - val_loss: 252601.0781 - val_mse: 252601.0781 Epoch 11/60 7711/7711 [==============================] - 11s 1ms/step - loss: 137381.6719 - mse: 137381.6719 - val_loss: 252753.1094 - val_mse: 252753.1094 Epoch 12/60 7711/7711 [==============================] - 11s 1ms/step - loss: 136789.8281 - mse: 136789.8281 - val_loss: 335963.0312 - val_mse: 335963.0312 Epoch 13/60 7711/7711 [==============================] - 11s 1ms/step - loss: 136090.0156 - mse: 136090.0156 - val_loss: 246943.6562 - val_mse: 246943.6562 Epoch 14/60 7711/7711 [==============================] - 11s 1ms/step - loss: 134976.2188 - mse: 134976.2188 - val_loss: 246803.7969 - val_mse: 246803.7969 Epoch 15/60 7711/7711 [==============================] - 11s 1ms/step - loss: 134758.7656 - mse: 134758.7656 - val_loss: 260979.8906 - val_mse: 260979.8906 Epoch 16/60 7711/7711 [==============================] - 11s 1ms/step - loss: 134038.1094 - mse: 134038.1094 - val_loss: 244349.4062 - val_mse: 244349.4062 Epoch 17/60 7711/7711 [==============================] - 11s 1ms/step - loss: 133334.5625 - mse: 133334.5625 - val_loss: 247565.7969 - val_mse: 247565.7969 Epoch 18/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132970.0781 - mse: 132970.0781 - val_loss: 251883.8438 - val_mse: 251883.8438 Epoch 19/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132831.4688 - mse: 132831.4688 - val_loss: 246898.6094 - val_mse: 246898.6094 Epoch 20/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132526.2969 - mse: 132526.2969 - val_loss: 241652.5312 - val_mse: 241652.5312 Epoch 21/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132300.5781 - mse: 132300.5781 - val_loss: 245074.3594 - val_mse: 245074.3594 Epoch 22/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132478.0938 - mse: 132478.0938 - val_loss: 241250.8594 - val_mse: 241250.8594 Epoch 23/60 7711/7711 [==============================] - 10s 1ms/step - loss: 132208.5312 - mse: 132208.5312 - val_loss: 253652.0000 - val_mse: 253652.0000 Epoch 24/60 7711/7711 [==============================] - 10s 1ms/step - loss: 131900.4531 - mse: 131900.4531 - val_loss: 246209.0156 - val_mse: 246209.0156 Epoch 25/60 7711/7711 [==============================] - 11s 1ms/step - loss: 131608.1094 - mse: 131608.1094 - val_loss: 240958.4531 - val_mse: 240958.4531 Epoch 26/60 7711/7711 [==============================] - 12s 2ms/step - loss: 131384.4219 - mse: 131384.4219 - val_loss: 244431.8594 - val_mse: 244431.8594 Epoch 27/60 7711/7711 [==============================] - 14s 2ms/step - loss: 131415.0469 - mse: 131415.0469 - val_loss: 239510.2969 - val_mse: 239510.2969 Epoch 28/60 7711/7711 [==============================] - 9s 1ms/step - loss: 131539.8594 - mse: 131539.8594 - val_loss: 245103.1406 - val_mse: 245103.1406 Epoch 29/60 7711/7711 [==============================] - 10s 1ms/step - loss: 130709.4375 - mse: 130709.4375 - val_loss: 241290.2969 - val_mse: 241290.2969 Epoch 30/60 7711/7711 [==============================] - 10s 1ms/step - loss: 131137.7500 - mse: 131137.7500 - val_loss: 241485.0938 - val_mse: 241485.0938 Epoch 31/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130929.8438 - mse: 130929.8438 - val_loss: 238599.6250 - val_mse: 238599.6250 Epoch 32/60 7711/7711 [==============================] - 10s 1ms/step - loss: 130753.1562 - mse: 130753.1562 - val_loss: 252188.1875 - val_mse: 252188.1875 Epoch 33/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130779.6562 - mse: 130779.6562 - val_loss: 241061.7969 - val_mse: 241061.7969 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130324.4375 - mse: 130324.4375 - val_loss: 239819.1875 - val_mse: 239819.1875 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130144.3047 - mse: 130144.3047 - val_loss: 274614.7188 - val_mse: 274614.7188 Epoch 36/60 7711/7711 [==============================] - 10s 1ms/step - loss: 130336.1250 - mse: 130336.1250 - val_loss: 245095.1250 - val_mse: 245095.1250 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130273.3516 - mse: 130273.3516 - val_loss: 243156.8594 - val_mse: 243156.8594 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130003.5859 - mse: 130003.5859 - val_loss: 252268.5625 - val_mse: 252268.5625 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 130001.9375 - mse: 130001.9375 - val_loss: 244496.4062 - val_mse: 244496.4062 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129528.0078 - mse: 129528.0078 - val_loss: 239984.5000 - val_mse: 239984.5000 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129827.2656 - mse: 129827.2656 - val_loss: 238596.0312 - val_mse: 238596.0312 Epoch 42/60 7711/7711 [==============================] - 10s 1ms/step - loss: 130136.7266 - mse: 130136.7266 - val_loss: 236898.1094 - val_mse: 236898.1094 Epoch 43/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129577.0391 - mse: 129577.0391 - val_loss: 312004.2500 - val_mse: 312004.2500 Epoch 44/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129546.1328 - mse: 129546.1328 - val_loss: 263956.8125 - val_mse: 263956.8125 Epoch 45/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129704.7188 - mse: 129704.7188 - val_loss: 237947.0625 - val_mse: 237947.0625 Epoch 46/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129506.3672 - mse: 129506.3672 - val_loss: 249181.4688 - val_mse: 249181.4688 Epoch 47/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128746.6719 - mse: 128746.6719 - val_loss: 235989.9844 - val_mse: 235989.9844 Epoch 48/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129276.4531 - mse: 129276.4531 - val_loss: 257481.9062 - val_mse: 257481.9062 Epoch 49/60 7711/7711 [==============================] - 9s 1ms/step - loss: 129318.8906 - mse: 129318.8906 - val_loss: 235955.8438 - val_mse: 235955.8438 Epoch 50/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128780.6094 - mse: 128780.6094 - val_loss: 238424.5312 - val_mse: 238424.5312 Epoch 51/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128777.7969 - mse: 128777.7969 - val_loss: 237672.7500 - val_mse: 237672.7500 Epoch 52/60 7711/7711 [==============================] - 10s 1ms/step - loss: 128884.5469 - mse: 128884.5469 - val_loss: 241035.5781 - val_mse: 241035.5781 Epoch 53/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128660.7031 - mse: 128660.7031 - val_loss: 242173.1719 - val_mse: 242173.1719 Epoch 54/60 7711/7711 [==============================] - 8s 1ms/step - loss: 128956.7422 - mse: 128956.7422 - val_loss: 238546.7656 - val_mse: 238546.7656 Epoch 55/60 7711/7711 [==============================] - 8s 1ms/step - loss: 128472.0000 - mse: 128472.0000 - val_loss: 238354.4531 - val_mse: 238354.4531 Epoch 56/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128452.5547 - mse: 128452.5547 - val_loss: 240689.1250 - val_mse: 240689.1250 Epoch 57/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128655.0703 - mse: 128655.0703 - val_loss: 239651.1250 - val_mse: 239651.1250 Epoch 58/60 7711/7711 [==============================] - 9s 1ms/step - loss: 128188.9297 - mse: 128188.9297 - val_loss: 239061.8125 - val_mse: 239061.8125 Epoch 59/60 7711/7711 [==============================] - 10s 1ms/step - loss: 128470.8359 - mse: 128470.8359 - val_loss: 236822.5938 - val_mse: 236822.5938 Epoch 1/60 7711/7711 [==============================] - 10s 1ms/step - loss: 494717.0312 - mse: 494717.0312 - val_loss: 406148.9375 - val_mse: 406148.9375 Epoch 2/60 7711/7711 [==============================] - 8s 1ms/step - loss: 218347.7344 - mse: 218347.7344 - val_loss: 417986.1562 - val_mse: 417986.1562 Epoch 3/60 7711/7711 [==============================] - 9s 1ms/step - loss: 215702.9688 - mse: 215702.9688 - val_loss: 394312.4375 - val_mse: 394312.4375 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 212442.8906 - mse: 212442.8906 - val_loss: 405709.7812 - val_mse: 405709.7812 Epoch 5/60 7711/7711 [==============================] - 9s 1ms/step - loss: 209090.5781 - mse: 209090.5781 - val_loss: 376947.3750 - val_mse: 376947.3750 Epoch 6/60 7711/7711 [==============================] - 9s 1ms/step - loss: 206035.5469 - mse: 206035.5469 - val_loss: 368228.7812 - val_mse: 368228.7812 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 203611.2344 - mse: 203611.2344 - val_loss: 450477.6875 - val_mse: 450477.6875 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 202104.6875 - mse: 202104.6875 - val_loss: 384779.1250 - val_mse: 384779.1250 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 201274.1875 - mse: 201274.1875 - val_loss: 361168.4375 - val_mse: 361168.4375 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 200654.1250 - mse: 200654.1250 - val_loss: 364385.1875 - val_mse: 364385.1875 Epoch 11/60 7711/7711 [==============================] - 9s 1ms/step - loss: 199860.1719 - mse: 199860.1719 - val_loss: 360451.8438 - val_mse: 360451.8438 Epoch 12/60 7711/7711 [==============================] - 9s 1ms/step - loss: 199265.2500 - mse: 199265.2500 - val_loss: 361379.8125 - val_mse: 361379.8125 Epoch 13/60 7711/7711 [==============================] - 9s 1ms/step - loss: 198869.9375 - mse: 198869.9375 - val_loss: 356438.4062 - val_mse: 356438.4062 Epoch 14/60 7711/7711 [==============================] - 12s 2ms/step - loss: 198659.6875 - mse: 198659.6875 - val_loss: 393500.9688 - val_mse: 393500.9688 Epoch 15/60 7711/7711 [==============================] - 10s 1ms/step - loss: 198076.7500 - mse: 198076.7500 - val_loss: 360047.3438 - val_mse: 360047.3438 Epoch 16/60 7711/7711 [==============================] - 11s 1ms/step - loss: 197964.7812 - mse: 197964.7812 - val_loss: 440002.4375 - val_mse: 440002.4375 Epoch 17/60 7711/7711 [==============================] - 11s 1ms/step - loss: 197871.2031 - mse: 197871.2031 - val_loss: 353919.9688 - val_mse: 353919.9688 Epoch 18/60 7711/7711 [==============================] - 10s 1ms/step - loss: 197649.3906 - mse: 197649.3906 - val_loss: 356328.0000 - val_mse: 356328.0000 Epoch 19/60 7711/7711 [==============================] - 13s 2ms/step - loss: 197508.0625 - mse: 197508.0625 - val_loss: 363686.2812 - val_mse: 363686.2812 Epoch 20/60 7711/7711 [==============================] - 13s 2ms/step - loss: 197360.0156 - mse: 197360.0156 - val_loss: 363940.1562 - val_mse: 363940.1562 Epoch 21/60 7711/7711 [==============================] - 12s 2ms/step - loss: 197191.6094 - mse: 197191.6094 - val_loss: 356275.0000 - val_mse: 356275.0000 Epoch 22/60 7711/7711 [==============================] - 14s 2ms/step - loss: 196844.4844 - mse: 196844.4844 - val_loss: 366530.5938 - val_mse: 366530.5938 Epoch 23/60 7711/7711 [==============================] - 12s 2ms/step - loss: 196574.8594 - mse: 196574.8594 - val_loss: 354957.5938 - val_mse: 354957.5938 Epoch 24/60 7711/7711 [==============================] - 15s 2ms/step - loss: 196222.7812 - mse: 196222.7812 - val_loss: 352584.3125 - val_mse: 352584.3125 Epoch 25/60 7711/7711 [==============================] - 13s 2ms/step - loss: 196488.5938 - mse: 196488.5938 - val_loss: 358782.5000 - val_mse: 358782.5000 Epoch 26/60 7711/7711 [==============================] - 14s 2ms/step - loss: 196070.7031 - mse: 196070.7031 - val_loss: 352803.1562 - val_mse: 352803.1562 Epoch 27/60 7711/7711 [==============================] - 12s 2ms/step - loss: 196436.9219 - mse: 196436.9219 - val_loss: 367376.3438 - val_mse: 367376.3438 Epoch 28/60 7711/7711 [==============================] - 14s 2ms/step - loss: 196426.1719 - mse: 196426.1719 - val_loss: 359766.5938 - val_mse: 359766.5938 Epoch 29/60 7711/7711 [==============================] - 11s 1ms/step - loss: 195389.7656 - mse: 195389.7656 - val_loss: 354344.6562 - val_mse: 354344.6562 Epoch 30/60 7711/7711 [==============================] - 11s 1ms/step - loss: 195744.0625 - mse: 195744.0625 - val_loss: 352530.5938 - val_mse: 352530.5938 Epoch 31/60 7711/7711 [==============================] - 10s 1ms/step - loss: 195696.1094 - mse: 195696.1094 - val_loss: 364217.5000 - val_mse: 364217.5000 Epoch 32/60 7711/7711 [==============================] - 17s 2ms/step - loss: 194833.7500 - mse: 194833.7500 - val_loss: 386991.1250 - val_mse: 386991.1250 Epoch 33/60 7711/7711 [==============================] - 13s 2ms/step - loss: 195383.9062 - mse: 195383.9062 - val_loss: 418966.9062 - val_mse: 418966.9062 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194917.5000 - mse: 194917.5000 - val_loss: 351359.7812 - val_mse: 351359.7812 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 195028.5781 - mse: 195028.5781 - val_loss: 350623.4375 - val_mse: 350623.4375 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194894.6406 - mse: 194894.6406 - val_loss: 388541.0625 - val_mse: 388541.0625 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194401.3438 - mse: 194401.3438 - val_loss: 351177.4062 - val_mse: 351177.4062 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194038.7344 - mse: 194038.7344 - val_loss: 356542.5000 - val_mse: 356542.5000 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194546.6719 - mse: 194546.6719 - val_loss: 360172.1250 - val_mse: 360172.1250 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 194257.6094 - mse: 194257.6094 - val_loss: 353909.8125 - val_mse: 353909.8125 Epoch 41/60 7711/7711 [==============================] - 15s 2ms/step - loss: 194229.2031 - mse: 194229.2031 - val_loss: 351575.6250 - val_mse: 351575.6250 Epoch 42/60 7711/7711 [==============================] - 14s 2ms/step - loss: 194053.6719 - mse: 194053.6719 - val_loss: 349028.5000 - val_mse: 349028.5000 Epoch 43/60 7711/7711 [==============================] - 13s 2ms/step - loss: 193790.9062 - mse: 193790.9062 - val_loss: 355547.4688 - val_mse: 355547.4688 Epoch 44/60 7711/7711 [==============================] - 15s 2ms/step - loss: 193593.2188 - mse: 193593.2188 - val_loss: 360764.8438 - val_mse: 360764.8438 Epoch 45/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193448.8750 - mse: 193448.8750 - val_loss: 350606.3125 - val_mse: 350606.3125 Epoch 46/60 7711/7711 [==============================] - 11s 1ms/step - loss: 193662.2969 - mse: 193662.2969 - val_loss: 350303.6562 - val_mse: 350303.6562 Epoch 47/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193584.8750 - mse: 193584.8750 - val_loss: 351263.6875 - val_mse: 351263.6875 Epoch 48/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193476.2500 - mse: 193476.2500 - val_loss: 348935.8438 - val_mse: 348935.8438 Epoch 49/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193472.2812 - mse: 193472.2812 - val_loss: 350466.5938 - val_mse: 350466.5938 Epoch 50/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193169.8906 - mse: 193169.8906 - val_loss: 360996.9375 - val_mse: 360996.9375 Epoch 51/60 7711/7711 [==============================] - 11s 1ms/step - loss: 192655.0469 - mse: 192655.0469 - val_loss: 350172.5312 - val_mse: 350172.5312 Epoch 52/60 7711/7711 [==============================] - 10s 1ms/step - loss: 193111.9531 - mse: 193111.9531 - val_loss: 355583.3750 - val_mse: 355583.3750 Epoch 53/60 7711/7711 [==============================] - 10s 1ms/step - loss: 192914.5625 - mse: 192914.5625 - val_loss: 347834.3438 - val_mse: 347834.3438 Epoch 54/60 7711/7711 [==============================] - 11s 1ms/step - loss: 192857.1875 - mse: 192857.1875 - val_loss: 370526.8125 - val_mse: 370526.8125 Epoch 55/60 7711/7711 [==============================] - 9s 1ms/step - loss: 192319.5625 - mse: 192319.5625 - val_loss: 350713.3438 - val_mse: 350713.3438 Epoch 56/60 7711/7711 [==============================] - 10s 1ms/step - loss: 192948.4844 - mse: 192948.4844 - val_loss: 353388.5625 - val_mse: 353388.5625 Epoch 57/60 7711/7711 [==============================] - 9s 1ms/step - loss: 192234.5938 - mse: 192234.5938 - val_loss: 347106.7812 - val_mse: 347106.7812 Epoch 58/60 7711/7711 [==============================] - 9s 1ms/step - loss: 192593.0312 - mse: 192593.0312 - val_loss: 348541.7500 - val_mse: 348541.7500 Epoch 59/60 7711/7711 [==============================] - 9s 1ms/step - loss: 192432.3281 - mse: 192432.3281 - val_loss: 358283.8750 - val_mse: 358283.8750 Epoch 60/60 7711/7711 [==============================] - 9s 1ms/step - loss: 192211.4375 - mse: 192211.4375 - val_loss: 363085.5000 - val_mse: 363085.5000 Epoch 1/60 7711/7711 [==============================] - 9s 1ms/step - loss: 543548.7500 - mse: 543548.7500 - val_loss: 548966.3125 - val_mse: 548966.3125 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 298277.1562 - mse: 298277.1562 - val_loss: 630206.8750 - val_mse: 630206.8750 Epoch 3/60 7711/7711 [==============================] - 13s 2ms/step - loss: 294197.0312 - mse: 294197.0312 - val_loss: 530175.4375 - val_mse: 530175.4375 Epoch 4/60 7711/7711 [==============================] - 11s 1ms/step - loss: 288927.6875 - mse: 288927.6875 - val_loss: 531053.8750 - val_mse: 531053.8750 Epoch 5/60 7711/7711 [==============================] - 10s 1ms/step - loss: 283985.7188 - mse: 283985.7188 - val_loss: 534163.0000 - val_mse: 534163.0000 Epoch 6/60 7711/7711 [==============================] - 10s 1ms/step - loss: 280360.3750 - mse: 280360.3750 - val_loss: 556362.6875 - val_mse: 556362.6875 Epoch 7/60 7711/7711 [==============================] - 11s 1ms/step - loss: 278184.2188 - mse: 278184.2188 - val_loss: 524089.3125 - val_mse: 524089.3125 Epoch 8/60 7711/7711 [==============================] - 12s 2ms/step - loss: 275713.2188 - mse: 275713.2188 - val_loss: 525468.1875 - val_mse: 525468.1875 Epoch 9/60 7711/7711 [==============================] - 12s 2ms/step - loss: 274222.5938 - mse: 274222.5938 - val_loss: 497920.1562 - val_mse: 497920.1562 Epoch 10/60 7711/7711 [==============================] - 11s 1ms/step - loss: 273291.4688 - mse: 273291.4688 - val_loss: 495599.0938 - val_mse: 495599.0938 Epoch 11/60 7711/7711 [==============================] - 10s 1ms/step - loss: 272053.6875 - mse: 272053.6875 - val_loss: 570515.1875 - val_mse: 570515.1875 Epoch 12/60 7711/7711 [==============================] - 11s 1ms/step - loss: 271309.4375 - mse: 271309.4375 - val_loss: 528299.8125 - val_mse: 528299.8125 Epoch 13/60 7711/7711 [==============================] - 12s 2ms/step - loss: 270829.3438 - mse: 270829.3438 - val_loss: 498641.6875 - val_mse: 498641.6875 Epoch 14/60 7711/7711 [==============================] - 11s 1ms/step - loss: 270926.6875 - mse: 270926.6875 - val_loss: 491833.2500 - val_mse: 491833.2500 Epoch 15/60 7711/7711 [==============================] - 10s 1ms/step - loss: 270359.5000 - mse: 270359.5000 - val_loss: 490311.5312 - val_mse: 490311.5312 Epoch 16/60 7711/7711 [==============================] - 11s 1ms/step - loss: 269858.8125 - mse: 269858.8125 - val_loss: 487854.9688 - val_mse: 487854.9688 Epoch 17/60 7711/7711 [==============================] - 10s 1ms/step - loss: 269031.7812 - mse: 269031.7812 - val_loss: 485598.0938 - val_mse: 485598.0938 Epoch 18/60 7711/7711 [==============================] - 10s 1ms/step - loss: 269515.5938 - mse: 269515.5938 - val_loss: 613818.1875 - val_mse: 613818.1875 Epoch 19/60 7711/7711 [==============================] - 10s 1ms/step - loss: 268302.5938 - mse: 268302.5938 - val_loss: 489949.3438 - val_mse: 489949.3438 Epoch 20/60 7711/7711 [==============================] - 10s 1ms/step - loss: 267673.4688 - mse: 267673.4688 - val_loss: 497665.5312 - val_mse: 497665.5312 Epoch 21/60 7711/7711 [==============================] - 10s 1ms/step - loss: 267779.5625 - mse: 267779.5625 - val_loss: 491565.7500 - val_mse: 491565.7500 Epoch 22/60 7711/7711 [==============================] - 10s 1ms/step - loss: 267245.2812 - mse: 267245.2812 - val_loss: 485135.0000 - val_mse: 485135.0000 Epoch 23/60 7711/7711 [==============================] - 10s 1ms/step - loss: 267572.9375 - mse: 267572.9375 - val_loss: 488007.0000 - val_mse: 488007.0000 Epoch 24/60 7711/7711 [==============================] - 10s 1ms/step - loss: 266816.9688 - mse: 266816.9688 - val_loss: 500509.6562 - val_mse: 500509.6562 Epoch 25/60 7711/7711 [==============================] - 10s 1ms/step - loss: 266106.9062 - mse: 266106.9062 - val_loss: 480024.8438 - val_mse: 480024.8438 Epoch 26/60 7711/7711 [==============================] - 11s 1ms/step - loss: 266564.4688 - mse: 266564.4688 - val_loss: 485087.5625 - val_mse: 485087.5625 Epoch 27/60 7711/7711 [==============================] - 11s 1ms/step - loss: 265810.0312 - mse: 265810.0312 - val_loss: 496068.5000 - val_mse: 496068.5000 Epoch 28/60 7711/7711 [==============================] - 12s 2ms/step - loss: 265656.4375 - mse: 265656.4375 - val_loss: 479445.7500 - val_mse: 479445.7500 Epoch 29/60 7711/7711 [==============================] - 12s 2ms/step - loss: 265551.2188 - mse: 265551.2188 - val_loss: 486081.0938 - val_mse: 486081.0938 Epoch 30/60 7711/7711 [==============================] - 11s 1ms/step - loss: 265217.2812 - mse: 265217.2812 - val_loss: 477740.4688 - val_mse: 477740.4688 Epoch 31/60 7711/7711 [==============================] - 10s 1ms/step - loss: 265183.3750 - mse: 265183.3750 - val_loss: 478187.4062 - val_mse: 478187.4062 Epoch 32/60 7711/7711 [==============================] - 9s 1ms/step - loss: 264599.0312 - mse: 264599.0312 - val_loss: 498323.3438 - val_mse: 498323.3438 Epoch 33/60 7711/7711 [==============================] - 9s 1ms/step - loss: 264284.3438 - mse: 264284.3438 - val_loss: 505083.0938 - val_mse: 505083.0938 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 264169.5312 - mse: 264169.5312 - val_loss: 481236.8125 - val_mse: 481236.8125 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 264522.0625 - mse: 264522.0625 - val_loss: 620522.2500 - val_mse: 620522.2500 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263984.7500 - mse: 263984.7500 - val_loss: 478122.9062 - val_mse: 478122.9062 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 264040.1250 - mse: 264040.1250 - val_loss: 481663.8125 - val_mse: 481663.8125 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263720.1250 - mse: 263720.1250 - val_loss: 474879.1562 - val_mse: 474879.1562 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263046.0625 - mse: 263046.0625 - val_loss: 538736.5000 - val_mse: 538736.5000 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263639.4375 - mse: 263639.4375 - val_loss: 480189.1562 - val_mse: 480189.1562 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263211.4062 - mse: 263211.4062 - val_loss: 475036.5938 - val_mse: 475036.5938 Epoch 42/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263613.0938 - mse: 263613.0938 - val_loss: 487427.6562 - val_mse: 487427.6562 Epoch 43/60 7711/7711 [==============================] - 9s 1ms/step - loss: 263341.5938 - mse: 263341.5938 - val_loss: 478562.3438 - val_mse: 478562.3438 Epoch 44/60 7711/7711 [==============================] - 10s 1ms/step - loss: 262372.3438 - mse: 262372.3438 - val_loss: 478301.6875 - val_mse: 478301.6875 Epoch 45/60 7711/7711 [==============================] - 10s 1ms/step - loss: 262639.3438 - mse: 262639.3438 - val_loss: 500038.8438 - val_mse: 500038.8438 Epoch 46/60 7711/7711 [==============================] - 10s 1ms/step - loss: 262649.8438 - mse: 262649.8438 - val_loss: 490457.2188 - val_mse: 490457.2188 Epoch 47/60 7711/7711 [==============================] - 9s 1ms/step - loss: 262818.7188 - mse: 262818.7188 - val_loss: 478123.3750 - val_mse: 478123.3750 Epoch 48/60 7711/7711 [==============================] - 9s 1ms/step - loss: 262218.6250 - mse: 262218.6250 - val_loss: 495274.4375 - val_mse: 495274.4375 Epoch 1/60 7711/7711 [==============================] - 10s 1ms/step - loss: 640479.9375 - mse: 640479.9375 - val_loss: 705158.0000 - val_mse: 705158.0000 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 381157.2500 - mse: 381157.2500 - val_loss: 690602.3125 - val_mse: 690602.3125 Epoch 3/60 7711/7711 [==============================] - 10s 1ms/step - loss: 372191.0312 - mse: 372191.0312 - val_loss: 679887.3750 - val_mse: 679887.3750 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 364769.4688 - mse: 364769.4688 - val_loss: 725863.6875 - val_mse: 725863.6875 Epoch 5/60 7711/7711 [==============================] - 9s 1ms/step - loss: 359798.7500 - mse: 359798.7500 - val_loss: 700268.2500 - val_mse: 700268.2500 Epoch 6/60 7711/7711 [==============================] - 9s 1ms/step - loss: 355999.0000 - mse: 355999.0000 - val_loss: 643940.9375 - val_mse: 643940.9375 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 354443.7188 - mse: 354443.7188 - val_loss: 653783.5000 - val_mse: 653783.5000 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 352400.6250 - mse: 352400.6250 - val_loss: 635480.8750 - val_mse: 635480.8750 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 351317.3125 - mse: 351317.3125 - val_loss: 645506.1250 - val_mse: 645506.1250 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 349700.9688 - mse: 349700.9688 - val_loss: 632168.1875 - val_mse: 632168.1875 Epoch 11/60 7711/7711 [==============================] - 10s 1ms/step - loss: 348883.7812 - mse: 348883.7812 - val_loss: 639395.9375 - val_mse: 639395.9375 Epoch 12/60 7711/7711 [==============================] - 10s 1ms/step - loss: 347861.4062 - mse: 347861.4062 - val_loss: 633562.6250 - val_mse: 633562.6250 Epoch 13/60 7711/7711 [==============================] - 9s 1ms/step - loss: 347733.4688 - mse: 347733.4688 - val_loss: 645869.8125 - val_mse: 645869.8125 Epoch 14/60 7711/7711 [==============================] - 9s 1ms/step - loss: 346681.3750 - mse: 346681.3750 - val_loss: 642356.4375 - val_mse: 642356.4375 Epoch 15/60 7711/7711 [==============================] - 9s 1ms/step - loss: 346125.1250 - mse: 346125.1250 - val_loss: 636211.5625 - val_mse: 636211.5625 Epoch 16/60 7711/7711 [==============================] - 9s 1ms/step - loss: 344871.5000 - mse: 344871.5000 - val_loss: 630088.0625 - val_mse: 630088.0625 Epoch 17/60 7711/7711 [==============================] - 9s 1ms/step - loss: 345399.8750 - mse: 345399.8750 - val_loss: 620533.1250 - val_mse: 620533.1250 Epoch 18/60 7711/7711 [==============================] - 9s 1ms/step - loss: 344478.9375 - mse: 344478.9375 - val_loss: 640179.9375 - val_mse: 640179.9375 Epoch 19/60 7711/7711 [==============================] - 9s 1ms/step - loss: 343686.6250 - mse: 343686.6250 - val_loss: 655093.8125 - val_mse: 655093.8125 Epoch 20/60 7711/7711 [==============================] - 9s 1ms/step - loss: 343676.1562 - mse: 343676.1562 - val_loss: 617327.5625 - val_mse: 617327.5625 Epoch 21/60 7711/7711 [==============================] - 9s 1ms/step - loss: 343076.8750 - mse: 343076.8750 - val_loss: 625080.7500 - val_mse: 625080.7500 Epoch 22/60 7711/7711 [==============================] - 9s 1ms/step - loss: 342592.6875 - mse: 342592.6875 - val_loss: 620585.2500 - val_mse: 620585.2500 Epoch 23/60 7711/7711 [==============================] - 9s 1ms/step - loss: 342209.4062 - mse: 342209.4062 - val_loss: 630751.6875 - val_mse: 630751.6875 Epoch 24/60 7711/7711 [==============================] - 9s 1ms/step - loss: 341668.7500 - mse: 341668.7500 - val_loss: 720819.2500 - val_mse: 720819.2500 Epoch 25/60 7711/7711 [==============================] - 9s 1ms/step - loss: 341324.3125 - mse: 341324.3125 - val_loss: 615263.1875 - val_mse: 615263.1875 Epoch 26/60 7711/7711 [==============================] - 9s 1ms/step - loss: 340797.2812 - mse: 340797.2812 - val_loss: 615646.5000 - val_mse: 615646.5000 Epoch 27/60 7711/7711 [==============================] - 9s 1ms/step - loss: 341166.2812 - mse: 341166.2812 - val_loss: 616373.3125 - val_mse: 616373.3125 Epoch 28/60 7711/7711 [==============================] - 9s 1ms/step - loss: 340043.4375 - mse: 340043.4375 - val_loss: 610617.3125 - val_mse: 610617.3125 Epoch 29/60 7711/7711 [==============================] - 9s 1ms/step - loss: 339250.2812 - mse: 339250.2812 - val_loss: 621553.8750 - val_mse: 621553.8750 Epoch 30/60 7711/7711 [==============================] - 9s 1ms/step - loss: 340015.6562 - mse: 340015.6562 - val_loss: 609950.3750 - val_mse: 609950.3750 Epoch 31/60 7711/7711 [==============================] - 9s 1ms/step - loss: 340648.0312 - mse: 340648.0312 - val_loss: 619313.3125 - val_mse: 619313.3125 Epoch 32/60 7711/7711 [==============================] - 9s 1ms/step - loss: 340182.3750 - mse: 340182.3750 - val_loss: 616297.0000 - val_mse: 616297.0000 Epoch 33/60 7711/7711 [==============================] - 9s 1ms/step - loss: 339385.8750 - mse: 339385.8750 - val_loss: 631218.3125 - val_mse: 631218.3125 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338949.0938 - mse: 338949.0938 - val_loss: 613903.9375 - val_mse: 613903.9375 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338744.3438 - mse: 338744.3438 - val_loss: 620905.5625 - val_mse: 620905.5625 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337768.6250 - mse: 337768.6250 - val_loss: 612846.8750 - val_mse: 612846.8750 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338654.7812 - mse: 338654.7812 - val_loss: 617500.2500 - val_mse: 617500.2500 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338291.9375 - mse: 338291.9375 - val_loss: 623221.5625 - val_mse: 623221.5625 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338424.8750 - mse: 338424.8750 - val_loss: 609357.8125 - val_mse: 609357.8125 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337847.0000 - mse: 337847.0000 - val_loss: 627372.1875 - val_mse: 627372.1875 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 338134.0625 - mse: 338134.0625 - val_loss: 614831.1250 - val_mse: 614831.1250 Epoch 42/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337787.8125 - mse: 337787.8125 - val_loss: 647392.8750 - val_mse: 647392.8750 Epoch 43/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337537.6875 - mse: 337537.6875 - val_loss: 609873.6250 - val_mse: 609873.6250 Epoch 44/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337649.7500 - mse: 337649.7500 - val_loss: 605828.5000 - val_mse: 605828.5000 Epoch 45/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336791.5938 - mse: 336791.5938 - val_loss: 616592.0000 - val_mse: 616592.0000 Epoch 46/60 7711/7711 [==============================] - 9s 1ms/step - loss: 337096.9688 - mse: 337096.9688 - val_loss: 616981.8750 - val_mse: 616981.8750 Epoch 47/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336946.6562 - mse: 336946.6562 - val_loss: 611088.7500 - val_mse: 611088.7500 Epoch 48/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336532.0000 - mse: 336532.0000 - val_loss: 607783.6875 - val_mse: 607783.6875 Epoch 49/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336331.0000 - mse: 336331.0000 - val_loss: 616880.6250 - val_mse: 616880.6250 Epoch 50/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336244.9062 - mse: 336244.9062 - val_loss: 607811.6875 - val_mse: 607811.6875 Epoch 51/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336211.2188 - mse: 336211.2188 - val_loss: 611260.8750 - val_mse: 611260.8750 Epoch 52/60 7711/7711 [==============================] - 9s 1ms/step - loss: 335650.5938 - mse: 335650.5938 - val_loss: 640860.6875 - val_mse: 640860.6875 Epoch 53/60 7711/7711 [==============================] - 9s 1ms/step - loss: 336425.9062 - mse: 336425.9062 - val_loss: 613632.4375 - val_mse: 613632.4375 Epoch 54/60 7711/7711 [==============================] - 9s 1ms/step - loss: 335736.4375 - mse: 335736.4375 - val_loss: 656387.6875 - val_mse: 656387.6875 Epoch 1/60 7711/7711 [==============================] - 10s 1ms/step - loss: 778951.8125 - mse: 778951.8125 - val_loss: 897251.5000 - val_mse: 897251.5000 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 476948.5938 - mse: 476948.5938 - val_loss: 884858.8750 - val_mse: 884858.8750 Epoch 3/60 7711/7711 [==============================] - 9s 1ms/step - loss: 474033.5312 - mse: 474033.5312 - val_loss: 886426.3750 - val_mse: 886426.3750 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 468014.6562 - mse: 468014.6562 - val_loss: 861623.5000 - val_mse: 861623.5000 Epoch 5/60 7711/7711 [==============================] - 11s 1ms/step - loss: 459218.3750 - mse: 459218.3750 - val_loss: 842642.1250 - val_mse: 842642.1250 Epoch 6/60 7711/7711 [==============================] - 12s 2ms/step - loss: 452817.6250 - mse: 452817.6250 - val_loss: 831285.5625 - val_mse: 831285.5625 Epoch 7/60 7711/7711 [==============================] - 10s 1ms/step - loss: 446428.5625 - mse: 446428.5625 - val_loss: 818229.6875 - val_mse: 818229.6875 Epoch 8/60 7711/7711 [==============================] - 10s 1ms/step - loss: 441991.4375 - mse: 441991.4375 - val_loss: 805474.9375 - val_mse: 805474.9375 Epoch 9/60 7711/7711 [==============================] - 12s 2ms/step - loss: 438967.1562 - mse: 438967.1562 - val_loss: 795317.5000 - val_mse: 795317.5000 Epoch 10/60 7711/7711 [==============================] - 12s 2ms/step - loss: 436377.5938 - mse: 436377.5938 - val_loss: 863349.1875 - val_mse: 863349.1875 Epoch 11/60 7711/7711 [==============================] - 11s 1ms/step - loss: 434758.3750 - mse: 434758.3750 - val_loss: 803879.6250 - val_mse: 803879.6250 Epoch 12/60 7711/7711 [==============================] - 11s 1ms/step - loss: 432512.9062 - mse: 432512.9062 - val_loss: 786905.3750 - val_mse: 786905.3750 Epoch 13/60 7711/7711 [==============================] - 10s 1ms/step - loss: 431797.0312 - mse: 431797.0312 - val_loss: 810194.8750 - val_mse: 810194.8750 Epoch 14/60 7711/7711 [==============================] - 11s 1ms/step - loss: 430791.3125 - mse: 430791.3125 - val_loss: 784365.9375 - val_mse: 784365.9375 Epoch 15/60 7711/7711 [==============================] - 11s 1ms/step - loss: 430534.0625 - mse: 430534.0625 - val_loss: 829775.5000 - val_mse: 829775.5000 Epoch 16/60 7711/7711 [==============================] - 10s 1ms/step - loss: 429933.4688 - mse: 429933.4688 - val_loss: 784172.1875 - val_mse: 784172.1875 Epoch 17/60 7711/7711 [==============================] - 10s 1ms/step - loss: 429233.7812 - mse: 429233.7812 - val_loss: 782226.3750 - val_mse: 782226.3750 Epoch 18/60 7711/7711 [==============================] - 10s 1ms/step - loss: 429233.5625 - mse: 429233.5625 - val_loss: 784080.2500 - val_mse: 784080.2500 Epoch 19/60 7711/7711 [==============================] - 11s 1ms/step - loss: 428533.0625 - mse: 428533.0625 - val_loss: 775613.0000 - val_mse: 775613.0000 Epoch 20/60 7711/7711 [==============================] - 11s 1ms/step - loss: 427382.1250 - mse: 427382.1250 - val_loss: 905891.5625 - val_mse: 905891.5625 Epoch 21/60 7711/7711 [==============================] - 11s 1ms/step - loss: 426537.4688 - mse: 426537.4688 - val_loss: 784542.6250 - val_mse: 784542.6250 Epoch 22/60 7711/7711 [==============================] - 10s 1ms/step - loss: 426059.0000 - mse: 426059.0000 - val_loss: 781943.0000 - val_mse: 781943.0000 Epoch 23/60 7711/7711 [==============================] - 13s 2ms/step - loss: 426361.3438 - mse: 426361.3438 - val_loss: 767673.0000 - val_mse: 767673.0000 Epoch 24/60 7711/7711 [==============================] - 11s 1ms/step - loss: 425075.5625 - mse: 425075.5625 - val_loss: 787082.9375 - val_mse: 787082.9375 Epoch 25/60 7711/7711 [==============================] - 12s 2ms/step - loss: 425295.6562 - mse: 425295.6562 - val_loss: 879157.6250 - val_mse: 879157.6250 Epoch 26/60 7711/7711 [==============================] - 12s 2ms/step - loss: 424687.4062 - mse: 424687.4062 - val_loss: 939933.9375 - val_mse: 939933.9375 Epoch 27/60 7711/7711 [==============================] - 11s 1ms/step - loss: 424104.9375 - mse: 424104.9375 - val_loss: 779014.0625 - val_mse: 779014.0625 Epoch 28/60 7711/7711 [==============================] - 11s 1ms/step - loss: 423708.1562 - mse: 423708.1562 - val_loss: 767648.8750 - val_mse: 767648.8750 Epoch 29/60 7711/7711 [==============================] - 12s 2ms/step - loss: 423013.2812 - mse: 423013.2812 - val_loss: 778856.1250 - val_mse: 778856.1250 Epoch 30/60 7711/7711 [==============================] - 12s 2ms/step - loss: 422649.5000 - mse: 422649.5000 - val_loss: 766976.6875 - val_mse: 766976.6875 Epoch 31/60 7711/7711 [==============================] - 11s 1ms/step - loss: 422233.9688 - mse: 422233.9688 - val_loss: 777825.3750 - val_mse: 777825.3750 Epoch 32/60 7711/7711 [==============================] - 10s 1ms/step - loss: 422135.3125 - mse: 422135.3125 - val_loss: 791503.2500 - val_mse: 791503.2500 Epoch 33/60 7711/7711 [==============================] - 12s 1ms/step - loss: 421662.0938 - mse: 421662.0938 - val_loss: 772355.0625 - val_mse: 772355.0625 Epoch 34/60 7711/7711 [==============================] - 11s 1ms/step - loss: 420954.8125 - mse: 420954.8125 - val_loss: 780982.7500 - val_mse: 780982.7500 Epoch 35/60 7711/7711 [==============================] - 11s 1ms/step - loss: 420382.0625 - mse: 420382.0625 - val_loss: 765014.8125 - val_mse: 765014.8125 Epoch 36/60 7711/7711 [==============================] - 10s 1ms/step - loss: 421087.4375 - mse: 421087.4375 - val_loss: 794569.3125 - val_mse: 794569.3125 Epoch 37/60 7711/7711 [==============================] - 10s 1ms/step - loss: 421592.8750 - mse: 421592.8750 - val_loss: 764055.4375 - val_mse: 764055.4375 Epoch 38/60 7711/7711 [==============================] - 10s 1ms/step - loss: 420526.1250 - mse: 420526.1250 - val_loss: 758052.3125 - val_mse: 758052.3125 Epoch 39/60 7711/7711 [==============================] - 11s 1ms/step - loss: 420413.6875 - mse: 420413.6875 - val_loss: 783448.1875 - val_mse: 783448.1875 Epoch 40/60 7711/7711 [==============================] - 11s 1ms/step - loss: 419646.3750 - mse: 419646.3750 - val_loss: 762208.1250 - val_mse: 762208.1250 Epoch 41/60 7711/7711 [==============================] - 10s 1ms/step - loss: 418734.1250 - mse: 418734.1250 - val_loss: 756886.4375 - val_mse: 756886.4375 Epoch 42/60 7711/7711 [==============================] - 10s 1ms/step - loss: 419354.2812 - mse: 419354.2812 - val_loss: 764782.8750 - val_mse: 764782.8750 Epoch 43/60 7711/7711 [==============================] - 11s 1ms/step - loss: 418709.6562 - mse: 418709.6562 - val_loss: 824495.2500 - val_mse: 824495.2500 Epoch 44/60 7711/7711 [==============================] - 14s 2ms/step - loss: 418657.6250 - mse: 418657.6250 - val_loss: 768467.7500 - val_mse: 768467.7500 Epoch 45/60 7711/7711 [==============================] - 11s 1ms/step - loss: 418869.1250 - mse: 418869.1250 - val_loss: 774622.3125 - val_mse: 774622.3125 Epoch 46/60 7711/7711 [==============================] - 11s 1ms/step - loss: 418472.8438 - mse: 418472.8438 - val_loss: 956475.8125 - val_mse: 956475.8125 Epoch 47/60 7711/7711 [==============================] - 11s 1ms/step - loss: 418372.7188 - mse: 418372.7188 - val_loss: 758789.3750 - val_mse: 758789.3750 Epoch 48/60 7711/7711 [==============================] - 11s 1ms/step - loss: 418974.2500 - mse: 418974.2500 - val_loss: 763732.0625 - val_mse: 763732.0625 Epoch 49/60 7711/7711 [==============================] - 12s 2ms/step - loss: 418257.7500 - mse: 418257.7500 - val_loss: 762854.0000 - val_mse: 762854.0000 Epoch 50/60 7711/7711 [==============================] - 11s 1ms/step - loss: 417721.7500 - mse: 417721.7500 - val_loss: 771228.9375 - val_mse: 771228.9375 Epoch 51/60 7711/7711 [==============================] - 10s 1ms/step - loss: 418078.6875 - mse: 418078.6875 - val_loss: 824662.3125 - val_mse: 824662.3125 Epoch 1/60 7711/7711 [==============================] - 13s 2ms/step - loss: 822089.8125 - mse: 822089.8125 - val_loss: 1092665.5000 - val_mse: 1092665.5000 Epoch 2/60 7711/7711 [==============================] - 15s 2ms/step - loss: 573867.0000 - mse: 573867.0000 - val_loss: 1073554.7500 - val_mse: 1073554.7500 Epoch 3/60 7711/7711 [==============================] - 10s 1ms/step - loss: 569243.3750 - mse: 569243.3750 - val_loss: 1076663.8750 - val_mse: 1076663.8750 Epoch 4/60 7711/7711 [==============================] - 10s 1ms/step - loss: 560329.1250 - mse: 560329.1250 - val_loss: 1057593.1250 - val_mse: 1057593.1250 Epoch 5/60 7711/7711 [==============================] - 10s 1ms/step - loss: 546283.9375 - mse: 546283.9375 - val_loss: 1270365.6250 - val_mse: 1270365.6250 Epoch 6/60 7711/7711 [==============================] - 10s 1ms/step - loss: 537370.0000 - mse: 537370.0000 - val_loss: 1067165.1250 - val_mse: 1067165.1250 Epoch 7/60 7711/7711 [==============================] - 11s 1ms/step - loss: 532168.6250 - mse: 532168.6250 - val_loss: 990216.7500 - val_mse: 990216.7500 Epoch 8/60 7711/7711 [==============================] - 10s 1ms/step - loss: 527877.5625 - mse: 527877.5625 - val_loss: 984519.7500 - val_mse: 984519.7500 Epoch 9/60 7711/7711 [==============================] - 10s 1ms/step - loss: 524842.0625 - mse: 524842.0625 - val_loss: 1027318.2500 - val_mse: 1027318.2500 Epoch 10/60 7711/7711 [==============================] - 10s 1ms/step - loss: 524189.3750 - mse: 524189.3750 - val_loss: 981241.9375 - val_mse: 981241.9375 Epoch 11/60 7711/7711 [==============================] - 10s 1ms/step - loss: 521482.3438 - mse: 521482.3438 - val_loss: 1026980.6250 - val_mse: 1026980.6250 Epoch 12/60 7711/7711 [==============================] - 10s 1ms/step - loss: 521400.5312 - mse: 521400.5312 - val_loss: 960179.6250 - val_mse: 960179.6250 Epoch 13/60 7711/7711 [==============================] - 10s 1ms/step - loss: 520923.3750 - mse: 520923.3750 - val_loss: 956437.1250 - val_mse: 956437.1250 Epoch 14/60 7711/7711 [==============================] - 11s 1ms/step - loss: 520810.4688 - mse: 520810.4688 - val_loss: 973217.1875 - val_mse: 973217.1875 Epoch 15/60 7711/7711 [==============================] - 11s 1ms/step - loss: 518788.3125 - mse: 518788.3125 - val_loss: 957400.4375 - val_mse: 957400.4375 Epoch 16/60 7711/7711 [==============================] - 11s 1ms/step - loss: 517376.2188 - mse: 517376.2188 - val_loss: 996320.4375 - val_mse: 996320.4375 Epoch 17/60 7711/7711 [==============================] - 10s 1ms/step - loss: 516700.4688 - mse: 516700.4688 - val_loss: 977813.6875 - val_mse: 977813.6875 Epoch 18/60 7711/7711 [==============================] - 10s 1ms/step - loss: 516091.7812 - mse: 516091.7812 - val_loss: 945316.6875 - val_mse: 945316.6875 Epoch 19/60 7711/7711 [==============================] - 10s 1ms/step - loss: 515172.4375 - mse: 515172.4375 - val_loss: 944262.5625 - val_mse: 944262.5625 Epoch 20/60 7711/7711 [==============================] - 10s 1ms/step - loss: 515676.5312 - mse: 515676.5312 - val_loss: 956029.8125 - val_mse: 956029.8125 Epoch 21/60 7711/7711 [==============================] - 10s 1ms/step - loss: 514419.7188 - mse: 514419.7188 - val_loss: 953953.3750 - val_mse: 953953.3750 Epoch 22/60 7711/7711 [==============================] - 10s 1ms/step - loss: 513857.2812 - mse: 513857.2812 - val_loss: 960057.1250 - val_mse: 960057.1250 Epoch 23/60 7711/7711 [==============================] - 10s 1ms/step - loss: 513526.7188 - mse: 513526.7188 - val_loss: 957107.3750 - val_mse: 957107.3750 Epoch 24/60 7711/7711 [==============================] - 10s 1ms/step - loss: 512539.0312 - mse: 512539.0312 - val_loss: 1130272.6250 - val_mse: 1130272.6250 Epoch 25/60 7711/7711 [==============================] - 10s 1ms/step - loss: 512385.6562 - mse: 512385.6562 - val_loss: 955762.7500 - val_mse: 955762.7500 Epoch 26/60 7711/7711 [==============================] - 10s 1ms/step - loss: 511086.6875 - mse: 511086.6875 - val_loss: 1033774.3125 - val_mse: 1033774.3125 Epoch 27/60 7711/7711 [==============================] - 10s 1ms/step - loss: 511227.5000 - mse: 511227.5000 - val_loss: 943798.3750 - val_mse: 943798.3750 Epoch 28/60 7711/7711 [==============================] - 11s 1ms/step - loss: 509917.4375 - mse: 509917.4375 - val_loss: 956527.3750 - val_mse: 956527.3750 Epoch 29/60 7711/7711 [==============================] - 11s 1ms/step - loss: 510419.4375 - mse: 510419.4375 - val_loss: 948150.4375 - val_mse: 948150.4375 Epoch 30/60 7711/7711 [==============================] - 10s 1ms/step - loss: 509777.9688 - mse: 509777.9688 - val_loss: 951321.6875 - val_mse: 951321.6875 Epoch 31/60 7711/7711 [==============================] - 10s 1ms/step - loss: 509850.0312 - mse: 509850.0312 - val_loss: 953499.1250 - val_mse: 953499.1250 Epoch 32/60 7711/7711 [==============================] - 11s 1ms/step - loss: 508404.5625 - mse: 508404.5625 - val_loss: 942554.5000 - val_mse: 942554.5000 Epoch 33/60 7711/7711 [==============================] - 10s 1ms/step - loss: 508642.7188 - mse: 508642.7188 - val_loss: 939956.3125 - val_mse: 939956.3125 Epoch 34/60 7711/7711 [==============================] - 11s 1ms/step - loss: 507190.8438 - mse: 507190.8438 - val_loss: 933004.8125 - val_mse: 933004.8125 Epoch 35/60 7711/7711 [==============================] - 11s 1ms/step - loss: 506557.5000 - mse: 506557.5000 - val_loss: 975317.6250 - val_mse: 975317.6250 Epoch 36/60 7711/7711 [==============================] - 11s 1ms/step - loss: 506210.5000 - mse: 506210.5000 - val_loss: 935929.6250 - val_mse: 935929.6250 Epoch 37/60 7711/7711 [==============================] - 10s 1ms/step - loss: 506076.0000 - mse: 506076.0000 - val_loss: 958417.9375 - val_mse: 958417.9375 Epoch 38/60 7711/7711 [==============================] - 14s 2ms/step - loss: 505795.8750 - mse: 505795.8750 - val_loss: 1027453.3125 - val_mse: 1027453.3125 Epoch 39/60 7711/7711 [==============================] - 17s 2ms/step - loss: 504930.6875 - mse: 504930.6875 - val_loss: 954653.4375 - val_mse: 954653.4375 Epoch 40/60 7711/7711 [==============================] - 10s 1ms/step - loss: 505140.3750 - mse: 505140.3750 - val_loss: 964011.3125 - val_mse: 964011.3125 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 504096.3125 - mse: 504096.3125 - val_loss: 947568.0625 - val_mse: 947568.0625 Epoch 42/60 7711/7711 [==============================] - 11s 1ms/step - loss: 505114.3438 - mse: 505114.3438 - val_loss: 925723.4375 - val_mse: 925723.4375 Epoch 43/60 7711/7711 [==============================] - 9s 1ms/step - loss: 503383.2188 - mse: 503383.2188 - val_loss: 958306.4375 - val_mse: 958306.4375 Epoch 44/60 7711/7711 [==============================] - 9s 1ms/step - loss: 503377.9062 - mse: 503377.9062 - val_loss: 924451.5000 - val_mse: 924451.5000 Epoch 45/60 7711/7711 [==============================] - 9s 1ms/step - loss: 504144.8125 - mse: 504144.8125 - val_loss: 936101.1875 - val_mse: 936101.1875 Epoch 46/60 7711/7711 [==============================] - 10s 1ms/step - loss: 502474.4688 - mse: 502474.4688 - val_loss: 927120.9375 - val_mse: 927120.9375 Epoch 47/60 7711/7711 [==============================] - 9s 1ms/step - loss: 502370.3750 - mse: 502370.3750 - val_loss: 936023.3125 - val_mse: 936023.3125 Epoch 48/60 7711/7711 [==============================] - 9s 1ms/step - loss: 502839.3125 - mse: 502839.3125 - val_loss: 929154.8125 - val_mse: 929154.8125 Epoch 49/60 7711/7711 [==============================] - 15s 2ms/step - loss: 501862.8750 - mse: 501862.8750 - val_loss: 930205.5000 - val_mse: 930205.5000 Epoch 50/60 7711/7711 [==============================] - 11s 1ms/step - loss: 501642.9062 - mse: 501642.9062 - val_loss: 959624.9375 - val_mse: 959624.9375 Epoch 51/60 7711/7711 [==============================] - 10s 1ms/step - loss: 501601.1562 - mse: 501601.1562 - val_loss: 970214.5625 - val_mse: 970214.5625 Epoch 52/60 7711/7711 [==============================] - 9s 1ms/step - loss: 500898.8125 - mse: 500898.8125 - val_loss: 924499.7500 - val_mse: 924499.7500 Epoch 53/60 7711/7711 [==============================] - 9s 1ms/step - loss: 501057.6562 - mse: 501057.6562 - val_loss: 1087453.6250 - val_mse: 1087453.6250 Epoch 54/60 7711/7711 [==============================] - 9s 1ms/step - loss: 500612.3750 - mse: 500612.3750 - val_loss: 930046.1875 - val_mse: 930046.1875 Epoch 1/60 7711/7711 [==============================] - 10s 1ms/step - loss: 981734.0625 - mse: 981734.0625 - val_loss: 1339244.5000 - val_mse: 1339244.5000 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 675785.6250 - mse: 675785.6250 - val_loss: 1294977.3750 - val_mse: 1294977.3750 Epoch 3/60 7711/7711 [==============================] - 9s 1ms/step - loss: 669547.6875 - mse: 669547.6875 - val_loss: 1274326.6250 - val_mse: 1274326.6250 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 662412.8750 - mse: 662412.8750 - val_loss: 1313314.2500 - val_mse: 1313314.2500 Epoch 5/60 7711/7711 [==============================] - 9s 1ms/step - loss: 653098.8125 - mse: 653098.8125 - val_loss: 1232527.1250 - val_mse: 1232527.1250 Epoch 6/60 7711/7711 [==============================] - 9s 1ms/step - loss: 643368.3125 - mse: 643368.3125 - val_loss: 1264368.3750 - val_mse: 1264368.3750 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 634029.1875 - mse: 634029.1875 - val_loss: 1208441.2500 - val_mse: 1208441.2500 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 629700.1250 - mse: 629700.1250 - val_loss: 1173558.1250 - val_mse: 1173558.1250 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 624610.1250 - mse: 624610.1250 - val_loss: 1178656.5000 - val_mse: 1178656.5000 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 621151.3125 - mse: 621151.3125 - val_loss: 1163886.1250 - val_mse: 1163886.1250 Epoch 11/60 7711/7711 [==============================] - 9s 1ms/step - loss: 618601.5000 - mse: 618601.5000 - val_loss: 1331934.0000 - val_mse: 1331934.0000 Epoch 12/60 7711/7711 [==============================] - 13s 2ms/step - loss: 616586.5000 - mse: 616586.5000 - val_loss: 1175546.8750 - val_mse: 1175546.8750 Epoch 13/60 7711/7711 [==============================] - 10s 1ms/step - loss: 614787.3125 - mse: 614787.3125 - val_loss: 1174948.7500 - val_mse: 1174948.7500 Epoch 14/60 7711/7711 [==============================] - 9s 1ms/step - loss: 613054.5000 - mse: 613054.5000 - val_loss: 1173044.7500 - val_mse: 1173044.7500 Epoch 15/60 7711/7711 [==============================] - 9s 1ms/step - loss: 612301.8750 - mse: 612301.8750 - val_loss: 1143026.0000 - val_mse: 1143026.0000 Epoch 16/60 7711/7711 [==============================] - 10s 1ms/step - loss: 612083.3750 - mse: 612083.3750 - val_loss: 1148925.0000 - val_mse: 1148925.0000 Epoch 17/60 7711/7711 [==============================] - 9s 1ms/step - loss: 611215.9375 - mse: 611215.9375 - val_loss: 1147968.2500 - val_mse: 1147968.2500 Epoch 18/60 7711/7711 [==============================] - 9s 1ms/step - loss: 609028.1250 - mse: 609028.1250 - val_loss: 1126155.6250 - val_mse: 1126155.6250 Epoch 19/60 7711/7711 [==============================] - 9s 1ms/step - loss: 609474.3750 - mse: 609474.3750 - val_loss: 1166548.5000 - val_mse: 1166548.5000 Epoch 20/60 7711/7711 [==============================] - 9s 1ms/step - loss: 607584.1875 - mse: 607584.1875 - val_loss: 1137951.5000 - val_mse: 1137951.5000 Epoch 21/60 7711/7711 [==============================] - 9s 1ms/step - loss: 607689.5625 - mse: 607689.5625 - val_loss: 1156014.5000 - val_mse: 1156014.5000 Epoch 22/60 7711/7711 [==============================] - 9s 1ms/step - loss: 607546.6250 - mse: 607546.6250 - val_loss: 1142106.1250 - val_mse: 1142106.1250 Epoch 23/60 7711/7711 [==============================] - 9s 1ms/step - loss: 606734.0000 - mse: 606734.0000 - val_loss: 1128023.5000 - val_mse: 1128023.5000 Epoch 24/60 7711/7711 [==============================] - 9s 1ms/step - loss: 606110.5000 - mse: 606110.5000 - val_loss: 1144863.8750 - val_mse: 1144863.8750 Epoch 25/60 7711/7711 [==============================] - 9s 1ms/step - loss: 604953.2500 - mse: 604953.2500 - val_loss: 1160347.6250 - val_mse: 1160347.6250 Epoch 26/60 7711/7711 [==============================] - 9s 1ms/step - loss: 604546.4375 - mse: 604546.4375 - val_loss: 1121827.3750 - val_mse: 1121827.3750 Epoch 27/60 7711/7711 [==============================] - 9s 1ms/step - loss: 604484.0000 - mse: 604484.0000 - val_loss: 1120987.3750 - val_mse: 1120987.3750 Epoch 28/60 7711/7711 [==============================] - 9s 1ms/step - loss: 604431.8125 - mse: 604431.8125 - val_loss: 1124276.7500 - val_mse: 1124276.7500 Epoch 29/60 7711/7711 [==============================] - 9s 1ms/step - loss: 603637.5000 - mse: 603637.5000 - val_loss: 1128026.8750 - val_mse: 1128026.8750 Epoch 30/60 7711/7711 [==============================] - 11s 1ms/step - loss: 602685.1250 - mse: 602685.1250 - val_loss: 1115265.2500 - val_mse: 1115265.2500 Epoch 31/60 7711/7711 [==============================] - 9s 1ms/step - loss: 602495.5625 - mse: 602495.5625 - val_loss: 1128574.3750 - val_mse: 1128574.3750 Epoch 32/60 7711/7711 [==============================] - 9s 1ms/step - loss: 600988.1875 - mse: 600988.1875 - val_loss: 1152490.8750 - val_mse: 1152490.8750 Epoch 33/60 7711/7711 [==============================] - 10s 1ms/step - loss: 601937.0000 - mse: 601937.0000 - val_loss: 1142351.7500 - val_mse: 1142351.7500 Epoch 34/60 7711/7711 [==============================] - 12s 2ms/step - loss: 601274.7500 - mse: 601274.7500 - val_loss: 1151511.0000 - val_mse: 1151511.0000 Epoch 35/60 7711/7711 [==============================] - 13s 2ms/step - loss: 600328.8125 - mse: 600328.8125 - val_loss: 1110999.6250 - val_mse: 1110999.6250 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 601184.0000 - mse: 601184.0000 - val_loss: 1162446.2500 - val_mse: 1162446.2500 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 599595.7500 - mse: 599595.7500 - val_loss: 1146517.8750 - val_mse: 1146517.8750 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 598719.0000 - mse: 598719.0000 - val_loss: 1106179.5000 - val_mse: 1106179.5000 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 598833.5000 - mse: 598833.5000 - val_loss: 1120472.5000 - val_mse: 1120472.5000 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 599331.8750 - mse: 599331.8750 - val_loss: 1122754.2500 - val_mse: 1122754.2500 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 599445.8125 - mse: 599445.8125 - val_loss: 1113701.6250 - val_mse: 1113701.6250 Epoch 42/60 7711/7711 [==============================] - 9s 1ms/step - loss: 598612.6250 - mse: 598612.6250 - val_loss: 1137704.7500 - val_mse: 1137704.7500 Epoch 43/60 7711/7711 [==============================] - 11s 1ms/step - loss: 598125.8125 - mse: 598125.8125 - val_loss: 1116975.2500 - val_mse: 1116975.2500 Epoch 44/60 7711/7711 [==============================] - 10s 1ms/step - loss: 598072.0625 - mse: 598072.0625 - val_loss: 1121959.7500 - val_mse: 1121959.7500 Epoch 45/60 7711/7711 [==============================] - 12s 2ms/step - loss: 597850.9375 - mse: 597850.9375 - val_loss: 1108042.0000 - val_mse: 1108042.0000 Epoch 46/60 7711/7711 [==============================] - 9s 1ms/step - loss: 597401.3750 - mse: 597401.3750 - val_loss: 1112726.6250 - val_mse: 1112726.6250 Epoch 47/60 7711/7711 [==============================] - 9s 1ms/step - loss: 597065.1875 - mse: 597065.1875 - val_loss: 1136495.1250 - val_mse: 1136495.1250 Epoch 48/60 7711/7711 [==============================] - 10s 1ms/step - loss: 596339.3125 - mse: 596339.3125 - val_loss: 1125084.2500 - val_mse: 1125084.2500 Epoch 1/60 7711/7711 [==============================] - 11s 1ms/step - loss: 1132425.3750 - mse: 1132425.3750 - val_loss: 1509432.0000 - val_mse: 1509432.0000 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 777156.0000 - mse: 777156.0000 - val_loss: 1482326.0000 - val_mse: 1482326.0000 Epoch 3/60 7711/7711 [==============================] - 9s 1ms/step - loss: 763858.1875 - mse: 763858.1875 - val_loss: 1458411.1250 - val_mse: 1458411.1250 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 746270.7500 - mse: 746270.7500 - val_loss: 1415960.1250 - val_mse: 1415960.1250 Epoch 5/60 7711/7711 [==============================] - 11s 1ms/step - loss: 737213.4375 - mse: 737213.4375 - val_loss: 1446671.7500 - val_mse: 1446671.7500 Epoch 6/60 7711/7711 [==============================] - 11s 1ms/step - loss: 730479.3125 - mse: 730479.3125 - val_loss: 1400168.7500 - val_mse: 1400168.7500 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 725888.9375 - mse: 725888.9375 - val_loss: 1417895.7500 - val_mse: 1417895.7500 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 721182.6250 - mse: 721182.6250 - val_loss: 1378795.3750 - val_mse: 1378795.3750 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 718407.6250 - mse: 718407.6250 - val_loss: 1426406.6250 - val_mse: 1426406.6250 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 716383.8750 - mse: 716383.8750 - val_loss: 1435143.7500 - val_mse: 1435143.7500 Epoch 11/60 7711/7711 [==============================] - 9s 1ms/step - loss: 715198.0000 - mse: 715198.0000 - val_loss: 1336512.1250 - val_mse: 1336512.1250 Epoch 12/60 7711/7711 [==============================] - 9s 1ms/step - loss: 713517.8750 - mse: 713517.8750 - val_loss: 1429313.6250 - val_mse: 1429313.6250 Epoch 13/60 7711/7711 [==============================] - 9s 1ms/step - loss: 712042.6875 - mse: 712042.6875 - val_loss: 1345133.6250 - val_mse: 1345133.6250 Epoch 14/60 7711/7711 [==============================] - 9s 1ms/step - loss: 711390.2500 - mse: 711390.2500 - val_loss: 1342901.7500 - val_mse: 1342901.7500 Epoch 15/60 7711/7711 [==============================] - 9s 1ms/step - loss: 709819.3125 - mse: 709819.3125 - val_loss: 1386926.7500 - val_mse: 1386926.7500 Epoch 16/60 7711/7711 [==============================] - 9s 1ms/step - loss: 707437.2500 - mse: 707437.2500 - val_loss: 1405117.1250 - val_mse: 1405117.1250 Epoch 17/60 7711/7711 [==============================] - 9s 1ms/step - loss: 708080.6250 - mse: 708080.6250 - val_loss: 1342979.6250 - val_mse: 1342979.6250 Epoch 18/60 7711/7711 [==============================] - 9s 1ms/step - loss: 706751.9375 - mse: 706751.9375 - val_loss: 1344545.1250 - val_mse: 1344545.1250 Epoch 19/60 7711/7711 [==============================] - 9s 1ms/step - loss: 704926.8125 - mse: 704926.8125 - val_loss: 1361914.0000 - val_mse: 1361914.0000 Epoch 20/60 7711/7711 [==============================] - 9s 1ms/step - loss: 704361.1875 - mse: 704361.1875 - val_loss: 1417299.3750 - val_mse: 1417299.3750 Epoch 21/60 7711/7711 [==============================] - 9s 1ms/step - loss: 703948.7500 - mse: 703948.7500 - val_loss: 1435128.7500 - val_mse: 1435128.7500 Epoch 1/60 7711/7711 [==============================] - 9s 1ms/step - loss: 1181778.1250 - mse: 1181778.1250 - val_loss: 1731819.8750 - val_mse: 1731819.8750 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 890525.5625 - mse: 890525.5625 - val_loss: 1731751.5000 - val_mse: 1731751.5000 Epoch 3/60 7711/7711 [==============================] - 9s 1ms/step - loss: 873378.3750 - mse: 873378.3750 - val_loss: 1655156.0000 - val_mse: 1655156.0000 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 854325.0000 - mse: 854325.0000 - val_loss: 1652154.7500 - val_mse: 1652154.7500 Epoch 5/60 7711/7711 [==============================] - 9s 1ms/step - loss: 843888.6250 - mse: 843888.6250 - val_loss: 1695737.7500 - val_mse: 1695737.7500 Epoch 6/60 7711/7711 [==============================] - 9s 1ms/step - loss: 835854.7500 - mse: 835854.7500 - val_loss: 1600748.8750 - val_mse: 1600748.8750 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 830025.0625 - mse: 830025.0625 - val_loss: 1592590.6250 - val_mse: 1592590.6250 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 826837.4375 - mse: 826837.4375 - val_loss: 1589897.0000 - val_mse: 1589897.0000 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 822391.6250 - mse: 822391.6250 - val_loss: 1566108.3750 - val_mse: 1566108.3750 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 822212.3125 - mse: 822212.3125 - val_loss: 1548259.8750 - val_mse: 1548259.8750 Epoch 11/60 7711/7711 [==============================] - 9s 1ms/step - loss: 819643.1875 - mse: 819643.1875 - val_loss: 1571170.1250 - val_mse: 1571170.1250 Epoch 12/60 7711/7711 [==============================] - 9s 1ms/step - loss: 818968.4375 - mse: 818968.4375 - val_loss: 1558449.3750 - val_mse: 1558449.3750 Epoch 13/60 7711/7711 [==============================] - 9s 1ms/step - loss: 816988.0625 - mse: 816988.0625 - val_loss: 1564328.6250 - val_mse: 1564328.6250 Epoch 14/60 7711/7711 [==============================] - 9s 1ms/step - loss: 815708.3125 - mse: 815708.3125 - val_loss: 1606196.7500 - val_mse: 1606196.7500 Epoch 15/60 7711/7711 [==============================] - 9s 1ms/step - loss: 814335.6875 - mse: 814335.6875 - val_loss: 1560991.7500 - val_mse: 1560991.7500 Epoch 16/60 7711/7711 [==============================] - 9s 1ms/step - loss: 813446.6250 - mse: 813446.6250 - val_loss: 1599298.8750 - val_mse: 1599298.8750 Epoch 17/60 7711/7711 [==============================] - 9s 1ms/step - loss: 813120.2500 - mse: 813120.2500 - val_loss: 1525991.7500 - val_mse: 1525991.7500 Epoch 18/60 7711/7711 [==============================] - 9s 1ms/step - loss: 811167.9375 - mse: 811167.9375 - val_loss: 1526312.1250 - val_mse: 1526312.1250 Epoch 19/60 7711/7711 [==============================] - 9s 1ms/step - loss: 811342.2500 - mse: 811342.2500 - val_loss: 1542945.2500 - val_mse: 1542945.2500 Epoch 20/60 7711/7711 [==============================] - 9s 1ms/step - loss: 809676.8750 - mse: 809676.8750 - val_loss: 1578484.1250 - val_mse: 1578484.1250 Epoch 21/60 7711/7711 [==============================] - 9s 1ms/step - loss: 808908.6250 - mse: 808908.6250 - val_loss: 1570290.2500 - val_mse: 1570290.2500 Epoch 22/60 7711/7711 [==============================] - 9s 1ms/step - loss: 809047.8750 - mse: 809047.8750 - val_loss: 1523161.5000 - val_mse: 1523161.5000 Epoch 23/60 7711/7711 [==============================] - 9s 1ms/step - loss: 808180.5000 - mse: 808180.5000 - val_loss: 1560078.3750 - val_mse: 1560078.3750 Epoch 24/60 7711/7711 [==============================] - 9s 1ms/step - loss: 807311.0625 - mse: 807311.0625 - val_loss: 1545962.3750 - val_mse: 1545962.3750 Epoch 25/60 7711/7711 [==============================] - 10s 1ms/step - loss: 806318.3750 - mse: 806318.3750 - val_loss: 1525198.6250 - val_mse: 1525198.6250 Epoch 26/60 7711/7711 [==============================] - 9s 1ms/step - loss: 804609.8750 - mse: 804609.8750 - val_loss: 1529203.8750 - val_mse: 1529203.8750 Epoch 27/60 7711/7711 [==============================] - 11s 1ms/step - loss: 804607.8750 - mse: 804607.8750 - val_loss: 1570753.0000 - val_mse: 1570753.0000 Epoch 28/60 7711/7711 [==============================] - 9s 1ms/step - loss: 805101.6875 - mse: 805101.6875 - val_loss: 1542289.6250 - val_mse: 1542289.6250 Epoch 29/60 7711/7711 [==============================] - 9s 1ms/step - loss: 803180.8125 - mse: 803180.8125 - val_loss: 1524336.3750 - val_mse: 1524336.3750 Epoch 30/60 7711/7711 [==============================] - 9s 1ms/step - loss: 803533.3125 - mse: 803533.3125 - val_loss: 1539272.2500 - val_mse: 1539272.2500 Epoch 31/60 7711/7711 [==============================] - 9s 1ms/step - loss: 801790.0000 - mse: 801790.0000 - val_loss: 1530008.5000 - val_mse: 1530008.5000 Epoch 32/60 7711/7711 [==============================] - 9s 1ms/step - loss: 801666.6250 - mse: 801666.6250 - val_loss: 1505929.2500 - val_mse: 1505929.2500 Epoch 33/60 7711/7711 [==============================] - 10s 1ms/step - loss: 801340.0000 - mse: 801340.0000 - val_loss: 1545276.1250 - val_mse: 1545276.1250 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 801959.5625 - mse: 801959.5625 - val_loss: 1515768.3750 - val_mse: 1515768.3750 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 799388.7500 - mse: 799388.7500 - val_loss: 1519302.6250 - val_mse: 1519302.6250 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 798700.5000 - mse: 798700.5000 - val_loss: 1518900.7500 - val_mse: 1518900.7500 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 799982.1875 - mse: 799982.1875 - val_loss: 1556717.6250 - val_mse: 1556717.6250 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 798784.0000 - mse: 798784.0000 - val_loss: 1516352.7500 - val_mse: 1516352.7500 Epoch 39/60 7711/7711 [==============================] - 11s 1ms/step - loss: 798727.1250 - mse: 798727.1250 - val_loss: 1519625.8750 - val_mse: 1519625.8750 Epoch 40/60 7711/7711 [==============================] - 9s 1ms/step - loss: 796833.0000 - mse: 796833.0000 - val_loss: 1528183.2500 - val_mse: 1528183.2500 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 796501.0625 - mse: 796501.0625 - val_loss: 1512159.7500 - val_mse: 1512159.7500 Epoch 42/60 7711/7711 [==============================] - 9s 1ms/step - loss: 796180.5625 - mse: 796180.5625 - val_loss: 1521572.2500 - val_mse: 1521572.2500 Epoch 1/60 7711/7711 [==============================] - 9s 1ms/step - loss: 1278796.7500 - mse: 1278796.7500 - val_loss: 1949078.0000 - val_mse: 1949078.0000 Epoch 2/60 7711/7711 [==============================] - 9s 1ms/step - loss: 997129.2500 - mse: 997129.2500 - val_loss: 1925164.6250 - val_mse: 1925164.6250 Epoch 3/60 7711/7711 [==============================] - 10s 1ms/step - loss: 976509.5000 - mse: 976509.5000 - val_loss: 1883684.7500 - val_mse: 1883684.7500 Epoch 4/60 7711/7711 [==============================] - 9s 1ms/step - loss: 957808.1875 - mse: 957808.1875 - val_loss: 1901554.7500 - val_mse: 1901554.7500 Epoch 5/60 7711/7711 [==============================] - 9s 1ms/step - loss: 947281.0000 - mse: 947281.0000 - val_loss: 1812553.1250 - val_mse: 1812553.1250 Epoch 6/60 7711/7711 [==============================] - 9s 1ms/step - loss: 940146.9375 - mse: 940146.9375 - val_loss: 1846085.6250 - val_mse: 1846085.6250 Epoch 7/60 7711/7711 [==============================] - 9s 1ms/step - loss: 934947.1250 - mse: 934947.1250 - val_loss: 1797743.3750 - val_mse: 1797743.3750 Epoch 8/60 7711/7711 [==============================] - 9s 1ms/step - loss: 931173.2500 - mse: 931173.2500 - val_loss: 1767478.3750 - val_mse: 1767478.3750 Epoch 9/60 7711/7711 [==============================] - 9s 1ms/step - loss: 927560.5000 - mse: 927560.5000 - val_loss: 1772028.1250 - val_mse: 1772028.1250 Epoch 10/60 7711/7711 [==============================] - 9s 1ms/step - loss: 924397.8125 - mse: 924397.8125 - val_loss: 1786684.2500 - val_mse: 1786684.2500 Epoch 11/60 7711/7711 [==============================] - 9s 1ms/step - loss: 921408.6250 - mse: 921408.6250 - val_loss: 1819296.1250 - val_mse: 1819296.1250 Epoch 12/60 7711/7711 [==============================] - 9s 1ms/step - loss: 922285.3750 - mse: 922285.3750 - val_loss: 1752158.1250 - val_mse: 1752158.1250 Epoch 13/60 7711/7711 [==============================] - 9s 1ms/step - loss: 920026.8750 - mse: 920026.8750 - val_loss: 1828496.6250 - val_mse: 1828496.6250 Epoch 14/60 7711/7711 [==============================] - 9s 1ms/step - loss: 918136.6250 - mse: 918136.6250 - val_loss: 1757702.1250 - val_mse: 1757702.1250 Epoch 15/60 7711/7711 [==============================] - 9s 1ms/step - loss: 918691.1250 - mse: 918691.1250 - val_loss: 2002906.7500 - val_mse: 2002906.7500 Epoch 16/60 7711/7711 [==============================] - 9s 1ms/step - loss: 916749.6250 - mse: 916749.6250 - val_loss: 1817758.7500 - val_mse: 1817758.7500 Epoch 17/60 7711/7711 [==============================] - 9s 1ms/step - loss: 915257.8750 - mse: 915257.8750 - val_loss: 1736416.8750 - val_mse: 1736416.8750 Epoch 18/60 7711/7711 [==============================] - 9s 1ms/step - loss: 913891.8125 - mse: 913891.8125 - val_loss: 1744511.8750 - val_mse: 1744511.8750 Epoch 19/60 7711/7711 [==============================] - 9s 1ms/step - loss: 914595.6250 - mse: 914595.6250 - val_loss: 1753693.2500 - val_mse: 1753693.2500 Epoch 20/60 7711/7711 [==============================] - 9s 1ms/step - loss: 913603.1250 - mse: 913603.1250 - val_loss: 1728893.7500 - val_mse: 1728893.7500 Epoch 21/60 7711/7711 [==============================] - 9s 1ms/step - loss: 911602.8750 - mse: 911602.8750 - val_loss: 1747388.6250 - val_mse: 1747388.6250 Epoch 22/60 7711/7711 [==============================] - 9s 1ms/step - loss: 911774.9375 - mse: 911774.9375 - val_loss: 1732347.2500 - val_mse: 1732347.2500 Epoch 23/60 7711/7711 [==============================] - 9s 1ms/step - loss: 908607.0000 - mse: 908607.0000 - val_loss: 1724889.8750 - val_mse: 1724889.8750 Epoch 24/60 7711/7711 [==============================] - 9s 1ms/step - loss: 909152.6250 - mse: 909152.6250 - val_loss: 1732380.3750 - val_mse: 1732380.3750 Epoch 25/60 7711/7711 [==============================] - 9s 1ms/step - loss: 907809.1250 - mse: 907809.1250 - val_loss: 1727829.7500 - val_mse: 1727829.7500 Epoch 26/60 7711/7711 [==============================] - 9s 1ms/step - loss: 907681.1250 - mse: 907681.1250 - val_loss: 1730006.6250 - val_mse: 1730006.6250 Epoch 27/60 7711/7711 [==============================] - 9s 1ms/step - loss: 906352.7500 - mse: 906352.7500 - val_loss: 1726694.1250 - val_mse: 1726694.1250 Epoch 28/60 7711/7711 [==============================] - 9s 1ms/step - loss: 906139.6875 - mse: 906139.6875 - val_loss: 1758837.1250 - val_mse: 1758837.1250 Epoch 29/60 7711/7711 [==============================] - 9s 1ms/step - loss: 906636.4375 - mse: 906636.4375 - val_loss: 1764272.2500 - val_mse: 1764272.2500 Epoch 30/60 7711/7711 [==============================] - 9s 1ms/step - loss: 905355.1250 - mse: 905355.1250 - val_loss: 1843535.6250 - val_mse: 1843535.6250 Epoch 31/60 7711/7711 [==============================] - 9s 1ms/step - loss: 904173.2500 - mse: 904173.2500 - val_loss: 1750857.0000 - val_mse: 1750857.0000 Epoch 32/60 7711/7711 [==============================] - 9s 1ms/step - loss: 903505.5625 - mse: 903505.5625 - val_loss: 1775172.3750 - val_mse: 1775172.3750 Epoch 33/60 7711/7711 [==============================] - 9s 1ms/step - loss: 902576.0000 - mse: 902576.0000 - val_loss: 1708645.8750 - val_mse: 1708645.8750 Epoch 34/60 7711/7711 [==============================] - 9s 1ms/step - loss: 903073.8125 - mse: 903073.8125 - val_loss: 1741841.1250 - val_mse: 1741841.1250 Epoch 35/60 7711/7711 [==============================] - 9s 1ms/step - loss: 902317.1250 - mse: 902317.1250 - val_loss: 1734515.7500 - val_mse: 1734515.7500 Epoch 36/60 7711/7711 [==============================] - 9s 1ms/step - loss: 901961.6250 - mse: 901961.6250 - val_loss: 1773431.8750 - val_mse: 1773431.8750 Epoch 37/60 7711/7711 [==============================] - 9s 1ms/step - loss: 902059.1250 - mse: 902059.1250 - val_loss: 1752508.6250 - val_mse: 1752508.6250 Epoch 38/60 7711/7711 [==============================] - 9s 1ms/step - loss: 901389.4375 - mse: 901389.4375 - val_loss: 1710294.6250 - val_mse: 1710294.6250 Epoch 39/60 7711/7711 [==============================] - 9s 1ms/step - loss: 900894.8125 - mse: 900894.8125 - val_loss: 1745904.8750 - val_mse: 1745904.8750 Epoch 40/60 7711/7711 [==============================] - 10s 1ms/step - loss: 900231.1875 - mse: 900231.1875 - val_loss: 1752176.3750 - val_mse: 1752176.3750 Epoch 41/60 7711/7711 [==============================] - 9s 1ms/step - loss: 899684.0625 - mse: 899684.0625 - val_loss: 1726522.2500 - val_mse: 1726522.2500 Epoch 42/60 7711/7711 [==============================] - 9s 1ms/step - loss: 898448.3750 - mse: 898448.3750 - val_loss: 1714041.1250 - val_mse: 1714041.1250 Epoch 43/60 7711/7711 [==============================] - 9s 1ms/step - loss: 899241.1250 - mse: 899241.1250 - val_loss: 1724680.3750 - val_mse: 1724680.3750
# Metrics for each horizon for MLP:
resultados_mlp = []
path = "modelos_mlp_simples/{}".format(directory)
for target, i in enumerate(os.listdir(path)):
path_temp = path + "/" + str(i)
modelo_carregado = tf.keras.models.load_model(path_temp)
mse, rmse, coef_p = metricas(X_test_std, y_test[:, target], {'mlp':modelo_carregado})
resultados_mlp.append([mse, rmse, coef_p])
2409/2409 [==============================] - 2s 746us/step Mean squared error: 247541.23993576627 Root Mean squared error: 497.5351645218318 Coef de pearson: 0.9902132723383218 ########################################### 2409/2409 [==============================] - 2s 907us/step Mean squared error: 368879.8296963908 Root Mean squared error: 607.354780747127 Coef de pearson: 0.9859220270338874 ########################################### 2409/2409 [==============================] - 2s 815us/step Mean squared error: 476382.2038747935 Root Mean squared error: 690.2044652672087 Coef de pearson: 0.9811503371802259 ########################################### 2409/2409 [==============================] - 2s 855us/step Mean squared error: 603839.6397278901 Root Mean squared error: 777.0711934745041 Coef de pearson: 0.976009306780267 ########################################### 2409/2409 [==============================] - 2s 822us/step Mean squared error: 748496.927420464 Root Mean squared error: 865.157169201333 Coef de pearson: 0.9701566298247793 ########################################### 2409/2409 [==============================] - 2s 788us/step Mean squared error: 911933.6948043354 Root Mean squared error: 954.9521950361366 Coef de pearson: 0.963925694795466 ########################################### 2409/2409 [==============================] - 2s 904us/step Mean squared error: 1086669.918192669 Root Mean squared error: 1042.4346109913413 Coef de pearson: 0.9563904985072705 ########################################### 2409/2409 [==============================] - 2s 1ms/step Mean squared error: 1302044.8054143423 Root Mean squared error: 1141.0717792559512 Coef de pearson: 0.9475641771071561 ########################################### 2409/2409 [==============================] - 2s 914us/step Mean squared error: 1466780.8947955202 Root Mean squared error: 1211.1073011073463 Coef de pearson: 0.9407577625656038 ########################################### 2409/2409 [==============================] - 2s 961us/step Mean squared error: 1660413.4333899617 Root Mean squared error: 1288.5703059553878 Coef de pearson: 0.9326020361695135 ###########################################
# Training Random Forest models for each horizon:
resultados_rnd = []
for target in range(n_target):
rnd_model = RandomForestRegressor(random_state=42)
rnd_model.fit(X_train_std, y_train[:, target])
print(f'Random Forest - {target}:')
mse, rmse, coef_p = metricas(X_test_std, y_test[:, target], {'rnd':rnd_model})
resultados_rnd.append([mse, rmse, coef_p])
# Training XGBoost models for each horizon:
resultados_xgb = []
for target in range(n_target):
xgb_model = XGBRegressor(random_state=42)
xgb_model.fit(X_train_std, y_train[:, target])
print(f'XGBoost - {target}:')
mse, rmse, coef_p = metricas(X_test_std, y_test[:, target], {'xgb':xgb_model})
resultados_xgb.append([mse, rmse, coef_p])
Long Short-Term Memory (LSTM) neural networks are a type of Recurrent Neural Network (RNN) that have the capability of remembering long-term information using an LSTM unit composed of three main "doors" that control the flow of information maintained or forgotten in the memory unit called the state cell. The name and functionality of each door can be seen below:
Forget gate: It decides which information can be discarded from the state cell.
Input gate: The input gate decides which new information should be stored in the state cell.
Output gate: The output gate decides which information in the state cell should be used to generate the output.
This type of Recurrent Neural Network is great for tasks that involve long-term dependencies and contextualized information, making it suitable for solving time series problems, NLP problems, and more.
# Transforming the features into a 3D Matrix:
X_train_lstm = X_train_std.reshape(X_train_std.shape[0], 1, X_train_std.shape[1])
X_val_lstm = X_val_std.reshape(X_val_std.shape[0], 1, X_val_std.shape[1])
# LSTM's architecture:
def model_LSTM(device):
with tf.device(device):
model = tf.keras.Sequential()
model.add(LSTM(1, batch_input_shape=(1, X_train_lstm.shape[1], X_train_lstm.shape[2]), stateful=True))
model.add(tf.keras.layers.Dense(y_train.shape[1]))
model.compile(loss='mean_squared_error', optimizer='adam')
return model
# Training a LSTM for each horizon:
path = "modelos_lstm/{}".format(directory)
make_directory(path)
for target in range(n_target):
# EarlyStopping callback:
earlystopping = tf.keras.callbacks.EarlyStopping(patience=10, restore_best_weights=True)
checkpoint = tf.keras.callbacks.ModelCheckpoint("{}/lstm_model{}.h5".format(path, target))
modelo_lstm = model_LSTM('/device:GPU:0')
for i in range(120):
history = modelo_lstm.fit(X_train_lstm, y_train[:, target], epochs=1, batch_size=1, verbose=1, shuffle=False,
validation_data=(X_val_lstm, y_val[:, target]), validation_batch_size=1, callbacks=[earlystopping, checkpoint])
modelo_lstm.reset_states()
# Metrics for the LSTM:
resultados_lstm = []
path = "modelos_lstm/{}".format(directory)
for target, i in enumerate(os.listdir(path)):
path_temp = path + "/" + str(i)
modelo_carregado = tf.keras.models.load_model(path_temp)
print(f'LSTM - {target}:')
mse, rmse, coef_p = metricas(X_test_std, y_test[:, target], {'LSTM':modelo_carregado})
resultados_lstm.append([mse, rmse, coef_p])
The 1D Convolutional Neural Network is a type of Convolutional Neural Network that processes unidimensional data to extract information and patterns. This architecture is used to tackle time series problems, text classification and other problems, thanks to its capacity for preprocessing sequences of data, where the order is fundamentally important.
# Reshaping variables to fit as Input for a CNN 1D:
X_train_conv = np.array(X_train_std).reshape(X_train_std.shape[0], X_train_std.shape[1], 1)
X_val_conv = np.array(X_val_std).reshape(X_val_std.shape[0], X_val_std.shape[1], 1)
X_test_conv = np.array(X_test_std).reshape(X_test_std.shape[0], X_test_std.shape[1], 1)
# CNN 1D:
def timeseries_model_conv(device):
with tf.device(device):
model = tf.keras.Sequential()
model.add(Conv1D(filters=64, kernel_size=7, activation="relu", padding="same", input_shape=(X_train_conv.shape[1], 1)))
model.add(MaxPooling1D(pool_size=2))
model.add(Conv1D(filters=128, kernel_size=3, activation="relu", padding="same"))
model.add(MaxPooling1D(pool_size=2))
model.add(Conv1D(filters=256, kernel_size=3, activation="relu", padding="same"))
model.add(MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(128, activation="relu"))
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(64, activation="relu"))
model.add(tf.keras.layers.Dense(1))
model.compile(optimizer="adam", loss="mse", metrics=["mse"])
return model
# Training a 1D CNN for each horizon:
path = "modelos_cnn/{}".format(directory)
make_directory(path)
for target in range(n_target):
earlystopping = tf.keras.callbacks.EarlyStopping(patience=10, restore_best_weights=True)
checkpoint = tf.keras.callbacks.ModelCheckpoint("{}/cnn_model{}.h5".format(path, target))
model_conv = timeseries_model_conv("/device:GPU:0")
history_conv = model_conv.fit(X_train_conv, y_train[:, target], validation_data=(X_val_conv, y_val[:, target]),
epochs=120, callbacks=[earlystopping, checkpoint])
# Metrics fr the 1D CNN:
resultados_cnn = []
path = 'modelos_cnn/{}'.format(directory)
for index, model in enumerate(os.listdir(path)):
path_temp = path + "/" + model
modelo_carregado = tf.keras.models.load_model(path_temp)
print(f'CNN 1D - {target}:')
mse, rmse, coef_p = metricas(X_test_conv, y_test[:, target], {'CNN':modelo_carregado})
resultados_cnn.append([mse, rmse, coef_p])
# Plotting the result figures:
resultados_mlp = np.array(resultados_mlp)
resultados_rnd = np.array(resultados_rnd)
resultados_xgb = np.array(resultados_xgb)
resultados_lstm = np.array(resultados_lstm)
resultados_cnn = np.array(resultados_cnn)
metricas_iterar = ['MSE', 'RMSE', 'COEF_PEARSON']
# Creating a directory to store all the images:
path = 'imagens/{}'.format(directory)
make_directory(path)
for index, nome in enumerate(metricas_iterar):
resultados_mlp_reshaped = resultados_mlp[:, index].reshape(-1, 1)
resultados_rnd_reshaped = resultados_rnd[:, index].reshape(-1, 1)
resultados_xgb_reshaped = resultados_xgb[:, index].reshape(-1, 1)
resultados_lstm_reshaped = resultados_lstm[:, index].reshape(-1, 1)
resultados_cnn_reshaped = resultados_cnn[:, index].reshape(-1, 1)
array_resultados_mse = np.concatenate([resultados_mlp_reshaped, resultados_rnd_reshaped,
resultados_xgb_reshaped, resultados_lstm_reshaped,
resultados_cnn_reshaped], axis=1)
df_resultados = pd.DataFrame(array_resultados_mse, columns=['MLP', 'RANDOM_FOREST', 'XGBOOST', 'LSTM', 'CNN']).reset_index(names='Horizontes')
df_resultados_melted = df_resultados.melt(id_vars='Horizontes', value_name=nome, var_name='Modelos')
fig_resultado = px.line(df_resultados_melted, x='Horizontes', y=nome, hover_data=['Modelos'], color='Modelos',
title='{} - 12 Horizontes'.format(nome))
pl.io.write_image(fig=fig_resultado, file='imagens\{}\{}.jpg'.format(directory, nome), width=1000, height=500)