第16章 CNNを使った時系列のフィルタリング
In [8]:
Copied!
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.losses import MeanSquaredError
from tensorflow.keras.metrics import MeanAbsoluteError
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.optimizers import Adam
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, LSTM, Conv1D
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.losses import MeanSquaredError
from tensorflow.keras.metrics import MeanAbsoluteError
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.optimizers import Adam
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, LSTM, Conv1D
In [2]:
Copied!
url_train = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/train.csv'
url_val = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/val.csv'
url_test = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/test.csv'
df_train = pd.read_csv(url_train, index_col=0)
df_val = pd.read_csv(url_val, index_col=0)
df_test = pd.read_csv(url_test, index_col=0)
url_train = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/train.csv'
url_val = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/val.csv'
url_test = 'https://raw.githubusercontent.com/marcopeix/TimeSeriesForecastingInPython/master/data/test.csv'
df_train = pd.read_csv(url_train, index_col=0)
df_val = pd.read_csv(url_val, index_col=0)
df_test = pd.read_csv(url_test, index_col=0)
In [3]:
Copied!
class DataWindow:
def __init__(self, input_width, label_width, shift, df_train, df_val, df_test, label_columns=None):
# window size
self.input_width = input_width
self.label_width = label_width
self.shift = shift
self.total_window_size = input_width + shift
# データ
self.df_train = df_train
self.df_val = df_val
self.df_test = df_test
# ラベル
self.label_columns = label_columns
if label_columns is not None:
self.label_columns_indices = {name: i for i, name in enumerate(label_columns)}
self.column_indices = {name: i for i, name in enumerate(self.df_train.columns)}
# スライス
self.input_slice = slice(0, input_width)
self.input_indices = np.arange(self.total_window_size)[self.input_slice]
# ラベル開始位置
self.label_start = self.total_window_size - self.label_width
self.labels_slice = slice(self.label_start, None)
self.label_indices = np.arange(self.total_window_size)[self.labels_slice]
def split_to_inputs_labels(self, features):
inputs = features[:, self.input_slice, :]
labels = features[:, self.labels_slice, :]
if self.label_columns is not None:
labels = tf.stack([labels[:, :, self.column_indices[name]] for name in self.label_columns], axis=-1)
inputs.set_shape([None, self.input_width, None])
labels.set_shape([None, self.label_width, None])
return inputs, labels
def plot(self, plot_col: str, model=None, max_subplots=3):
inputs, labels = self.sample_batch
plt.figure(figsize=(12, 8))
plot_col_index = self.column_indices[plot_col]
n_max = min(max_subplots, len(inputs))
for n in range(n_max):
plt.subplot(n_max, 1, n+1)
plt.ylabel(f'{plot_col} [scaled]')
plt.plot(self.input_indices, inputs[n, :, plot_col_index], label='Inputs', marker='.', zorder=-10)
if self.label_columns:
label_col_index = self.label_columns_indices.get(plot_col, None)
else:
label_col_index = plot_col_index
if label_col_index is None:
continue
plt.scatter(self.label_indices, labels[n, :, label_col_index], edgecolors='k', label='Labels', c='tab:green', s=64)
if model is not None:
predictions = model(inputs)
plt.scatter(self.label_indices, predictions[n, :, label_col_index], marker='X', edgecolors='k', label='Predictions', c='tab:red', s=64)
if n == 0:
plt.legend()
plt.xlabel('Time (h)')
def make_dataset(self, data):
data = np.array(data, dtype=np.float32)
ds = tf.keras.utils.timeseries_dataset_from_array(
data=data,
targets=None,
sequence_length=self.total_window_size,
sequence_stride=1,
shuffle=True,
batch_size=32,
)
ds = ds.map(self.split_to_inputs_labels)
return ds
@property
def train(self):
return self.make_dataset(self.df_train)
@property
def val(self):
return self.make_dataset(self.df_val)
@property
def test(self):
return self.make_dataset(self.df_test)
@property
def sample_batch(self):
"""Get and cache an example batch of `inputs, labels` for plotting."""
result = getattr(self, '_sample_batch', None)
if result is None:
result = next(iter(self.train))
self._sample_batch = result
return result
class DataWindow:
def __init__(self, input_width, label_width, shift, df_train, df_val, df_test, label_columns=None):
# window size
self.input_width = input_width
self.label_width = label_width
self.shift = shift
self.total_window_size = input_width + shift
# データ
self.df_train = df_train
self.df_val = df_val
self.df_test = df_test
# ラベル
self.label_columns = label_columns
if label_columns is not None:
self.label_columns_indices = {name: i for i, name in enumerate(label_columns)}
self.column_indices = {name: i for i, name in enumerate(self.df_train.columns)}
# スライス
self.input_slice = slice(0, input_width)
self.input_indices = np.arange(self.total_window_size)[self.input_slice]
# ラベル開始位置
self.label_start = self.total_window_size - self.label_width
self.labels_slice = slice(self.label_start, None)
self.label_indices = np.arange(self.total_window_size)[self.labels_slice]
def split_to_inputs_labels(self, features):
inputs = features[:, self.input_slice, :]
labels = features[:, self.labels_slice, :]
if self.label_columns is not None:
labels = tf.stack([labels[:, :, self.column_indices[name]] for name in self.label_columns], axis=-1)
inputs.set_shape([None, self.input_width, None])
labels.set_shape([None, self.label_width, None])
return inputs, labels
def plot(self, plot_col: str, model=None, max_subplots=3):
inputs, labels = self.sample_batch
plt.figure(figsize=(12, 8))
plot_col_index = self.column_indices[plot_col]
n_max = min(max_subplots, len(inputs))
for n in range(n_max):
plt.subplot(n_max, 1, n+1)
plt.ylabel(f'{plot_col} [scaled]')
plt.plot(self.input_indices, inputs[n, :, plot_col_index], label='Inputs', marker='.', zorder=-10)
if self.label_columns:
label_col_index = self.label_columns_indices.get(plot_col, None)
else:
label_col_index = plot_col_index
if label_col_index is None:
continue
plt.scatter(self.label_indices, labels[n, :, label_col_index], edgecolors='k', label='Labels', c='tab:green', s=64)
if model is not None:
predictions = model(inputs)
plt.scatter(self.label_indices, predictions[n, :, label_col_index], marker='X', edgecolors='k', label='Predictions', c='tab:red', s=64)
if n == 0:
plt.legend()
plt.xlabel('Time (h)')
def make_dataset(self, data):
data = np.array(data, dtype=np.float32)
ds = tf.keras.utils.timeseries_dataset_from_array(
data=data,
targets=None,
sequence_length=self.total_window_size,
sequence_stride=1,
shuffle=True,
batch_size=32,
)
ds = ds.map(self.split_to_inputs_labels)
return ds
@property
def train(self):
return self.make_dataset(self.df_train)
@property
def val(self):
return self.make_dataset(self.df_val)
@property
def test(self):
return self.make_dataset(self.df_test)
@property
def sample_batch(self):
"""Get and cache an example batch of `inputs, labels` for plotting."""
result = getattr(self, '_sample_batch', None)
if result is None:
result = next(iter(self.train))
self._sample_batch = result
return result
In [4]:
Copied!
# for training
def compile_and_fit(model, window, patience=3, max_epochs=50):
early_stopping = EarlyStopping(
monitor='val_loss',
patience=patience,
mode='min'
)
model.compile(
loss=MeanSquaredError(),
optimizer=Adam(),
metrics=[MeanAbsoluteError()]
)
history = model.fit(
window.train,
epochs=max_epochs,
validation_data=window.val,
callbacks=[early_stopping]
)
return history
# for training
def compile_and_fit(model, window, patience=3, max_epochs=50):
early_stopping = EarlyStopping(
monitor='val_loss',
patience=patience,
mode='min'
)
model.compile(
loss=MeanSquaredError(),
optimizer=Adam(),
metrics=[MeanAbsoluteError()]
)
history = model.fit(
window.train,
epochs=max_epochs,
validation_data=window.val,
callbacks=[early_stopping]
)
return history
In [6]:
Copied!
# models
class Baseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
if self.label_index is None:
return inputs
elif isinstance(self.label_index, list):
tensors = []
for index in self.label_index:
res = inputs[:, :, index]
res = res[:, :, tf.newaxis]
tensors.append(res)
return tf.concat(tensors, axis=-1)
else:
res = inputs[:, :, self.label_index]
return res[:, :, tf.newaxis]
class MultiStepLastBaseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
if self.label_index is None:
return tf.tile(inputs[:, -1:, :], [1, 24, 1])
return tf.tile(inputs[:, -1:, self.label_index:], [1, 24, 1])
class RepeatBaseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
return inputs[:, :, self.label_index:]
# 線形モデル
linear = Sequential([Dense(units=1)])
ms_linear = Sequential([Dense(units=1, kernel_initializer=tf.initializers.zeros)])
mo_linear = Sequential([Dense(units=2)])
# DNN
dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=1)
])
ms_dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
mo_dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=2)
])
# LSTM
lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=1)
])
ms_lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
mo_lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=2)
])
# models
class Baseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
if self.label_index is None:
return inputs
elif isinstance(self.label_index, list):
tensors = []
for index in self.label_index:
res = inputs[:, :, index]
res = res[:, :, tf.newaxis]
tensors.append(res)
return tf.concat(tensors, axis=-1)
else:
res = inputs[:, :, self.label_index]
return res[:, :, tf.newaxis]
class MultiStepLastBaseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
if self.label_index is None:
return tf.tile(inputs[:, -1:, :], [1, 24, 1])
return tf.tile(inputs[:, -1:, self.label_index:], [1, 24, 1])
class RepeatBaseline(Model):
def __init__(self, label_index=None):
super().__init__()
self.label_index = label_index
def call(self, inputs):
return inputs[:, :, self.label_index:]
# 線形モデル
linear = Sequential([Dense(units=1)])
ms_linear = Sequential([Dense(units=1, kernel_initializer=tf.initializers.zeros)])
mo_linear = Sequential([Dense(units=2)])
# DNN
dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=1)
])
ms_dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
mo_dense = Sequential([
Dense(units=64, activation='relu'),
Dense(units=64, activation='relu'),
Dense(units=2)
])
# LSTM
lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=1)
])
ms_lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
mo_lstm_model = Sequential([
LSTM(32, return_sequences=True),
Dense(units=2)
])
In [7]:
Copied!
# シングルステップ
single_step_window = DataWindow(input_width=1, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
wide_window = DataWindow(input_width=24, label_width=24, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
column_indices = {name: i for i, name in enumerate(df_train.columns)}
baseline_last = Baseline(column_indices['traffic_volume'])
baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
val_performance = {}
test_performance = {}
# マルチステップ
multi_window = DataWindow(input_width=24, label_width=24, shift=24, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
ms_baseline_last = MultiStepLastBaseline(label_index=column_indices['traffic_volume'])
ms_baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
ms_baseline_repeat = RepeatBaseline(label_index=column_indices['traffic_volume'])
ms_baseline_repeat.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
ms_val_performance = {}
ms_test_performance = {}
# 多変数アウトプット
col_names = ['temp', 'traffic_volume']
mo_single_step_window = DataWindow(input_width=1, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_wide_window = DataWindow(input_width=24, label_width=24, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_baseline_last = Baseline(label_index=[column_indices[col] for col in col_names])
mo_baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
mo_val_performance = {}
mo_test_performance = {}
## ベースライン
val_performance['Baseline - Last'] = baseline_last.evaluate(single_step_window.val)
test_performance['Baseline - Last'] = baseline_last.evaluate(single_step_window.test, verbose=0)
ms_val_performance['Baseline - Last'] = ms_baseline_last.evaluate(multi_window.val)
ms_test_performance['Baseline - Last'] = ms_baseline_last.evaluate(multi_window.test)
ms_val_performance['Baseline - Repeat'] = ms_baseline_repeat.evaluate(multi_window.val)
ms_test_performance['Baseline - Repeat'] = ms_baseline_repeat.evaluate(multi_window.test)
mo_val_performance['Baseline - Last'] = mo_baseline_last.evaluate(mo_single_step_window.val)
mo_test_performance['Baseline - Last'] = mo_baseline_last.evaluate(mo_single_step_window.val)
## 線形モデル
history = compile_and_fit(linear, single_step_window)
val_performance['Linear'] = linear.evaluate(single_step_window.val)
test_performance['Linear'] = linear.evaluate(single_step_window.test, verbose=0)
history = compile_and_fit(ms_linear, multi_window)
ms_val_performance['Linear'] = ms_linear.evaluate(multi_window.val)
ms_test_performance['Linear'] = ms_linear.evaluate(multi_window.test)
history = compile_and_fit(mo_linear, mo_single_step_window)
mo_val_performance['Linear'] = mo_linear.evaluate(mo_single_step_window.val)
mo_test_performance['Linear'] = mo_linear.evaluate(mo_single_step_window.test)
## DNN
history = compile_and_fit(dense, single_step_window)
val_performance['Dense'] = dense.evaluate(single_step_window.val)
test_performance['Dense'] = dense.evaluate(single_step_window.test, verbose=0)
history = compile_and_fit(ms_dense, multi_window)
ms_val_performance['Dense'] = ms_dense.evaluate(multi_window.val)
ms_test_performance['Dense'] = ms_dense.evaluate(multi_window.test)
history = compile_and_fit(mo_dense, mo_single_step_window)
mo_val_performance['Dense'] = mo_dense.evaluate(mo_single_step_window.val)
mo_test_performance['Dense'] = mo_dense.evaluate(mo_single_step_window.test)
## LSTM
history = compile_and_fit(lstm_model, wide_window)
val_performance['LSTM'] = lstm_model.evaluate(wide_window.val)
test_performance['LSTM'] = lstm_model.evaluate(wide_window.test, verbose=0)
history = compile_and_fit(ms_lstm_model, multi_window)
ms_val_performance['LSTM'] = ms_lstm_model.evaluate(multi_window.val)
ms_test_performance['LSTM'] = ms_lstm_model.evaluate(multi_window.test)
history = compile_and_fit(mo_lstm_model, mo_wide_window)
mo_val_performance['LSTM'] = mo_lstm_model.evaluate(mo_wide_window.val)
mo_test_performance['LSTM'] = mo_lstm_model.evaluate(mo_wide_window.test)
# シングルステップ
single_step_window = DataWindow(input_width=1, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
wide_window = DataWindow(input_width=24, label_width=24, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
column_indices = {name: i for i, name in enumerate(df_train.columns)}
baseline_last = Baseline(column_indices['traffic_volume'])
baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
val_performance = {}
test_performance = {}
# マルチステップ
multi_window = DataWindow(input_width=24, label_width=24, shift=24, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
ms_baseline_last = MultiStepLastBaseline(label_index=column_indices['traffic_volume'])
ms_baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
ms_baseline_repeat = RepeatBaseline(label_index=column_indices['traffic_volume'])
ms_baseline_repeat.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
ms_val_performance = {}
ms_test_performance = {}
# 多変数アウトプット
col_names = ['temp', 'traffic_volume']
mo_single_step_window = DataWindow(input_width=1, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_wide_window = DataWindow(input_width=24, label_width=24, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_baseline_last = Baseline(label_index=[column_indices[col] for col in col_names])
mo_baseline_last.compile(loss=MeanSquaredError(), metrics=[MeanAbsoluteError()])
mo_val_performance = {}
mo_test_performance = {}
## ベースライン
val_performance['Baseline - Last'] = baseline_last.evaluate(single_step_window.val)
test_performance['Baseline - Last'] = baseline_last.evaluate(single_step_window.test, verbose=0)
ms_val_performance['Baseline - Last'] = ms_baseline_last.evaluate(multi_window.val)
ms_test_performance['Baseline - Last'] = ms_baseline_last.evaluate(multi_window.test)
ms_val_performance['Baseline - Repeat'] = ms_baseline_repeat.evaluate(multi_window.val)
ms_test_performance['Baseline - Repeat'] = ms_baseline_repeat.evaluate(multi_window.test)
mo_val_performance['Baseline - Last'] = mo_baseline_last.evaluate(mo_single_step_window.val)
mo_test_performance['Baseline - Last'] = mo_baseline_last.evaluate(mo_single_step_window.val)
## 線形モデル
history = compile_and_fit(linear, single_step_window)
val_performance['Linear'] = linear.evaluate(single_step_window.val)
test_performance['Linear'] = linear.evaluate(single_step_window.test, verbose=0)
history = compile_and_fit(ms_linear, multi_window)
ms_val_performance['Linear'] = ms_linear.evaluate(multi_window.val)
ms_test_performance['Linear'] = ms_linear.evaluate(multi_window.test)
history = compile_and_fit(mo_linear, mo_single_step_window)
mo_val_performance['Linear'] = mo_linear.evaluate(mo_single_step_window.val)
mo_test_performance['Linear'] = mo_linear.evaluate(mo_single_step_window.test)
## DNN
history = compile_and_fit(dense, single_step_window)
val_performance['Dense'] = dense.evaluate(single_step_window.val)
test_performance['Dense'] = dense.evaluate(single_step_window.test, verbose=0)
history = compile_and_fit(ms_dense, multi_window)
ms_val_performance['Dense'] = ms_dense.evaluate(multi_window.val)
ms_test_performance['Dense'] = ms_dense.evaluate(multi_window.test)
history = compile_and_fit(mo_dense, mo_single_step_window)
mo_val_performance['Dense'] = mo_dense.evaluate(mo_single_step_window.val)
mo_test_performance['Dense'] = mo_dense.evaluate(mo_single_step_window.test)
## LSTM
history = compile_and_fit(lstm_model, wide_window)
val_performance['LSTM'] = lstm_model.evaluate(wide_window.val)
test_performance['LSTM'] = lstm_model.evaluate(wide_window.test, verbose=0)
history = compile_and_fit(ms_lstm_model, multi_window)
ms_val_performance['LSTM'] = ms_lstm_model.evaluate(multi_window.val)
ms_test_performance['LSTM'] = ms_lstm_model.evaluate(multi_window.test)
history = compile_and_fit(mo_lstm_model, mo_wide_window)
mo_val_performance['LSTM'] = mo_lstm_model.evaluate(mo_wide_window.val)
mo_test_performance['LSTM'] = mo_lstm_model.evaluate(mo_wide_window.test)
110/110 [==============================] - 0s 3ms/step - loss: 0.0133 - mean_absolute_error: 0.0831 109/109 [==============================] - 0s 2ms/step - loss: 0.1875 - mean_absolute_error: 0.3522 54/54 [==============================] - 0s 2ms/step - loss: 0.1814 - mean_absolute_error: 0.3473 109/109 [==============================] - 0s 2ms/step - loss: 0.2065 - mean_absolute_error: 0.3473 54/54 [==============================] - 0s 2ms/step - loss: 0.2018 - mean_absolute_error: 0.3413 110/110 [==============================] - 0s 2ms/step - loss: 0.0069 - mean_absolute_error: 0.0482 110/110 [==============================] - 0s 2ms/step - loss: 0.0069 - mean_absolute_error: 0.0482 Epoch 1/50 384/384 [==============================] - 1s 3ms/step - loss: 0.2975 - mean_absolute_error: 0.4443 - val_loss: 0.0746 - val_mean_absolute_error: 0.2244 Epoch 2/50 384/384 [==============================] - 1s 3ms/step - loss: 0.0617 - mean_absolute_error: 0.2041 - val_loss: 0.0539 - val_mean_absolute_error: 0.1910 Epoch 3/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0412 - mean_absolute_error: 0.1653 - val_loss: 0.0386 - val_mean_absolute_error: 0.1619 Epoch 4/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0304 - mean_absolute_error: 0.1407 - val_loss: 0.0300 - val_mean_absolute_error: 0.1417 Epoch 5/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0243 - mean_absolute_error: 0.1244 - val_loss: 0.0242 - val_mean_absolute_error: 0.1260 Epoch 6/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0200 - mean_absolute_error: 0.1115 - val_loss: 0.0199 - val_mean_absolute_error: 0.1131 Epoch 7/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0166 - mean_absolute_error: 0.1000 - val_loss: 0.0163 - val_mean_absolute_error: 0.1011 Epoch 8/50 384/384 [==============================] - 3s 7ms/step - loss: 0.0140 - mean_absolute_error: 0.0902 - val_loss: 0.0137 - val_mean_absolute_error: 0.0907 Epoch 9/50 384/384 [==============================] - 3s 8ms/step - loss: 0.0121 - mean_absolute_error: 0.0822 - val_loss: 0.0119 - val_mean_absolute_error: 0.0831 Epoch 10/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0109 - mean_absolute_error: 0.0767 - val_loss: 0.0107 - val_mean_absolute_error: 0.0768 Epoch 11/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0102 - mean_absolute_error: 0.0732 - val_loss: 0.0100 - val_mean_absolute_error: 0.0722 Epoch 12/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0098 - mean_absolute_error: 0.0712 - val_loss: 0.0096 - val_mean_absolute_error: 0.0694 Epoch 13/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0095 - mean_absolute_error: 0.0700 - val_loss: 0.0094 - val_mean_absolute_error: 0.0679 Epoch 14/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0094 - mean_absolute_error: 0.0692 - val_loss: 0.0092 - val_mean_absolute_error: 0.0666 Epoch 15/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0094 - mean_absolute_error: 0.0687 - val_loss: 0.0092 - val_mean_absolute_error: 0.0665 Epoch 16/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0684 - val_loss: 0.0091 - val_mean_absolute_error: 0.0653 Epoch 17/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0682 - val_loss: 0.0091 - val_mean_absolute_error: 0.0654 Epoch 18/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0680 - val_loss: 0.0091 - val_mean_absolute_error: 0.0661 Epoch 19/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0680 - val_loss: 0.0091 - val_mean_absolute_error: 0.0655 Epoch 20/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0678 - val_loss: 0.0091 - val_mean_absolute_error: 0.0649 Epoch 21/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0093 - mean_absolute_error: 0.0677 - val_loss: 0.0091 - val_mean_absolute_error: 0.0648 Epoch 22/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0093 - mean_absolute_error: 0.0677 - val_loss: 0.0091 - val_mean_absolute_error: 0.0659 Epoch 23/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0093 - mean_absolute_error: 0.0677 - val_loss: 0.0090 - val_mean_absolute_error: 0.0656 Epoch 24/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0093 - mean_absolute_error: 0.0677 - val_loss: 0.0091 - val_mean_absolute_error: 0.0662 Epoch 25/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0093 - mean_absolute_error: 0.0676 - val_loss: 0.0091 - val_mean_absolute_error: 0.0661 Epoch 26/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0093 - mean_absolute_error: 0.0678 - val_loss: 0.0091 - val_mean_absolute_error: 0.0651 110/110 [==============================] - 0s 3ms/step - loss: 0.0091 - mean_absolute_error: 0.0651 Epoch 1/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0898 - mean_absolute_error: 0.2464 - val_loss: 0.0458 - val_mean_absolute_error: 0.1822 Epoch 2/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0298 - mean_absolute_error: 0.1368 - val_loss: 0.0261 - val_mean_absolute_error: 0.1254 Epoch 3/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0201 - mean_absolute_error: 0.1044 - val_loss: 0.0212 - val_mean_absolute_error: 0.1074 Epoch 4/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0177 - mean_absolute_error: 0.0954 - val_loss: 0.0196 - val_mean_absolute_error: 0.1001 Epoch 5/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0168 - mean_absolute_error: 0.0914 - val_loss: 0.0188 - val_mean_absolute_error: 0.0952 Epoch 6/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0164 - mean_absolute_error: 0.0887 - val_loss: 0.0184 - val_mean_absolute_error: 0.0922 Epoch 7/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0162 - mean_absolute_error: 0.0868 - val_loss: 0.0183 - val_mean_absolute_error: 0.0902 Epoch 8/50 383/383 [==============================] - 3s 8ms/step - loss: 0.0161 - mean_absolute_error: 0.0857 - val_loss: 0.0182 - val_mean_absolute_error: 0.0892 Epoch 9/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0161 - mean_absolute_error: 0.0850 - val_loss: 0.0182 - val_mean_absolute_error: 0.0886 Epoch 10/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0161 - mean_absolute_error: 0.0846 - val_loss: 0.0182 - val_mean_absolute_error: 0.0882 Epoch 11/50 383/383 [==============================] - 3s 6ms/step - loss: 0.0161 - mean_absolute_error: 0.0843 - val_loss: 0.0182 - val_mean_absolute_error: 0.0881 Epoch 12/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0161 - mean_absolute_error: 0.0841 - val_loss: 0.0182 - val_mean_absolute_error: 0.0882 Epoch 13/50 383/383 [==============================] - 3s 6ms/step - loss: 0.0161 - mean_absolute_error: 0.0840 - val_loss: 0.0182 - val_mean_absolute_error: 0.0879 Epoch 14/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0161 - mean_absolute_error: 0.0839 - val_loss: 0.0182 - val_mean_absolute_error: 0.0879 Epoch 15/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0161 - mean_absolute_error: 0.0838 - val_loss: 0.0182 - val_mean_absolute_error: 0.0879 109/109 [==============================] - 0s 3ms/step - loss: 0.0182 - mean_absolute_error: 0.0879 54/54 [==============================] - 0s 3ms/step - loss: 0.0142 - mean_absolute_error: 0.0753 Epoch 1/50 384/384 [==============================] - 2s 4ms/step - loss: 0.2482 - mean_absolute_error: 0.3754 - val_loss: 0.0894 - val_mean_absolute_error: 0.2307 Epoch 2/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0494 - mean_absolute_error: 0.1644 - val_loss: 0.0418 - val_mean_absolute_error: 0.1526 Epoch 3/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0359 - mean_absolute_error: 0.1355 - val_loss: 0.0332 - val_mean_absolute_error: 0.1363 Epoch 4/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0292 - mean_absolute_error: 0.1238 - val_loss: 0.0271 - val_mean_absolute_error: 0.1251 Epoch 5/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0239 - mean_absolute_error: 0.1140 - val_loss: 0.0222 - val_mean_absolute_error: 0.1156 Epoch 6/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0194 - mean_absolute_error: 0.1045 - val_loss: 0.0179 - val_mean_absolute_error: 0.1047 Epoch 7/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0157 - mean_absolute_error: 0.0950 - val_loss: 0.0143 - val_mean_absolute_error: 0.0954 Epoch 8/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0126 - mean_absolute_error: 0.0857 - val_loss: 0.0112 - val_mean_absolute_error: 0.0842 Epoch 9/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0101 - mean_absolute_error: 0.0765 - val_loss: 0.0089 - val_mean_absolute_error: 0.0749 Epoch 10/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0081 - mean_absolute_error: 0.0680 - val_loss: 0.0071 - val_mean_absolute_error: 0.0652 Epoch 11/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0068 - mean_absolute_error: 0.0602 - val_loss: 0.0060 - val_mean_absolute_error: 0.0581 Epoch 12/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0059 - mean_absolute_error: 0.0535 - val_loss: 0.0053 - val_mean_absolute_error: 0.0512 Epoch 13/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0054 - mean_absolute_error: 0.0483 - val_loss: 0.0049 - val_mean_absolute_error: 0.0461 Epoch 14/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0051 - mean_absolute_error: 0.0446 - val_loss: 0.0048 - val_mean_absolute_error: 0.0430 Epoch 15/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0050 - mean_absolute_error: 0.0422 - val_loss: 0.0047 - val_mean_absolute_error: 0.0409 Epoch 16/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0050 - mean_absolute_error: 0.0408 - val_loss: 0.0047 - val_mean_absolute_error: 0.0398 Epoch 17/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0402 - val_loss: 0.0047 - val_mean_absolute_error: 0.0398 Epoch 18/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0398 - val_loss: 0.0047 - val_mean_absolute_error: 0.0391 Epoch 19/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0396 - val_loss: 0.0046 - val_mean_absolute_error: 0.0385 Epoch 20/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0395 - val_loss: 0.0046 - val_mean_absolute_error: 0.0386 Epoch 21/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0395 - val_loss: 0.0047 - val_mean_absolute_error: 0.0389 Epoch 22/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0049 - mean_absolute_error: 0.0395 - val_loss: 0.0047 - val_mean_absolute_error: 0.0402 110/110 [==============================] - 0s 3ms/step - loss: 0.0047 - mean_absolute_error: 0.0402 55/55 [==============================] - 0s 3ms/step - loss: 0.0044 - mean_absolute_error: 0.0384 Epoch 1/50 384/384 [==============================] - 3s 6ms/step - loss: 0.0198 - mean_absolute_error: 0.0841 - val_loss: 0.0054 - val_mean_absolute_error: 0.0540 Epoch 2/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0051 - mean_absolute_error: 0.0518 - val_loss: 0.0041 - val_mean_absolute_error: 0.0472 Epoch 3/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0043 - mean_absolute_error: 0.0478 - val_loss: 0.0035 - val_mean_absolute_error: 0.0434 Epoch 4/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0038 - mean_absolute_error: 0.0450 - val_loss: 0.0032 - val_mean_absolute_error: 0.0410 Epoch 5/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0036 - mean_absolute_error: 0.0434 - val_loss: 0.0035 - val_mean_absolute_error: 0.0452 Epoch 6/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0034 - mean_absolute_error: 0.0423 - val_loss: 0.0027 - val_mean_absolute_error: 0.0380 Epoch 7/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0032 - mean_absolute_error: 0.0411 - val_loss: 0.0034 - val_mean_absolute_error: 0.0463 Epoch 8/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0031 - mean_absolute_error: 0.0402 - val_loss: 0.0026 - val_mean_absolute_error: 0.0371 Epoch 9/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0029 - mean_absolute_error: 0.0390 - val_loss: 0.0026 - val_mean_absolute_error: 0.0376 Epoch 10/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0029 - mean_absolute_error: 0.0386 - val_loss: 0.0024 - val_mean_absolute_error: 0.0364 Epoch 11/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0028 - mean_absolute_error: 0.0376 - val_loss: 0.0030 - val_mean_absolute_error: 0.0426 Epoch 12/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0026 - mean_absolute_error: 0.0363 - val_loss: 0.0021 - val_mean_absolute_error: 0.0331 Epoch 13/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0026 - mean_absolute_error: 0.0361 - val_loss: 0.0022 - val_mean_absolute_error: 0.0338 Epoch 14/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0026 - mean_absolute_error: 0.0358 - val_loss: 0.0024 - val_mean_absolute_error: 0.0350 Epoch 15/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0025 - mean_absolute_error: 0.0349 - val_loss: 0.0023 - val_mean_absolute_error: 0.0339 110/110 [==============================] - 0s 3ms/step - loss: 0.0023 - mean_absolute_error: 0.0339 Epoch 1/50 383/383 [==============================] - 3s 5ms/step - loss: 0.0308 - mean_absolute_error: 0.1119 - val_loss: 0.0166 - val_mean_absolute_error: 0.0857 Epoch 2/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0140 - mean_absolute_error: 0.0785 - val_loss: 0.0156 - val_mean_absolute_error: 0.0842 Epoch 3/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0135 - mean_absolute_error: 0.0777 - val_loss: 0.0152 - val_mean_absolute_error: 0.0805 Epoch 4/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0133 - mean_absolute_error: 0.0769 - val_loss: 0.0151 - val_mean_absolute_error: 0.0809 Epoch 5/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0131 - mean_absolute_error: 0.0765 - val_loss: 0.0148 - val_mean_absolute_error: 0.0808 Epoch 6/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0130 - mean_absolute_error: 0.0762 - val_loss: 0.0150 - val_mean_absolute_error: 0.0803 Epoch 7/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0130 - mean_absolute_error: 0.0761 - val_loss: 0.0147 - val_mean_absolute_error: 0.0795 Epoch 8/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0129 - mean_absolute_error: 0.0756 - val_loss: 0.0148 - val_mean_absolute_error: 0.0820 Epoch 9/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0128 - mean_absolute_error: 0.0753 - val_loss: 0.0148 - val_mean_absolute_error: 0.0808 Epoch 10/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0128 - mean_absolute_error: 0.0752 - val_loss: 0.0146 - val_mean_absolute_error: 0.0773 Epoch 11/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0128 - mean_absolute_error: 0.0753 - val_loss: 0.0147 - val_mean_absolute_error: 0.0787 Epoch 12/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0127 - mean_absolute_error: 0.0748 - val_loss: 0.0145 - val_mean_absolute_error: 0.0783 Epoch 13/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0127 - mean_absolute_error: 0.0747 - val_loss: 0.0147 - val_mean_absolute_error: 0.0769 Epoch 14/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0127 - mean_absolute_error: 0.0747 - val_loss: 0.0145 - val_mean_absolute_error: 0.0789 Epoch 15/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0126 - mean_absolute_error: 0.0747 - val_loss: 0.0144 - val_mean_absolute_error: 0.0790 Epoch 16/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0126 - mean_absolute_error: 0.0744 - val_loss: 0.0144 - val_mean_absolute_error: 0.0792 Epoch 17/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0125 - mean_absolute_error: 0.0742 - val_loss: 0.0148 - val_mean_absolute_error: 0.0785 Epoch 18/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0125 - mean_absolute_error: 0.0741 - val_loss: 0.0144 - val_mean_absolute_error: 0.0770 Epoch 19/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0124 - mean_absolute_error: 0.0739 - val_loss: 0.0144 - val_mean_absolute_error: 0.0779 Epoch 20/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0124 - mean_absolute_error: 0.0739 - val_loss: 0.0144 - val_mean_absolute_error: 0.0809 Epoch 21/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0124 - mean_absolute_error: 0.0738 - val_loss: 0.0144 - val_mean_absolute_error: 0.0779 Epoch 22/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0123 - mean_absolute_error: 0.0735 - val_loss: 0.0144 - val_mean_absolute_error: 0.0801 Epoch 23/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0122 - mean_absolute_error: 0.0732 - val_loss: 0.0148 - val_mean_absolute_error: 0.0776 Epoch 24/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0122 - mean_absolute_error: 0.0732 - val_loss: 0.0142 - val_mean_absolute_error: 0.0771 Epoch 25/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0122 - mean_absolute_error: 0.0730 - val_loss: 0.0142 - val_mean_absolute_error: 0.0777 Epoch 26/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0121 - mean_absolute_error: 0.0728 - val_loss: 0.0142 - val_mean_absolute_error: 0.0764 Epoch 27/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0121 - mean_absolute_error: 0.0729 - val_loss: 0.0144 - val_mean_absolute_error: 0.0765 Epoch 28/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0121 - mean_absolute_error: 0.0726 - val_loss: 0.0142 - val_mean_absolute_error: 0.0780 Epoch 29/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0120 - mean_absolute_error: 0.0724 - val_loss: 0.0144 - val_mean_absolute_error: 0.0761 Epoch 30/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0120 - mean_absolute_error: 0.0722 - val_loss: 0.0140 - val_mean_absolute_error: 0.0767 Epoch 31/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0119 - mean_absolute_error: 0.0719 - val_loss: 0.0141 - val_mean_absolute_error: 0.0764 Epoch 32/50 383/383 [==============================] - 2s 5ms/step - loss: 0.0119 - mean_absolute_error: 0.0719 - val_loss: 0.0141 - val_mean_absolute_error: 0.0758 Epoch 33/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0119 - mean_absolute_error: 0.0718 - val_loss: 0.0142 - val_mean_absolute_error: 0.0759 109/109 [==============================] - 0s 3ms/step - loss: 0.0142 - mean_absolute_error: 0.0759 54/54 [==============================] - 0s 3ms/step - loss: 0.0093 - mean_absolute_error: 0.0600 Epoch 1/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0100 - mean_absolute_error: 0.0581 - val_loss: 0.0031 - val_mean_absolute_error: 0.0402 Epoch 2/50 384/384 [==============================] - 1s 4ms/step - loss: 0.0029 - mean_absolute_error: 0.0343 - val_loss: 0.0024 - val_mean_absolute_error: 0.0316 Epoch 3/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0024 - mean_absolute_error: 0.0313 - val_loss: 0.0023 - val_mean_absolute_error: 0.0319 Epoch 4/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0022 - mean_absolute_error: 0.0292 - val_loss: 0.0017 - val_mean_absolute_error: 0.0273 Epoch 5/50 384/384 [==============================] - 1s 4ms/step - loss: 0.0021 - mean_absolute_error: 0.0287 - val_loss: 0.0016 - val_mean_absolute_error: 0.0254 Epoch 6/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0020 - mean_absolute_error: 0.0274 - val_loss: 0.0017 - val_mean_absolute_error: 0.0271 Epoch 7/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0020 - mean_absolute_error: 0.0277 - val_loss: 0.0016 - val_mean_absolute_error: 0.0256 Epoch 8/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0020 - mean_absolute_error: 0.0273 - val_loss: 0.0022 - val_mean_absolute_error: 0.0315 Epoch 9/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0019 - mean_absolute_error: 0.0268 - val_loss: 0.0014 - val_mean_absolute_error: 0.0242 Epoch 10/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0019 - mean_absolute_error: 0.0269 - val_loss: 0.0014 - val_mean_absolute_error: 0.0260 Epoch 11/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0019 - mean_absolute_error: 0.0266 - val_loss: 0.0015 - val_mean_absolute_error: 0.0241 Epoch 12/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0018 - mean_absolute_error: 0.0261 - val_loss: 0.0013 - val_mean_absolute_error: 0.0234 Epoch 13/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0018 - mean_absolute_error: 0.0254 - val_loss: 0.0014 - val_mean_absolute_error: 0.0248 Epoch 14/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0018 - mean_absolute_error: 0.0254 - val_loss: 0.0014 - val_mean_absolute_error: 0.0258 Epoch 15/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0017 - mean_absolute_error: 0.0250 - val_loss: 0.0013 - val_mean_absolute_error: 0.0241 110/110 [==============================] - 0s 2ms/step - loss: 0.0013 - mean_absolute_error: 0.0241 55/55 [==============================] - 0s 2ms/step - loss: 9.2933e-04 - mean_absolute_error: 0.0206 Epoch 1/50 384/384 [==============================] - 8s 18ms/step - loss: 0.0318 - mean_absolute_error: 0.1260 - val_loss: 0.0111 - val_mean_absolute_error: 0.0743 Epoch 2/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0072 - mean_absolute_error: 0.0594 - val_loss: 0.0048 - val_mean_absolute_error: 0.0489 Epoch 3/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0044 - mean_absolute_error: 0.0463 - val_loss: 0.0035 - val_mean_absolute_error: 0.0425 Epoch 4/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0037 - mean_absolute_error: 0.0425 - val_loss: 0.0033 - val_mean_absolute_error: 0.0408 Epoch 5/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0034 - mean_absolute_error: 0.0404 - val_loss: 0.0030 - val_mean_absolute_error: 0.0389 Epoch 6/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0032 - mean_absolute_error: 0.0390 - val_loss: 0.0027 - val_mean_absolute_error: 0.0364 Epoch 7/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0030 - mean_absolute_error: 0.0379 - val_loss: 0.0027 - val_mean_absolute_error: 0.0362 Epoch 8/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0030 - mean_absolute_error: 0.0373 - val_loss: 0.0025 - val_mean_absolute_error: 0.0352 Epoch 9/50 384/384 [==============================] - 8s 21ms/step - loss: 0.0029 - mean_absolute_error: 0.0366 - val_loss: 0.0025 - val_mean_absolute_error: 0.0347 Epoch 10/50 384/384 [==============================] - 9s 22ms/step - loss: 0.0028 - mean_absolute_error: 0.0361 - val_loss: 0.0025 - val_mean_absolute_error: 0.0356 Epoch 11/50 384/384 [==============================] - 10s 26ms/step - loss: 0.0027 - mean_absolute_error: 0.0355 - val_loss: 0.0024 - val_mean_absolute_error: 0.0343 Epoch 12/50 384/384 [==============================] - 8s 19ms/step - loss: 0.0027 - mean_absolute_error: 0.0351 - val_loss: 0.0023 - val_mean_absolute_error: 0.0333 Epoch 13/50 384/384 [==============================] - 7s 17ms/step - loss: 0.0026 - mean_absolute_error: 0.0347 - val_loss: 0.0023 - val_mean_absolute_error: 0.0332 Epoch 14/50 384/384 [==============================] - 7s 17ms/step - loss: 0.0026 - mean_absolute_error: 0.0344 - val_loss: 0.0023 - val_mean_absolute_error: 0.0335 Epoch 15/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0025 - mean_absolute_error: 0.0340 - val_loss: 0.0021 - val_mean_absolute_error: 0.0322 Epoch 16/50 384/384 [==============================] - 7s 17ms/step - loss: 0.0025 - mean_absolute_error: 0.0338 - val_loss: 0.0022 - val_mean_absolute_error: 0.0336 Epoch 17/50 384/384 [==============================] - 6s 17ms/step - loss: 0.0024 - mean_absolute_error: 0.0335 - val_loss: 0.0023 - val_mean_absolute_error: 0.0331 Epoch 18/50 384/384 [==============================] - 7s 18ms/step - loss: 0.0024 - mean_absolute_error: 0.0333 - val_loss: 0.0022 - val_mean_absolute_error: 0.0341 109/109 [==============================] - 1s 6ms/step - loss: 0.0022 - mean_absolute_error: 0.0341 Epoch 1/50 383/383 [==============================] - 8s 18ms/step - loss: 0.0464 - mean_absolute_error: 0.1531 - val_loss: 0.0205 - val_mean_absolute_error: 0.0995 Epoch 2/50 383/383 [==============================] - 7s 17ms/step - loss: 0.0166 - mean_absolute_error: 0.0891 - val_loss: 0.0177 - val_mean_absolute_error: 0.0897 Epoch 3/50 383/383 [==============================] - 7s 17ms/step - loss: 0.0150 - mean_absolute_error: 0.0823 - val_loss: 0.0166 - val_mean_absolute_error: 0.0856 Epoch 4/50 383/383 [==============================] - 7s 17ms/step - loss: 0.0143 - mean_absolute_error: 0.0798 - val_loss: 0.0160 - val_mean_absolute_error: 0.0822 Epoch 5/50 383/383 [==============================] - 7s 18ms/step - loss: 0.0138 - mean_absolute_error: 0.0780 - val_loss: 0.0155 - val_mean_absolute_error: 0.0808 Epoch 6/50 383/383 [==============================] - 7s 17ms/step - loss: 0.0134 - mean_absolute_error: 0.0765 - val_loss: 0.0151 - val_mean_absolute_error: 0.0813 Epoch 7/50 383/383 [==============================] - 7s 17ms/step - loss: 0.0132 - mean_absolute_error: 0.0757 - val_loss: 0.0149 - val_mean_absolute_error: 0.0784 Epoch 8/50 383/383 [==============================] - 7s 18ms/step - loss: 0.0131 - mean_absolute_error: 0.0751 - val_loss: 0.0150 - val_mean_absolute_error: 0.0796 Epoch 9/50 383/383 [==============================] - 6s 17ms/step - loss: 0.0131 - mean_absolute_error: 0.0749 - val_loss: 0.0145 - val_mean_absolute_error: 0.0789 Epoch 10/50 383/383 [==============================] - 7s 18ms/step - loss: 0.0129 - mean_absolute_error: 0.0744 - val_loss: 0.0147 - val_mean_absolute_error: 0.0790 Epoch 11/50 383/383 [==============================] - 8s 20ms/step - loss: 0.0128 - mean_absolute_error: 0.0738 - val_loss: 0.0148 - val_mean_absolute_error: 0.0796 Epoch 12/50 383/383 [==============================] - 8s 22ms/step - loss: 0.0127 - mean_absolute_error: 0.0735 - val_loss: 0.0145 - val_mean_absolute_error: 0.0761 109/109 [==============================] - 1s 9ms/step - loss: 0.0145 - mean_absolute_error: 0.0761 54/54 [==============================] - 0s 7ms/step - loss: 0.0119 - mean_absolute_error: 0.0646 Epoch 1/50 384/384 [==============================] - 16s 37ms/step - loss: 0.0325 - mean_absolute_error: 0.1182 - val_loss: 0.0116 - val_mean_absolute_error: 0.0698 Epoch 2/50 384/384 [==============================] - 12s 30ms/step - loss: 0.0071 - mean_absolute_error: 0.0542 - val_loss: 0.0048 - val_mean_absolute_error: 0.0443 Epoch 3/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0040 - mean_absolute_error: 0.0400 - val_loss: 0.0032 - val_mean_absolute_error: 0.0368 Epoch 4/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0032 - mean_absolute_error: 0.0355 - val_loss: 0.0027 - val_mean_absolute_error: 0.0336 Epoch 5/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0029 - mean_absolute_error: 0.0334 - val_loss: 0.0023 - val_mean_absolute_error: 0.0309 Epoch 6/50 384/384 [==============================] - 8s 19ms/step - loss: 0.0026 - mean_absolute_error: 0.0317 - val_loss: 0.0022 - val_mean_absolute_error: 0.0297 Epoch 7/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0024 - mean_absolute_error: 0.0302 - val_loss: 0.0020 - val_mean_absolute_error: 0.0287 Epoch 8/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0023 - mean_absolute_error: 0.0288 - val_loss: 0.0019 - val_mean_absolute_error: 0.0274 Epoch 9/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0021 - mean_absolute_error: 0.0277 - val_loss: 0.0017 - val_mean_absolute_error: 0.0265 Epoch 10/50 384/384 [==============================] - 8s 21ms/step - loss: 0.0020 - mean_absolute_error: 0.0268 - val_loss: 0.0016 - val_mean_absolute_error: 0.0251 Epoch 11/50 384/384 [==============================] - 12s 30ms/step - loss: 0.0019 - mean_absolute_error: 0.0264 - val_loss: 0.0017 - val_mean_absolute_error: 0.0259 Epoch 12/50 384/384 [==============================] - 10s 25ms/step - loss: 0.0018 - mean_absolute_error: 0.0257 - val_loss: 0.0016 - val_mean_absolute_error: 0.0257 Epoch 13/50 384/384 [==============================] - 9s 23ms/step - loss: 0.0018 - mean_absolute_error: 0.0253 - val_loss: 0.0014 - val_mean_absolute_error: 0.0238 Epoch 14/50 384/384 [==============================] - 9s 24ms/step - loss: 0.0017 - mean_absolute_error: 0.0248 - val_loss: 0.0014 - val_mean_absolute_error: 0.0239 Epoch 15/50 384/384 [==============================] - 11s 27ms/step - loss: 0.0017 - mean_absolute_error: 0.0245 - val_loss: 0.0015 - val_mean_absolute_error: 0.0246 Epoch 16/50 384/384 [==============================] - 9s 23ms/step - loss: 0.0017 - mean_absolute_error: 0.0243 - val_loss: 0.0015 - val_mean_absolute_error: 0.0244 Epoch 17/50 384/384 [==============================] - 11s 29ms/step - loss: 0.0016 - mean_absolute_error: 0.0240 - val_loss: 0.0013 - val_mean_absolute_error: 0.0228 Epoch 18/50 384/384 [==============================] - 12s 31ms/step - loss: 0.0016 - mean_absolute_error: 0.0238 - val_loss: 0.0013 - val_mean_absolute_error: 0.0225 Epoch 19/50 384/384 [==============================] - 11s 30ms/step - loss: 0.0016 - mean_absolute_error: 0.0235 - val_loss: 0.0013 - val_mean_absolute_error: 0.0224 Epoch 20/50 384/384 [==============================] - 13s 33ms/step - loss: 0.0015 - mean_absolute_error: 0.0233 - val_loss: 0.0013 - val_mean_absolute_error: 0.0227 Epoch 21/50 384/384 [==============================] - 9s 24ms/step - loss: 0.0015 - mean_absolute_error: 0.0231 - val_loss: 0.0012 - val_mean_absolute_error: 0.0219 Epoch 22/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0015 - mean_absolute_error: 0.0230 - val_loss: 0.0012 - val_mean_absolute_error: 0.0219 Epoch 23/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0015 - mean_absolute_error: 0.0229 - val_loss: 0.0014 - val_mean_absolute_error: 0.0233 Epoch 24/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0015 - mean_absolute_error: 0.0227 - val_loss: 0.0012 - val_mean_absolute_error: 0.0218 Epoch 25/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0015 - mean_absolute_error: 0.0226 - val_loss: 0.0012 - val_mean_absolute_error: 0.0228 Epoch 26/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0014 - mean_absolute_error: 0.0225 - val_loss: 0.0012 - val_mean_absolute_error: 0.0212 Epoch 27/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0014 - mean_absolute_error: 0.0223 - val_loss: 0.0012 - val_mean_absolute_error: 0.0216 Epoch 28/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0014 - mean_absolute_error: 0.0223 - val_loss: 0.0011 - val_mean_absolute_error: 0.0210 Epoch 29/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0014 - mean_absolute_error: 0.0221 - val_loss: 0.0011 - val_mean_absolute_error: 0.0212 Epoch 30/50 384/384 [==============================] - 7s 19ms/step - loss: 0.0014 - mean_absolute_error: 0.0220 - val_loss: 0.0011 - val_mean_absolute_error: 0.0213 Epoch 31/50 384/384 [==============================] - 9s 24ms/step - loss: 0.0014 - mean_absolute_error: 0.0219 - val_loss: 0.0011 - val_mean_absolute_error: 0.0212 Epoch 32/50 384/384 [==============================] - 9s 23ms/step - loss: 0.0014 - mean_absolute_error: 0.0218 - val_loss: 0.0011 - val_mean_absolute_error: 0.0208 Epoch 33/50 384/384 [==============================] - 8s 21ms/step - loss: 0.0013 - mean_absolute_error: 0.0218 - val_loss: 0.0011 - val_mean_absolute_error: 0.0205 Epoch 34/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0216 - val_loss: 0.0011 - val_mean_absolute_error: 0.0208 Epoch 35/50 384/384 [==============================] - 9s 23ms/step - loss: 0.0013 - mean_absolute_error: 0.0215 - val_loss: 0.0012 - val_mean_absolute_error: 0.0213 Epoch 36/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0215 - val_loss: 0.0010 - val_mean_absolute_error: 0.0201 Epoch 37/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0213 - val_loss: 0.0011 - val_mean_absolute_error: 0.0206 Epoch 38/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0213 - val_loss: 0.0010 - val_mean_absolute_error: 0.0201 Epoch 39/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0211 - val_loss: 9.9319e-04 - val_mean_absolute_error: 0.0198 Epoch 40/50 384/384 [==============================] - 8s 21ms/step - loss: 0.0013 - mean_absolute_error: 0.0210 - val_loss: 9.8922e-04 - val_mean_absolute_error: 0.0201 Epoch 41/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0013 - mean_absolute_error: 0.0210 - val_loss: 0.0011 - val_mean_absolute_error: 0.0210 Epoch 42/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0012 - mean_absolute_error: 0.0209 - val_loss: 0.0010 - val_mean_absolute_error: 0.0201 Epoch 43/50 384/384 [==============================] - 8s 20ms/step - loss: 0.0012 - mean_absolute_error: 0.0208 - val_loss: 0.0011 - val_mean_absolute_error: 0.0207 109/109 [==============================] - 1s 9ms/step - loss: 0.0011 - mean_absolute_error: 0.0207 55/55 [==============================] - 1s 8ms/step - loss: 7.9657e-04 - mean_absolute_error: 0.0176
畳み込みニューラルネットワーク(CNN)を調べる¶
CNNを実装する¶
In [10]:
Copied!
KERNEL_WIDTH = 3
conv_window = DataWindow(input_width=KERNEL_WIDTH, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=1)
])
history = compile_and_fit(cnn_model, conv_window)
val_performance['CNN'] = cnn_model.evaluate(conv_window.val)
test_performance['CNN'] = cnn_model.evaluate(conv_window.test)
KERNEL_WIDTH = 3
conv_window = DataWindow(input_width=KERNEL_WIDTH, label_width=1, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=1)
])
history = compile_and_fit(cnn_model, conv_window)
val_performance['CNN'] = cnn_model.evaluate(conv_window.val)
test_performance['CNN'] = cnn_model.evaluate(conv_window.test)
Epoch 1/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0360 - mean_absolute_error: 0.1166 - val_loss: 0.0072 - val_mean_absolute_error: 0.0614 Epoch 2/50 384/384 [==============================] - 1s 4ms/step - loss: 0.0064 - mean_absolute_error: 0.0587 - val_loss: 0.0050 - val_mean_absolute_error: 0.0527 Epoch 3/50 384/384 [==============================] - 1s 4ms/step - loss: 0.0051 - mean_absolute_error: 0.0525 - val_loss: 0.0042 - val_mean_absolute_error: 0.0494 Epoch 4/50 384/384 [==============================] - 1s 4ms/step - loss: 0.0043 - mean_absolute_error: 0.0482 - val_loss: 0.0041 - val_mean_absolute_error: 0.0500 Epoch 5/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0039 - mean_absolute_error: 0.0459 - val_loss: 0.0033 - val_mean_absolute_error: 0.0439 Epoch 6/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0036 - mean_absolute_error: 0.0438 - val_loss: 0.0033 - val_mean_absolute_error: 0.0439 Epoch 7/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0033 - mean_absolute_error: 0.0421 - val_loss: 0.0036 - val_mean_absolute_error: 0.0461 Epoch 8/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0032 - mean_absolute_error: 0.0409 - val_loss: 0.0027 - val_mean_absolute_error: 0.0393 Epoch 9/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0030 - mean_absolute_error: 0.0396 - val_loss: 0.0025 - val_mean_absolute_error: 0.0367 Epoch 10/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0029 - mean_absolute_error: 0.0386 - val_loss: 0.0026 - val_mean_absolute_error: 0.0375 Epoch 11/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0029 - mean_absolute_error: 0.0385 - val_loss: 0.0023 - val_mean_absolute_error: 0.0354 Epoch 12/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0028 - mean_absolute_error: 0.0384 - val_loss: 0.0022 - val_mean_absolute_error: 0.0349 Epoch 13/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0027 - mean_absolute_error: 0.0374 - val_loss: 0.0023 - val_mean_absolute_error: 0.0358 Epoch 14/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0027 - mean_absolute_error: 0.0372 - val_loss: 0.0021 - val_mean_absolute_error: 0.0340 Epoch 15/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0026 - mean_absolute_error: 0.0364 - val_loss: 0.0021 - val_mean_absolute_error: 0.0336 Epoch 16/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0026 - mean_absolute_error: 0.0359 - val_loss: 0.0023 - val_mean_absolute_error: 0.0357 Epoch 17/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0026 - mean_absolute_error: 0.0360 - val_loss: 0.0021 - val_mean_absolute_error: 0.0336 Epoch 18/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0026 - mean_absolute_error: 0.0358 - val_loss: 0.0021 - val_mean_absolute_error: 0.0345 110/110 [==============================] - 0s 3ms/step - loss: 0.0021 - mean_absolute_error: 0.0345 55/55 [==============================] - 0s 4ms/step - loss: 0.0017 - mean_absolute_error: 0.0308
In [11]:
Copied!
cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
LSTM(32, return_sequences=True),
Dense(units=1)
])
history = compile_and_fit(cnn_lstm_model, conv_window)
val_performance['CNN + LSTM'] = cnn_lstm_model.evaluate(conv_window.val)
test_performance['CNN + LSTM'] = cnn_lstm_model.evaluate(conv_window.test)
cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
LSTM(32, return_sequences=True),
Dense(units=1)
])
history = compile_and_fit(cnn_lstm_model, conv_window)
val_performance['CNN + LSTM'] = cnn_lstm_model.evaluate(conv_window.val)
test_performance['CNN + LSTM'] = cnn_lstm_model.evaluate(conv_window.test)
Epoch 1/50 384/384 [==============================] - 6s 8ms/step - loss: 0.0306 - mean_absolute_error: 0.1142 - val_loss: 0.0070 - val_mean_absolute_error: 0.0596 Epoch 2/50 384/384 [==============================] - 3s 7ms/step - loss: 0.0063 - mean_absolute_error: 0.0583 - val_loss: 0.0050 - val_mean_absolute_error: 0.0522 Epoch 3/50 384/384 [==============================] - 4s 11ms/step - loss: 0.0049 - mean_absolute_error: 0.0515 - val_loss: 0.0039 - val_mean_absolute_error: 0.0466 Epoch 4/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0041 - mean_absolute_error: 0.0469 - val_loss: 0.0039 - val_mean_absolute_error: 0.0478 Epoch 5/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0038 - mean_absolute_error: 0.0446 - val_loss: 0.0031 - val_mean_absolute_error: 0.0417 Epoch 6/50 384/384 [==============================] - 4s 11ms/step - loss: 0.0035 - mean_absolute_error: 0.0427 - val_loss: 0.0033 - val_mean_absolute_error: 0.0429 Epoch 7/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0033 - mean_absolute_error: 0.0412 - val_loss: 0.0027 - val_mean_absolute_error: 0.0392 Epoch 8/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0031 - mean_absolute_error: 0.0402 - val_loss: 0.0026 - val_mean_absolute_error: 0.0377 Epoch 9/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0031 - mean_absolute_error: 0.0398 - val_loss: 0.0026 - val_mean_absolute_error: 0.0374 Epoch 10/50 384/384 [==============================] - 4s 10ms/step - loss: 0.0029 - mean_absolute_error: 0.0382 - val_loss: 0.0026 - val_mean_absolute_error: 0.0382 Epoch 11/50 384/384 [==============================] - 4s 11ms/step - loss: 0.0028 - mean_absolute_error: 0.0378 - val_loss: 0.0030 - val_mean_absolute_error: 0.0419 110/110 [==============================] - 1s 5ms/step - loss: 0.0030 - mean_absolute_error: 0.0419 55/55 [==============================] - 0s 5ms/step - loss: 0.0027 - mean_absolute_error: 0.0408
In [12]:
Copied!
KERNEL_WIDTH = 3
LABEL_WIDTH = 24
INPUT_WIDTH = LABEL_WIDTH + KERNEL_WIDTH - 1
multi_conv_window = DataWindow(input_width=INPUT_WIDTH, label_width=LABEL_WIDTH, shift=24, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
ms_cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
history = compile_and_fit(ms_cnn_model, multi_conv_window)
ms_val_performance['CNN'] = ms_cnn_model.evaluate(multi_conv_window.val)
ms_test_performance['CNN'] = ms_cnn_model.evaluate(multi_conv_window.test)
KERNEL_WIDTH = 3
LABEL_WIDTH = 24
INPUT_WIDTH = LABEL_WIDTH + KERNEL_WIDTH - 1
multi_conv_window = DataWindow(input_width=INPUT_WIDTH, label_width=LABEL_WIDTH, shift=24, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=['traffic_volume'])
ms_cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
history = compile_and_fit(ms_cnn_model, multi_conv_window)
ms_val_performance['CNN'] = ms_cnn_model.evaluate(multi_conv_window.val)
ms_test_performance['CNN'] = ms_cnn_model.evaluate(multi_conv_window.test)
Epoch 1/50 383/383 [==============================] - 3s 5ms/step - loss: 0.0358 - mean_absolute_error: 0.1232 - val_loss: 0.0164 - val_mean_absolute_error: 0.0839 Epoch 2/50 383/383 [==============================] - 2s 4ms/step - loss: 0.0142 - mean_absolute_error: 0.0791 - val_loss: 0.0160 - val_mean_absolute_error: 0.0859 Epoch 3/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0138 - mean_absolute_error: 0.0784 - val_loss: 0.0158 - val_mean_absolute_error: 0.0823 Epoch 4/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0135 - mean_absolute_error: 0.0774 - val_loss: 0.0154 - val_mean_absolute_error: 0.0840 Epoch 5/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0132 - mean_absolute_error: 0.0766 - val_loss: 0.0152 - val_mean_absolute_error: 0.0831 Epoch 6/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0130 - mean_absolute_error: 0.0759 - val_loss: 0.0147 - val_mean_absolute_error: 0.0796 Epoch 7/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0130 - mean_absolute_error: 0.0759 - val_loss: 0.0146 - val_mean_absolute_error: 0.0820 Epoch 8/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0129 - mean_absolute_error: 0.0757 - val_loss: 0.0148 - val_mean_absolute_error: 0.0824 Epoch 9/50 383/383 [==============================] - 3s 9ms/step - loss: 0.0127 - mean_absolute_error: 0.0750 - val_loss: 0.0149 - val_mean_absolute_error: 0.0822 Epoch 10/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0126 - mean_absolute_error: 0.0746 - val_loss: 0.0146 - val_mean_absolute_error: 0.0796 Epoch 11/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0126 - mean_absolute_error: 0.0745 - val_loss: 0.0145 - val_mean_absolute_error: 0.0796 Epoch 12/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0125 - mean_absolute_error: 0.0741 - val_loss: 0.0145 - val_mean_absolute_error: 0.0778 Epoch 13/50 383/383 [==============================] - 3s 7ms/step - loss: 0.0124 - mean_absolute_error: 0.0735 - val_loss: 0.0145 - val_mean_absolute_error: 0.0768 Epoch 14/50 383/383 [==============================] - 3s 6ms/step - loss: 0.0123 - mean_absolute_error: 0.0732 - val_loss: 0.0146 - val_mean_absolute_error: 0.0806 Epoch 15/50 383/383 [==============================] - 2s 6ms/step - loss: 0.0123 - mean_absolute_error: 0.0730 - val_loss: 0.0145 - val_mean_absolute_error: 0.0769 109/109 [==============================] - 1s 5ms/step - loss: 0.0145 - mean_absolute_error: 0.0769 54/54 [==============================] - 0s 4ms/step - loss: 0.0097 - mean_absolute_error: 0.0625
In [13]:
Copied!
ms_cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
history = compile_and_fit(ms_cnn_lstm_model, multi_conv_window)
ms_val_performance['CNN + LSTM'] = ms_cnn_lstm_model.evaluate(multi_conv_window.val)
ms_test_performance['CNN + LSTM'] = ms_cnn_lstm_model.evaluate(multi_conv_window.test)
ms_cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
Dense(units=1, kernel_initializer=tf.initializers.zeros)
])
history = compile_and_fit(ms_cnn_lstm_model, multi_conv_window)
ms_val_performance['CNN + LSTM'] = ms_cnn_lstm_model.evaluate(multi_conv_window.val)
ms_test_performance['CNN + LSTM'] = ms_cnn_lstm_model.evaluate(multi_conv_window.test)
Epoch 1/50 383/383 [==============================] - 10s 22ms/step - loss: 0.0419 - mean_absolute_error: 0.1452 - val_loss: 0.0174 - val_mean_absolute_error: 0.0904 Epoch 2/50 383/383 [==============================] - 10s 25ms/step - loss: 0.0147 - mean_absolute_error: 0.0823 - val_loss: 0.0161 - val_mean_absolute_error: 0.0859 Epoch 3/50 383/383 [==============================] - 10s 27ms/step - loss: 0.0141 - mean_absolute_error: 0.0801 - val_loss: 0.0158 - val_mean_absolute_error: 0.0866 Epoch 4/50 383/383 [==============================] - 9s 24ms/step - loss: 0.0137 - mean_absolute_error: 0.0787 - val_loss: 0.0155 - val_mean_absolute_error: 0.0855 Epoch 5/50 383/383 [==============================] - 8s 22ms/step - loss: 0.0132 - mean_absolute_error: 0.0770 - val_loss: 0.0149 - val_mean_absolute_error: 0.0810 Epoch 6/50 383/383 [==============================] - 11s 28ms/step - loss: 0.0127 - mean_absolute_error: 0.0752 - val_loss: 0.0151 - val_mean_absolute_error: 0.0766 Epoch 7/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0123 - mean_absolute_error: 0.0733 - val_loss: 0.0136 - val_mean_absolute_error: 0.0763 Epoch 8/50 383/383 [==============================] - 9s 24ms/step - loss: 0.0120 - mean_absolute_error: 0.0722 - val_loss: 0.0134 - val_mean_absolute_error: 0.0750 Epoch 9/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0120 - mean_absolute_error: 0.0721 - val_loss: 0.0132 - val_mean_absolute_error: 0.0769 Epoch 10/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0117 - mean_absolute_error: 0.0711 - val_loss: 0.0131 - val_mean_absolute_error: 0.0743 Epoch 11/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0116 - mean_absolute_error: 0.0705 - val_loss: 0.0129 - val_mean_absolute_error: 0.0742 Epoch 12/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0114 - mean_absolute_error: 0.0699 - val_loss: 0.0138 - val_mean_absolute_error: 0.0805 Epoch 13/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0113 - mean_absolute_error: 0.0695 - val_loss: 0.0127 - val_mean_absolute_error: 0.0736 Epoch 14/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0112 - mean_absolute_error: 0.0690 - val_loss: 0.0129 - val_mean_absolute_error: 0.0742 Epoch 15/50 383/383 [==============================] - 9s 23ms/step - loss: 0.0111 - mean_absolute_error: 0.0687 - val_loss: 0.0125 - val_mean_absolute_error: 0.0730 Epoch 16/50 383/383 [==============================] - 9s 24ms/step - loss: 0.0109 - mean_absolute_error: 0.0678 - val_loss: 0.0126 - val_mean_absolute_error: 0.0743 Epoch 17/50 383/383 [==============================] - 9s 24ms/step - loss: 0.0109 - mean_absolute_error: 0.0678 - val_loss: 0.0131 - val_mean_absolute_error: 0.0719 Epoch 18/50 383/383 [==============================] - 9s 24ms/step - loss: 0.0108 - mean_absolute_error: 0.0674 - val_loss: 0.0131 - val_mean_absolute_error: 0.0774 109/109 [==============================] - 1s 9ms/step - loss: 0.0131 - mean_absolute_error: 0.0774 54/54 [==============================] - 1s 9ms/step - loss: 0.0084 - mean_absolute_error: 0.0599
In [14]:
Copied!
mo_conv_window = DataWindow(input_width=INPUT_WIDTH, label_width=LABEL_WIDTH, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=2)
])
history = compile_and_fit(mo_cnn_model, mo_conv_window)
mo_val_performance['CNN'] = mo_cnn_model.evaluate(mo_conv_window.val)
mo_test_performance['CNN'] = mo_cnn_model.evaluate(mo_conv_window.test)
mo_conv_window = DataWindow(input_width=INPUT_WIDTH, label_width=LABEL_WIDTH, shift=1, df_train=df_train, df_val=df_val, df_test=df_test, label_columns=col_names)
mo_cnn_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
Dense(units=32, activation='relu'),
Dense(units=2)
])
history = compile_and_fit(mo_cnn_model, mo_conv_window)
mo_val_performance['CNN'] = mo_cnn_model.evaluate(mo_conv_window.val)
mo_test_performance['CNN'] = mo_cnn_model.evaluate(mo_conv_window.test)
Epoch 1/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0101 - mean_absolute_error: 0.0589 - val_loss: 0.0023 - val_mean_absolute_error: 0.0318 Epoch 2/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0020 - mean_absolute_error: 0.0288 - val_loss: 0.0014 - val_mean_absolute_error: 0.0258 Epoch 3/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0016 - mean_absolute_error: 0.0252 - val_loss: 0.0012 - val_mean_absolute_error: 0.0236 Epoch 4/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0015 - mean_absolute_error: 0.0238 - val_loss: 0.0012 - val_mean_absolute_error: 0.0232 Epoch 5/50 384/384 [==============================] - 2s 4ms/step - loss: 0.0014 - mean_absolute_error: 0.0230 - val_loss: 0.0011 - val_mean_absolute_error: 0.0219 Epoch 6/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0013 - mean_absolute_error: 0.0224 - val_loss: 0.0011 - val_mean_absolute_error: 0.0213 Epoch 7/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0013 - mean_absolute_error: 0.0220 - val_loss: 0.0011 - val_mean_absolute_error: 0.0214 Epoch 8/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0013 - mean_absolute_error: 0.0217 - val_loss: 0.0010 - val_mean_absolute_error: 0.0206 Epoch 9/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0013 - mean_absolute_error: 0.0215 - val_loss: 9.9162e-04 - val_mean_absolute_error: 0.0201 Epoch 10/50 384/384 [==============================] - 3s 6ms/step - loss: 0.0012 - mean_absolute_error: 0.0213 - val_loss: 0.0010 - val_mean_absolute_error: 0.0204 Epoch 11/50 384/384 [==============================] - 2s 6ms/step - loss: 0.0012 - mean_absolute_error: 0.0209 - val_loss: 0.0010 - val_mean_absolute_error: 0.0206 Epoch 12/50 384/384 [==============================] - 2s 5ms/step - loss: 0.0012 - mean_absolute_error: 0.0208 - val_loss: 0.0011 - val_mean_absolute_error: 0.0214 109/109 [==============================] - 0s 3ms/step - loss: 0.0011 - mean_absolute_error: 0.0214 55/55 [==============================] - 0s 3ms/step - loss: 8.4113e-04 - mean_absolute_error: 0.0187
In [16]:
Copied!
mo_cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
Dense(units=2)
])
history = compile_and_fit(mo_cnn_lstm_model, mo_conv_window)
mo_val_performance['CNN + LSTM'] = mo_cnn_lstm_model.evaluate(mo_conv_window.val)
mo_test_performance['CNN + LSTM'] = mo_cnn_lstm_model.evaluate(mo_conv_window.test)
mo_cnn_lstm_model = Sequential([
Conv1D(filters=32, kernel_size=(KERNEL_WIDTH,), activation='relu'),
LSTM(32, return_sequences=True),
Dense(units=2)
])
history = compile_and_fit(mo_cnn_lstm_model, mo_conv_window)
mo_val_performance['CNN + LSTM'] = mo_cnn_lstm_model.evaluate(mo_conv_window.val)
mo_test_performance['CNN + LSTM'] = mo_cnn_lstm_model.evaluate(mo_conv_window.test)
Epoch 1/50 384/384 [==============================] - 6s 13ms/step - loss: 0.0230 - mean_absolute_error: 0.0913 - val_loss: 0.0041 - val_mean_absolute_error: 0.0415 Epoch 2/50 384/384 [==============================] - 5s 13ms/step - loss: 0.0032 - mean_absolute_error: 0.0363 - val_loss: 0.0024 - val_mean_absolute_error: 0.0324 Epoch 3/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0026 - mean_absolute_error: 0.0322 - val_loss: 0.0020 - val_mean_absolute_error: 0.0292 Epoch 4/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0022 - mean_absolute_error: 0.0298 - val_loss: 0.0017 - val_mean_absolute_error: 0.0279 Epoch 5/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0019 - mean_absolute_error: 0.0278 - val_loss: 0.0017 - val_mean_absolute_error: 0.0279 Epoch 6/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0017 - mean_absolute_error: 0.0263 - val_loss: 0.0015 - val_mean_absolute_error: 0.0274 Epoch 7/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0016 - mean_absolute_error: 0.0255 - val_loss: 0.0013 - val_mean_absolute_error: 0.0238 Epoch 8/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0016 - mean_absolute_error: 0.0246 - val_loss: 0.0013 - val_mean_absolute_error: 0.0237 Epoch 9/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0015 - mean_absolute_error: 0.0240 - val_loss: 0.0013 - val_mean_absolute_error: 0.0238 Epoch 10/50 384/384 [==============================] - 5s 14ms/step - loss: 0.0015 - mean_absolute_error: 0.0236 - val_loss: 0.0012 - val_mean_absolute_error: 0.0238 Epoch 11/50 384/384 [==============================] - 6s 14ms/step - loss: 0.0015 - mean_absolute_error: 0.0232 - val_loss: 0.0013 - val_mean_absolute_error: 0.0246 Epoch 12/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0014 - mean_absolute_error: 0.0229 - val_loss: 0.0012 - val_mean_absolute_error: 0.0224 Epoch 13/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0014 - mean_absolute_error: 0.0227 - val_loss: 0.0011 - val_mean_absolute_error: 0.0221 Epoch 14/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0014 - mean_absolute_error: 0.0225 - val_loss: 0.0011 - val_mean_absolute_error: 0.0211 Epoch 15/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0014 - mean_absolute_error: 0.0224 - val_loss: 0.0011 - val_mean_absolute_error: 0.0222 Epoch 16/50 384/384 [==============================] - 6s 15ms/step - loss: 0.0013 - mean_absolute_error: 0.0221 - val_loss: 0.0011 - val_mean_absolute_error: 0.0224 Epoch 17/50 384/384 [==============================] - 6s 16ms/step - loss: 0.0013 - mean_absolute_error: 0.0219 - val_loss: 0.0011 - val_mean_absolute_error: 0.0217 109/109 [==============================] - 1s 6ms/step - loss: 0.0011 - mean_absolute_error: 0.0217 55/55 [==============================] - 0s 6ms/step - loss: 8.2616e-04 - mean_absolute_error: 0.0185
In [23]:
Copied!
# single stepの結果比較
plt.title('Single Step')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(val_performance.keys(), [v[1] for v in val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(test_performance.keys(), [v[1] for v in test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.xticks(rotation=45, ha='right')
plt.legend()
plt.tight_layout()
# single stepの結果比較
plt.title('Single Step')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(val_performance.keys(), [v[1] for v in val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(test_performance.keys(), [v[1] for v in test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.xticks(rotation=45, ha='right')
plt.legend()
plt.tight_layout()
In [22]:
Copied!
# multi stepの結果比較
plt.title('Multi Step')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(ms_val_performance.keys(), [v[1] for v in ms_val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(ms_test_performance.keys(), [v[1] for v in ms_test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.legend()
plt.xticks(rotation=45, ha='right')
plt.tight_layout()
# multi stepの結果比較
plt.title('Multi Step')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(ms_val_performance.keys(), [v[1] for v in ms_val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(ms_test_performance.keys(), [v[1] for v in ms_test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.legend()
plt.xticks(rotation=45, ha='right')
plt.tight_layout()
In [24]:
Copied!
# multi outputの結果比較
plt.title('Multi Output')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(mo_val_performance.keys(), [v[1] for v in mo_val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(mo_test_performance.keys(), [v[1] for v in mo_test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.xticks(rotation=45, ha='right')
plt.legend()
plt.tight_layout()
# multi outputの結果比較
plt.title('Multi Output')
plt.xlabel('Models')
plt.ylabel('MAE')
plt.bar(mo_val_performance.keys(), [v[1] for v in mo_val_performance.values()], width=-0.25, align='edge', label='Validation')
plt.bar(mo_test_performance.keys(), [v[1] for v in mo_test_performance.values()], width=0.25, align='edge', label='Test', hatch='/')
plt.xticks(rotation=45, ha='right')
plt.legend()
plt.tight_layout()
In [ ]:
Copied!