Recurrent Neural Network
Table of Contents
Example: "I grew up in Franceā¦ I speak fluent French."
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
from six.moves import cPickle
data = cPickle.load(open('/content/rnn_time_signal.pkl', 'rb'))
print("Shape of data is {}".format(data.shape))
plt.figure(figsize = (10,6))
plt.title('Time signal for RNN', fontsize=15)
plt.plot(data[0:2000])
plt.xlim(0,2000)
plt.show()
def dataset(data, n_samples, n_step = 25, dim_input = 100, dim_output = 100, stride = 5):
train_x_list = []
train_y_list = []
for i in range(n_samples):
'''
train_data_shape = (25,100)
'''
train_x = data[i*stride:i*stride + n_step*dim_input] #length = 2500
train_x = train_x.reshape(n_step, dim_input) #(25,100)
train_x_list.append(train_x)
'''
train_label_shape = 100
'''
train_y = data[i*stride + n_step*dim_input:i*stride + n_step*dim_input + dim_output]
train_y_list.append(train_y)
'''
Therefore, the purpose of RNN is to predict 100 sequences given 2500 sequences
'''
train_data = np.array(train_x_list)
train_label = np.array(train_y_list)
test_data = data[10000:10000 + n_step*dim_input]
test_data = test_data.reshape(1, n_step, dim_input)
return train_data, train_label, test_data
train_data, train_label, test_data = dataset(data, 5000)
train_data.shape
train_label.shape
n_step = 25
n_input = 100
# LSTM shape
n_lstm1 = 100
n_lstm2 = 100
# Fully connected
n_hidden = 100
n_output = 100
lstm_network = tf.keras.models.Sequential([
tf.keras.layers.Input(shape = (n_step, n_input)),
tf.keras.layers.LSTM(n_lstm1, return_sequences = True),
tf.keras.layers.LSTM(n_lstm2),
tf.keras.layers.Dense(n_hidden),
tf.keras.layers.Dense(n_output),
])
lstm_network.summary()
lstm_network.compile(optimizer = 'adam',
loss = 'mean_squared_error',
metrics = ['mse'])
lstm_network.fit(train_data, train_label, epochs = 10)
rnn_network = tf.keras.models.Sequential([
tf.keras.layers.Input(shape = (n_step, n_input)),
tf.keras.layers.SimpleRNN(n_lstm1, return_sequences = True),
tf.keras.layers.SimpleRNN(n_lstm2),
tf.keras.layers.Dense(n_hidden),
tf.keras.layers.Dense(n_output),
])
rnn_network.summary()
rnn_network.compile(optimizer = 'adam',
loss = 'mean_squared_error',
metrics = ['mse'])
rnn_network.fit(train_data, train_label, epochs = 10)
test_pred = lstm_network.predict(test_data).ravel()
test_label = data[10000:10000 + n_step*n_input + n_input]
plt.figure(figsize=(10,6))
plt.plot(np.arange(0, n_step*n_input + n_input), test_label, 'b', label = 'Ground truth')
plt.plot(np.arange(n_step*n_input, n_step*n_input + n_input), test_pred, 'r', label = 'Prediction')
plt.vlines(n_step*n_input, -1, 1, colors = 'r', linestyles = 'dashed')
plt.legend(fontsize = 15, loc = 'upper left')
plt.xlim(0, len(test_label))
plt.show()
gen_signal = []
for i in range(n_step):
test_pred = lstm_network.predict(test_data)
gen_signal.append(test_pred.ravel())
test_pred = test_pred[:, np.newaxis, :]
test_data = test_data[:, 1:, :]
test_data = np.concatenate([test_data, test_pred], axis = 1)
gen_signal = np.concatenate(gen_signal)
test_label = data[10000:10000 + n_step*n_input + n_step*n_input]
plt.figure(figsize=(10,6))
plt.plot(np.arange(0, n_step*n_input + n_step*n_input), test_label, 'b', label = 'Ground truth')
plt.plot(np.arange(n_step*n_input, n_step*n_input + n_step*n_input), gen_signal, 'r', label = 'Prediction')
plt.vlines(n_step*n_input, -1, 1, colors = 'r', linestyles = 'dashed')
plt.legend(fontsize=15, loc = 'upper left')
plt.xlim(0, len(test_label))
plt.show()
test_pred = rnn_network.predict(test_data).ravel()
test_label = data[10000:10000 + n_step*n_input + n_input]
plt.figure(figsize=(10,6))
plt.plot(np.arange(0, n_step*n_input + n_input), test_label, 'b', label = 'Ground truth')
plt.plot(np.arange(n_step*n_input, n_step*n_input + n_input), test_pred, 'r', label = 'Prediction')
plt.vlines(n_step*n_input, -1, 1, colors = 'r', linestyles = 'dashed')
plt.legend(fontsize = 15, loc = 'upper left')
plt.xlim(0, len(test_label))
plt.show()
gen_signal = []
for i in range(n_step):
test_pred = rnn_network.predict(test_data)
gen_signal.append(test_pred.ravel())
test_pred = test_pred[:, np.newaxis, :]
test_data = test_data[:, 1:, :]
test_data = np.concatenate([test_data, test_pred], axis = 1)
gen_signal = np.concatenate(gen_signal)
test_label = data[10000:10000 + n_step*n_input + n_step*n_input]
plt.figure(figsize=(10,6))
plt.plot(np.arange(0, n_step*n_input + n_step*n_input), test_label, 'b', label = 'Ground truth')
plt.plot(np.arange(n_step*n_input, n_step*n_input + n_step*n_input), gen_signal, 'r', label = 'Prediction')
plt.vlines(n_step*n_input, -1, 1, colors = 'r', linestyles = 'dashed')
plt.legend(fontsize=15, loc = 'upper left')
plt.xlim(0, len(test_label))
plt.show()
%%javascript
$.getScript('https://kmahelona.github.io/ipython_notebook_goodies/ipython_notebook_toc.js')