RNN
특정 범위만큼의 sin 파형 시퀀스를 학습해 다음 스텝의 파형을 예측하는 RNN 모델 구현
Library Call
1
2
3
4
5
6
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Input, Dense, Flatten, LSTM, SimpleRNN
Data Split & Preprocessing
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# time step만큼 시퀀스 데이터 분리
def split_sequence(sequence, step):
x, y = list(), list()
for i in range(len(sequence)):
end_idx = i + step
if end_idx > len(sequence) - 1:
break
seq_x, seq_y = sequence[i:end_idx], sequence[end_idx]
x.append(seq_x)
y.append(seq_y)
return np.array(x), np.array(y)
1
2
3
# sin 함수 학습 데이터
x = [i for i in np.arange(start=-10, stop=10, step=0.1)]
train_y = [np.sin(i) for i in x]
1
2
3
# 하이퍼파라미터
n_timesteps = 15
n_features = 1
1
2
3
4
5
6
# 시퀀스 나누기
# train_x.shape => (samples, timesteps)
# train_y.shape => (samples)
train_x, train_y = split_sequence(train_y, step=n_timesteps)
print('shape x:{} / y:{}'.format(train_x.shape, train_y.shape))
1
shape x:(185, 15) / y:(185,)
1
2
3
4
5
6
7
# RNN 입력 벡터 크기를 맞추기 위해 벡터 차원 크기 변경
# reshape from [samples, timesteps] into [samples, timesteps, features]
train_x = train_x.reshape(train_x.shape[0], train_x.shape[1], n_features)
print('train_x.shape = {}'.format(train_x.shape))
print('train_y.shape = {}'.format(train_y.shape))
1
2
train_x.shape = (185, 15, 1)
train_y.shape = (185,)
Modeling
1
2
3
4
5
6
# RNN 모델 정의 - Sequential Model
model = Sequential()
model.add(SimpleRNN(units=10, return_sequences=False, input_shape=(n_timesteps, n_features)))
model.add(Dense(1))
model.summary()
1
2
3
4
5
6
7
8
9
10
11
12
13
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
simple_rnn (SimpleRNN) (None, 10) 120
dense (Dense) (None, 1) 11
=================================================================
Total params: 131
Trainable params: 131
Non-trainable params: 0
_________________________________________________________________
1
2
3
4
5
6
7
# RNN 모델 정의 - Functional Model
input = Input(shape=(n_timesteps, n_features))
x = SimpleRNN(units=10, return_sequences=False)(input)
output = Dense(units=1)(x)
model = Model(inputs=input, outputs=output)
model.summary()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
Model: "model_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_3 (InputLayer) [(None, 15, 1)] 0
simple_rnn_3 (SimpleRNN) (None, 10) 120
dense_3 (Dense) (None, 1) 11
=================================================================
Total params: 131
Trainable params: 131
Non-trainable params: 0
_________________________________________________________________
1
2
3
4
5
6
7
8
# 모델 학습
np.random.seed(0)
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='loss', patience=5, mode='auto')
model.compile(optimizer='adam', loss='mse')
history = model.fit(train_x, train_y, epochs=1000, callbacks=[early_stopping])
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
Epoch 1/1000
6/6 [==============================] - 1s 13ms/step - loss: 0.1235
Epoch 2/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0849
Epoch 3/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0551
Epoch 4/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0367
Epoch 5/1000
6/6 [==============================] - 0s 23ms/step - loss: 0.0259
Epoch 6/1000
6/6 [==============================] - 0s 23ms/step - loss: 0.0202
Epoch 7/1000
6/6 [==============================] - 0s 20ms/step - loss: 0.0174
Epoch 8/1000
6/6 [==============================] - 0s 21ms/step - loss: 0.0161
Epoch 9/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0147
Epoch 10/1000
6/6 [==============================] - 0s 18ms/step - loss: 0.0133
Epoch 11/1000
6/6 [==============================] - 0s 21ms/step - loss: 0.0119
Epoch 12/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0107
Epoch 13/1000
6/6 [==============================] - 0s 17ms/step - loss: 0.0096
Epoch 14/1000
6/6 [==============================] - 0s 20ms/step - loss: 0.0088
Epoch 15/1000
6/6 [==============================] - 0s 18ms/step - loss: 0.0079
Epoch 16/1000
6/6 [==============================] - 0s 18ms/step - loss: 0.0072
Epoch 17/1000
6/6 [==============================] - 0s 18ms/step - loss: 0.0065
Epoch 18/1000
6/6 [==============================] - 0s 18ms/step - loss: 0.0059
Epoch 19/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0053
Epoch 20/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0048
Epoch 21/1000
6/6 [==============================] - 0s 19ms/step - loss: 0.0044
Epoch 22/1000
6/6 [==============================] - 0s 21ms/step - loss: 0.0039
Epoch 23/1000
6/6 [==============================] - 0s 15ms/step - loss: 0.0035
Epoch 24/1000
6/6 [==============================] - 0s 13ms/step - loss: 0.0032
Epoch 25/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0029
Epoch 26/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0026
Epoch 27/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0023
Epoch 28/1000
6/6 [==============================] - 0s 11ms/step - loss: 0.0021
Epoch 29/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0019
Epoch 30/1000
6/6 [==============================] - 0s 13ms/step - loss: 0.0017
Epoch 31/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0015
Epoch 32/1000
6/6 [==============================] - 0s 13ms/step - loss: 0.0014
Epoch 33/1000
6/6 [==============================] - 0s 11ms/step - loss: 0.0012
Epoch 34/1000
6/6 [==============================] - 0s 13ms/step - loss: 0.0011
Epoch 35/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0010
Epoch 36/1000
6/6 [==============================] - 0s 15ms/step - loss: 9.2530e-04
Epoch 37/1000
6/6 [==============================] - 0s 11ms/step - loss: 8.5516e-04
Epoch 38/1000
6/6 [==============================] - 0s 14ms/step - loss: 7.8895e-04
Epoch 39/1000
6/6 [==============================] - 0s 12ms/step - loss: 7.4184e-04
Epoch 40/1000
6/6 [==============================] - 0s 12ms/step - loss: 6.9673e-04
Epoch 41/1000
6/6 [==============================] - 0s 13ms/step - loss: 6.7514e-04
Epoch 42/1000
6/6 [==============================] - 0s 12ms/step - loss: 6.3540e-04
Epoch 43/1000
6/6 [==============================] - 0s 12ms/step - loss: 6.1471e-04
Epoch 44/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.9373e-04
Epoch 45/1000
6/6 [==============================] - 0s 13ms/step - loss: 5.7172e-04
Epoch 46/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.5432e-04
Epoch 47/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.5360e-04
Epoch 48/1000
6/6 [==============================] - 0s 16ms/step - loss: 5.3313e-04
Epoch 49/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.1154e-04
Epoch 50/1000
6/6 [==============================] - 0s 14ms/step - loss: 5.0518e-04
Epoch 51/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.9420e-04
Epoch 52/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.7630e-04
Epoch 53/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.7123e-04
Epoch 54/1000
6/6 [==============================] - 0s 11ms/step - loss: 4.6755e-04
Epoch 55/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.5960e-04
Epoch 56/1000
6/6 [==============================] - 0s 14ms/step - loss: 4.5533e-04
Epoch 57/1000
6/6 [==============================] - 0s 15ms/step - loss: 4.3551e-04
Epoch 58/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.2956e-04
Epoch 59/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.2107e-04
Epoch 60/1000
6/6 [==============================] - 0s 15ms/step - loss: 4.0283e-04
Epoch 61/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.2440e-04
Epoch 62/1000
6/6 [==============================] - 0s 14ms/step - loss: 3.9743e-04
Epoch 63/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.9136e-04
Epoch 64/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.6906e-04
Epoch 65/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.6760e-04
Epoch 66/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.6543e-04
Epoch 67/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.6410e-04
Epoch 68/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.7137e-04
Epoch 69/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.4011e-04
Epoch 70/1000
6/6 [==============================] - 0s 14ms/step - loss: 3.2776e-04
Epoch 71/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.1376e-04
Epoch 72/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.1046e-04
Epoch 73/1000
6/6 [==============================] - 0s 15ms/step - loss: 3.1709e-04
Epoch 74/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.1510e-04
Epoch 75/1000
6/6 [==============================] - 0s 11ms/step - loss: 3.1219e-04
Epoch 76/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.9264e-04
Epoch 77/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.8129e-04
Epoch 78/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.8032e-04
Epoch 79/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.7605e-04
Epoch 80/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.6444e-04
Epoch 81/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.8391e-04
Epoch 82/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.9896e-04
Epoch 83/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.6146e-04
Epoch 84/1000
6/6 [==============================] - 0s 11ms/step - loss: 2.4631e-04
Epoch 85/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.3853e-04
Epoch 86/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.3509e-04
Epoch 87/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.3808e-04
Epoch 88/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.3061e-04
Epoch 89/1000
6/6 [==============================] - 0s 14ms/step - loss: 2.3232e-04
Epoch 90/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.2010e-04
Epoch 91/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.1240e-04
Epoch 92/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.0991e-04
Epoch 93/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.0759e-04
Epoch 94/1000
6/6 [==============================] - 0s 11ms/step - loss: 2.0609e-04
Epoch 95/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.9490e-04
Epoch 96/1000
6/6 [==============================] - 0s 14ms/step - loss: 1.9089e-04
Epoch 97/1000
6/6 [==============================] - 0s 14ms/step - loss: 1.9211e-04
Epoch 98/1000
6/6 [==============================] - 0s 16ms/step - loss: 1.8511e-04
Epoch 99/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.8119e-04
Epoch 100/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.7724e-04
Epoch 101/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.7439e-04
Epoch 102/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.6989e-04
Epoch 103/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.6679e-04
Epoch 104/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.7945e-04
Epoch 105/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.9641e-04
Epoch 106/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.6618e-04
Epoch 107/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.5661e-04
Epoch 108/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.5038e-04
Epoch 109/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.5048e-04
Epoch 110/1000
6/6 [==============================] - 0s 11ms/step - loss: 1.4586e-04
Epoch 111/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.4235e-04
Epoch 112/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.4028e-04
Epoch 113/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.4023e-04
Epoch 114/1000
6/6 [==============================] - 0s 11ms/step - loss: 1.3469e-04
Epoch 115/1000
6/6 [==============================] - 0s 10ms/step - loss: 1.3363e-04
Epoch 116/1000
6/6 [==============================] - 0s 11ms/step - loss: 1.2954e-04
Epoch 117/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.2773e-04
Epoch 118/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.2676e-04
Epoch 119/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.3761e-04
Epoch 120/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.2228e-04
Epoch 121/1000
6/6 [==============================] - 0s 14ms/step - loss: 1.1669e-04
Epoch 122/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.1898e-04
Epoch 123/1000
6/6 [==============================] - 0s 14ms/step - loss: 1.2131e-04
Epoch 124/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.1536e-04
Epoch 125/1000
6/6 [==============================] - 0s 14ms/step - loss: 1.1479e-04
Epoch 126/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.0810e-04
Epoch 127/1000
6/6 [==============================] - 0s 11ms/step - loss: 1.0453e-04
Epoch 128/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.0457e-04
Epoch 129/1000
6/6 [==============================] - 0s 13ms/step - loss: 1.0142e-04
Epoch 130/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.0029e-04
Epoch 131/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.0110e-04
Epoch 132/1000
6/6 [==============================] - 0s 12ms/step - loss: 1.0365e-04
Epoch 133/1000
6/6 [==============================] - 0s 13ms/step - loss: 9.8002e-05
Epoch 134/1000
6/6 [==============================] - 0s 13ms/step - loss: 9.4757e-05
Epoch 135/1000
6/6 [==============================] - 0s 12ms/step - loss: 9.0415e-05
Epoch 136/1000
6/6 [==============================] - 0s 14ms/step - loss: 9.1215e-05
Epoch 137/1000
6/6 [==============================] - 0s 12ms/step - loss: 9.0338e-05
Epoch 138/1000
6/6 [==============================] - 0s 13ms/step - loss: 8.8595e-05
Epoch 139/1000
6/6 [==============================] - 0s 12ms/step - loss: 8.5687e-05
Epoch 140/1000
6/6 [==============================] - 0s 12ms/step - loss: 8.2914e-05
Epoch 141/1000
6/6 [==============================] - 0s 13ms/step - loss: 8.1743e-05
Epoch 142/1000
6/6 [==============================] - 0s 12ms/step - loss: 8.0873e-05
Epoch 143/1000
6/6 [==============================] - 0s 13ms/step - loss: 8.5741e-05
Epoch 144/1000
6/6 [==============================] - 0s 15ms/step - loss: 7.8910e-05
Epoch 145/1000
6/6 [==============================] - 0s 23ms/step - loss: 7.6244e-05
Epoch 146/1000
6/6 [==============================] - 0s 20ms/step - loss: 7.5892e-05
Epoch 147/1000
6/6 [==============================] - 0s 19ms/step - loss: 7.4714e-05
Epoch 148/1000
6/6 [==============================] - 0s 19ms/step - loss: 7.3746e-05
Epoch 149/1000
6/6 [==============================] - 0s 19ms/step - loss: 7.3034e-05
Epoch 150/1000
6/6 [==============================] - 0s 20ms/step - loss: 7.0438e-05
Epoch 151/1000
6/6 [==============================] - 0s 19ms/step - loss: 6.9810e-05
Epoch 152/1000
6/6 [==============================] - 0s 18ms/step - loss: 6.7697e-05
Epoch 153/1000
6/6 [==============================] - 0s 19ms/step - loss: 6.5274e-05
Epoch 154/1000
6/6 [==============================] - 0s 20ms/step - loss: 6.5134e-05
Epoch 155/1000
6/6 [==============================] - 0s 20ms/step - loss: 6.5718e-05
Epoch 156/1000
6/6 [==============================] - 0s 19ms/step - loss: 6.3520e-05
Epoch 157/1000
6/6 [==============================] - 0s 18ms/step - loss: 6.4643e-05
Epoch 158/1000
6/6 [==============================] - 0s 20ms/step - loss: 6.1326e-05
Epoch 159/1000
6/6 [==============================] - 0s 18ms/step - loss: 6.0526e-05
Epoch 160/1000
6/6 [==============================] - 0s 19ms/step - loss: 6.1289e-05
Epoch 161/1000
6/6 [==============================] - 0s 18ms/step - loss: 6.1183e-05
Epoch 162/1000
6/6 [==============================] - 0s 18ms/step - loss: 5.8281e-05
Epoch 163/1000
6/6 [==============================] - 0s 19ms/step - loss: 5.5599e-05
Epoch 164/1000
6/6 [==============================] - 0s 17ms/step - loss: 5.7965e-05
Epoch 165/1000
6/6 [==============================] - 0s 14ms/step - loss: 5.3817e-05
Epoch 166/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.2212e-05
Epoch 167/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.2691e-05
Epoch 168/1000
6/6 [==============================] - 0s 13ms/step - loss: 5.3459e-05
Epoch 169/1000
6/6 [==============================] - 0s 13ms/step - loss: 5.2452e-05
Epoch 170/1000
6/6 [==============================] - 0s 12ms/step - loss: 5.1160e-05
Epoch 171/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.9744e-05
Epoch 172/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.8846e-05
Epoch 173/1000
6/6 [==============================] - 0s 14ms/step - loss: 4.8967e-05
Epoch 174/1000
6/6 [==============================] - 0s 13ms/step - loss: 5.1478e-05
Epoch 175/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.4507e-05
Epoch 176/1000
6/6 [==============================] - 0s 15ms/step - loss: 4.5720e-05
Epoch 177/1000
6/6 [==============================] - 0s 11ms/step - loss: 4.4354e-05
Epoch 178/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.3692e-05
Epoch 179/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.2230e-05
Epoch 180/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.2462e-05
Epoch 181/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.1156e-05
Epoch 182/1000
6/6 [==============================] - 0s 11ms/step - loss: 4.2282e-05
Epoch 183/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.3084e-05
Epoch 184/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.0693e-05
Epoch 185/1000
6/6 [==============================] - 0s 11ms/step - loss: 4.0019e-05
Epoch 186/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.2961e-05
Epoch 187/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.0348e-05
Epoch 188/1000
6/6 [==============================] - 0s 14ms/step - loss: 4.0411e-05
Epoch 189/1000
6/6 [==============================] - 0s 14ms/step - loss: 3.9352e-05
Epoch 190/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.7698e-05
Epoch 191/1000
6/6 [==============================] - 0s 13ms/step - loss: 4.0630e-05
Epoch 192/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.8197e-05
Epoch 193/1000
6/6 [==============================] - 0s 11ms/step - loss: 3.6473e-05
Epoch 194/1000
6/6 [==============================] - 0s 11ms/step - loss: 3.4887e-05
Epoch 195/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.4167e-05
Epoch 196/1000
6/6 [==============================] - 0s 12ms/step - loss: 4.1593e-05
Epoch 197/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.6896e-05
Epoch 198/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.4512e-05
Epoch 199/1000
6/6 [==============================] - 0s 14ms/step - loss: 3.6134e-05
Epoch 200/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.2380e-05
Epoch 201/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.2505e-05
Epoch 202/1000
6/6 [==============================] - 0s 11ms/step - loss: 3.3549e-05
Epoch 203/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.2775e-05
Epoch 204/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.1709e-05
Epoch 205/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.2824e-05
Epoch 206/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.5075e-05
Epoch 207/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.1079e-05
Epoch 208/1000
6/6 [==============================] - 0s 11ms/step - loss: 3.0030e-05
Epoch 209/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.0412e-05
Epoch 210/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.9089e-05
Epoch 211/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.5860e-05
Epoch 212/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.6429e-05
Epoch 213/1000
6/6 [==============================] - 0s 15ms/step - loss: 2.8583e-05
Epoch 214/1000
6/6 [==============================] - 0s 16ms/step - loss: 2.9131e-05
Epoch 215/1000
6/6 [==============================] - 0s 12ms/step - loss: 3.0008e-05
Epoch 216/1000
6/6 [==============================] - 0s 14ms/step - loss: 3.3983e-05
Epoch 217/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.9038e-05
Epoch 218/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.6917e-05
Epoch 219/1000
6/6 [==============================] - 0s 12ms/step - loss: 2.7775e-05
Epoch 220/1000
6/6 [==============================] - 0s 13ms/step - loss: 2.9985e-05
Epoch 221/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.0280e-05
Epoch 222/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.6453e-05
Epoch 223/1000
6/6 [==============================] - 0s 13ms/step - loss: 3.2978e-05
1
2
3
4
# loss 그래프
plt.plot(history.history['loss'], label='loss')
plt.legend(loc='upper right')
plt.show()
Evaluation
1
2
3
# Test dataset
test_x = np.arange(10, 20, 0.1)
calc_y = np.cos(test_x)
1
2
3
4
5
6
7
8
9
10
# RNN 모델 예측 및 로그 저장
test_y = calc_y[:n_timesteps]
for i in range(len(test_x) - n_timesteps):
net_input = test_y[i:i + n_timesteps]
net_input = net_input.reshape((1, n_timesteps, n_features))
train_y = model.predict(net_input, verbose=0)
print(test_y.shape, train_y.shape, i, i + n_timesteps)
test_y = np.append(test_y, train_y)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
(15,) (1, 1) 0 15
(16,) (1, 1) 1 16
(17,) (1, 1) 2 17
(18,) (1, 1) 3 18
(19,) (1, 1) 4 19
(20,) (1, 1) 5 20
(21,) (1, 1) 6 21
(22,) (1, 1) 7 22
(23,) (1, 1) 8 23
(24,) (1, 1) 9 24
(25,) (1, 1) 10 25
(26,) (1, 1) 11 26
(27,) (1, 1) 12 27
(28,) (1, 1) 13 28
(29,) (1, 1) 14 29
(30,) (1, 1) 15 30
(31,) (1, 1) 16 31
(32,) (1, 1) 17 32
(33,) (1, 1) 18 33
(34,) (1, 1) 19 34
(35,) (1, 1) 20 35
(36,) (1, 1) 21 36
(37,) (1, 1) 22 37
(38,) (1, 1) 23 38
(39,) (1, 1) 24 39
(40,) (1, 1) 25 40
(41,) (1, 1) 26 41
(42,) (1, 1) 27 42
(43,) (1, 1) 28 43
(44,) (1, 1) 29 44
(45,) (1, 1) 30 45
(46,) (1, 1) 31 46
(47,) (1, 1) 32 47
(48,) (1, 1) 33 48
(49,) (1, 1) 34 49
(50,) (1, 1) 35 50
(51,) (1, 1) 36 51
(52,) (1, 1) 37 52
(53,) (1, 1) 38 53
(54,) (1, 1) 39 54
(55,) (1, 1) 40 55
(56,) (1, 1) 41 56
(57,) (1, 1) 42 57
(58,) (1, 1) 43 58
(59,) (1, 1) 44 59
(60,) (1, 1) 45 60
(61,) (1, 1) 46 61
(62,) (1, 1) 47 62
(63,) (1, 1) 48 63
(64,) (1, 1) 49 64
(65,) (1, 1) 50 65
(66,) (1, 1) 51 66
(67,) (1, 1) 52 67
(68,) (1, 1) 53 68
(69,) (1, 1) 54 69
(70,) (1, 1) 55 70
(71,) (1, 1) 56 71
(72,) (1, 1) 57 72
(73,) (1, 1) 58 73
(74,) (1, 1) 59 74
(75,) (1, 1) 60 75
(76,) (1, 1) 61 76
(77,) (1, 1) 62 77
(78,) (1, 1) 63 78
(79,) (1, 1) 64 79
(80,) (1, 1) 65 80
(81,) (1, 1) 66 81
(82,) (1, 1) 67 82
(83,) (1, 1) 68 83
(84,) (1, 1) 69 84
(85,) (1, 1) 70 85
(86,) (1, 1) 71 86
(87,) (1, 1) 72 87
(88,) (1, 1) 73 88
(89,) (1, 1) 74 89
(90,) (1, 1) 75 90
(91,) (1, 1) 76 91
(92,) (1, 1) 77 92
(93,) (1, 1) 78 93
(94,) (1, 1) 79 94
(95,) (1, 1) 80 95
(96,) (1, 1) 81 96
(97,) (1, 1) 82 97
(98,) (1, 1) 83 98
(99,) (1, 1) 84 99
1
2
3
4
5
6
7
# 예측 결과 그래프
plt.plot(test_x, calc_y, label='ground truth', color='orange')
plt.plot(test_x, test_y, label='predictions', color='blue')
plt.legend(loc='upper left')
plt.ylim(-2, 2)
plt.show()