LSTM
- RNN 모델은 입력 시퀀스의 시점(time step)이 길어질수록 앞쪽 데이터가 뒤쪽으로 잘 전달되지 않아 학습 능력이 떨어짐
- RNN을 다층 구조로 쌓으면 입력과 출력 데이터 사이의 연관 관계가 줄어들어
장기 의존성
문제가 발생 - 이러한 문제를 보완하기 위해 LSTM을 개발함
Library Call
1
2
3
4
5
6
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Input, Dense, Flatten, LSTM
Data Split & Preprocessing
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# time step만큼 시퀀스 데이터 분리
def split_sequence(sequence, step):
x, y = list(), list()
for i in range(len(sequence)):
end_idx = i + step
if end_idx > len(sequence) - 1:
break
seq_x, seq_y = sequence[i:end_idx], sequence[end_idx]
x.append(seq_x)
y.append(seq_y)
return np.array(x), np.array(y)
1
2
3
# sin 함수 학습 데이터
x = [i for i in np.arange(start=-10, stop=10, step=0.1)]
train_y = [np.sin(i) for i in x]
1
2
3
# 하이퍼파라미터
n_timesteps = 15
n_features = 1
1
2
3
4
5
6
# 시퀀스 나누기
# train_x.shape => (samples, timesteps)
# train_y.shape => (samples)
train_x, train_y = split_sequence(train_y, step=n_timesteps)
print('shape x:{} / y:{}'.format(train_x.shape, train_y.shape))
1
shape x:(185, 15) / y:(185,)
1
2
3
4
5
6
7
# RNN 입력 벡터 크기를 맞추기 위해 벡터 차원 크기 변경
# reshape from [samples, timesteps] into [samples, timesteps, features]
train_x = train_x.reshape(train_x.shape[0], train_x.shape[1], n_features)
print('train_x.shape = {}'.format(train_x.shape))
print('train_y.shape = {}'.format(train_y.shape))
1
2
train_x.shape = (185, 15, 1)
train_y.shape = (185,)
Modeling
1
2
3
4
5
6
# RNN 모델 정의 - Sequential Model
model = Sequential()
model.add(LSTM(units=10, return_sequences=False, input_shape=(n_timesteps, n_features)))
model.add(Dense(units=1))
model.summary()
1
2
3
4
5
6
7
8
9
10
11
12
13
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 10) 480
dense (Dense) (None, 1) 11
=================================================================
Total params: 491
Trainable params: 491
Non-trainable params: 0
_________________________________________________________________
1
2
3
4
5
6
7
# RNN 모델 정의 - Functional Model
input = Input(shape=(n_timesteps, n_features))
x = LSTM(units=10, return_sequences=False)(input)
output = Dense(units=1)(x)
model = Model(inputs=input, outputs=output)
model.summary()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 15, 1)] 0
lstm_1 (LSTM) (None, 10) 480
dense_1 (Dense) (None, 1) 11
=================================================================
Total params: 491
Trainable params: 491
Non-trainable params: 0
_________________________________________________________________
1
2
3
4
5
6
7
8
# 모델 학습
np.random.seed(0)
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(monitor='loss', patience=5, mode='auto')
model.compile(optimizer='adam', loss='mse')
history = model.fit(train_x, train_y, epochs=1000, callbacks=[early_stopping])
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
Epoch 1/1000
6/6 [==============================] - 7s 11ms/step - loss: 0.3356
Epoch 2/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.2983
Epoch 3/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.2653
Epoch 4/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.2354
Epoch 5/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.2111
Epoch 6/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.1894
Epoch 7/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.1716
Epoch 8/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.1583
Epoch 9/1000
6/6 [==============================] - 0s 8ms/step - loss: 0.1469
Epoch 10/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1381
Epoch 11/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.1308
Epoch 12/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1242
Epoch 13/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1181
Epoch 14/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1124
Epoch 15/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1066
Epoch 16/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.1015
Epoch 17/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0962
Epoch 18/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0914
Epoch 19/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0864
Epoch 20/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0819
Epoch 21/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0771
Epoch 22/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0728
Epoch 23/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0683
Epoch 24/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0639
Epoch 25/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0595
Epoch 26/1000
6/6 [==============================] - 0s 8ms/step - loss: 0.0550
Epoch 27/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0505
Epoch 28/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0461
Epoch 29/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0415
Epoch 30/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0369
Epoch 31/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0324
Epoch 32/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0280
Epoch 33/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0236
Epoch 34/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0197
Epoch 35/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0163
Epoch 36/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0133
Epoch 37/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0110
Epoch 38/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0093
Epoch 39/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0080
Epoch 40/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0070
Epoch 41/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0063
Epoch 42/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0057
Epoch 43/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0051
Epoch 44/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0047
Epoch 45/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0043
Epoch 46/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0039
Epoch 47/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0036
Epoch 48/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0034
Epoch 49/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0032
Epoch 50/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0030
Epoch 51/1000
6/6 [==============================] - 0s 14ms/step - loss: 0.0028
Epoch 52/1000
6/6 [==============================] - 0s 9ms/step - loss: 0.0026
Epoch 53/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0025
Epoch 54/1000
6/6 [==============================] - 0s 7ms/step - loss: 0.0024
Epoch 55/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0023
Epoch 56/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0022
Epoch 57/1000
6/6 [==============================] - 0s 12ms/step - loss: 0.0021
Epoch 58/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0020
Epoch 59/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0019
Epoch 60/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0018
Epoch 61/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0017
Epoch 62/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0016
Epoch 63/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0016
Epoch 64/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0015
Epoch 65/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0014
Epoch 66/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0014
Epoch 67/1000
6/6 [==============================] - 0s 5ms/step - loss: 0.0013
Epoch 68/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0012
Epoch 69/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0012
Epoch 70/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0011
Epoch 71/1000
6/6 [==============================] - 0s 6ms/step - loss: 0.0011
Epoch 72/1000
6/6 [==============================] - 0s 4ms/step - loss: 0.0010
Epoch 73/1000
6/6 [==============================] - 0s 5ms/step - loss: 9.7267e-04
Epoch 74/1000
6/6 [==============================] - 0s 4ms/step - loss: 9.0993e-04
Epoch 75/1000
6/6 [==============================] - 0s 6ms/step - loss: 8.7220e-04
Epoch 76/1000
6/6 [==============================] - 0s 5ms/step - loss: 8.2242e-04
Epoch 77/1000
6/6 [==============================] - 0s 4ms/step - loss: 7.9731e-04
Epoch 78/1000
6/6 [==============================] - 0s 5ms/step - loss: 7.4751e-04
Epoch 79/1000
6/6 [==============================] - 0s 4ms/step - loss: 7.1020e-04
Epoch 80/1000
6/6 [==============================] - 0s 4ms/step - loss: 6.7267e-04
Epoch 81/1000
6/6 [==============================] - 0s 4ms/step - loss: 6.3763e-04
Epoch 82/1000
6/6 [==============================] - 0s 5ms/step - loss: 6.0020e-04
Epoch 83/1000
6/6 [==============================] - 0s 4ms/step - loss: 5.7294e-04
Epoch 84/1000
6/6 [==============================] - 0s 4ms/step - loss: 5.4476e-04
Epoch 85/1000
6/6 [==============================] - 0s 5ms/step - loss: 5.1785e-04
Epoch 86/1000
6/6 [==============================] - 0s 5ms/step - loss: 4.8496e-04
Epoch 87/1000
6/6 [==============================] - 0s 4ms/step - loss: 4.6241e-04
Epoch 88/1000
6/6 [==============================] - 0s 4ms/step - loss: 4.3553e-04
Epoch 89/1000
6/6 [==============================] - 0s 4ms/step - loss: 4.1214e-04
Epoch 90/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.8754e-04
Epoch 91/1000
6/6 [==============================] - 0s 4ms/step - loss: 3.6763e-04
Epoch 92/1000
6/6 [==============================] - 0s 4ms/step - loss: 3.4501e-04
Epoch 93/1000
6/6 [==============================] - 0s 4ms/step - loss: 3.2592e-04
Epoch 94/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.0861e-04
Epoch 95/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.8848e-04
Epoch 96/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.6932e-04
Epoch 97/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.6031e-04
Epoch 98/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.5022e-04
Epoch 99/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.2859e-04
Epoch 100/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.1427e-04
Epoch 101/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.0070e-04
Epoch 102/1000
6/6 [==============================] - 0s 4ms/step - loss: 1.9103e-04
Epoch 103/1000
6/6 [==============================] - 0s 4ms/step - loss: 1.7804e-04
Epoch 104/1000
6/6 [==============================] - 0s 4ms/step - loss: 1.6632e-04
Epoch 105/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.5785e-04
Epoch 106/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.4697e-04
Epoch 107/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.3869e-04
Epoch 108/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.2898e-04
Epoch 109/1000
6/6 [==============================] - 0s 4ms/step - loss: 1.2284e-04
Epoch 110/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.1470e-04
Epoch 111/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.0862e-04
Epoch 112/1000
6/6 [==============================] - 0s 5ms/step - loss: 1.0218e-04
Epoch 113/1000
6/6 [==============================] - 0s 4ms/step - loss: 9.6083e-05
Epoch 114/1000
6/6 [==============================] - 0s 5ms/step - loss: 9.0590e-05
Epoch 115/1000
6/6 [==============================] - 0s 4ms/step - loss: 8.5281e-05
Epoch 116/1000
6/6 [==============================] - 0s 5ms/step - loss: 8.4038e-05
Epoch 117/1000
6/6 [==============================] - 0s 6ms/step - loss: 7.5523e-05
Epoch 118/1000
6/6 [==============================] - 0s 5ms/step - loss: 7.4124e-05
Epoch 119/1000
6/6 [==============================] - 0s 5ms/step - loss: 6.8989e-05
Epoch 120/1000
6/6 [==============================] - 0s 4ms/step - loss: 6.5304e-05
Epoch 121/1000
6/6 [==============================] - 0s 5ms/step - loss: 6.2229e-05
Epoch 122/1000
6/6 [==============================] - 0s 5ms/step - loss: 5.9394e-05
Epoch 123/1000
6/6 [==============================] - 0s 4ms/step - loss: 5.6700e-05
Epoch 124/1000
6/6 [==============================] - 0s 5ms/step - loss: 5.3823e-05
Epoch 125/1000
6/6 [==============================] - 0s 5ms/step - loss: 5.1714e-05
Epoch 126/1000
6/6 [==============================] - 0s 5ms/step - loss: 4.8672e-05
Epoch 127/1000
6/6 [==============================] - 0s 4ms/step - loss: 4.8928e-05
Epoch 128/1000
6/6 [==============================] - 0s 6ms/step - loss: 4.5447e-05
Epoch 129/1000
6/6 [==============================] - 0s 4ms/step - loss: 4.5901e-05
Epoch 130/1000
6/6 [==============================] - 0s 5ms/step - loss: 4.2962e-05
Epoch 131/1000
6/6 [==============================] - 0s 5ms/step - loss: 4.1365e-05
Epoch 132/1000
6/6 [==============================] - 0s 5ms/step - loss: 4.0391e-05
Epoch 133/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.8119e-05
Epoch 134/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.7208e-05
Epoch 135/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.6568e-05
Epoch 136/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.5234e-05
Epoch 137/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.6215e-05
Epoch 138/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.6693e-05
Epoch 139/1000
6/6 [==============================] - 0s 4ms/step - loss: 3.3624e-05
Epoch 140/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.2566e-05
Epoch 141/1000
6/6 [==============================] - 0s 4ms/step - loss: 3.1380e-05
Epoch 142/1000
6/6 [==============================] - 0s 5ms/step - loss: 3.0722e-05
Epoch 143/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.9964e-05
Epoch 144/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.9954e-05
Epoch 145/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.9530e-05
Epoch 146/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.8321e-05
Epoch 147/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.9107e-05
Epoch 148/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.9064e-05
Epoch 149/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.8339e-05
Epoch 150/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.7466e-05
Epoch 151/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.6921e-05
Epoch 152/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.6774e-05
Epoch 153/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.6329e-05
Epoch 154/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.6081e-05
Epoch 155/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.5651e-05
Epoch 156/1000
6/6 [==============================] - 0s 7ms/step - loss: 2.5381e-05
Epoch 157/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.5744e-05
Epoch 158/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.5035e-05
Epoch 159/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4964e-05
Epoch 160/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4517e-05
Epoch 161/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4417e-05
Epoch 162/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4166e-05
Epoch 163/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4256e-05
Epoch 164/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.3607e-05
Epoch 165/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.3903e-05
Epoch 166/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4796e-05
Epoch 167/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.3850e-05
Epoch 168/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.3377e-05
Epoch 169/1000
6/6 [==============================] - 0s 6ms/step - loss: 2.3284e-05
Epoch 170/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.4254e-05
Epoch 171/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.5136e-05
Epoch 172/1000
6/6 [==============================] - 0s 4ms/step - loss: 2.5552e-05
Epoch 173/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.1505e-05
Epoch 174/1000
6/6 [==============================] - 0s 6ms/step - loss: 2.3836e-05
Epoch 175/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.3222e-05
Epoch 176/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.2046e-05
Epoch 177/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.2150e-05
Epoch 178/1000
6/6 [==============================] - 0s 5ms/step - loss: 2.2109e-05
1
2
3
4
# loss 그래프
plt.plot(history.history['loss'], label='loss')
plt.legend(loc='upper right')
plt.show()
Evaluation
1
2
3
# Test dataset
test_x = np.arange(10, 20, 0.1)
calc_y = np.cos(test_x)
1
2
3
4
5
6
7
8
9
10
# RNN 모델 예측 및 로그 저장
test_y = calc_y[:n_timesteps]
for i in range(len(test_x) - n_timesteps):
net_input = test_y[i:i + n_timesteps]
net_input = net_input.reshape((1, n_timesteps, n_features))
train_y = model.predict(net_input, verbose=0)
print(test_y.shape, train_y.shape, i, i + n_timesteps)
test_y = np.append(test_y, train_y)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
(15,) (1, 1) 0 15
(16,) (1, 1) 1 16
(17,) (1, 1) 2 17
(18,) (1, 1) 3 18
(19,) (1, 1) 4 19
(20,) (1, 1) 5 20
(21,) (1, 1) 6 21
(22,) (1, 1) 7 22
(23,) (1, 1) 8 23
(24,) (1, 1) 9 24
(25,) (1, 1) 10 25
(26,) (1, 1) 11 26
(27,) (1, 1) 12 27
(28,) (1, 1) 13 28
(29,) (1, 1) 14 29
(30,) (1, 1) 15 30
(31,) (1, 1) 16 31
(32,) (1, 1) 17 32
(33,) (1, 1) 18 33
(34,) (1, 1) 19 34
(35,) (1, 1) 20 35
(36,) (1, 1) 21 36
(37,) (1, 1) 22 37
(38,) (1, 1) 23 38
(39,) (1, 1) 24 39
(40,) (1, 1) 25 40
(41,) (1, 1) 26 41
(42,) (1, 1) 27 42
(43,) (1, 1) 28 43
(44,) (1, 1) 29 44
(45,) (1, 1) 30 45
(46,) (1, 1) 31 46
(47,) (1, 1) 32 47
(48,) (1, 1) 33 48
(49,) (1, 1) 34 49
(50,) (1, 1) 35 50
(51,) (1, 1) 36 51
(52,) (1, 1) 37 52
(53,) (1, 1) 38 53
(54,) (1, 1) 39 54
(55,) (1, 1) 40 55
(56,) (1, 1) 41 56
(57,) (1, 1) 42 57
(58,) (1, 1) 43 58
(59,) (1, 1) 44 59
(60,) (1, 1) 45 60
(61,) (1, 1) 46 61
(62,) (1, 1) 47 62
(63,) (1, 1) 48 63
(64,) (1, 1) 49 64
(65,) (1, 1) 50 65
(66,) (1, 1) 51 66
(67,) (1, 1) 52 67
(68,) (1, 1) 53 68
(69,) (1, 1) 54 69
(70,) (1, 1) 55 70
(71,) (1, 1) 56 71
(72,) (1, 1) 57 72
(73,) (1, 1) 58 73
(74,) (1, 1) 59 74
(75,) (1, 1) 60 75
(76,) (1, 1) 61 76
(77,) (1, 1) 62 77
(78,) (1, 1) 63 78
(79,) (1, 1) 64 79
(80,) (1, 1) 65 80
(81,) (1, 1) 66 81
(82,) (1, 1) 67 82
(83,) (1, 1) 68 83
(84,) (1, 1) 69 84
(85,) (1, 1) 70 85
(86,) (1, 1) 71 86
(87,) (1, 1) 72 87
(88,) (1, 1) 73 88
(89,) (1, 1) 74 89
(90,) (1, 1) 75 90
(91,) (1, 1) 76 91
(92,) (1, 1) 77 92
(93,) (1, 1) 78 93
(94,) (1, 1) 79 94
(95,) (1, 1) 80 95
(96,) (1, 1) 81 96
(97,) (1, 1) 82 97
(98,) (1, 1) 83 98
(99,) (1, 1) 84 99
1
2
3
4
5
6
7
# 예측 결과 그래프
plt.plot(test_x, calc_y, label='ground truth', color='orange')
plt.plot(test_x, test_y, label='predictions', color='blue')
plt.legend(loc='upper left')
plt.ylim(-2, 2)
plt.show()