X = []
Y = []
for i in range(3000):
lst = np.random.rand(100)
idx = np.random.choice(100,2, replace=False)
zeros = np.zeros(100)
zeros[idx] = 1
X.append(np.array(list(zip(zeros, lst))))
Y.append(np.prod(lst[idx]))
print(X[0], Y[0])
'''
[[1. 0.30630322]
[0. 0.14278013]
[0. 0.96178476]
[0. 0.02242287]
[0. 0.34139033]
[0. 0.54767334]
[0. 0.58360337]
[0. 0.75452914]
.
.
.
'''
model = tf.keras.Sequential([
tf.keras.layers.SimpleRNN(units=30, return_sequences=True, input_shape=[100, 2]),
tf.keras.layers.SimpleRNN(units=30),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam', loss='mse')
model.summary()
# 데이터 준비
X = np.array(X)
Y = np.array(Y)
# 학습
history = model.fit(X[:2500], Y[:2500], epochs=100, validation_split=0.2)
# 시각화
import matplotlib.pyplot as plt
%matplotlib inline
plt.plot(history.history['loss'], 'b--', label='loss')
plt.plot(history.history['val_loss'], 'r--', label='val_loss')
plt.xlabel('Epoch')
plt.legend()
plt.show()
# 모델
model = tf.keras.Sequential([
tf.keras.layers.LSTM(units=30, return_sequences=True, input_shape=[100, 2]),
tf.keras.layers.LSTM(units=30),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam', loss='mse')
model.summary()
X = np.array(X)
Y = np.array(Y)
# 학습
history = model.fit(X[:2500], Y[:2500], epochs=100, validation_split=0.2)
# 시각화
plt.plot(history.history['loss'], 'b--', label='loss')
plt.plot(history.history['val_loss'], 'r--', label='val_loss')
plt.xlabel('Epoch')
plt.legend()
plt.show()
Reference
1) 제로베이스 데이터스쿨 강의자료
2) https://omicro03.medium.com/%EC%9E%90%EC%97%B0%EC%96%B4%EC%B2%98%EB%A6%AC-nlp-12%EC%9D%BC%EC%B0%A8-ltsm-81c9751afafb