Tensorflow - batch

이정규 (가지마)·2023년 1월 2일

import tensorflow as tf
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense

#1 데이터
x = np.array([1,2,3,4,5,6])
y = np.array([1,2,3,5,4,6])

#2.모델구성
model = Sequential() //
model.add(Dense(3,input_dim=1)) #1개들어가서 3개 //입력층
model.add(Dense(5)) //히든층
model.add(Dense(4)) //히든
model.add(Dense(2)) //히든
model.add(Dense(1)) //출력층

#3.컴파일훈련
model.compile(loss='mae' , optimizer='adam')
model.fit(x,y, epochs=10 , batch_size=7) #batch사이즈 조절은 fit에서함.

"""
batch_ size =2
Epoch 1/10
3/3 [==============================] - 0s 1ms/step - loss: 2.1968
Epoch 2/10
3/3 [==============================] - 0s 498us/step - loss: 2.0350
Epoch 3/10
3/3 [==============================] - 0s 997us/step - loss: 1.8360
Epoch 4/10
3/3 [==============================] - 0s 498us/step - loss: 1.6591
Epoch 5/10
3/3 [==============================] - 0s 997us/step - loss: 1.4543
Epoch 6/10
3/3 [==============================] - 0s 500us/step - loss: 1.2717
Epoch 7/10
3/3 [==============================] - 0s 498us/step - loss: 1.0715
Epoch 8/10
3/3 [==============================] - 0s 498us/step - loss: 0.8531
Epoch 9/10
3/3 [==============================] - 0s 498us/step - loss: 0.6848
Epoch 10/10
3/3 [==============================] - 0s 498us/step - loss: 0.5478

,

batch_size =3

Epoch 1/10
2/2 [==============================] - 0s 2ms/step - loss: 4.4034
Epoch 2/10
2/2 [==============================] - 0s 0s/step - loss: 4.0804
Epoch 3/10
2/2 [==============================] - 0s 0s/step - loss: 3.7423
Epoch 4/10
2/2 [==============================] - 0s 997us/step - loss: 3.4300
Epoch 5/10
2/2 [==============================] - 0s 1ms/step - loss: 3.1028
Epoch 6/10
2/2 [==============================] - 0s 0s/step - loss: 2.8326
Epoch 7/10
2/2 [==============================] - 0s 998us/step - loss: 2.4965
Epoch 8/10
2/2 [==============================] - 0s 0s/step - loss: 2.2055
Epoch 9/10
2/2 [==============================] - 0s 0s/step - loss: 1.8946
Epoch 10/10
2/2 [==============================] - 0s 1ms/step - loss: 1.6299

batch_size =4
Epoch 1/10
2/2 [==============================] - 0s 1ms/step - loss: 2.9631
Epoch 2/10
2/2 [==============================] - 0s 0s/step - loss: 2.8066
Epoch 3/10
2/2 [==============================] - 0s 998us/step - loss: 2.6360
Epoch 4/10
2/2 [==============================] - 0s 1ms/step - loss: 2.4763
Epoch 5/10
2/2 [==============================] - 0s 996us/step - loss: 2.2962
Epoch 6/10
2/2 [==============================] - 0s 997us/step - loss: 2.1351
Epoch 7/10
2/2 [==============================] - 0s 997us/step - loss: 1.9782
Epoch 8/10
2/2 [==============================] - 0s 996us/step - loss: 1.7942
Epoch 9/10
2/2 [==============================] - 0s 997us/step - loss: 1.6265
Epoch 10/10
2/2 [==============================] - 0s 996us/step - loss: 1.4305
,
batch_size = 7
Epoch 1/10
1/1 [==============================] - 0s 174ms/step - loss: 5.2018
Epoch 2/10
1/1 [==============================] - 0s 13ms/step - loss: 5.1183
Epoch 3/10
1/1 [==============================] - 0s 2ms/step - loss: 5.0348
Epoch 4/10
1/1 [==============================] - 0s 993us/step - loss: 4.9513
Epoch 5/10
1/1 [==============================] - 0s 997us/step - loss: 4.8678
Epoch 6/10
1/1 [==============================] - 0s 995us/step - loss: 4.7844
Epoch 7/10
1/1 [==============================] - 0s 997us/step - loss: 4.7009
Epoch 8/10
1/1 [==============================] - 0s 996us/step - loss: 4.6175
Epoch 9/10
1/1 [==============================] - 0s 998us/step - loss: 4.5341
Epoch 10/10
1/1 [==============================] - 0s 996us/step - loss: 4.4507
"""

#4.평가,예측
results = model.predict([6])

print('6의결과',results)

profile
"꾸준함이 답이다."

0개의 댓글