[TIL]21.08.09 간단한 딥러닝 회귀예측

Seung Joo·2021년 8월 9일
0

TIL

목록 보기
29/31
post-thumbnail
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from xgboost import XGBRegressor
from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.preprocessing import MinMaxScaler

# Auto MPG 데이터셋을 이용한 연비 예측
dataset_path = keras.utils.get_file("auto-mpg.data",
                                    "http://archive.ics.uci.edu/ml/machine-learning-databases/auto-mpg/auto-mpg.data")


column_names = ['MPG', 'Cylinders', 'Displacement', 
                'Horsepower', 'Weight', 'Acceleration', 
                'model_Year', 'Origin']

data = pd.read_csv(dataset_path, names=column_names,
                   na_values='?', comment='\t', sep=" ", skipinitialspace=True)

df = data.copy()
# 결측치 제거
df = df.dropna()
# 원핫 인코딩
df['USA'] = df.Origin.apply(lambda x : 1 if x == 1 else 0)
df['Europe'] = df.Origin.apply(lambda x : 1 if x == 2 else 0)
df['Japan'] = df.Origin.apply(lambda x : 1 if x == 3 else 0)
df.drop('Origin', axis=1, inplace=True)
# 학습, 테스트 데이터 나누기
train = df.sample(frac=0.8, random_state=0)
test = df.drop(train.index)

# 특성과 타겟 나누기
X_train, y_train = train.drop('MPG', axis=1), train['MPG']
X_test, y_test = test.drop('MPG', axis=1), test['MPG']
# 정규화
scaler = MinMaxScaler()
X_train_scaled = scaler.fit_transform(X_train)
X_test_scaled = scaler.transform(X_test)

케라스 사용 모델

# keras Sequential 모델 생성
model = keras.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(9,)))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(1))

# RMSprop optimizer 
optimizer = tf.keras.optimizers.RMSprop(0.001)

model.compile(loss='mse',
              optimizer=optimizer,
              metrics=['mae', 'mse'])
              
# 모델 학습    
history = model.fit(
X_train, y_train,
epochs=500, validation_split = 0.2, verbose=0)

# mse, mae값 담기
result = model.evaluate(X_test, y_test)

print("mae :", round(result[1], 3))
print("mse :", round(result[2], 3))

#
# mae : 5.653
# mse : 39.813

XGBRegressor 사용 모델

reg = XGBRegressor()

reg.fit(X_train, y_train)

y_pred = reg.predict(X_test)

print("mae :", round(mean_absolute_error(y_test, y_pred), 3))
print("mse :", round(mean_squared_error(y_test, y_pred), 3))

#
# mae : 1.897
# mse : 6.61
profile
조금씩 천천히

0개의 댓글