Deep Learning for Computer vision

๋ฐ•๊ด‘์šฑยท2023๋…„ 2์›” 9์ผ
0

Computer Vision

๋ชฉ๋ก ๋ณด๊ธฐ
7/8

๐Ÿ“• Keras

โœ keras

import numpy as np
from numpy import genfromtxt # ํ…์ŠคํŠธ ํŒŒ์ผ์—์„œ ๋ฐ์ดํ„ฐ๋ฅผ ๋ฐฐ์—ด๋กœ ์ƒ์„ฑํ•ด์คŒ.

data = genfromtxt('../Computer-Vision-with-Python/DATA/bank_note_data.txt', delimiter=',') # delimiter : ,๋กœ ๋‚˜๋ˆ„์–ด์ ธ์„œ ๋ฐฐ์—ด์ƒ์„ฑ๋จ.

labels = data[:, 4]

features = data[:,0:4]

X = features
y = labels

from sklearn.model_selection import train_test_split # ์ด๊ฑธ ์‚ฌ์šฉํ•˜๋ฉด train set๊ณผ test set ๋‚˜๋ˆŒ์ˆ˜ ์žˆ์Œ.

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42) # 33ํผ๋Š” test set์œผ๋กœ ๋“ค์–ด๊ฐ.

from sklearn.preprocessing import MinMaxScaler # ๋ชจ๋“  ๋ฐ์ดํ„ฐ์˜ feature๋“ค์ด ํŠน์ • ๋ฒ”์œ„๋กœ bound์‹œํ‚ฌ ์ˆ˜ ์žˆ์Œ. -> ์ •๊ทœํ™”๊ฐ€ ๊ฐ€๋Šฅ

scaler_object = MinMaxScaler()
scaler_object.fit(X_train) # ์ด ๊ณผ์ •์„ ํ†ตํ•ด X_train์˜ ์ตœ์†Œ ๊ฐ’๊ณผ ์ตœ๋Œ€ ๊ฐ’์„ ์ฐพ์„ ์ˆ˜ ์žˆ๋‹ค.
scaled_X_train = scaler_object.transform(X_train)
scaled_X_test = scaler_object.transform(X_test) # ๋ชจ๋“  ๋ฐ์ดํ„ฐ๋ฅผ ํ•™์Šต ์‹œํ‚ค์ง€ ์•Š๋Š” ์ด์œ ๋Š” overfit ๋ฐฉ์ง€

from keras.models import Sequential # ์ด๊ฒƒ๋“ค์„ ์ด์šฉํ•ด์„œ CNN์ƒ์„ฑ
from keras.layers import Dense

model = Sequential()
model.add(Dense(4, input_dim=4, activation='relu')) # feature์˜ ๊ฐœ์ˆ˜, activation function. ์ด๊ฑฐ๋Š” input layer
model.add(Dense(8, activation='relu')) # ์ด๊ฑฐ๋Š” hidden layer
model.add(Dense(1, activation='sigmoid')) # output layer

# ๋ชจ๋ธ์„ ์ปดํŒŒ์ผํ•˜๋Š” ๊ณผ์ •. ํ•™์Šต ๋ฐฉ์‹์— ๋Œ€ํ•œ ํ™˜๊ฒฝ์„ค์ •.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# epochs : ๋ชจ๋“  ๋ฐ์ดํ„ฐ์˜ ํ•™์Šต ํšŸ์ˆ˜
# verbose : ์†์‹ค ๋ณ€์ˆ˜, ์†์‹ค ๊ฐ’์„ ๋‚˜ํƒ€๋‚ด์คŒ.
model.fit(scaled_X_train, y_train, epochs=50, verbose=2)

model.predict_classes(scaled_X_test)  # ์œ„ ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด X test์— ๋Œ€ํ•ด ์˜ˆ์ธก ํด๋ž˜์Šค๋ฅผ ๋ฑ‰์Œ

from sklearn.metrics import confusion_matrix, classification_report # ๋ชจ๋ธ์˜ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์—ฌ์ฃผ๊ธฐ ์œ„ํ•ด ๊ฐ€์ ธ์˜ด

predictions = model.predict_classes(scaled_X_test)
print(classification_report(y_test, predictions))

๐Ÿ’ป ๊ฒฐ๊ณผ


๐Ÿ“— CNN

โœ MNIST

from keras.datasets import mnist
import matplotlib.pyplot as plt
%matplotlib inline

(x_train, y_train), (x_test, y_test) = mnist.load_data()

single_image = x_train[0]

plt.imshow(single_image, cmap='gray_r') # ๊ฒ€์ • ํ•˜์–‘ ๋ฐ˜์ „

from keras.utils.np_utils import to_categorical # one hot encoding์„ ํ•˜๊ธฐ ์œ„ํ•จ

y_cat_test = to_categorical(y_test, 10) # class์˜ ๊ฐœ์ˆ˜ 10๊ฐœ
y_cat_train = to_categorical(y_train, 10)

x_train = x_train / x_train.max() # ์ˆ˜๋™์œผ๋กœ ์ •๊ทœํ™” ํ•˜๋Š” ๋ฐฉ๋ฒ•
x_test = x_test / x_test.max()

x_train = x_train.reshape(60000, 28, 28, 1) # ์ฑ„๋„์˜ ์ฐจ์›์„ ๋Š˜๋ ค์คŒ
x_test = x_test.reshape(10000, 28, 28, 1)

from keras.models import Sequential
from keras.layers import Dense, Conv2D, MaxPool2D, Flatten # Flatten์€ 2์ฐจ์›์„ 1์ฐจ์›์œผ๋กœ ๋ฐ”๊พธ์–ด์คŒ

model = Sequential()

# Convolutional Layer
# ํ•„ํ„ฐ์˜ ์ˆ˜๋Š” ๊ฐ„๋‹จํ•˜๋ฉด 32๊ฐ€ ๋‚ซ๋‹ค. ์ปค๋„ ์‚ฌ์ด์ฆˆ๋Š” ๋ณดํ†ต 3x3 or 4x4
model.add(Conv2D(filters=32, kernel_size=(4,4), input_shape=(28,28,1), activation='relu')) 

# Pooling Layer
model.add(MaxPool2D(pool_size=(2,2)))

# 2D -> 1D
model.add(Flatten())

# Dense Layer
model.add(Dense(128, activation='relu')) # Hidden layer
model.add(Dense(10, activation='softmax')) # ์ด๊ฑฐ๋Š” ํด๋ž˜์Šค ๋ถ„๋ฅ˜

model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])

model.summary()

model.fit(x_train, y_cat_train, epochs=2)

model.evaluate(x_test, y_cat_test)

from sklearn.metrics import classification_report

predictions = model.predict_classes(x_test)

print(classification_report(y_test, predictions))

๐Ÿ’ป ๊ฒฐ๊ณผ


โœ CIFAR-10

from keras.datasets import cifar10
import matplotlib.pyplot as plt
%matplotlib inline

(x_train, y_train), (x_test, y_test) = cifar10.load_data()

x_train = x_train / x_train.max()
x_test = x_test / x_test.max()

from keras.utils import to_categorical
from keras.models import Sequential
from keras.layers import Dense, Conv2D, MaxPool2D, Flatten

y_cat_train = to_categorical(y_train, 10)
y_cat_test = to_categorical(y_test,10)

model = Sequential()

model.add(Conv2D(filters=32, kernel_size=(4,4), input_shape=(32,32,3), activation='relu'))

model.add(MaxPool2D(pool_size=(2,2)))

model.add(Conv2D(filters=32, kernel_size=(4,4), input_shape=(32,32,3), activation='relu'))

model.add(MaxPool2D(pool_size=(2,2)))

model.add(Flatten())

model.add(Dense(256, activation='relu'))

model.add(Dense(10, activation='softmax'))

model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])

model.fit(x_train, y_cat_train, verbose=1, epochs=10)

model.evaluate(x_test, y_cat_test)

from sklearn.metrics import classification_report

predictions = model.predict_classes(x_test)

print(classification_report(y_test, predictions))

๐Ÿ’ป ๊ฒฐ๊ณผ

0๊ฐœ์˜ ๋Œ“๊ธ€