
DNN(μ¬μΈ΅ μ κ²½λ§) μμ©μμ
import matplotlib.pyplot as plt
import tensorflow as tf
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten(input_shape=(28,28)))
model.add(tf.keras.layers.Dense(512, activation='relu'))
model.add(tf.keras.layers.Dropout(0.2))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5)
model.evaluate(x_test, y_test)
Epoch 1/5
60000/60000 [======================] - 7s 116us/sample - loss: 0.2205 - acc: 0.9348
Epoch 2/5
60000/60000 [======================] - 7s 110us/sample - loss: 0.0969 - acc: 0.9700
Epoch 3/5
60000/60000 [======================] - 7s 109us/sample - loss: 0.0678 - acc: 0.9785
Epoch 4/5
60000/60000 [======================] - 6s 108us/sample - loss: 0.0529 - acc: 0.9834
Epoch 5/5
60000/60000 [======================] - 7s 108us/sample - loss: 0.0428 - acc: 0.9859
10000/10000 [======================] - 0s 43us/sample - loss: 0.0645 - acc: 0.9795
ν μνλ‘μ° νν 리μΌμ λμ€λ ν¨μ μμ΄ν μ μ¬μΈ΅ μ κ²½λ§μΌλ‘ λΆλ₯νλ μ½λ
ν¨μ MNIST λ°μ΄ν°μ μ 10κ°μ λ²μ£Ό(category)μ 70,000κ°μ ν¨μ κ΄λ ¨ μ΄λ―Έμ§ (μ·, ꡬλ, νΈλλ°± λ±)κ° μ 곡λλ©° ν΄μλλ 28x28μ΄λ€.

μ΄λ―Έμ§λ 28x28 ν¬κΈ°, ν½μ κ°μ 0κ³Ό 255 μ¬μ΄
λ μ΄λΈ(label)μ 0μμ 9κΉμ§μ μ μλ‘μ ν¨μ μμ΄ν μ λ²μ£Όλ₯Ό λνλ

import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras import datasets, layers, models
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
plt.imshow(train_images[0])
train_images = train_images / 255.0
test_images = test_images / 255.0

model = models.Sequential()
model.add(layers.Flatten(input_shape=(28, 28)))
model.add(layers.Dense(128, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=5)
test_loss, test_acc = model.evaluate(test_images, test_labels)
print('μ νλ:', test_acc)
10000/10000 [==============================] - 0s 32us/sample - loss: 0.3560 -
acc: 0.8701
μ νλ: 0.8701
# λΌμ΄λΈλ¬λ¦¬ μ μ¬
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import tensorflow as tf
# νμ΅ λ°μ΄ν° λ€μ΄λ‘λ
# νμ΄νλ λ°μ΄ν°λ Kaggle μ¬μ΄νΈ(https://www.Kaggle.com/c/titanic/data)μμ λ€μ΄λ‘λ λ°λλ€.
# νμ΅λ°μ΄ν°λ train.csvνμΌμ΄κ³ ν
μ€νΈ λ°μ΄ν°λ test.csvνμΌμ΄λ€.
# λ°μ΄ν° μΈνΈλ₯Ό μ½μ΄λ€μΈλ€.
train = pd.read_csv("train.csv", sep=',')
test = pd.read_csv("test.csv", sep=',')
# νμμλ 컬λΌμ μμ νλ€.
train.drop(['SibSp', 'Parch', 'Ticket', 'Embarked', 'Name', 'Cabin', 'PassengerId', 'Fare', 'Age'], inplace=True, axis=1)
# κ²°μμΉκ° μλ λ°μ΄ν° νμ μμ νλ€.
train.dropna(inplace=True)
# κ΅¬κΈ λλΌμ΄λΈ μ°κ²° from google.colab import drive drive.mount('/content/drive') # Mounted at /content/drive import os print(os.listdir('/content/drive/MyDrive')) # ['Colab Notebooks', 'Google Formβ,β¦]
# μ±λ³μ λ°λ₯Έ μμ‘΄λ₯ μκ°ν
df = train.groupby('Sex').mean()["Survived"]
df.plot(kind='bar')
plt.show()
# Pcalssμ λν΄μλ μμ‘΄λ₯ κ·Έλνλ₯Ό κ·Έλ €λ³΄λ©΄ μλΉν μΈκ³Όκ΄κ³κ° μλ€λ κ²μ μ μ μλ€.
#λ°λΌμ νμ΅μ μ
λ ₯μΌλ‘ βSexβμ βPclassβλ§μ κ³ λ €νλ€.

μ¬μ±μ μμ‘΄λ₯ μ΄ λμ κ²μ μ μ μμ
# νμ΅ λ°μ΄ν° μ μ
# train.drop(['SibSp', 'Parch', 'Ticket', 'Embarked', 'Name', 'Cabin', 'PassengerId', 'Fare', 'Age'], inplace=True, axis=1)
# inplace=Trueλ μλ λ°μ΄ν° νλ μμ λ³κ²½νλΌλ μλ―Έμ΄κ³ axis=1μ μΆλ²νΈ 1λ² (μ¦ column)μ μμ νλΌλ μλ―Έμ΄λ€.
train.head()
# Survived Pclass Sex
# 0 0 3 male
# 1 1 1 female
# 2 1 3 female
# 3 1 1 female
# 4 0 3 male
# μ±λ³ κΈ°νΈλ₯Ό μμΉλ‘ λ³ννλ€.
# λ₯λ¬λμ 0λΆν° 1μ¬μ΄μ μ€μλ§ μ²λ¦¬ κ°λ₯
for ix in train.index:
if train.loc[ix, 'Sex']=="male":
train.loc[ix, 'Sex']=1
else:
train.loc[ix, 'Sex']=0
# 2μ°¨μ λ°°μ΄μ 1μ°¨μ λ°°μ΄λ‘ ννννλ€.
target = np.ravel(train.Survived)
# μμ‘΄μ¬λΆλ₯Ό νμ΅ λ°μ΄ν°μμ μμ νλ€.
train.drop(['Survived'], inplace=True, axis=1)
train = train.astype(float) # μ΅κ·Ό μμ€μμλ floatννλ‘ νλ³ννμ¬μΌ νλ€.
# μΌλΌμ€ λͺ¨λΈμ μμ±νλ€.
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(16, activation='relu', input_shape=(2,)))
model.add(tf.keras.layers.Dense(8, activation='relu'))
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
# μΌλΌμ€ λͺ¨λΈμ μ»΄νμΌνλ€.
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# μΌλΌμ€ λͺ¨λΈμ νμ΅μν¨λ€.
model.fit(train, target, epochs=30, batch_size=1, verbose=1)
...
Epoch 29/30
891/891 [==============================] - 1s 753us/sample - loss: 0.4591 - acc: 0.7677
Epoch 30/30
891/891 [==============================] - 1s 753us/sample - loss: 0.4547 - acc: 0.7789
μ λ§ λμμ΄ λλ μ 보μμ΅λλ€.
νμ μ¦κ²¨λ³΄κ³ μμ΅λλ€.
κ°μ¬ν©λλ€.