import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
tf.__version__
당뇨병을 진단해봅시다.
xy = np.loadtxt('./data-03-diabetes.csv', delimiter=',', dtype=np.float32)
x_train = xy[0:-100, 0:-1]
y_train = xy[0:-100, [-1]]
x_test = xy[-100:, 0:-1]
y_test = xy[-100:, [-1]]
print(x_train.shape, y_train.shape)
print(x_test.shape, y_test.shape)
print(x_test)
print(y_test)
dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(len(x_train))
``
```python
W = tf.Variable(tf.random.normal([8, 1]), name='weight')
b = tf.Variable(tf.random.normal([1]), name='bias')
가설 설정
- 병이 있다 / 없다로 분류
- binary classification으로 진행

def logistic_regression(features):
hypothesis = tf.divide(1., 1. + tf.exp(-(tf.matmul(features, W) + b)))
return hypothesis
print(logistic_regression(x_train))
tf.Tensor(
[[0.7636665 ]
[0.80092156]
[0.41501445]
[0.64212984]
[0.08430427]
[0.63381004]
[0.24326754]
[0.3841968 ]
[0.89089465]
[0.82491577]
[0.8023107 ]
[0.4901009 ]
[0.5799539 ]
[0.9825106 ]
[0.4572858 ]
[0.4163838 ]
[0.5812125 ]
[0.18660453]
[0.49397138]
[0.62530833]
[0.73232764]
[0.7523532 ]
[0.6552009 ]
[0.33452913]
[0.68108976]
[0.54304326]
[0.45371786]
[0.7994284 ]
[0.87836325]
[0.5488168 ]
[0.36446616]
[0.72126794]
[0.70075434]
[0.35161632]
[0.48328355]
[0.67992574]
[0.7080763 ]
[0.49282402]
[0.3677201 ]
[0.67686725]
[0.9042856 ]
[0.8459027 ]
[0.5395733 ]
[0.38149193]
[0.58056957]
[0.76291215]
[0.6500029 ]
[0.43171626]
[0.61393666]
[0.3151854 ]
[0.30568883]
[0.7729612 ]
[0.50671214]
[0.8705757 ]
[0.5404438 ]
[0.21647061]
[0.5636013 ]
[0.20140737]
[0.7398153 ]
[0.58383894]
[0.5396699 ]
[0.29455638]
[0.58545834]
[0.78524125]
[0.7630256 ]
[0.88663423]
[0.46257353]
[0.5376614 ]
[0.43710646]
[0.29864013]
[0.58702856]
[0.75715876]
[0.8147204 ]
[0.70629656]
[0.66748947]
[0.7191528 ]
[0.41652465]
[0.784707 ]
[0.6601275 ]
[0.6013106 ]
[0.36445394]
[0.55154824]
[0.8871866 ]
[0.4873026 ]
[0.4423806 ]
[0.44266328]
[0.19323725]
[0.8383805 ]
[0.7044733 ]
[0.6911003 ]
[0.35248557]
[0.7854226 ]
[0.68024784]
[0.5945548 ]
[0.75045216]
[0.31145722]
[0.15239963]
[0.6889401 ]
[0.6916801 ]
[0.5982999 ]
[0.7874695 ]
[0.4930176 ]
[0.6035695 ]
[0.52621114]
[0.93228096]
[0.40088257]
[0.23307744]
[0.3338161 ]
[0.44407237]
[0.75184536]
[0.41691092]
[0.53907806]
[0.28092387]
[0.87783015]
[0.69089836]
[0.36543256]
[0.674538 ]
[0.6241091 ]
[0.15919036]
[0.5720373 ]
[0.4550946 ]
[0.8480272 ]
[0.5062124 ]
[0.15462755]
[0.48597947]
[0.31892073]
[0.7759308 ]
[0.7397651 ]
[0.60515463]
[0.31335363]
[0.4768164 ]
[0.7203091 ]
[0.44128314]
[0.53259516]
[0.333995 ]
[0.18140796]
[0.5336953 ]
[0.6654354 ]
[0.81676185]
[0.84335667]
[0.27014837]
[0.48655856]
[0.5064386 ]
[0.7958667 ]
[0.72779024]
[0.3328443 ]
[0.8311853 ]
[0.836748 ]
[0.47448954]
[0.6056248 ]
[0.45292825]
[0.83604586]
[0.6897159 ]
[0.77199703]
[0.5026937 ]
[0.38735747]
[0.4824938 ]
[0.12066017]
[0.84649557]
[0.38549057]
[0.5493018 ]
[0.8670505 ]
[0.61569124]
[0.55314785]
[0.74935067]
[0.60369956]
[0.5834295 ]
[0.6766917 ]
[0.7095112 ]
[0.40459758]
[0.7286922 ]
[0.24222213]
[0.41957784]
[0.25559497]
[0.6979939 ]
[0.5611082 ]
[0.7786507 ]
[0.6965237 ]
[0.66873777]
[0.23819672]
[0.6364276 ]
[0.4979731 ]
[0.714705 ]
[0.7020187 ]
[0.78507227]
[0.53667647]
[0.2624541 ]
[0.452083 ]
[0.7129255 ]
[0.22131674]
[0.53050655]
[0.41136152]
[0.6669064 ]
[0.53683543]
[0.7375507 ]
[0.34301266]
[0.21388213]
[0.5986855 ]
[0.8330947 ]
[0.80187535]
[0.64903283]
[0.4714769 ]
[0.60809654]
[0.7226655 ]
[0.6793256 ]
[0.9021515 ]
[0.5891988 ]
[0.8016913 ]
[0.75919235]
[0.56440383]
[0.913523 ]
[0.26920265]
[0.4617522 ]
[0.3071325 ]
[0.24110588]
[0.3428931 ]
[0.7469285 ]
[0.6508559 ]
[0.6763496 ]
[0.89952314]
[0.5012992 ]
[0.45024437]
[0.45327234]
[0.48845887]
[0.5373695 ]
[0.54264766]
[0.8453774 ]
[0.30040938]
[0.70696026]
[0.7836389 ]
[0.4648138 ]
[0.6119319 ]
[0.33171308]
[0.6333675 ]
[0.74983406]
[0.7440523 ]
[0.47342327]
[0.8846874 ]
[0.32426697]
[0.46258035]
[0.25525188]
[0.59502435]
[0.8243798 ]
[0.49547455]
[0.90853006]
[0.604871 ]
[0.90686107]
[0.3889982 ]
[0.76802987]
[0.5737063 ]
[0.5890859 ]
[0.45235273]
[0.83422416]
[0.6021486 ]
[0.8165836 ]
[0.665509 ]
[0.5822604 ]
[0.5298171 ]
[0.69548607]
[0.9476272 ]
[0.60625416]
[0.4768089 ]
[0.3412375 ]
[0.6143951 ]
[0.37127158]
[0.6081643 ]
[0.66949815]
[0.46397138]
[0.75950485]
[0.47994053]
[0.4896506 ]
[0.23809849]
[0.6258427 ]
[0.23959407]
[0.7328004 ]
[0.6403352 ]
[0.48757273]
[0.36688936]
[0.6742406 ]
[0.7604507 ]
[0.8425764 ]
[0.84232324]
[0.6810904 ]
[0.2849021 ]
[0.30383506]
[0.4764485 ]
[0.16555266]
[0.5279863 ]
[0.2804107 ]
[0.51549876]
[0.26363027]
[0.6870069 ]
[0.45990103]
[0.38192233]
[0.62898135]
[0.36766055]
[0.3459825 ]
[0.3800515 ]
[0.8241863 ]
[0.73126805]
[0.4914277 ]
[0.36833724]
[0.54069245]
[0.2955948 ]
[0.54006386]
[0.7240843 ]
[0.23198804]
[0.5508033 ]
[0.33670497]
[0.7528618 ]
[0.4827643 ]
[0.5956967 ]
[0.6806996 ]
[0.39760885]
[0.7777132 ]
[0.58061177]
[0.78884 ]
[0.72492105]
[0.23343174]
[0.89194214]
[0.6262258 ]
[0.47217122]
[0.4976585 ]
[0.58576447]
[0.34759036]
[0.7120238 ]
[0.39932963]
[0.7298943 ]
[0.54752946]
[0.39619485]
[0.40414265]
[0.44822156]
[0.72210354]
[0.3363574 ]
[0.7218922 ]
[0.55552727]
[0.562554 ]
[0.7483087 ]
[0.7455528 ]
[0.6777819 ]
[0.5165851 ]
[0.2835523 ]
[0.5450235 ]
[0.3785477 ]
[0.82105684]
[0.7291967 ]
[0.73976344]
[0.89891005]
[0.42973378]
[0.81454223]
[0.7233231 ]
[0.2811927 ]
[0.48980665]
[0.20398001]
[0.62771314]
[0.5244867 ]
[0.7755 ]
[0.95346034]
[0.84620076]
[0.6889333 ]
[0.24285477]
[0.5693512 ]
[0.76369256]
[0.58009857]
[0.85132635]
[0.64586854]
[0.26660168]
[0.34568483]
[0.28297365]
[0.25824115]
[0.5771973 ]
[0.39185464]
[0.29263896]
[0.71612537]
[0.4060299 ]
[0.41314667]
[0.61048424]
[0.7173807 ]
[0.34865826]
[0.51796865]
[0.38521075]
[0.76301765]
[0.8754921 ]
[0.81893456]
[0.3807082 ]
[0.6450838 ]
[0.65612316]
[0.8975352 ]
[0.5465226 ]
[0.6624358 ]
[0.46019804]
[0.2996241 ]
[0.4929719 ]
[0.6118485 ]
[0.75788957]
[0.6066032 ]
[0.43232313]
[0.7535387 ]
[0.63284343]
[0.32570115]
[0.72443765]
[0.37332115]
[0.5006614 ]
[0.8192237 ]
[0.75885975]
[0.5622173 ]
[0.78403085]
[0.6618404 ]
[0.33079818]
[0.85007083]
[0.7708067 ]
[0.834261 ]
[0.64302355]
[0.39414245]
[0.6374696 ]
[0.604928 ]
[0.22426799]
[0.86885685]
[0.49368358]
[0.6264783 ]
[0.39506266]
[0.5899049 ]
[0.39188552]
[0.7884841 ]
[0.58259594]
[0.5808124 ]
[0.55833954]
[0.7224996 ]
[0.8704563 ]
[0.43792704]
[0.6993865 ]
[0.6171954 ]
[0.9194286 ]
[0.6960325 ]
[0.88938886]
[0.3668208 ]
[0.3727263 ]
[0.46614826]
[0.7939589 ]
[0.05962122]
[0.5595139 ]
[0.23507877]
[0.14036497]
[0.3183285 ]
[0.4972031 ]
[0.63007444]
[0.33999383]
[0.7765038 ]
[0.31941396]
[0.44529113]
[0.73057693]
[0.30166724]
[0.4887114 ]
[0.32092988]
[0.6143098 ]
[0.20864552]
[0.80825156]
[0.6245585 ]
[0.36496627]
[0.18112727]
[0.17601198]
[0.48685083]
[0.36005446]
[0.8256423 ]
[0.5416993 ]
[0.5926569 ]
[0.7944695 ]
[0.5606976 ]
[0.85915315]
[0.5120207 ]
[0.43992117]
[0.28699648]
[0.9156089 ]
[0.7199931 ]
[0.7302978 ]
[0.29350042]
[0.3630638 ]
[0.41596973]
[0.31394008]
[0.84450716]
[0.62244946]
[0.8170966 ]
[0.7549453 ]
[0.35122392]
[0.9169064 ]
[0.7254303 ]
[0.52748376]
[0.56286156]
[0.7783947 ]
[0.5780855 ]
[0.5146494 ]
[0.48204806]
[0.46600467]
[0.76925594]
[0.7844165 ]
[0.65899026]
[0.30863944]
[0.83081335]
[0.6027034 ]
[0.48024657]
[0.632341 ]
[0.39004117]
[0.73970485]
[0.62533045]
[0.528723 ]
[0.6224033 ]
[0.55916107]
[0.33480844]
[0.51781493]
[0.53442115]
[0.347116 ]
[0.9100422 ]
[0.38443407]
[0.51216805]
[0.48601606]
[0.54000443]
[0.5446764 ]
[0.8411278 ]
[0.6099867 ]
[0.5563153 ]
[0.268715 ]
[0.40976167]
[0.38979083]
[0.5007875 ]
[0.24788392]
[0.4824183 ]
[0.2407676 ]
[0.5203949 ]
[0.744302 ]
[0.5761994 ]
[0.3453807 ]
[0.50945175]
[0.43230698]
[0.5557008 ]
[0.7835617 ]
[0.5741481 ]
[0.53914785]
[0.5861171 ]
[0.6589845 ]
[0.53246963]
[0.5854794 ]
[0.921835 ]
[0.8908273 ]
[0.37412447]
[0.85578173]
[0.35911554]
[0.5043008 ]
[0.4602722 ]
[0.7887939 ]
[0.71430486]
[0.5824623 ]
[0.47400293]
[0.7626106 ]
[0.39731818]
[0.39681867]
[0.28079882]
[0.53555644]
[0.34190837]
[0.55484104]
[0.40515682]
[0.49421048]
[0.3364206 ]
[0.71804976]
[0.89632165]
[0.22754587]
[0.4700739 ]
[0.7745273 ]
[0.16087689]
[0.22672014]
[0.8355446 ]
[0.4568351 ]
[0.50936085]
[0.7896421 ]
[0.6286575 ]
[0.81790966]
[0.6534319 ]
[0.9005289 ]
[0.786376 ]
[0.56117505]
[0.5235736 ]
[0.7055119 ]
[0.36500552]
[0.6618679 ]
[0.42692015]
[0.7948441 ]
[0.25390214]
[0.3877735 ]
[0.49036333]
[0.67437106]
[0.23031549]
[0.79976875]
[0.2996345 ]
[0.9049057 ]
[0.479109 ]
[0.8211558 ]
[0.52805084]
[0.594226 ]
[0.83106095]
[0.5853545 ]
[0.33579 ]
[0.49695298]
[0.60951 ]
[0.39794663]
[0.5118717 ]
[0.6532073 ]
[0.7271816 ]
[0.39820278]
[0.66155314]
[0.7709704 ]
[0.3689781 ]
[0.76498526]
[0.3952522 ]
[0.5928684 ]
[0.7847312 ]
[0.755186 ]
[0.44578898]
[0.76340497]
[0.3816837 ]
[0.5955655 ]
[0.67361015]
[0.7707462 ]
[0.8477395 ]
[0.4667048 ]
[0.2416224 ]
[0.56083953]
[0.7360955 ]
[0.3842407 ]
[0.39748034]
[0.7379605 ]
[0.49579504]
[0.32042682]
[0.58163965]
[0.437914 ]
[0.60518587]
[0.7585915 ]
[0.5343307 ]
[0.449775 ]
[0.8288851 ]
[0.6622481 ]
[0.14425522]
[0.36460817]
[0.54965633]
[0.3563462 ]
[0.44539824]
[0.30856517]
[0.5247021 ]
[0.5280862 ]
[0.796762 ]
[0.2778471 ]
[0.57513106]
[0.3974484 ]
[0.6978083 ]
[0.6963511 ]
[0.7044852 ]
[0.2962951 ]
[0.63282883]
[0.49066746]
[0.94800115]
[0.6699201 ]], shape=(659, 1), dtype=float32)
Loss Function
- 기존 MSE 대신 Cross Entropy 사용

def loss_fn(hypothesis, labels):
loss = -tf.reduce_mean(labels * tf.math.log(hypothesis) + \
(1 - labels) * tf.math.log(1 - hypothesis))
return loss
optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.005)
학습
epochs = 5000
for step in range(epochs):
for features, labels in dataset:
with tf.GradientTape() as tape:
pred = logistic_regression(features)
loss_value = loss_fn(pred, labels)
grads = tape.gradient(loss_value, [W,b])
optimizer.apply_gradients(grads_and_vars=zip(grads,[W,b]))
if step % 100 == 0:
print("Iter: {}, Loss: {:.4f}".format(step, loss_fn(logistic_regression(features),labels)))
Iter: 0, Loss: 0.7648
Iter: 100, Loss: 0.7447
Iter: 200, Loss: 0.7296
Iter: 300, Loss: 0.7179
Iter: 400, Loss: 0.7083
Iter: 500, Loss: 0.7001
Iter: 600, Loss: 0.6929
Iter: 700, Loss: 0.6864
Iter: 800, Loss: 0.6803
Iter: 900, Loss: 0.6746
Iter: 1000, Loss: 0.6692
Iter: 1100, Loss: 0.6640
Iter: 1200, Loss: 0.6591
Iter: 1300, Loss: 0.6543
Iter: 1400, Loss: 0.6497
Iter: 1500, Loss: 0.6453
Iter: 1600, Loss: 0.6410
Iter: 1700, Loss: 0.6368
Iter: 1800, Loss: 0.6328
Iter: 1900, Loss: 0.6289
Iter: 2000, Loss: 0.6251
Iter: 2100, Loss: 0.6214
Iter: 2200, Loss: 0.6179
Iter: 2300, Loss: 0.6144
Iter: 2400, Loss: 0.6111
Iter: 2500, Loss: 0.6079
Iter: 2600, Loss: 0.6048
Iter: 2700, Loss: 0.6017
Iter: 2800, Loss: 0.5988
Iter: 2900, Loss: 0.5959
Iter: 3000, Loss: 0.5932
Iter: 3100, Loss: 0.5905
Iter: 3200, Loss: 0.5879
Iter: 3300, Loss: 0.5854
Iter: 3400, Loss: 0.5829
Iter: 3500, Loss: 0.5806
Iter: 3600, Loss: 0.5783
Iter: 3700, Loss: 0.5760
Iter: 3800, Loss: 0.5739
Iter: 3900, Loss: 0.5718
Iter: 4000, Loss: 0.5697
Iter: 4100, Loss: 0.5677
Iter: 4200, Loss: 0.5658
Iter: 4300, Loss: 0.5639
Iter: 4400, Loss: 0.5621
Iter: 4500, Loss: 0.5604
Iter: 4600, Loss: 0.5586
Iter: 4700, Loss: 0.5570
Iter: 4800, Loss: 0.5553
Iter: 4900, Loss: 0.5538
테스트
def accuracy_fn(hypothesis, labels):
predicted = tf.cast(hypothesis > 0.5, dtype=tf.int32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, labels), dtype=tf.float32))
return accuracy
test_acc = accuracy_fn(logistic_regression(x_test),y_test)
print("Testset Accuracy: {:.4f}".format(test_acc))