Naver Project (diabetes Prediction)

Jacob Kim·2024년 1월 28일
0

Naver Project Week 1

목록 보기
17/28
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

tf.__version__
# 2.3.0

당뇨병을 진단해봅시다.

xy = np.loadtxt('./data-03-diabetes.csv', delimiter=',', dtype=np.float32)
x_train = xy[0:-100, 0:-1]
y_train = xy[0:-100, [-1]]

x_test = xy[-100:, 0:-1]
y_test = xy[-100:, [-1]]

print(x_train.shape, y_train.shape)
print(x_test.shape, y_test.shape)
#(659, 8) (659, 1)
#(100, 8) (100, 1)
print(x_test)
print(y_test)
#[[-0.294118   -0.0150754  -0.0491803  -0.333333   -0.550827    0.0134128
#  -0.699402   -0.266667  ]
# [ 0.0588235   0.547739    0.278689   -0.393939   -0.763593   -0.0789866
#  -0.926558   -0.2       ]
# [-0.294118    0.658291    0.114754   -0.474747   -0.602837    0.00149028
#  -0.527754   -0.0666667 ]
# [-0.882353   -0.00502513 -0.0491803  -0.79798     0.         -0.242921
#  -0.596072    0.        ]
# [ 0.176471   -0.316583    0.737705   -0.535354   -0.884161    0.0581222
# -0.823228   -0.133333  ]
#[-0.647059    0.236181    0.639344   -0.292929   -0.432624    0.707899
# -0.315115   -0.966667  ]
#[-0.0588235  -0.0854271   0.344262    0.          0.          0.0611028
# -0.565329    0.566667  ]
#[-0.294118    0.959799    0.147541    0.          0.         -0.0789866
# -0.786507   -0.666667  ]
#[ 0.0588235   0.567839    0.409836    0.          0.         -0.260805
# -0.870196    0.0666667 ]
#[ 0.         -0.0653266  -0.0163934   0.          0.          0.052161
# -0.842015   -0.866667  ]
#[-0.647059    0.21608    -0.147541    0.          0.          0.0730254
# -0.958155   -0.866667  ]
#[-0.764706    0.0150754  -0.0491803  -0.656566   -0.373522   -0.278688
# -0.542272   -0.933333  ]
#[-0.764706   -0.437186   -0.0819672  -0.434343   -0.893617   -0.278688
# -0.783091   -0.966667  ]
#[ 0.          0.628141    0.245902   -0.272727    0.          0.47839
# -0.755764   -0.833333  ]
#[ 0.         -0.0452261   0.0491803  -0.212121   -0.751773    0.329359
# -0.754056   -0.966667  ]
#[-0.529412    0.256281    0.311475    0.          0.         -0.0372578
# -0.608881   -0.8       ]
#[-0.411765    0.366834    0.344262    0.          0.          0.
# -0.520068    0.6       ]
#[-0.764706    0.296482    0.213115   -0.474747   -0.515366   -0.0104321
# -0.561913   -0.866667  ]
#[-0.647059    0.306533    0.0491803   0.          0.         -0.311475
# -0.798463   -0.966667  ]
#[-0.882353    0.0753769  -0.180328   -0.616162    0.         -0.156483
# -0.912041   -0.733333  ]
#[-0.882353    0.407035    0.213115   -0.474747   -0.574468   -0.281669
# -0.359522   -0.933333  ]
#[-0.882353    0.447236    0.344262   -0.0707071  -0.574468    0.374069
# -0.780529   -0.166667  ]
#[-0.0588235   0.0753769   0.311475    0.          0.         -0.266766
# -0.335611   -0.566667  ]
#[ 0.529412    0.58794     0.868852    0.          0.          0.260805
# -0.847139   -0.233333  ]
#[-0.764706    0.21608     0.147541   -0.353535   -0.775414    0.165425
# -0.309991   -0.933333  ]
#[-0.176471    0.296482    0.114754   -0.010101   -0.704492    0.147541
# -0.691716   -0.266667  ]
#[-0.764706   -0.0954774  -0.0163934   0.          0.         -0.299553
# -0.903501   -0.866667  ]
#[-0.176471    0.427136    0.47541    -0.515152    0.134752   -0.0938897
# -0.957301   -0.266667  ]
#[-0.647059    0.698492    0.213115   -0.616162   -0.704492   -0.108793
# -0.837746   -0.666667  ]
#[ 0.         -0.00502513  0.          0.          0.         -0.254843
# -0.850555   -0.966667  ]
#[-0.529412    0.276382    0.442623   -0.777778   -0.63357     0.028316
# -0.555935   -0.766667  ]
#[-0.529412    0.18593     0.147541    0.          0.          0.326379
# -0.29462    -0.833333  ]
#[-0.764706    0.226131    0.245902   -0.454545   -0.527187    0.0700448
# -0.654142   -0.833333  ]
#[-0.294118    0.256281    0.278689   -0.373737    0.         -0.177347
# -0.584116   -0.0666667 ]
#[-0.882353    0.688442    0.442623   -0.414141    0.          0.0432191
# -0.293766    0.0333333 ]
#[-0.764706    0.296482    0.          0.          0.          0.147541
# -0.807003   -0.333333  ]
#[-0.529412    0.105528    0.245902   -0.59596    -0.763593   -0.153502
# -0.965841   -0.8       ]
#[-0.294118   -0.19598     0.311475   -0.272727    0.          0.186289
# -0.915457   -0.766667  ]
#[ 0.176471    0.155779    0.          0.          0.          0.
# -0.843723   -0.7       ]
#[-0.764706    0.276382   -0.245902   -0.575758   -0.208038    0.0253354
# -0.916311   -0.966667  ]
#[ 0.0588235   0.648241    0.278689    0.          0.         -0.0223547
# -0.940222   -0.2       ]
#[-0.764706   -0.0653266   0.0491803  -0.353535   -0.621749    0.132638
# -0.491033   -0.933333  ]
#[-0.647059    0.58794     0.0491803  -0.737374   -0.0851064  -0.0700447
# -0.814688   -0.9       ]
#[-0.411765    0.266332    0.278689   -0.454545   -0.947991   -0.117735
# -0.691716   -0.366667  ]
#[ 0.176471    0.296482    0.0163934  -0.272727    0.          0.228018
# -0.690009   -0.433333  ]
#[ 0.          0.346734   -0.0491803  -0.59596    -0.312057   -0.213115
# -0.766012    0.        ]
#[-0.647059    0.0251256   0.213115    0.          0.         -0.120715
# -0.963279   -0.633333  ]
#[-0.176471    0.879397   -0.180328   -0.333333   -0.0732861   0.0104323
# -0.36123    -0.566667  ]
#[-0.647059    0.738693    0.278689   -0.212121   -0.562648    0.00745157
# -0.238258   -0.666667  ]
#[ 0.176471   -0.0552764   0.180328   -0.636364    0.         -0.311475
# -0.558497    0.166667  ]
#[-0.882353    0.0854271  -0.0163934  -0.0707071  -0.579196    0.0581222
# -0.712212   -0.9       ]
#[-0.411765   -0.0251256   0.245902   -0.454545    0.          0.0611028
# -0.743809    0.0333333 ]
#[-0.529412   -0.165829    0.409836   -0.616162    0.         -0.126677
# -0.795901   -0.566667  ]
#[-0.882353    0.145729    0.0819672  -0.272727   -0.527187    0.135618
# -0.819812    0.        ]
#[-0.882353    0.497487    0.114754   -0.414141   -0.699764   -0.126677
# -0.768574   -0.3       ]
#[-0.411765    0.175879    0.409836   -0.393939   -0.751773    0.165425
# -0.852263   -0.3       ]
#[-0.882353    0.115578    0.540984    0.          0.         -0.0223547
# -0.840307   -0.2       ]
#[-0.529412    0.125628    0.278689   -0.191919    0.          0.174367
# -0.865073   -0.433333  ]
#[-0.882353    0.165829    0.278689   -0.414141   -0.574468    0.0760059
# -0.64304    -0.866667  ]
#[ 0.          0.417085    0.377049   -0.474747    0.         -0.0342771
# -0.69684    -0.966667  ]
#[-0.764706    0.758794    0.442623    0.          0.         -0.317437
# -0.788215   -0.966667  ]
#[-0.764706   -0.0753769  -0.147541    0.          0.         -0.102832
# -0.9462     -0.966667  ]
#[-0.647059    0.306533    0.278689   -0.535354   -0.813239   -0.153502
# -0.790777   -0.566667  ]
#[-0.0588235   0.20603     0.409836    0.          0.         -0.153502
# -0.845431   -0.966667  ]
#[-0.764706    0.748744    0.442623   -0.252525   -0.716312    0.326379
# -0.514944   -0.9       ]
#[-0.764706    0.0653266  -0.0819672  -0.454545   -0.609929   -0.135618
# -0.702818   -0.966667  ]
#[-0.764706    0.0552764   0.229508    0.          0.         -0.305514
# -0.588386    0.0666667 ]
#[-0.529412   -0.0452261  -0.0163934  -0.353535    0.          0.0551417
# -0.824082   -0.766667  ]
#[ 0.          0.266332    0.409836   -0.454545   -0.716312   -0.183308
# -0.626815    0.        ]
#[-0.0588235  -0.346734    0.180328   -0.535354    0.         -0.0461997
# -0.554227   -0.3       ]
#[-0.764706   -0.00502513 -0.0163934  -0.656566   -0.621749    0.0909091
# -0.679761    0.        ]
#[-0.882353    0.0251256   0.213115    0.          0.          0.177347
# -0.816396   -0.3       ]
#[ 0.294118    0.20603     0.311475   -0.252525   -0.64539     0.260805
#-0.396243   -0.1       ]
#[-0.647059    0.0251256  -0.278689   -0.59596    -0.777778   -0.0819672
# -0.725021   -0.833333  ]
#[-0.882353    0.0954774  -0.0491803  -0.636364   -0.725768   -0.150522
# -0.87959    -0.966667  ]
#[ 0.0588235   0.407035    0.540984    0.          0.         -0.0253353
# -0.439795   -0.2       ]
# 0.529412    0.537688    0.442623   -0.252525   -0.669031    0.210134
# -0.0640478  -0.4       ]
#[ 0.411765    0.00502513  0.377049   -0.333333   -0.751773   -0.105812
# -0.649872   -0.166667  ]
# [-0.882353    0.477387    0.540984   -0.171717    0.          0.469449
#  -0.760888   -0.8       ]
# [-0.882353   -0.18593     0.213115   -0.171717   -0.865248    0.38003
#  -0.130658   -0.633333  ]
# [-0.647059    0.879397    0.147541   -0.555556   -0.527187    0.0849479
#  -0.71819    -0.5       ]
# [-0.294118    0.628141    0.0163934   0.          0.         -0.275708
#  -0.914603   -0.0333333 ]
# [-0.529412    0.366834    0.147541    0.          0.         -0.0700447
#  -0.0572161  -0.966667  ]
# [-0.882353    0.21608     0.278689   -0.212121   -0.825059    0.162444
#  -0.843723   -0.766667  ]
# [-0.647059    0.0854271   0.0163934  -0.515152    0.         -0.225037
#  -0.876174   -0.866667  ]
# [ 0.          0.819095    0.442623   -0.111111    0.205674    0.290611
#  -0.877028   -0.833333  ]
# [-0.0588235   0.547739    0.278689   -0.353535    0.         -0.0342771
#  -0.688301   -0.2       ]
# [-0.882353    0.286432    0.442623   -0.212121   -0.739953    0.0879285
#  -0.163962   -0.466667  ]
# [-0.176471    0.376884    0.47541    -0.171717    0.         -0.0461997
#  -0.732707   -0.4       ]
# [ 0.          0.236181    0.180328    0.          0.          0.0819672
#  -0.846285    0.0333333 ]
# [-0.882353    0.0653266   0.245902    0.          0.          0.117735
#  -0.898377   -0.833333  ]
# [-0.294118    0.909548    0.508197    0.          0.          0.0581222
#  -0.829206    0.5       ]
# [-0.764706   -0.115578   -0.0491803  -0.474747   -0.962175   -0.153502
#  -0.412468   -0.966667  ]
# [ 0.0588235   0.708543    0.213115   -0.373737    0.          0.311475
#  -0.722459   -0.266667  ]
# [ 0.0588235  -0.105528    0.0163934   0.          0.         -0.329359
#  -0.945346   -0.6       ]
# [ 0.176471    0.0150754   0.245902   -0.030303   -0.574468   -0.019374
#  -0.920581    0.4       ]
# [-0.764706    0.226131    0.147541   -0.454545    0.          0.0968703
#  -0.77626    -0.8       ]
# [-0.411765    0.21608     0.180328   -0.535354   -0.735225   -0.219076
#  -0.857387   -0.7       ]
# [-0.882353    0.266332   -0.0163934   0.          0.         -0.102832
#  -0.768574   -0.133333  ]
# [-0.882353   -0.0653266   0.147541   -0.373737    0.         -0.0938897
#  -0.797609   -0.933333  ]]
#[[1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [0.]
# [1.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [0.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [1.]
# [0.]
# [1.]
# [0.]
# [1.]
# [0.]
# [0.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [0.]
# [1.]
# [1.]
# [1.]
# [0.]
# [1.]
# [0.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [1.]
# [0.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [0.]
# [0.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [1.]
# [0.]
# [1.]
# [0.]
# [0.]
# [0.]
# [1.]
# [1.]
# [0.]
# [0.]
# [0.]
# [1.]
# [0.]
# [1.]
# [0.]
# [1.]
# [0.]
# [1.]
# [1.]
# [1.]
# [1.]
# [0.]
# [1.]]
dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(len(x_train))
``

```python
W = tf.Variable(tf.random.normal([8, 1]), name='weight')
b = tf.Variable(tf.random.normal([1]), name='bias')

가설 설정

  • 병이 있다 / 없다로 분류
  • binary classification으로 진행
def logistic_regression(features):
    hypothesis  = tf.divide(1., 1. + tf.exp(-(tf.matmul(features, W) + b)))
    return hypothesis
  
print(logistic_regression(x_train))
#output
tf.Tensor(
[[0.7636665 ]
 [0.80092156]
 [0.41501445]
 [0.64212984]
 [0.08430427]
 [0.63381004]
 [0.24326754]
 [0.3841968 ]
 [0.89089465]
 [0.82491577]
 [0.8023107 ]
 [0.4901009 ]
 [0.5799539 ]
 [0.9825106 ]
 [0.4572858 ]
 [0.4163838 ]
 [0.5812125 ]
 [0.18660453]
 [0.49397138]
 [0.62530833]
 [0.73232764]
 [0.7523532 ]
 [0.6552009 ]
 [0.33452913]
 [0.68108976]
 [0.54304326]
 [0.45371786]
 [0.7994284 ]
 [0.87836325]
 [0.5488168 ]
 [0.36446616]
 [0.72126794]
 [0.70075434]
 [0.35161632]
 [0.48328355]
 [0.67992574]
 [0.7080763 ]
 [0.49282402]
 [0.3677201 ]
 [0.67686725]
 [0.9042856 ]
 [0.8459027 ]
 [0.5395733 ]
 [0.38149193]
 [0.58056957]
 [0.76291215]
 [0.6500029 ]
 [0.43171626]
 [0.61393666]
 [0.3151854 ]
 [0.30568883]
 [0.7729612 ]
 [0.50671214]
 [0.8705757 ]
 [0.5404438 ]
 [0.21647061]
 [0.5636013 ]
 [0.20140737]
 [0.7398153 ]
 [0.58383894]
 [0.5396699 ]
 [0.29455638]
 [0.58545834]
 [0.78524125]
 [0.7630256 ]
 [0.88663423]
 [0.46257353]
 [0.5376614 ]
 [0.43710646]
 [0.29864013]
 [0.58702856]
 [0.75715876]
 [0.8147204 ]
 [0.70629656]
 [0.66748947]
 [0.7191528 ]
 [0.41652465]
 [0.784707  ]
 [0.6601275 ]
 [0.6013106 ]
 [0.36445394]
 [0.55154824]
 [0.8871866 ]
 [0.4873026 ]
 [0.4423806 ]
 [0.44266328]
 [0.19323725]
 [0.8383805 ]
 [0.7044733 ]
 [0.6911003 ]
 [0.35248557]
 [0.7854226 ]
 [0.68024784]
 [0.5945548 ]
 [0.75045216]
 [0.31145722]
 [0.15239963]
 [0.6889401 ]
 [0.6916801 ]
 [0.5982999 ]
 [0.7874695 ]
 [0.4930176 ]
 [0.6035695 ]
 [0.52621114]
 [0.93228096]
 [0.40088257]
 [0.23307744]
 [0.3338161 ]
 [0.44407237]
 [0.75184536]
 [0.41691092]
 [0.53907806]
 [0.28092387]
 [0.87783015]
 [0.69089836]
 [0.36543256]
 [0.674538  ]
 [0.6241091 ]
 [0.15919036]
 [0.5720373 ]
 [0.4550946 ]
 [0.8480272 ]
 [0.5062124 ]
 [0.15462755]
 [0.48597947]
 [0.31892073]
 [0.7759308 ]
 [0.7397651 ]
 [0.60515463]
 [0.31335363]
 [0.4768164 ]
 [0.7203091 ]
 [0.44128314]
 [0.53259516]
 [0.333995  ]
 [0.18140796]
 [0.5336953 ]
 [0.6654354 ]
 [0.81676185]
 [0.84335667]
 [0.27014837]
 [0.48655856]
 [0.5064386 ]
 [0.7958667 ]
 [0.72779024]
 [0.3328443 ]
 [0.8311853 ]
 [0.836748  ]
 [0.47448954]
 [0.6056248 ]
 [0.45292825]
 [0.83604586]
 [0.6897159 ]
 [0.77199703]
 [0.5026937 ]
 [0.38735747]
 [0.4824938 ]
 [0.12066017]
 [0.84649557]
 [0.38549057]
 [0.5493018 ]
 [0.8670505 ]
 [0.61569124]
 [0.55314785]
 [0.74935067]
 [0.60369956]
 [0.5834295 ]
 [0.6766917 ]
 [0.7095112 ]
 [0.40459758]
 [0.7286922 ]
 [0.24222213]
 [0.41957784]
 [0.25559497]
 [0.6979939 ]
 [0.5611082 ]
 [0.7786507 ]
 [0.6965237 ]
 [0.66873777]
 [0.23819672]
 [0.6364276 ]
 [0.4979731 ]
 [0.714705  ]
 [0.7020187 ]
 [0.78507227]
 [0.53667647]
 [0.2624541 ]
 [0.452083  ]
 [0.7129255 ]
 [0.22131674]
 [0.53050655]
 [0.41136152]
 [0.6669064 ]
 [0.53683543]
 [0.7375507 ]
 [0.34301266]
 [0.21388213]
 [0.5986855 ]
 [0.8330947 ]
 [0.80187535]
 [0.64903283]
 [0.4714769 ]
 [0.60809654]
 [0.7226655 ]
 [0.6793256 ]
 [0.9021515 ]
 [0.5891988 ]
 [0.8016913 ]
 [0.75919235]
 [0.56440383]
 [0.913523  ]
 [0.26920265]
 [0.4617522 ]
 [0.3071325 ]
 [0.24110588]
 [0.3428931 ]
 [0.7469285 ]
 [0.6508559 ]
 [0.6763496 ]
 [0.89952314]
 [0.5012992 ]
 [0.45024437]
 [0.45327234]
 [0.48845887]
 [0.5373695 ]
 [0.54264766]
 [0.8453774 ]
 [0.30040938]
 [0.70696026]
 [0.7836389 ]
 [0.4648138 ]
 [0.6119319 ]
 [0.33171308]
 [0.6333675 ]
 [0.74983406]
 [0.7440523 ]
 [0.47342327]
 [0.8846874 ]
 [0.32426697]
 [0.46258035]
 [0.25525188]
 [0.59502435]
 [0.8243798 ]
 [0.49547455]
 [0.90853006]
 [0.604871  ]
 [0.90686107]
 [0.3889982 ]
 [0.76802987]
 [0.5737063 ]
 [0.5890859 ]
 [0.45235273]
 [0.83422416]
 [0.6021486 ]
 [0.8165836 ]
 [0.665509  ]
 [0.5822604 ]
 [0.5298171 ]
 [0.69548607]
 [0.9476272 ]
 [0.60625416]
 [0.4768089 ]
 [0.3412375 ]
 [0.6143951 ]
 [0.37127158]
 [0.6081643 ]
 [0.66949815]
 [0.46397138]
 [0.75950485]
 [0.47994053]
 [0.4896506 ]
 [0.23809849]
 [0.6258427 ]
 [0.23959407]
 [0.7328004 ]
 [0.6403352 ]
 [0.48757273]
 [0.36688936]
 [0.6742406 ]
 [0.7604507 ]
 [0.8425764 ]
 [0.84232324]
 [0.6810904 ]
 [0.2849021 ]
 [0.30383506]
 [0.4764485 ]
 [0.16555266]
 [0.5279863 ]
 [0.2804107 ]
 [0.51549876]
 [0.26363027]
 [0.6870069 ]
 [0.45990103]
 [0.38192233]
 [0.62898135]
 [0.36766055]
 [0.3459825 ]
 [0.3800515 ]
 [0.8241863 ]
 [0.73126805]
 [0.4914277 ]
 [0.36833724]
 [0.54069245]
 [0.2955948 ]
 [0.54006386]
 [0.7240843 ]
 [0.23198804]
 [0.5508033 ]
 [0.33670497]
 [0.7528618 ]
 [0.4827643 ]
 [0.5956967 ]
 [0.6806996 ]
 [0.39760885]
 [0.7777132 ]
 [0.58061177]
 [0.78884   ]
 [0.72492105]
 [0.23343174]
 [0.89194214]
 [0.6262258 ]
 [0.47217122]
 [0.4976585 ]
 [0.58576447]
 [0.34759036]
 [0.7120238 ]
 [0.39932963]
 [0.7298943 ]
 [0.54752946]
 [0.39619485]
 [0.40414265]
 [0.44822156]
 [0.72210354]
 [0.3363574 ]
 [0.7218922 ]
 [0.55552727]
 [0.562554  ]
 [0.7483087 ]
 [0.7455528 ]
 [0.6777819 ]
 [0.5165851 ]
 [0.2835523 ]
 [0.5450235 ]
 [0.3785477 ]
 [0.82105684]
 [0.7291967 ]
 [0.73976344]
 [0.89891005]
 [0.42973378]
 [0.81454223]
 [0.7233231 ]
 [0.2811927 ]
 [0.48980665]
 [0.20398001]
 [0.62771314]
 [0.5244867 ]
 [0.7755    ]
 [0.95346034]
 [0.84620076]
 [0.6889333 ]
 [0.24285477]
 [0.5693512 ]
 [0.76369256]
 [0.58009857]
 [0.85132635]
 [0.64586854]
 [0.26660168]
 [0.34568483]
 [0.28297365]
 [0.25824115]
 [0.5771973 ]
 [0.39185464]
 [0.29263896]
 [0.71612537]
 [0.4060299 ]
 [0.41314667]
 [0.61048424]
 [0.7173807 ]
 [0.34865826]
 [0.51796865]
 [0.38521075]
 [0.76301765]
 [0.8754921 ]
 [0.81893456]
 [0.3807082 ]
 [0.6450838 ]
 [0.65612316]
 [0.8975352 ]
 [0.5465226 ]
 [0.6624358 ]
 [0.46019804]
 [0.2996241 ]
 [0.4929719 ]
 [0.6118485 ]
 [0.75788957]
 [0.6066032 ]
 [0.43232313]
 [0.7535387 ]
 [0.63284343]
 [0.32570115]
 [0.72443765]
 [0.37332115]
 [0.5006614 ]
 [0.8192237 ]
 [0.75885975]
 [0.5622173 ]
 [0.78403085]
 [0.6618404 ]
 [0.33079818]
 [0.85007083]
 [0.7708067 ]
 [0.834261  ]
 [0.64302355]
 [0.39414245]
 [0.6374696 ]
 [0.604928  ]
 [0.22426799]
 [0.86885685]
 [0.49368358]
 [0.6264783 ]
 [0.39506266]
 [0.5899049 ]
 [0.39188552]
 [0.7884841 ]
 [0.58259594]
 [0.5808124 ]
 [0.55833954]
 [0.7224996 ]
 [0.8704563 ]
 [0.43792704]
 [0.6993865 ]
 [0.6171954 ]
 [0.9194286 ]
 [0.6960325 ]
 [0.88938886]
 [0.3668208 ]
 [0.3727263 ]
 [0.46614826]
 [0.7939589 ]
 [0.05962122]
 [0.5595139 ]
 [0.23507877]
 [0.14036497]
 [0.3183285 ]
 [0.4972031 ]
 [0.63007444]
 [0.33999383]
 [0.7765038 ]
 [0.31941396]
 [0.44529113]
 [0.73057693]
 [0.30166724]
 [0.4887114 ]
 [0.32092988]
 [0.6143098 ]
 [0.20864552]
 [0.80825156]
 [0.6245585 ]
 [0.36496627]
 [0.18112727]
 [0.17601198]
 [0.48685083]
 [0.36005446]
 [0.8256423 ]
 [0.5416993 ]
 [0.5926569 ]
 [0.7944695 ]
 [0.5606976 ]
 [0.85915315]
 [0.5120207 ]
 [0.43992117]
 [0.28699648]
 [0.9156089 ]
 [0.7199931 ]
 [0.7302978 ]
 [0.29350042]
 [0.3630638 ]
 [0.41596973]
 [0.31394008]
 [0.84450716]
 [0.62244946]
 [0.8170966 ]
 [0.7549453 ]
 [0.35122392]
 [0.9169064 ]
 [0.7254303 ]
 [0.52748376]
 [0.56286156]
 [0.7783947 ]
 [0.5780855 ]
 [0.5146494 ]
 [0.48204806]
 [0.46600467]
 [0.76925594]
 [0.7844165 ]
 [0.65899026]
 [0.30863944]
 [0.83081335]
 [0.6027034 ]
 [0.48024657]
 [0.632341  ]
 [0.39004117]
 [0.73970485]
 [0.62533045]
 [0.528723  ]
 [0.6224033 ]
 [0.55916107]
 [0.33480844]
 [0.51781493]
 [0.53442115]
 [0.347116  ]
 [0.9100422 ]
 [0.38443407]
 [0.51216805]
 [0.48601606]
 [0.54000443]
 [0.5446764 ]
 [0.8411278 ]
 [0.6099867 ]
 [0.5563153 ]
 [0.268715  ]
 [0.40976167]
 [0.38979083]
 [0.5007875 ]
 [0.24788392]
 [0.4824183 ]
 [0.2407676 ]
 [0.5203949 ]
 [0.744302  ]
 [0.5761994 ]
 [0.3453807 ]
 [0.50945175]
 [0.43230698]
 [0.5557008 ]
 [0.7835617 ]
 [0.5741481 ]
 [0.53914785]
 [0.5861171 ]
 [0.6589845 ]
 [0.53246963]
 [0.5854794 ]
 [0.921835  ]
 [0.8908273 ]
 [0.37412447]
 [0.85578173]
 [0.35911554]
 [0.5043008 ]
 [0.4602722 ]
 [0.7887939 ]
 [0.71430486]
 [0.5824623 ]
 [0.47400293]
 [0.7626106 ]
 [0.39731818]
 [0.39681867]
 [0.28079882]
 [0.53555644]
 [0.34190837]
 [0.55484104]
 [0.40515682]
 [0.49421048]
 [0.3364206 ]
 [0.71804976]
 [0.89632165]
 [0.22754587]
 [0.4700739 ]
 [0.7745273 ]
 [0.16087689]
 [0.22672014]
 [0.8355446 ]
 [0.4568351 ]
 [0.50936085]
 [0.7896421 ]
 [0.6286575 ]
 [0.81790966]
 [0.6534319 ]
 [0.9005289 ]
 [0.786376  ]
 [0.56117505]
 [0.5235736 ]
 [0.7055119 ]
 [0.36500552]
 [0.6618679 ]
 [0.42692015]
 [0.7948441 ]
 [0.25390214]
 [0.3877735 ]
 [0.49036333]
 [0.67437106]
 [0.23031549]
 [0.79976875]
 [0.2996345 ]
 [0.9049057 ]
 [0.479109  ]
 [0.8211558 ]
 [0.52805084]
 [0.594226  ]
 [0.83106095]
 [0.5853545 ]
 [0.33579   ]
 [0.49695298]
 [0.60951   ]
 [0.39794663]
 [0.5118717 ]
 [0.6532073 ]
 [0.7271816 ]
 [0.39820278]
 [0.66155314]
 [0.7709704 ]
 [0.3689781 ]
 [0.76498526]
 [0.3952522 ]
 [0.5928684 ]
 [0.7847312 ]
 [0.755186  ]
 [0.44578898]
 [0.76340497]
 [0.3816837 ]
 [0.5955655 ]
 [0.67361015]
 [0.7707462 ]
 [0.8477395 ]
 [0.4667048 ]
 [0.2416224 ]
 [0.56083953]
 [0.7360955 ]
 [0.3842407 ]
 [0.39748034]
 [0.7379605 ]
 [0.49579504]
 [0.32042682]
 [0.58163965]
 [0.437914  ]
 [0.60518587]
 [0.7585915 ]
 [0.5343307 ]
 [0.449775  ]
 [0.8288851 ]
 [0.6622481 ]
 [0.14425522]
 [0.36460817]
 [0.54965633]
 [0.3563462 ]
 [0.44539824]
 [0.30856517]
 [0.5247021 ]
 [0.5280862 ]
 [0.796762  ]
 [0.2778471 ]
 [0.57513106]
 [0.3974484 ]
 [0.6978083 ]
 [0.6963511 ]
 [0.7044852 ]
 [0.2962951 ]
 [0.63282883]
 [0.49066746]
 [0.94800115]
 [0.6699201 ]], shape=(659, 1), dtype=float32)

Loss Function

  • 기존 MSE 대신 Cross Entropy 사용
def loss_fn(hypothesis, labels):
    loss = -tf.reduce_mean(labels * tf.math.log(hypothesis) + \
                           (1 - labels) * tf.math.log(1 - hypothesis))
    return loss

optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.005)

학습

epochs = 5000

for step in range(epochs):
  for features, labels in dataset:
    with tf.GradientTape() as tape:
      pred = logistic_regression(features)
      loss_value = loss_fn(pred, labels)
      grads = tape.gradient(loss_value, [W,b])
      optimizer.apply_gradients(grads_and_vars=zip(grads,[W,b]))
      if step % 100 == 0:
            print("Iter: {}, Loss: {:.4f}".format(step, loss_fn(logistic_regression(features),labels)))
#Output
Iter: 0, Loss: 0.7648
Iter: 100, Loss: 0.7447
Iter: 200, Loss: 0.7296
Iter: 300, Loss: 0.7179
Iter: 400, Loss: 0.7083
Iter: 500, Loss: 0.7001
Iter: 600, Loss: 0.6929
Iter: 700, Loss: 0.6864
Iter: 800, Loss: 0.6803
Iter: 900, Loss: 0.6746
Iter: 1000, Loss: 0.6692
Iter: 1100, Loss: 0.6640
Iter: 1200, Loss: 0.6591
Iter: 1300, Loss: 0.6543
Iter: 1400, Loss: 0.6497
Iter: 1500, Loss: 0.6453
Iter: 1600, Loss: 0.6410
Iter: 1700, Loss: 0.6368
Iter: 1800, Loss: 0.6328
Iter: 1900, Loss: 0.6289
Iter: 2000, Loss: 0.6251
Iter: 2100, Loss: 0.6214
Iter: 2200, Loss: 0.6179
Iter: 2300, Loss: 0.6144
Iter: 2400, Loss: 0.6111
Iter: 2500, Loss: 0.6079
Iter: 2600, Loss: 0.6048
Iter: 2700, Loss: 0.6017
Iter: 2800, Loss: 0.5988
Iter: 2900, Loss: 0.5959
Iter: 3000, Loss: 0.5932
Iter: 3100, Loss: 0.5905
Iter: 3200, Loss: 0.5879
Iter: 3300, Loss: 0.5854
Iter: 3400, Loss: 0.5829
Iter: 3500, Loss: 0.5806
Iter: 3600, Loss: 0.5783
Iter: 3700, Loss: 0.5760
Iter: 3800, Loss: 0.5739
Iter: 3900, Loss: 0.5718
Iter: 4000, Loss: 0.5697
Iter: 4100, Loss: 0.5677
Iter: 4200, Loss: 0.5658
Iter: 4300, Loss: 0.5639
Iter: 4400, Loss: 0.5621
Iter: 4500, Loss: 0.5604
Iter: 4600, Loss: 0.5586
Iter: 4700, Loss: 0.5570
Iter: 4800, Loss: 0.5553
Iter: 4900, Loss: 0.5538

테스트

def accuracy_fn(hypothesis, labels):
    predicted = tf.cast(hypothesis > 0.5, dtype=tf.int32)
    accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, labels), dtype=tf.float32))
    return accuracy
test_acc = accuracy_fn(logistic_regression(x_test),y_test)
print("Testset Accuracy: {:.4f}".format(test_acc))
#Testset Accuracy: 0.7200
profile
AI, Information and Communication, Electronics, Computer Science, Bio, Algorithms

0개의 댓글