import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# 학습할 데이터들
# 공부시간
x_data = [[1, 2],[2, 3],[3, 1],[4, 3],[5, 3],[6, 2]]
# 결과 0은 불합 1은 합격
y_data = [[0],[0],[0],[1],[1],[1]]
X = tf.placeholder(tf.float32, shape=[None, 2])
Y = tf.placeholder(tf.float32, shape=[None, 1])
W = tf.Variable(tf.random_normal([2, 1]), name='weight')
b = tf.Variable(tf.random_normal([1]), name='bias')
# sigmode 만들고
hypothesis = tf.sigmoid(tf.matmul(X, W) + b)
# cost 정의해주고
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1 - hypothesis))
# GradientDescent
train = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)
# 예상
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)
# 정확도
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
# 학습
with tf.Session() as sess:
# 초기화
sess.run(tf.global_variables_initializer())
for step in range(10001):
cost_val, _ = sess.run([cost, train], feed_dict={X: x_data, Y: y_data})
if step % 200 == 0:
print(step, cost_val)
# 정확도 예측
h, c, a = sess.run([hypothesis, predicted, accuracy],feed_dict={X: x_data, Y: y_data})
print("\nHypothesis: ", h, "\nCorrect (Y): ", c, "\nAccuracy: ", a)
참조
[1] - https://www.youtube.com/watch?v=2FeWGgnyLSw&list=PLlMkM4tgfjnLSOjrEJN31gZATbcj_MpUm&index=13
'유튜브 > 대충 배우는 머신러닝 AI(영상삭제)' 카테고리의 다른 글
14 - multinomial logistic classification 실습 (18) | 2020.02.14 |
---|---|
13 - multinomial logistic classification (0) | 2020.02.14 |
11 - logistic classification cost (0) | 2020.02.10 |
10 - Logistic Classification (0) | 2020.02.10 |
9 - regression kaggle 실습 (0) | 2020.01.31 |