코드부
import tensorflow as tf
#선형회귀(Linear Regression) 모델 정의 = Wx + b
W = tf.Variable(tf.random_normal([1]),name="W")
b = tf.Variable(tf.random_normal([1]),name="b")
X = tf.placeholder(dtype=tf.float32,name="X") # 데이터 셋
Y = tf.placeholder(dtype=tf.float32,name="Y")
epoch = 1000
#가설
Linear_model = W * X + b
# cost 함수 정의 (가설 - Y)제곱 의 평균
loss = tf.reduce_mean(tf.square(Linear_model - Y))
# 손실함수를 최적화 하기 위해 Gradient_Descent 사용
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train_step = optimizer.minimize(loss)
#데이터셋 정의
x_train = [ 2,3,4,5,6,7,8,9,10 ]
y_train = [ 3,4,5,6,7,8,9,10,11]
# 세션을 열어 tf.global_variables_initializer() 초기화
sess = tf.Session()
sess.run(tf.global_variables_initializer()) # W 와 b 값에 초기값 할당 --> Variable 에 할당.
# 텐서보드를 위해 요약정보 저장
tf.summary.scalar('loss',loss)
merged = tf.summary.merge_all() # 요약정보 하나로 합치기
tensorboard_write = tf.summary.FileWriter('./tensorboard_log',sess.graph) # 요약 정보를 파일로 저장
# epoch 만큼 학습시켜, 선형회귀 학습
for i in range(epoch):
cost ,_ = sess.run([loss,train_step], feed_dict={X:x_train , Y:y_train})
summary = sess.run(merged,feed_dict={X:x_train,Y:y_train})
tensorboard_write.add_summary(summary,i)
if i % 50 == 0:
print("Step : {}\tLoss : {}".format(i, cost))
# 모델 테스트
x_test = [12,3.5,96,100]
# 정답 : [ 13, 4.5, 97 101 ]
print(sess.run(Linear_model, feed_dict={X:x_test}))
sess.close()
출력부
Step : 0 Loss : 0.4130171537399292
Step : 50 Loss : 0.260214626789093
Step : 100 Loss : 0.19144833087921143
Step : 150 Loss : 0.14085480570793152
Step : 200 Loss : 0.10363136231899261
Step : 250 Loss : 0.07624498009681702
Step : 300 Loss : 0.05609585717320442
Step : 350 Loss : 0.04127151146531105
Step : 400 Loss : 0.03036479465663433
Step : 450 Loss : 0.022340409457683563
Step : 500 Loss : 0.016436535865068436
Step : 550 Loss : 0.012092893011868
Step : 600 Loss : 0.008897127583622932
Step : 650 Loss : 0.006545921787619591
Step : 700 Loss : 0.004816039931029081
Step : 750 Loss : 0.0035433073062449694
Step : 800 Loss : 0.0026069271843880415
Step : 850 Loss : 0.001918009016662836
Step : 900 Loss : 0.001411122502759099
Step : 950 Loss : 0.001038226648233831
[ 13.048492 4.4646196 97.87736 101.916824 ]