tf.GradientTape
는 컨텍스트(context) 안에서 실행된 모든 연산을 테이프(tape)에 "기록".import tensorflow as tf
x = tf.Variable(3.0) #Variable에 대한 미분만 제공
with tf.GradientTape() as tape:
y = x**2
# dy = 2x * dx
dy_dx = tape.gradient(y, x)
dy_dx.numpy() # 6.0
import tensorflow as tf
w = tf.Variable(tf.random.normal((3, 2)), name='w')
b = tf.Variable(tf.zeros(2, dtype=tf.float32), name='b')
x = [[1., 2., 3.]]
# persistent 다시 부르는 것을 가능하게 해 줌
with tf.GradientTape(persistent=True) as tape:
y = x @ w + b
loss = tf.reduce_mean(y**2)
[dl_dw, dl_db] = tape.gradient(loss, [w, b])
'''
[<tf.Tensor: shape=(3, 2), dtype=float32, numpy=
array([[-0.05410123, -0.9976089 ],
[-0.10820246, -1.9952178 ],
[-0.16230369, -2.9928267 ]], dtype=float32)>,
<tf.Tensor: shape=(2,), dtype=float32, numpy=array([-0.05410123, -0.9976089 ], dtype=float32)>]
'''
tf.Variable
만 기록 한다trainable
조건으로 미분 기록을 제어# A trainable variable
x0 = tf.Variable(3.0, name='x0')
# Not trainable
x1 = tf.Variable(3.0, name='x1', trainable=False)
# Not a Variable: A variable + tensor returns a tensor.
x2 = tf.Variable(2.0, name='x2') + 1.0
# Not a variable
x3 = tf.constant(3.0, name='x3')
with tf.GradientTape() as tape:
y = (x0**2) + (x1**2) + (x2**2)
grad = tape.gradient(y, [x0, x1, x2, x3])
for g in grad:
print(g)
'''
tf.Tensor(6.0, shape=(), dtype=float32)
None
None
None
'''
# 기록되고 있는 variable 확인하기
tape.watched_variables()
Reference
1) 제로베이스 데이터스쿨 강의자료