미분을 자동으로 해준다
import torch x = torch.FloatTensor([[1,2], [3,4]]).requires_grad_(True) x1 = x + 2 x2 = x - 2 x3 = x1 * x2 y = x3.sum() y.backward()
import torch impoty torch.nn.functional as F target = torch.FloatTensor([[.1, .2, .3], [.4, .5, .6], [.7, .8, .9]]) # 랜덤한 x 생성 x = torch.rand_like(target) # gradient를 가질 수 있다 x.requires_grad = True # loss 구하기 loss = F.mse_loss(x, target) threshold = 1e-5 learning_rate = 1 iter_cnt = 0 while loss > threshold: iter_cnt += 1 loss.backward() # Calculate gradients, 백워드 호출(loss를 미분) x = x - learning_rate * x.grad # x를 업데이트 x.detach_() x.requires_grad_(True) loss = F.mse_loss(x,target) print("%d-th Loss : %.4e" % (iter_cnt, loss)) print(X)