def forward(x):
return w * x
def loss(y, y_predicted):
return ((y_predicted-y) ** 2).mean()
requires_grad
x = torch.randn(3, requires_grad=True)
print(x)
# tensor([-0.2124, -0.1003, -0.3773], requires_grad=True)
requires_grad
之後,会自动产生 Backward function 当他开始计算的过程中
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227], grad_fn=<AddBackward0>)
z = y * y * 2
# tensor([6.3911, 7.2174, 5.2663], grad_fn=<MulBackward0>)
z = z.mean()
print(z)
# tensor(6.2916, grad_fn=<MeanBackward0>)
torch.backward()
呼叫 backward calculate 时就等於计算了 dz/dx
z.backward()
print(x.grad)
# tensor([2.3835, 2.5329, 2.1636])
import torch
x = torch.randn(3, requires_grad=True)
print(x)
# tensor([-0.2124, -0.1003, -0.3773], requires_grad=True)
# using: x.requires_grad_(False)
x.requires_grad_(False)
print(x)
# tensor([-0.2124, -0.1003, -0.3773])
# using: x.detach()
y = x.detach()
print(y)
# tensor([-0.2124, -0.1003, -0.3773])
# using: with torch.no_grad():
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227], grad_fn=<AddBackward0>)
with torch.no_grad():
y = x + 2
print(y)
# tensor([1.7876, 1.8997, 1.6227])
x.grad.zero_()
<<: Day 17. 计算属性(Computed) VS 方法(Methods)
>>: Day 15. 常见模板 Template OS Linux / Windows by Zabbix agent 介绍
1.前言 今天要讲解如标题一样,Google Sheet是Google所开发的试算表,所以我们要用G...
while回圈计算1加到100: for回圈计算1加到100: break跟continue的范例:...
如果用非常概括性的说法来描述 CPU 的组成,其实就是数以亿计的电晶体组合而成的。那麽电晶体又是怎麽...
提到资料库特性势必要先了解SQL(关联式资料库)vs.NoSQL(非关联式资料库)之间的差异,在应...
终於度过前面枯燥乏味的内容了...(但它们都很重要,也与今天的主题有关) 今天要来进入重点项目 我们...