Skip to content

Commit 3a07285

Browse files
committed
update
1 parent b61bf0a commit 3a07285

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed

ann_class2/pytorch_example.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,10 +64,12 @@
6464
# so we encapsulate it in a function
6565
# Note: inputs and labels are torch tensors
6666
def train(model, loss, optimizer, inputs, labels):
67+
# https://discuss.pytorch.org/t/why-is-it-recommended-to-wrap-your-data-with-variable-each-step-of-the-iterations-rather-than-before-training-starts/12683
6768
inputs = Variable(inputs, requires_grad=False)
6869
labels = Variable(labels, requires_grad=False)
6970

7071
# Reset gradient
72+
# https://discuss.pytorch.org/t/why-do-we-need-to-set-the-gradients-manually-to-zero-in-pytorch/4903/7
7173
optimizer.zero_grad()
7274

7375
# Forward
@@ -81,6 +83,7 @@ def train(model, loss, optimizer, inputs, labels):
8183
optimizer.step()
8284

8385
# what's the difference between backward() and step()?
86+
# https://discuss.pytorch.org/t/what-does-the-backward-function-do/9944
8487
return output.item()
8588

8689

0 commit comments

Comments
 (0)