Skip to content

Commit

Permalink
Add Autograd.
Browse files Browse the repository at this point in the history
  • Loading branch information
Mansterteddy committed Nov 6, 2020
1 parent ee9cb7d commit cc9ab0b
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 1 deletion.
5 changes: 4 additions & 1 deletion pytorch/Basic/Autograd/autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@
from matplotlib import pyplot as plt

x = torch.linspace(0, 5, 100, requires_grad=True)

y = (x**2).cos()

print(x)
print(y)
print(y.sum())

dydx = torch.autograd.grad(y.sum(), [x])[0]

plt.plot(x.detach(), y.detach(), label='y')
Expand Down
16 changes: 16 additions & 0 deletions pytorch/Basic/Autograd/autograd_1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import torch
from matplotlib import pyplot as plt

x = torch.linspace(0, 5, 100, requires_grad=True)
y = (x**3 - 6 * x**2 + 8 * x)

print(x)
print(y)
print(y.sum())

dydx = torch.autograd.grad(y.sum(), [x])[0]

plt.plot(x.detach(), y.detach(), label='y')
plt.plot(x.detach(), dydx, label='dy/dx')
plt.legend()
plt.show()
13 changes: 13 additions & 0 deletions pytorch/Basic/Autograd/backprop.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import torch
from matplotlib import pyplot as plt

x = torch.linspace(0, 5, 100, requires_grad=True)
print(x.grad)
y = (x**2).cos()
y.sum().backward()
print(x.grad)

plt.plot(x.detach(), y.detach(), label='y')
plt.plot(x.detach(), x.grad, label='dy/dx')
plt.legend()
plt.show()
14 changes: 14 additions & 0 deletions pytorch/Basic/Autograd/backprop_1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import torch
from matplotlib import pyplot as plt

x = torch.linspace(0, 5, 100, requires_grad=True)
x2 = torch.linspace(0, 5, 100, requires_grad=True)

y = x2**3 - 6 * x**2 + 8 * x
y.sum().backward()

plt.plot(x.detach(), y.detach(), label='y')
plt.plot(x.detach(), x.grad, label='dy/dx')
plt.plot(x2.detach(), x2.grad, label='dy/dx2')
plt.legend()
plt.show()
17 changes: 17 additions & 0 deletions pytorch/Basic/Autograd/backprop_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import torch
from matplotlib import pyplot as plt

x = torch.linspace(0, 5, 100, requires_grad=True)

y = x**3 - 6 * x**2 + 8 * x
y.sum().backward(create_graph=True)

dy = x.grad.clone()
x.grad.zero_()
dy.sum().backward()

plt.plot(x.detach(), y.detach(), label='y')
#plt.plot(x.detach(), x.grad.detach(), label='dy/dx')
plt.plot(x.detach(), x.grad.detach(), label='d2y/dx')
plt.legend()
plt.show()

0 comments on commit cc9ab0b

Please sign in to comment.