Skip to content

Commit

Permalink
Update 0523
Browse files Browse the repository at this point in the history
Add style_transfer and optim.
  • Loading branch information
Zhang Yuan committed May 23, 2017
1 parent cc4129f commit e17cf67
Show file tree
Hide file tree
Showing 13 changed files with 950 additions and 0 deletions.
4 changes: 4 additions & 0 deletions optim/.idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions optim/.idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions optim/.idea/optim.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

322 changes: 322 additions & 0 deletions optim/.idea/workspace.xml

Large diffs are not rendered by default.

52 changes: 52 additions & 0 deletions optim/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
"""
Optim x^2 + xy + y^2 +4
"""

import numpy as np

import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim

# Define my own loss function
class Optim_Function(nn.Module):

def __init__(self):
super(Optim_Function, self).__init__()

def forward(self, x):
self.loss = torch.add(torch.add(torch.add(torch.pow(x[0], 2), torch.mul(x[0], x[1])), torch.pow(x[1], 2)), Variable(torch.Tensor([4])))
return self.loss

def backward(self):
self.loss.backward()

xy = Variable(torch.Tensor([300, 400]), requires_grad=True)

loss = torch.add(torch.add(torch.add(torch.pow(xy[0], 2), torch.mul(xy[0], xy[1])), torch.pow(xy[1], 2)), Variable(torch.Tensor([4])))

print "loss: ", loss
loss.backward()
print(xy.grad)

criterion = Optim_Function()
print "res: ", criterion(xy)

xy = nn.Parameter(xy.data)
optimizer = optim.SGD([xy], lr=0.1)
#optimizer = optim.LBFGS([xy])

run = [0]
while run[0] <= 100:

def closure():
optimizer.zero_grad()
loss = torch.add(torch.add(torch.add(torch.pow(xy[0], 2), torch.mul(xy[0], xy[1])), torch.pow(xy[1], 2)),
Variable(torch.Tensor([4])))
loss.backward()
print "loss: ", loss.data[0]
run[0] += 1
return loss

optimizer.step(closure)
4 changes: 4 additions & 0 deletions style_transfer/.idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions style_transfer/.idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 12 additions & 0 deletions style_transfer/.idea/style_transfer.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

280 changes: 280 additions & 0 deletions style_transfer/.idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Binary file added style_transfer/images/dancing.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added style_transfer/images/figure_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added style_transfer/images/picasso.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit e17cf67

Please sign in to comment.