Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SOT]Remove unused UT logic in test_sot_resnet50_backward #59439

Merged
merged 1 commit into from
Nov 28, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 0 additions & 22 deletions test/sot/test_sot_resnet50_backward.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,37 +63,15 @@ def run_symbolic_optimizer(inp):
return loss


def run_to_static_optimizer(inp):
"""dygraph train + SGD optimizer"""
paddle.seed(2021)
np.random.seed(2021)
random.seed(2021)
net = resnet50()
net = paddle.jit.to_static(net, enable_fallback=False)
optimizer = paddle.optimizer.SGD(
learning_rate=0.03, parameters=net.parameters()
)
for i in range(5):
optimizer.clear_grad()
loss = execute_time(net)(inp)
loss.backward()
optimizer.step()
return loss


class TestBackward(unittest.TestCase):
def test(self):
# TODO(xiongkun) add cache to speedup !
paddle.seed(2021)
np.random.seed(2021)
random.seed(2021)
inp = paddle.rand((3, 3, 255, 255))
print("Start Run SymbolicTranslate:")
out2 = run_symbolic_optimizer(inp)[0].numpy()
print("Start Run Dygraph:")
out1 = run_dygraph_optimizer(inp)[0].numpy()
print("Start Run To Static:")
out1 = run_to_static_optimizer(inp)[0].numpy()
assert_array_equal(
out1, out2, "Not Equal in dygraph and static graph", True
)
Expand Down