Skip to content

Commit

Permalink
replace the AdagradOptimizer 、adamaxOptimizer、AdadeltaOptimizer、RMSPr…
Browse files Browse the repository at this point in the history
…opOptimizer、LambOptimizer and Momentum (#54152)

* replace the AdadeltaOptimizer with Adadelta

* replace the RMSPropOptimizer with RMSProp

* replace the LambOptimizer with lamb

* replace the momentum in contrib/optimizer.py with Momentum in python/paddle/optimizer/momentum.py

* fix bug

* fix bug

* fix bug

* fix bug of Lamp

* fix bug of Lamp

* fix bug of import

* replace the AdamaxOptimizer with Admax and change the optimizer base for AdagradOptimizer

* fix bug

* fix bug

* Update optimizer.py

* fix bug

* fix bug
  • Loading branch information
longranger2 authored Jul 11, 2023
1 parent a1396a8 commit 9436585
Show file tree
Hide file tree
Showing 27 changed files with 155 additions and 2,111 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@

import logging

import paddle
from paddle.fluid.optimizer import AdamOptimizer
from paddle.fluid.optimizer import LambOptimizer as LAMB

from .meta_optimizer_base import MetaOptimizerBase

Expand Down Expand Up @@ -55,14 +55,13 @@ def exclude_fn(param):

_exclude_from_weight_decay_fn = exclude_fn

self.lamb_opt = LAMB(
self.lamb_opt = paddle.optimizer.Lamb(
learning_rate=opt._learning_rate,
lamb_weight_decay=configs['lamb_weight_decay'],
beta1=opt._beta1,
beta2=opt._beta2,
epsilon=opt._epsilon,
parameter_list=opt._parameter_list,
regularization=opt.regularization,
parameters=opt._parameter_list,
grad_clip=opt._grad_clip,
exclude_from_weight_decay_fn=_exclude_from_weight_decay_fn,
name=opt._name,
Expand Down Expand Up @@ -111,7 +110,7 @@ def apply_gradients(self, params_grads):
return self.lamb_opt.apply_gradients(params_grads=params_grads)

def apply_optimize(self, loss, startup_program, params_grads):
return self.lamb_opt.apply_optimize(
return self.lamb_opt._apply_optimize(
loss, startup_program=startup_program, params_grads=params_grads
)

Expand Down
2 changes: 0 additions & 2 deletions python/paddle/fluid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
from .initializer import set_global_initializer
from . import layers
from . import dygraph
from . import contrib
from . import optimizer
from . import backward
from .backward import gradients
Expand Down Expand Up @@ -105,7 +104,6 @@
'io',
'initializer',
'layers',
'contrib',
'dygraph',
'enable_dygraph',
'disable_dygraph',
Expand Down
22 changes: 0 additions & 22 deletions python/paddle/fluid/contrib/__init__.py

This file was deleted.

287 changes: 0 additions & 287 deletions python/paddle/fluid/contrib/optimizer.py

This file was deleted.

Loading

0 comments on commit 9436585

Please sign in to comment.