Skip to content

Commit

Permalink
[cherry-pick] Fix amp support fleet(#29505)
Browse files Browse the repository at this point in the history
  • Loading branch information
Aurelius84 authored Dec 9, 2020
1 parent 4d51cd7 commit d82d59e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 1 deletion.
2 changes: 2 additions & 0 deletions python/paddle/fluid/contrib/mixed_precision/decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ def __init__(self, optimizer, amp_lists, init_loss_scaling,
self._loss_scaling = None
self._init_loss_scaling = init_loss_scaling
self._use_dynamic_loss_scaling = use_dynamic_loss_scaling
self._learning_rate = optimizer._learning_rate
self._learning_rate_map = optimizer._learning_rate_map
if self._use_dynamic_loss_scaling:
self._incr_every_n_steps = incr_every_n_steps
self._decr_every_n_nan_or_inf = decr_every_n_nan_or_inf
Expand Down
4 changes: 3 additions & 1 deletion python/paddle/fluid/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3751,7 +3751,9 @@ def __init__(self, optimizer, num_microbatches=1, start_cpu_core_id=0):
if framework.in_dygraph_mode():
raise Exception("In dygraph, don't support PipelineOptimizer.")
if not isinstance(optimizer, Optimizer) and not isinstance(
optimizer, paddle.optimizer.Optimizer):
optimizer, paddle.optimizer.Optimizer) and not isinstance(
optimizer, paddle.fluid.contrib.mixed_precision.decorator.
OptimizerWithMixedPrecision):
raise ValueError("The 'optimizer' parameter for "
"PipelineOptimizer must be an instance of "
"Optimizer, but the given type is {}.".format(
Expand Down

0 comments on commit d82d59e

Please sign in to comment.