mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: `ModifierContext` is the base class for `OptimizerContext` and `RegularizationContext`. `UseModifierBase` is the base class for `UseRegularizer `and `UseOptimizer` Most of codes in `OptimizerContext`, `RegularizationContext` and other potential Context class in future could be shared. We thus implemented a new base class, called `ModifierContext` to support it. It happens to be the same for `UseRegularizer` and `UseOptimizer`, and we implemented a new base class called `UseModifierBase`. In this way, users only need to provide API for **get** and **has** operation. Also, they need to tell what's the **context class**. **Note** Mirrored code in fbandroid and fbobj would be added when finally check in. Reviewed By: kittipatv, xianjiec Differential Revision: D5724613 fbshipit-source-id: de19bb822dcd41ec5c459d65065603a0abe2fd20
55 lines
1.6 KiB
Python
55 lines
1.6 KiB
Python
## @package optimizer_context
|
|
# Module caffe2.python.optimizer_context
|
|
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
from caffe2.python import context
|
|
from caffe2.python.modifier_context import (
|
|
ModifierContext, UseModifierBase)
|
|
|
|
|
|
DEFAULT_OPTIM = 'DEFAULT'
|
|
|
|
|
|
@context.define_context(allow_default=True)
|
|
class OptimizerContext(ModifierContext):
|
|
"""
|
|
provide context to allow param_info to have different optimizers
|
|
"""
|
|
|
|
def has_optimizer(self, name):
|
|
return self._has_modifier(name)
|
|
|
|
def get_optimizer(self, name):
|
|
assert self.has_optimizer(name), (
|
|
"{} optimizer is not provided!".format(name))
|
|
return self._get_modifier(name)
|
|
|
|
|
|
class UseOptimizer(UseModifierBase):
|
|
'''
|
|
context class to allow setting the current context.
|
|
Example usage with brew:
|
|
- with UseOptimizer(optim):
|
|
brew.func
|
|
- with UseOptimizer({'WEIGHT': weight_optim}):
|
|
brew.func
|
|
- with UseOptimizer({'DEFAULT': optim, 'BIAS': bias_optim,
|
|
'WEIGHT': weight_optim}):
|
|
brew.func
|
|
- with UseOptimizer(optim1):
|
|
brew.func
|
|
with UseOptimizer(optim2):
|
|
brew.func
|
|
|
|
Example useage with layer:
|
|
optimizers = {'optim1': optim1, 'optim2': optim2}
|
|
with Optimizers(optimizers):
|
|
optim = OptimizerContext.current().get_optimizer('optim1')
|
|
layer(optim=optim)
|
|
'''
|
|
def _context_class(self):
|
|
return OptimizerContext
|