pytorch/caffe2/python/optimizer_context.py
Bugra Akyildiz 27c7158166 Remove __future__ imports for legacy Python2 supports (#45033)
Summary:
There is a module called `2to3` which you can target for future specifically to remove these, the directory of `caffe2` has the most redundant imports:

```2to3 -f future -w caffe2```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/45033

Reviewed By: seemethere

Differential Revision: D23808648

Pulled By: bugra

fbshipit-source-id: 38971900f0fe43ab44a9168e57f2307580d36a38
2020-09-23 17:57:02 -07:00

55 lines
1.4 KiB
Python

## @package optimizer_context
# Module caffe2.python.optimizer_context
from caffe2.python import context
from caffe2.python.modifier_context import (
ModifierContext, UseModifierBase)
DEFAULT_OPTIM = 'DEFAULT'
@context.define_context(allow_default=True)
class OptimizerContext(ModifierContext):
"""
provide context to allow param_info to have different optimizers
"""
def has_optimizer(self, name):
return self._has_modifier(name)
def get_optimizer(self, name):
assert self.has_optimizer(name), (
"{} optimizer is not provided!".format(name))
return self._get_modifier(name)
class UseOptimizer(UseModifierBase):
'''
context class to allow setting the current context.
Example usage with brew:
- with UseOptimizer(optim):
brew.func
- with UseOptimizer({'WEIGHT': weight_optim}):
brew.func
- with UseOptimizer({'DEFAULT': optim, 'BIAS': bias_optim,
'WEIGHT': weight_optim}):
brew.func
- with UseOptimizer(optim1):
brew.func
with UseOptimizer(optim2):
brew.func
Example usage with layer:
optimizers = {'optim1': optim1, 'optim2': optim2}
with Optimizers(optimizers):
optim = OptimizerContext.current().get_optimizer('optim1')
layer(optim=optim)
'''
def _context_class(self):
return OptimizerContext