forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathoptimizer_context.py
53 lines (39 loc) · 1.43 KB
/
optimizer_context.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
## @package optimizer_context
# Module caffe2.python.optimizer_context
from caffe2.python import context
from caffe2.python.modifier_context import (
ModifierContext, UseModifierBase)
DEFAULT_OPTIM = 'DEFAULT'
class OptimizerContext(ModifierContext, context.DefaultManaged):
"""
provide context to allow param_info to have different optimizers
"""
def has_optimizer(self, name):
return self._has_modifier(name)
def get_optimizer(self, name):
assert self.has_optimizer(name), (
"{} optimizer is not provided!".format(name))
return self._get_modifier(name)
class UseOptimizer(UseModifierBase):
'''
context class to allow setting the current context.
Example usage with brew:
- with UseOptimizer(optim):
brew.func
- with UseOptimizer({'WEIGHT': weight_optim}):
brew.func
- with UseOptimizer({'DEFAULT': optim, 'BIAS': bias_optim,
'WEIGHT': weight_optim}):
brew.func
- with UseOptimizer(optim1):
brew.func
with UseOptimizer(optim2):
brew.func
Example usage with layer:
optimizers = {'optim1': optim1, 'optim2': optim2}
with Optimizers(optimizers):
optim = OptimizerContext.current().get_optimizer('optim1')
layer(optim=optim)
'''
def _context_class(self):
return OptimizerContext