mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Summary: There is a module called `2to3` which you can target for future specifically to remove these, the directory of `caffe2` has the most redundant imports: ```2to3 -f future -w caffe2``` Pull Request resolved: https://github.com/pytorch/pytorch/pull/45033 Reviewed By: seemethere Differential Revision: D23808648 Pulled By: bugra fbshipit-source-id: 38971900f0fe43ab44a9168e57f2307580d36a38
55 lines
1.4 KiB
Python
55 lines
1.4 KiB
Python
## @package sampling_trainable_mixin
|
|
# Module caffe2.python.layers.sampling_trainable_mixin
|
|
|
|
|
|
|
|
|
|
|
|
import abc
|
|
import six
|
|
|
|
|
|
class SamplingTrainableMixin(six.with_metaclass(abc.ABCMeta, object)):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
super(SamplingTrainableMixin, self).__init__(*args, **kwargs)
|
|
self._train_param_blobs = None
|
|
self._train_param_blobs_frozen = False
|
|
|
|
@property
|
|
@abc.abstractmethod
|
|
def param_blobs(self):
|
|
"""
|
|
List of parameter blobs for prediction net
|
|
"""
|
|
pass
|
|
|
|
@property
|
|
def train_param_blobs(self):
|
|
"""
|
|
If train_param_blobs is not set before used, default to param_blobs
|
|
"""
|
|
if self._train_param_blobs is None:
|
|
self.train_param_blobs = self.param_blobs
|
|
return self._train_param_blobs
|
|
|
|
@train_param_blobs.setter
|
|
def train_param_blobs(self, blobs):
|
|
assert not self._train_param_blobs_frozen
|
|
assert blobs is not None
|
|
self._train_param_blobs_frozen = True
|
|
self._train_param_blobs = blobs
|
|
|
|
@abc.abstractmethod
|
|
def _add_ops(self, net, param_blobs):
|
|
"""
|
|
Add ops to the given net, using the given param_blobs
|
|
"""
|
|
pass
|
|
|
|
def add_ops(self, net):
|
|
self._add_ops(net, self.param_blobs)
|
|
|
|
def add_train_ops(self, net):
|
|
self._add_ops(net, self.train_param_blobs)
|