pytorch/caffe2/python/layers/sampling_trainable_mixin.py
Bugra Akyildiz 27c7158166 Remove __future__ imports for legacy Python2 supports (#45033)
Summary:
There is a module called `2to3` which you can target for future specifically to remove these, the directory of `caffe2` has the most redundant imports:

```2to3 -f future -w caffe2```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/45033

Reviewed By: seemethere

Differential Revision: D23808648

Pulled By: bugra

fbshipit-source-id: 38971900f0fe43ab44a9168e57f2307580d36a38
2020-09-23 17:57:02 -07:00

55 lines
1.4 KiB
Python

## @package sampling_trainable_mixin
# Module caffe2.python.layers.sampling_trainable_mixin
import abc
import six
class SamplingTrainableMixin(six.with_metaclass(abc.ABCMeta, object)):
def __init__(self, *args, **kwargs):
super(SamplingTrainableMixin, self).__init__(*args, **kwargs)
self._train_param_blobs = None
self._train_param_blobs_frozen = False
@property
@abc.abstractmethod
def param_blobs(self):
"""
List of parameter blobs for prediction net
"""
pass
@property
def train_param_blobs(self):
"""
If train_param_blobs is not set before used, default to param_blobs
"""
if self._train_param_blobs is None:
self.train_param_blobs = self.param_blobs
return self._train_param_blobs
@train_param_blobs.setter
def train_param_blobs(self, blobs):
assert not self._train_param_blobs_frozen
assert blobs is not None
self._train_param_blobs_frozen = True
self._train_param_blobs = blobs
@abc.abstractmethod
def _add_ops(self, net, param_blobs):
"""
Add ops to the given net, using the given param_blobs
"""
pass
def add_ops(self, net):
self._add_ops(net, self.param_blobs)
def add_train_ops(self, net):
self._add_ops(net, self.train_param_blobs)