mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary:
To achive this, I modified the blob name scheme defined in a layer.
Before it was scope/fc_w and scope/fc_w_auto_0 (if there is another fc
within the same scope).
Now I change it to scope/fc/w and scope/fc_auto_0/w.
That is, we rely on the uniqueness of the scoped layer name to define
names for blobs.
I also overwrote the create_param method in LayerModelHelper to let it
use the resolved name for blobs given the sharingparameter context.
There are some details such as making the initializer more structured
that I need to finalize.
Reviewed By: kennyhorror
Differential Revision: D5435132
fbshipit-source-id: a0525f5ea0977e255dd5ea765b38913f5951d455
105 lines
4.2 KiB
Python
105 lines
4.2 KiB
Python
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
from caffe2.python import scope
|
|
from caffe2.python.modeling.parameter_sharing import (
|
|
ParameterSharing,
|
|
)
|
|
from caffe2.python.layer_test_util import LayersTestCase
|
|
|
|
|
|
class ParameterSharingTest(LayersTestCase):
|
|
|
|
def test_layer_parameter_name(self):
|
|
output_dims = 2
|
|
with scope.NameScope('global_scope'):
|
|
fc1_output = self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w, 'global_scope/fc/w')
|
|
self.assertEquals(fc1_output(), 'global_scope/fc/output')
|
|
|
|
with scope.NameScope('nested_scope'):
|
|
fc2_output = self.model.FC(
|
|
fc1_output,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/nested_scope/fc/w')
|
|
self.assertEquals(fc2_output(),
|
|
'global_scope/nested_scope/fc/output')
|
|
|
|
fc3_output = self.model.FC(
|
|
fc1_output,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/nested_scope/fc_auto_0/w')
|
|
self.assertEquals(fc3_output(),
|
|
'global_scope/nested_scope/fc_auto_0/output')
|
|
|
|
def test_layer_shared_parameter_name_different_namescopes(self):
|
|
output_dims = 2
|
|
with scope.NameScope('global_scope'):
|
|
with ParameterSharing({'scope_1': 'scope_0'}):
|
|
with scope.NameScope('scope_0'):
|
|
fc1_output = self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/scope_0/fc/w')
|
|
self.assertEquals(fc1_output(),
|
|
'global_scope/scope_0/fc/output')
|
|
|
|
with scope.NameScope('scope_1'):
|
|
fc2_output = self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/scope_0/fc/w')
|
|
self.assertEquals(fc2_output(),
|
|
'global_scope/scope_1/fc/output')
|
|
|
|
def test_layer_shared_parameter_name_within_same_namescope(self):
|
|
output_dims = 2
|
|
with scope.NameScope('global_scope'):
|
|
with ParameterSharing({'fc_auto_0': 'fc'}):
|
|
self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/fc/w')
|
|
|
|
self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/fc/w')
|
|
|
|
def test_layer_shared_parameter_name_within_same_namescope_customized_name(self):
|
|
output_dims = 2
|
|
with scope.NameScope('global_scope'):
|
|
with ParameterSharing({'new_fc': 'shared_fc'}):
|
|
self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims,
|
|
name='shared_fc'
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/shared_fc/w')
|
|
|
|
self.model.FC(
|
|
self.model.input_feature_schema.float_features,
|
|
output_dims,
|
|
name='new_fc'
|
|
)
|
|
self.assertEquals(self.model.layers[-1].w,
|
|
'global_scope/shared_fc/w')
|