pytorch/caffe2/python/layer_parameter_sharing_test.py
Yangqing Jia 8286ce1e3a Re-license to Apache
Summary: Closes https://github.com/caffe2/caffe2/pull/1260

Differential Revision: D5906739

Pulled By: Yangqing

fbshipit-source-id: e482ba9ba60b5337d9165f28f7ec68d4518a0902
2017-09-28 16:22:00 -07:00

120 lines
4.9 KiB
Python

# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import scope
from caffe2.python.modeling.parameter_sharing import (
ParameterSharing,
)
from caffe2.python.layer_test_util import LayersTestCase
class ParameterSharingTest(LayersTestCase):
def test_layer_parameter_name(self):
output_dims = 2
with scope.NameScope('global_scope'):
fc1_output = self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w, 'global_scope/fc/w')
self.assertEquals(fc1_output(), 'global_scope/fc/output')
with scope.NameScope('nested_scope'):
fc2_output = self.model.FC(
fc1_output,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/nested_scope/fc/w')
self.assertEquals(fc2_output(),
'global_scope/nested_scope/fc/output')
fc3_output = self.model.FC(
fc1_output,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/nested_scope/fc_auto_0/w')
self.assertEquals(fc3_output(),
'global_scope/nested_scope/fc_auto_0/output')
def test_layer_shared_parameter_name_different_namescopes(self):
output_dims = 2
with scope.NameScope('global_scope'):
with ParameterSharing({'scope_1': 'scope_0'}):
with scope.NameScope('scope_0'):
fc1_output = self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/scope_0/fc/w')
self.assertEquals(fc1_output(),
'global_scope/scope_0/fc/output')
with scope.NameScope('scope_1'):
fc2_output = self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/scope_0/fc/w')
self.assertEquals(fc2_output(),
'global_scope/scope_1/fc/output')
def test_layer_shared_parameter_name_within_same_namescope(self):
output_dims = 2
with scope.NameScope('global_scope'):
with ParameterSharing({'fc_auto_0': 'fc'}):
self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/fc/w')
self.model.FC(
self.model.input_feature_schema.float_features,
output_dims
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/fc/w')
def test_layer_shared_parameter_name_within_same_namescope_customized_name(self):
output_dims = 2
with scope.NameScope('global_scope'):
with ParameterSharing({'new_fc': 'shared_fc'}):
self.model.FC(
self.model.input_feature_schema.float_features,
output_dims,
name='shared_fc'
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/shared_fc/w')
self.model.FC(
self.model.input_feature_schema.float_features,
output_dims,
name='new_fc'
)
self.assertEquals(self.model.layers[-1].w,
'global_scope/shared_fc/w')