pytorch/caffe2/python/session_test.py
Xianjie Chen d0621a2449 NextScopedBlob with well-defined behavior and respect namescope
Summary:
Remove the use of `NextName` in layer model helper, so that the same function return `model_helper` that should construct identical `Net`, when under the same NameScope.

The `NextScopedBlob` should only take effect when there is real name conflicting, otherwise it returns ScopedBlobReference.

This is critical for parameter blobs. In long run, we need to be able to specify parameter blobs more explicitly. (kennyhorror is working on this). This solution works in short term for e.g., two tower sparse nn models.

Reviewed By: kennyhorror

Differential Revision: D4555423

fbshipit-source-id: 2c4b99a61392e5d51aa878f7346466a8f14be187
2017-02-16 17:16:36 -08:00

64 lines
2.1 KiB
Python

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python.schema import (
Struct, FetchRecord, NewRecord, FeedRecord, InitEmptyRecord)
from caffe2.python import core, workspace
from caffe2.python.session import LocalSession
from caffe2.python.dataset import Dataset
from caffe2.python.pipeline import pipe
from caffe2.python.task import TaskGroup
from caffe2.python.test_util import TestCase
import numpy as np
class TestLocalSession(TestCase):
def test_local_session(self):
init_net = core.Net('init')
src_values = Struct(
('uid', np.array([1, 2, 6])),
('value', np.array([1.4, 1.6, 1.7])))
expected_dst = Struct(
('uid', np.array([2, 4, 12])),
('value', np.array([0.0, 0.0, 0.0])))
with core.NameScope('init'):
src_blobs = NewRecord(init_net, src_values)
dst_blobs = InitEmptyRecord(init_net, src_values.clone_schema())
def proc1(rec):
net = core.Net('proc1')
with core.NameScope('proc1'):
out = NewRecord(net, rec)
net.Add([rec.uid(), rec.uid()], [out.uid()])
out.value.set(blob=rec.value())
return [net], out
def proc2(rec):
net = core.Net('proc2')
with core.NameScope('proc2'):
out = NewRecord(net, rec)
out.uid.set(blob=rec.uid())
net.Sub([rec.value(), rec.value()], [out.value()])
return [net], out
src_ds = Dataset(src_blobs)
dst_ds = Dataset(dst_blobs)
with TaskGroup() as tg:
out1 = pipe(src_ds.reader(), processor=proc1)
out2 = pipe(out1, processor=proc2)
pipe(out2, dst_ds.writer())
ws = workspace.C.Workspace()
FeedRecord(src_blobs, src_values, ws)
session = LocalSession(ws)
session.run(init_net)
session.run(tg)
output = FetchRecord(dst_blobs, ws=ws)
for a, b in zip(output.field_blobs(), expected_dst.field_blobs()):
np.testing.assert_array_equal(a, b)