Skip to content
This repository has been archived by the owner on Jul 2, 2021. It is now read-only.

Add sequential_feature_extractor #342

Merged
merged 37 commits into from
Jul 17, 2017
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
75ad8e7
add sequential_feature_extractor
yuyu2172 Jul 14, 2017
817d4e4
improve doc
yuyu2172 Jul 14, 2017
4601ed1
fix doc
yuyu2172 Jul 14, 2017
3c031d4
fix doc
yuyu2172 Jul 14, 2017
b6d618a
fix doc
yuyu2172 Jul 14, 2017
d1790a5
remove unnecessary []
yuyu2172 Jul 14, 2017
ec70c8f
drop support for list layers
yuyu2172 Jul 14, 2017
1cad17b
add to doc
yuyu2172 Jul 14, 2017
321ee42
use init_scope
Hakuyume Jul 15, 2017
644ce71
update test
Hakuyume Jul 15, 2017
958913e
update docs
Hakuyume Jul 15, 2017
4af9fc8
remove unused import
Hakuyume Jul 15, 2017
d02e8ce
support None
Hakuyume Jul 15, 2017
064c7bc
check if a layer is used in layer_names
Hakuyume Jul 15, 2017
08f1ba7
improve test
Hakuyume Jul 15, 2017
8e80d7b
use tuple
Hakuyume Jul 15, 2017
210f3d2
remove list
Hakuyume Jul 15, 2017
5e31464
add deletion test
Hakuyume Jul 15, 2017
a0f4fcf
force tuple
Hakuyume Jul 15, 2017
97778d1
fix test
Hakuyume Jul 15, 2017
6036240
fix docs
Hakuyume Jul 15, 2017
29faace
update docs
Hakuyume Jul 15, 2017
4870aaa
fix docs
Hakuyume Jul 15, 2017
49c490e
Merge pull request #2 from Hakuyume/use-init-scope-in-sequential-feat…
yuyu2172 Jul 15, 2017
73e7189
delete imoprt collections
yuyu2172 Jul 15, 2017
869dcaf
layer_names --> feature_names
yuyu2172 Jul 15, 2017
4168f06
fix doc
yuyu2172 Jul 16, 2017
faa7f22
flake8
yuyu2172 Jul 16, 2017
4fdc7d6
fix getter
Hakuyume Jul 16, 2017
9a3fdee
add test
Hakuyume Jul 16, 2017
aaf403c
add comment
Hakuyume Jul 16, 2017
cae3e59
member variable -> attribute
Hakuyume Jul 16, 2017
ffd8152
the
Hakuyume Jul 16, 2017
e9f5af2
move comment
Hakuyume Jul 16, 2017
c57dd3b
Merge pull request #3 from Hakuyume/fix-feature-names-getter-in-seque…
yuyu2172 Jul 16, 2017
b2e012d
add a test when there are two same keys in feature_names
yuyu2172 Jul 17, 2017
b053036
Merge branch 'sequential-feature-extractor' of https://github.com/yuy…
yuyu2172 Jul 17, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions chainercv/links/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from chainercv.links.model.pixelwise_softmax_classifier import PixelwiseSoftmaxClassifier # NOQA
from chainercv.links.model.sequential_feature_extractor import SequentialFeatureExtractor # NOQA

from chainercv.links.model.faster_rcnn.faster_rcnn_vgg import FasterRCNNVGG16 # NOQA
from chainercv.links.model.segnet.segnet_basic import SegNetBasic # NOQA
Expand Down
128 changes: 128 additions & 0 deletions chainercv/links/model/sequential_feature_extractor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import chainer


class SequentialFeatureExtractor(chainer.Chain):

"""A feature extractor model with a single-stream forward pass.

This class is a base class that can be used for an implementation of
a feature extractor model.
Callable objects, such as :class:`chainer.Link` and
:class:`chainer.Function`, can be registered to this chain with
:meth:`init_scope`.
This chain keeps the order of registerations and :meth:`__call__`
executes callables in that order.
A :class:`chainer.Link` object in the sequence will be added as
a child link of this link.

:meth:`__call__` returns single or multiple features that are picked up
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

features that are -> feature(s) that is/are ? (I'm not sure)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about adding a sentence after this one saying

"Note that the number of features can be one."

I think it is better to make all the related nouns plural because there will be too many X(s) in this paragraph.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree with you. I could understand that the number of features can be one. This was only a grammatical question.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that -> those

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think that is OK.
It works like which.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry, it is OK.

through a stream of computation.
These features can be specified by :obj:`layer_names`, which contains
the names of the layers whose outputs are collected.
When :obj:`layer_names` is a string, single value is returned.
When :obj:`layer_names` is an iterable of strings, a tuple of values
is returned. The order of the values is the same as the order of
the strings in :obj:`layer_names`.
When :obj:`layer_names` is :obj:`None`, the output of the last
layer is returned.

Examples:

>>> import chainer.functions as F
>>> import chainer.links as L
>>> model = SequentialFeatureExtractor()
>>> with model.init_scope():
>>> model.l1 = L.Linear(None, 1000)
>>> model.l1_relu = F.relu
>>> model.l2 = L.Linear(None, 1000)
>>> model.l2_relu = F.relu
>>> model.l3 = L.Linear(None, 10)
>>> # This is the output of layer l3.
>>> feat3 = model(x)
>>> # The layers to be collected can be changed.
>>> model.layer_names = ('l2_relu', 'l1_relu')
>>> # These are outputs of layer l2_relu and l1_relu.
>>> feat2, feat1 = model(x)

Params:
layer_names (string or iterable of strings):
Names of layers whose outputs will be collected in
the forward pass.

"""

def __init__(self):
super(SequentialFeatureExtractor, self).__init__()
self._order = list()
self.layer_names = None

def __setattr__(self, name, value):
super(SequentialFeatureExtractor, self).__setattr__(name, value)
if self.within_init_scope and callable(value):
self._order.append(name)

def __delattr__(self, name):
if self._layer_names and name in self._layer_names:
raise AttributeError(
'Layer {:s} is registered to layer_names.'.format(name))
super(SequentialFeatureExtractor, self).__delattr__(name)
try:
self._order.remove(name)
except ValueError:
pass

@property
def layer_names(self):
return self._layer_names

@layer_names.setter
def layer_names(self, layer_names):
if layer_names is None:
self._return_tuple = False
self._layer_names = None
return

if (not isinstance(layer_names, str) and
all(isinstance(name, str) for name in layer_names)):
return_tuple = True
else:
return_tuple = False
layer_names = (layer_names,)
if any(name not in self._order for name in layer_names):
raise ValueError('Invalid layer name')

self._return_tuple = return_tuple
self._layer_names = tuple(layer_names)

def __call__(self, x):
"""Forward this model.

Args:
x (chainer.Variable or array): Input to the model.

Returns:
chainer.Variable or tuple of chainer.Variable:
The returned values are determined by :obj:`layer_names`.

"""
if self._layer_names is None:
layer_names = (self._order[-1],)
else:
layer_names = self._layer_names

# The biggest index among indices of the layers that are included
# in layer_names.
last_index = max(self._order.index(name) for name in layer_names)

features = {}
h = x
for name in self._order[:last_index + 1]:
h = self[name](h)
if name in layer_names:
features[name] = h

if self._return_tuple:
features = tuple(features[name] for name in layer_names)
else:
features = list(features.values())[0]
return features
113 changes: 113 additions & 0 deletions tests/links_tests/model_tests/test_sequential_feature_extractor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import numpy as np
import unittest

import chainer
from chainer.cuda import to_cpu
from chainer.function import Function
from chainer import testing
from chainer.testing import attr

from chainercv.links import SequentialFeatureExtractor
from chainercv.utils.testing import ConstantStubLink


class DummyFunc(Function):

def forward(self, inputs):
return inputs[0] * 2,


@testing.parameterize(
{'layer_names': None},
{'layer_names': 'f2'},
{'layer_names': ('f2',)},
{'layer_names': ('l2', 'l1', 'f2')},
)
class TestSequentialFeatureExtractor(unittest.TestCase):

def setUp(self):
self.l1 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))
self.f1 = DummyFunc()
self.f2 = DummyFunc()
self.l2 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))

self.link = SequentialFeatureExtractor()
with self.link.init_scope():
self.link.l1 = self.l1
self.link.f1 = self.f1
self.link.f2 = self.f2
self.link.l2 = self.l2

if self.layer_names:
self.link.layer_names = self.layer_names

self.x = np.random.uniform(size=(1, 3, 24, 24))

def check_call(self, x, expects):
outs = self.link(x)

if isinstance(self.layer_names, tuple):
layer_names = self.layer_names
else:
if self.layer_names is None:
layer_names = ('l2',)
else:
layer_names = (self.layer_names,)
outs = (outs,)

self.assertEqual(len(outs), len(layer_names))

for out, layer_name in zip(outs, layer_names):
self.assertIsInstance(out, chainer.Variable)
self.assertIsInstance(out.data, self.link.xp.ndarray)

out = to_cpu(out.data)
np.testing.assert_equal(out, to_cpu(expects[layer_name].data))

def check_basic(self):
x = self.link.xp.asarray(self.x)

expects = dict()
expects['l1'] = self.l1(x)
expects['f1'] = self.f1(expects['l1'])
expects['f2'] = self.f2(expects['f1'])
expects['l2'] = self.l2(expects['f2'])

self.check_call(x, expects)

def test_basic_cpu(self):
self.check_basic()

@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self.check_basic()

def check_deletion(self):
x = self.link.xp.asarray(self.x)

if self.layer_names == 'l1' or \
(isinstance(self.layer_names, tuple) and 'l1' in self.layer_names):
with self.assertRaises(AttributeError):
del self.link.l1
return
else:
del self.link.l1

expects = dict()
expects['f1'] = self.f1(x)
expects['f2'] = self.f2(expects['f1'])
expects['l2'] = self.l2(expects['f2'])

self.check_call(x, expects)

def test_deletion_cpu(self):
self.check_deletion()

@attr.gpu
def test_deletion_gpu(self):
self.link.to_gpu()
self.check_deletion()


testing.run_module(__name__, __file__)