Skip to content
This repository has been archived by the owner on Jul 2, 2021. It is now read-only.

Add sequential_feature_extractor #342

Merged
merged 37 commits into from
Jul 17, 2017
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
75ad8e7
add sequential_feature_extractor
yuyu2172 Jul 14, 2017
817d4e4
improve doc
yuyu2172 Jul 14, 2017
4601ed1
fix doc
yuyu2172 Jul 14, 2017
3c031d4
fix doc
yuyu2172 Jul 14, 2017
b6d618a
fix doc
yuyu2172 Jul 14, 2017
d1790a5
remove unnecessary []
yuyu2172 Jul 14, 2017
ec70c8f
drop support for list layers
yuyu2172 Jul 14, 2017
1cad17b
add to doc
yuyu2172 Jul 14, 2017
321ee42
use init_scope
Hakuyume Jul 15, 2017
644ce71
update test
Hakuyume Jul 15, 2017
958913e
update docs
Hakuyume Jul 15, 2017
4af9fc8
remove unused import
Hakuyume Jul 15, 2017
d02e8ce
support None
Hakuyume Jul 15, 2017
064c7bc
check if a layer is used in layer_names
Hakuyume Jul 15, 2017
08f1ba7
improve test
Hakuyume Jul 15, 2017
8e80d7b
use tuple
Hakuyume Jul 15, 2017
210f3d2
remove list
Hakuyume Jul 15, 2017
5e31464
add deletion test
Hakuyume Jul 15, 2017
a0f4fcf
force tuple
Hakuyume Jul 15, 2017
97778d1
fix test
Hakuyume Jul 15, 2017
6036240
fix docs
Hakuyume Jul 15, 2017
29faace
update docs
Hakuyume Jul 15, 2017
4870aaa
fix docs
Hakuyume Jul 15, 2017
49c490e
Merge pull request #2 from Hakuyume/use-init-scope-in-sequential-feat…
yuyu2172 Jul 15, 2017
73e7189
delete imoprt collections
yuyu2172 Jul 15, 2017
869dcaf
layer_names --> feature_names
yuyu2172 Jul 15, 2017
4168f06
fix doc
yuyu2172 Jul 16, 2017
faa7f22
flake8
yuyu2172 Jul 16, 2017
4fdc7d6
fix getter
Hakuyume Jul 16, 2017
9a3fdee
add test
Hakuyume Jul 16, 2017
aaf403c
add comment
Hakuyume Jul 16, 2017
cae3e59
member variable -> attribute
Hakuyume Jul 16, 2017
ffd8152
the
Hakuyume Jul 16, 2017
e9f5af2
move comment
Hakuyume Jul 16, 2017
c57dd3b
Merge pull request #3 from Hakuyume/fix-feature-names-getter-in-seque…
yuyu2172 Jul 16, 2017
b2e012d
add a test when there are two same keys in feature_names
yuyu2172 Jul 17, 2017
b053036
Merge branch 'sequential-feature-extractor' of https://github.com/yuy…
yuyu2172 Jul 17, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
add sequential_feature_extractor
  • Loading branch information
yuyu2172 committed Jul 14, 2017
commit 75ad8e772815d5ad3576d49160fa323321339ab8
1 change: 1 addition & 0 deletions chainercv/links/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from chainercv.links.model.pixelwise_softmax_classifier import PixelwiseSoftmaxClassifier # NOQA
from chainercv.links.model.sequential_feature_extractor import SequentialFeatureExtractor # NOQA

from chainercv.links.model.faster_rcnn.faster_rcnn_vgg import FasterRCNNVGG16 # NOQA
from chainercv.links.model.segnet.segnet_basic import SegNetBasic # NOQA
Expand Down
132 changes: 132 additions & 0 deletions chainercv/links/model/sequential_feature_extractor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
import collections

import chainer


class SequentialFeatureExtractor(chainer.Chain):

"""A feature extractor model with a single-stream forward pass.

This class is a base class that can be used for an implementation of
a feature extractor model.
The link takes :obj:`layers` to specify the computation
conducted in :meth:`__call__`.
:obj:`layers` is a list or :obj:`collections.OrderedDict` of
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

:obj: -> :class:

callable objects called layers, which are going to be called sequentially
starting from the top to the end.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

from the top to the end -> from the top to the bottom

A :obj:`chainer.Link` object in the sequence will be added as
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

:obj: -> :class:

a child link of this object.

:meth:`__call__` returns single or multiple features that are picked up
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

features that are -> feature(s) that is/are ? (I'm not sure)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about adding a sentence after this one saying

"Note that the number of features can be one."

I think it is better to make all the related nouns plural because there will be too many X(s) in this paragraph.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree with you. I could understand that the number of features can be one. This was only a grammatical question.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that -> those

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think that is OK.
It works like which.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry, it is OK.

through a stream of computation.
These features can be specified by :obj:`layer_names`, which contains
the names of the layer whose output is collected.
When :obj:`layer_names` is a string, single value is returned.
When :obj:`layer_names` is an iterable of strings, a tuple of values
will be returned. These values are ordered in the same order of the
strings in :obj:`layer_names`.

Examples:

>>> import collections
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I forgot to remove this line. Could you remove this?

>>> import chainer.functions as F
>>> import chainer.links as L
>>> layers = collections.OrderedDict([
>>> ('l1', L.Linear(None, 1000)),
>>> ('l1_relu', F.relu),
>>> ('l2', L.Linear(None, 1000)),
>>> ('l2_relu', F.relu),
>>> ('l3', L.Linear(None, 10))])
>>> model = SequentialFeatureExtractor(layers, ['l2_relu', 'l1_relu'])
>>> # These are outputs of layer l2_relu and l1_relu.
>>> feat1, feat2 = model(x)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

feat1, feat2 -> feat2, feat1? (feat1 sounds a feature from l1_relu)

>>> # The layer_names can be dynamically changed.
>>> model.layer_names = 'l3'
>>> # This is an output of layer l1.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

l1 -> l3

>>> feat3 = model(x)

Args:
layers (list or collections.OrderedDict of callables):
Callable objects called in the forward pass.
layer_names (string or iterable of strings):
Names of layers whose outputs will be collected in
the forward pass.

"""

def __init__(self, layers, layer_names=None):
super(SequentialFeatureExtractor, self).__init__()

if not isinstance(layers, collections.OrderedDict):
layers = collections.OrderedDict(
[('{}_{}'.format(layer.__class__.__name__, i), layer)
for i, layer in enumerate(layers)])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't like layer.__class__. Do we need to support iterables? (I mean OrderedDict is enough).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice suggestion.
I dropped support for iterables.

self._layers = layers

self.layer_names = layer_names

with self.init_scope():
for name, layer in self._layers.items():
if isinstance(layer, chainer.Link):
setattr(self, name, layer)

@property
def layer_names(self):
return self._layer_names

@layer_names.setter
def layer_names(self, layer_names):
if layer_names is None:
layer_names = list(self._layers.keys())[-1]

if (not isinstance(layer_names, str) and
all([isinstance(name, str) for name in layer_names])):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

all(isinstance(name, str) for name in layer_names)

return_tuple = True
else:
return_tuple = False
layer_names = [layer_names]
if any([name not in self._layers for name in layer_names]):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

any(name not in self._layers for name in layer_names)

raise ValueError('Invalid layer name')

self._return_tuple = return_tuple
self._layer_names = layer_names

def __call__(self, x):
"""Forward sequential feature extraction model.

Args:
x (chainer.Variable or array): Input to the network.

Returns:
chainer.Variable or tuple of chainer.Variable:
The returned values are determined by :obj:`layer_names`.

"""
# The biggest index among indices of the layers that are included
# in self._layer_names.
last_index = max([list(self._layers.keys()).index(name) for
name in self._layer_names])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about last_index = max(map(list(self._layers.keys()).index, self._layer_names)) ?
This version calls list(self._layers.keys()) only once.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I personally find this more readable.

        layers_keys = list(self._layers.keys())
        last_index = max(
            [layer_keys.index(name) for name in self._layer_names])

Copy link
Member

@Hakuyume Hakuyume Jul 14, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Making a temporal list layers_keys is good. Note that we can replace max([...]) with max(...).


features = {}
h = x
for name, layer in list(self._layers.items())[:last_index + 1]:
h = layer(h)
if name in self._layer_names:
features[name] = h

if self._return_tuple:
features = tuple(
[features[name] for name in self._layer_names])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

tuple([...]) -> tuple(...)

else:
features = list(features.values())[0]
return features

def copy(self):
ret = super(SequentialFeatureExtractor, self).copy()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about this?

ret = super(SequentialFeatureExtractor, self).copy()
for name in ret._layers.keys():
    if hasattr(ret, name):
        ret._layers[name] = ret[name]
return ret

layers = []
for name, layer in self._layers.items():
if name in self._children:
layer = ret[name]
layers.append((name, layer))
ret.layers = collections.OrderedDict(layers)
return ret
149 changes: 149 additions & 0 deletions tests/links_tests/model_tests/test_sequential_feature_extractor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import collections
import unittest

import numpy as np

import chainer
from chainer.cuda import to_cpu
from chainer import testing
from chainer.testing import attr

from chainer.function import Function

from chainercv.links import SequentialFeatureExtractor
from chainercv.utils.testing import ConstantStubLink


class DummyFunc(Function):

def forward(self, inputs):
return inputs[0] * 2,


class TestSequentialFeatureExtractorOrderedDictFunctions(unittest.TestCase):

def setUp(self):
self.l1 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))
self.f1 = DummyFunc()
self.f2 = DummyFunc()
self.l2 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))

self.link = SequentialFeatureExtractor(
collections.OrderedDict(
[('l1', self.l1),
('f1', self.f1),
('f2', self.f2),
('l2', self.l2)]),
layer_names=['l1', 'f1', 'f2'])
self.x = np.random.uniform(size=(1, 3, 24, 24))

def check_call_output(self):
x = self.link.xp.asarray(self.x)
out = self.link(x)

self.assertEqual(len(out), 3)
self.assertIsInstance(out[0], chainer.Variable)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about using for?

self.assertIsInstance(out[1], chainer.Variable)
self.assertIsInstance(out[2], chainer.Variable)
self.assertIsInstance(out[0].data, self.link.xp.ndarray)
self.assertIsInstance(out[1].data, self.link.xp.ndarray)
self.assertIsInstance(out[2].data, self.link.xp.ndarray)

out_data = [to_cpu(var.data) for var in out]
np.testing.assert_equal(out_data[0], to_cpu(self.l1(x).data))
np.testing.assert_equal(out_data[1], to_cpu(self.f1(self.l1(x)).data))
np.testing.assert_equal(
out_data[2], to_cpu(self.f2(self.f1(self.l1(x))).data))

def test_call_output_cpu(self):
self.check_call_output()

@attr.gpu
def test_call_output_gpu(self):
self.link.to_gpu()
self.check_call_output()

def check_call_dynamic_layer_names(self):
x = self.link.xp.asarray(self.x)
self.link.layer_names = ['l2']
out, = self.link(x)

self.assertIsInstance(out, chainer.Variable)
self.assertIsInstance(out.data, self.link.xp.ndarray)

out_data = out.data
np.testing.assert_equal(
out_data, to_cpu(self.l2(self.f2(self.f1(self.l1(x)))).data))

def test_call_dynamic_layer_names_cpu(self):
self.check_call_dynamic_layer_names()

@attr.gpu
def test_call_dynamic_layer_names_gpu(self):
self.check_call_dynamic_layer_names()


class TestSequentialFeatureExtractorListFunctions(unittest.TestCase):

def setUp(self):
self.l1 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))
self.f1 = DummyFunc()
self.f2 = DummyFunc()
self.l2 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))

self.link = SequentialFeatureExtractor(
[self.l1, self.f1, self.f2, self.l2])
self.x = np.random.uniform(size=(1, 3, 24, 24))

def check_call_output(self):
x = self.link.xp.asarray(self.x)
out = self.link(x)

self.assertIsInstance(out, chainer.Variable)
self.assertIsInstance(out.data, self.link.xp.ndarray)

out = to_cpu(out.data)
np.testing.assert_equal(
out,
to_cpu(self.l2(self.f2(self.f1(self.l1(x)))).data))

def test_call_output_cpu(self):
self.check_call_output()

@attr.gpu
def test_call_output_gpu(self):
self.link.to_gpu()
self.check_call_output()


class TestSequentialFeatureExtractorCopy(unittest.TestCase):

def setUp(self):
self.l1 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))
self.f1 = DummyFunc()
self.f2 = DummyFunc()
self.l2 = ConstantStubLink(np.random.uniform(size=(1, 3, 24, 24)))

self.link = SequentialFeatureExtractor(
collections.OrderedDict(
[('l1', self.l1),
('f1', self.f1),
('f2', self.f2),
('l2', self.l2)]),
layer_names=['l1', 'f1', 'f2', 'l2'])

def check_copy(self):
copied = self.link.copy()
self.assertIs(copied.l1, copied.layers['l1'])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

layers -> _layers

self.assertIs(copied.l2, copied.layers['l2'])

def test_copy_cpu(self):
self.check_copy()

@attr.gpu
def test_copy_gpu(self):
self.link.to_gpu()
self.check_copy()


testing.run_module(__name__, __file__)