Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ To release a new version, please update the changelog as followed:
- CI Tool:
- Danger CI has been added to enforce the update of the changelog (by @lgarithm and @DEKHTIARJonathan in #563)
- https://github.com/apps/stale/ added to clean stale issues (by @DEKHTIARJonathan in #573)
- Layer:
- ElementwiseLambdaLayer added to use custom function to connect multiple layer inputs (by @One-sixth in #579)

### Changed
- Tensorflow CPU & GPU dependencies moved to separated requirement files in order to allow PyUP.io to parse them (by @DEKHTIARJonathan in #573)
Expand All @@ -94,7 +96,7 @@ To release a new version, please update the changelog as followed:
### Dependencies Update

### Contributors
@lgarithm @DEKHTIARJonathan @2wins
@lgarithm @DEKHTIARJonathan @2wins @One-sixth


## [1.8.5] - 2018-05-09
Expand Down
6 changes: 6 additions & 0 deletions docs/modules/layers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,7 @@ Layer list

ConcatLayer
ElementwiseLayer
ElementwiseLambdaLayer

ExpandDimsLayer
TileLayer
Expand Down Expand Up @@ -783,6 +784,11 @@ Element-wise layer
.. autoclass:: ElementwiseLayer


Element-wise lambda layer
^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. autoclass:: ElementwiseLambdaLayer


Extend layer
-------------

Expand Down
60 changes: 60 additions & 0 deletions tensorlayer/layers/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
__all__ = [
'ConcatLayer',
'ElementwiseLayer',
'ElementwiseLambdaLayer',
]


Expand Down Expand Up @@ -150,3 +151,62 @@ def __init__(
# # self.all_drop = list_remove_repeat(self.all_drop)

self.all_layers.append(self.outputs)


class ElementwiseLambdaLayer(Layer):
"""A layer that use a custom function to combine multiple :class:Layer inputs.

Parameters
----------
layers : list of :class:`Layer`
The list of layers to combine.
fn : function
The function that applies to the outputs of previous layer.
fn_args : dictionary or None
The arguments for the function (option).
act : activation function
The activation function of this layer.
name : str
A unique layer name.

Examples
--------
z = mean + noise * tf.exp(std * 0.5)

>>> def func(noise, mean, std):
>>> return mean + noise * tf.exp(std * 0.5)
>>> x = tf.placeholder(tf.float32, [None, 200])
>>> noise_tensor = tf.random_normal(tf.stack([tf.shape(x)[0], 200]))
>>> noise = tl.layers.InputLayer(noise_tensor)
>>> net = tl.layers.InputLayer(x)
>>> net = tl.layers.DenseLayer(net, n_units=200, act=tf.nn.relu, name='dense1')
>>> mean = tl.layers.DenseLayer(net, n_units=200, name='mean')
>>> std = tl.layers.DenseLayer(net, n_units=200, name='std')
>>> z = tl.layers.ElementwiseLambdaLayer([noise, mean, std], fn=func, name='z')
"""

def __init__(
self,
layers,
fn,
fn_args=None,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please add parameter

  • act=None

act=None,
name='elementwiselambda_layer',
):

super(ElementwiseLambdaLayer, self).__init__(prev_layer=layers, name=name)
logging.info("ElementwiseLambdaLayer %s" % self.name)

if fn_args is None:
fn_args = {}

self.inputs = [layer.outputs for layer in layers]

with tf.variable_scope(name) as vs:
self.outputs = fn(*self.inputs, **fn_args)
if act:
self.outputs = act(self.outputs)
variables = tf.get_collection(TF_GRAPHKEYS_VARIABLES, scope=vs.name)

self.all_layers.append(self.outputs)
self.all_params.extend(variables)
34 changes: 34 additions & 0 deletions tests/test_layers_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,18 @@ def setUpClass(cls):
cls.data["net_vector2"]["params"] = net_v2.all_params
cls.data["net_vector2"]["n_params"] = net_v2.count_params()

net_v3_1 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_a')
net_v3_2 = tl.layers.DenseLayer(inputs, n_units=100, act=tf.nn.relu, name='net_b')
net_v3 = tl.layers.ElementwiseLambdaLayer([net_v3_1, net_v3_2], fn=lambda a, b: a * b, name='multiply')

net_v3.print_params(False)
net_v3.print_layers()

cls.data["net_vector3"] = dict()
cls.data["net_vector3"]["layers"] = net_v3.all_layers
cls.data["net_vector3"]["params"] = net_v3.all_params
cls.data["net_vector3"]["n_params"] = net_v3.count_params()

#############
# Image #
#############
Expand Down Expand Up @@ -75,6 +87,17 @@ def setUpClass(cls):
cls.data["net_image2"]["params"] = net_im2.all_params
cls.data["net_image2"]["n_params"] = net_im2.count_params()

net_im3 = tl.layers.ElementwiseLambdaLayer([net_im1_1, net_im1_2], fn=lambda a, b: a * b, name='multiply2')

net_im3.print_params(False)
net_im3.print_layers()

cls.data["net_image3"] = dict()
cls.data["net_image3"]["shape"] = net_im3.outputs.get_shape().as_list()
cls.data["net_image3"]["layers"] = net_im3.all_layers
cls.data["net_image3"]["params"] = net_im3.all_params
cls.data["net_image3"]["n_params"] = net_im3.count_params()

@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
Expand All @@ -89,6 +112,11 @@ def test_net_vector2(self):
self.assertEqual(len(self.data["net_vector2"]["params"]), 4)
self.assertEqual(self.data["net_vector2"]["n_params"], 157000)

def test_net_vector3(self):
self.assertEqual(len(self.data["net_vector3"]["layers"]), 3)
self.assertEqual(len(self.data["net_vector3"]["params"]), 4)
self.assertEqual(self.data["net_vector3"]["n_params"], 157000)

def test_net_image1(self):
self.assertEqual(self.data["net_image1"]["shape"][1:], [50, 50, 64])
self.assertEqual(len(self.data["net_image1"]["layers"]), 3)
Expand All @@ -101,6 +129,12 @@ def test_net_image2(self):
self.assertEqual(len(self.data["net_image2"]["params"]), 4)
self.assertEqual(self.data["net_image2"]["n_params"], 1792)

def test_net_image3(self):
self.assertEqual(self.data["net_image3"]["shape"][1:], [50, 50, 32])
self.assertEqual(len(self.data["net_image3"]["layers"]), 3)
self.assertEqual(len(self.data["net_image3"]["params"]), 4)
self.assertEqual(self.data["net_image3"]["n_params"], 1792)


if __name__ == '__main__':

Expand Down