1
1
#! /usr/bin/python
2
- # -*- coding: utf8 -*-
3
-
4
-
2
+ # -*- coding: utf-8 -*-
5
3
6
4
import tensorflow as tf
7
5
@@ -13,7 +11,6 @@ def identity(x, name=None):
13
11
x : a tensor input
14
12
input(s)
15
13
16
-
17
14
Returns
18
15
--------
19
16
A `Tensor` with the same type as `x`.
@@ -37,14 +34,13 @@ def ramp(x=None, v_min=0, v_max=1, name=None):
37
34
name : a string or None
38
35
An optional name to attach to this activation function.
39
36
40
-
41
37
Returns
42
38
--------
43
39
A `Tensor` with the same type as `x`.
44
40
"""
45
41
return tf .clip_by_value (x , clip_value_min = v_min , clip_value_max = v_max , name = name )
46
42
47
- def leaky_relu (x = None , alpha = 0.1 , name = "LeakyReLU " ):
43
+ def leaky_relu (x = None , alpha = 0.1 , name = "lrelu " ):
48
44
"""The LeakyReLU, Shortcut is ``lrelu``.
49
45
50
46
Modified version of ReLU, introducing a nonzero gradient for negative
@@ -67,16 +63,33 @@ def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"):
67
63
------------
68
64
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`_
69
65
"""
70
- with tf .name_scope (name ) as scope :
66
+ # with tf.name_scope(name) as scope:
71
67
# x = tf.nn.relu(x)
72
68
# m_x = tf.nn.relu(-x)
73
69
# x -= alpha * m_x
74
- x = tf .maximum (x , alpha * x )
70
+ x = tf .maximum (x , alpha * x , name = name )
75
71
return x
76
72
77
73
#Shortcut
78
74
lrelu = leaky_relu
79
75
76
+
77
+ def swish (x , name = 'swish' ):
78
+ """The Swish function, see `Swish: a Self-Gated Activation Function <https://arxiv.org/abs/1710.05941>`_.
79
+
80
+ Parameters
81
+ ----------
82
+ x : a tensor input
83
+ input(s)
84
+
85
+ Returns
86
+ --------
87
+ A `Tensor` with the same type as `x`.
88
+ """
89
+ with tf .name_scope (name ) as scope :
90
+ x = tf .nn .sigmoid (x ) * x
91
+ return x
92
+
80
93
def pixel_wise_softmax (output , name = 'pixel_wise_softmax' ):
81
94
"""Return the softmax outputs of images, every pixels have multiple label, the sum of a pixel is 1.
82
95
Usually be used for image segmentation.
0 commit comments