-
Notifications
You must be signed in to change notification settings - Fork 1.2k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Keras-like API Advanced Activations, dropout and noise layers #2222
Changes from 1 commit
24bc07b
cb68227
8866a38
0c92054
ac28817
ecf21de
03e1ed5
866735d
c64eff5
50296a2
3054fec
c90ab99
d263320
9538c76
3ebaa40
8e83192
ed6f307
218cd41
07fce1d
daac88b
437f478
8d2ecb0
081649f
411465f
b133247
f9f3b81
082a310
6ce745c
8bfc875
4392d45
5a75157
32ff46e
9c596f2
f8beee3
6fabd80
1ada8b3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,8 +16,6 @@ | |
|
||
package com.intel.analytics.bigdl.nn.keras | ||
|
||
import com.intel.analytics.bigdl._ | ||
import com.intel.analytics.bigdl.nn._ | ||
import com.intel.analytics.bigdl.nn.abstractnn._ | ||
import com.intel.analytics.bigdl.tensor.Tensor | ||
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric | ||
|
@@ -26,21 +24,21 @@ import com.intel.analytics.bigdl.utils.Shape | |
import scala.reflect.ClassTag | ||
|
||
|
||
@SerialVersionUID( - 1470253389268877486L) | ||
class LeakyReLU[T: ClassTag](private val alpha: Double = 0.01, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why this alpha is private? cc @zhichao-li There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Pls confirm this with the original author. we can open it if no objections. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In the original nn/LeakyReLU, the negval is private so I set the alpha as private. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. cc @psyyz10 @qiuxin2012 Any comments on this? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Seems can delete |
||
var inputShape: Shape = null | ||
)(implicit ev: TensorNumeric[T]) | ||
extends KerasLayer[Tensor[T], Tensor[T], T](KerasLayer.addBatch(inputShape)) { | ||
|
||
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = { | ||
val layer = nn.LeakyReLU( | ||
val layer = com.intel.analytics.bigdl.nn.LeakyReLU( | ||
negval = alpha, | ||
inplace = false | ||
) | ||
layer.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]] | ||
} | ||
} | ||
|
||
|
||
object LeakyReLU { | ||
|
||
def apply[@specialized(Float, Double) T: ClassTag]( | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Add scala doc for the new added layers as well.