From a06e1936793959ae99103924a962fe450cda4959 Mon Sep 17 00:00:00 2001 From: Tim O'Shea Date: Sun, 26 Jul 2015 14:40:28 -0400 Subject: [PATCH] adding thresholded linear and rectified activation functions --- docs/sources/layers/advanced_activations.md | 38 +++++++++++++++++++- keras/layers/advanced_activations.py | 40 +++++++++++++++++++++ 2 files changed, 77 insertions(+), 1 deletion(-) diff --git a/docs/sources/layers/advanced_activations.md b/docs/sources/layers/advanced_activations.md index c9addff95..f565ab70c 100644 --- a/docs/sources/layers/advanced_activations.md +++ b/docs/sources/layers/advanced_activations.md @@ -52,4 +52,40 @@ Parametric Softplus of the form: (`f(x) = alpha * (1 + exp(beta * x))`). This is - __input_shape__: tuple. - __References__: - - [Inferring Nonlinear Neuronal Computation Based on Physiologically Plausible Inputs](http://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1003143) \ No newline at end of file + - [Inferring Nonlinear Neuronal Computation Based on Physiologically Plausible Inputs](http://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1003143) + +## Thresholded Linear + +```python +keras.layers.advanced_activations.ThresholdedLinear(theta) +``` + +Parametrized linear unit. provides a threshold near zero where values are zeroed. + +- __Input shape__: Same as `input_shape`. This layer cannot be used as first layer in a model. + +- __Output shape__: Same as input. + +- __Arguments__: + - __theta__: float >= 0. Threshold location of activation + +- __References__: + - [Zero-Bias Autoencoders and the Benefits of Co-Adapting Features](http://arxiv.org/pdf/1402.3337.pdf) + +## Thresholded ReLu + +```python +keras.layers.advanced_activations.ThresholdedReLu(theta) +``` + +Parametrized rectified linear unit. provides a threshold near zero where values are zeroed. + +- __Input shape__: Same as `input_shape`. This layer cannot be used as first layer in a model. + +- __Output shape__: Same as input. + +- __Arguments__: + - __theta__: float >= 0. Threshold location of activation + +- __References__: + - [Zero-Bias Autoencoders and the Benefits of Co-Adapting Features](http://arxiv.org/pdf/1402.3337.pdf) diff --git a/keras/layers/advanced_activations.py b/keras/layers/advanced_activations.py index 0ec01a897..636ee4334 100644 --- a/keras/layers/advanced_activations.py +++ b/keras/layers/advanced_activations.py @@ -68,3 +68,43 @@ class ParametricSoftplus(MaskedLayer): "input_shape": self.input_shape, "alpha_init": self.alpha_init, "beta_init": self.beta_init} + +class ThresholdedLinear(MaskedLayer): + ''' + Thresholded Linear Activation + + Reference: + Zero-Bias Autoencoders and the Benefits of Co-Adapting Features + http://arxiv.org/pdf/1402.3337.pdf + ''' + def __init__(self, theta=1.0): + super(ThresholdedLinear, self).__init__() + self.theta = theta + + def get_output(self, train): + X = self.get_input(train) + return T.switch( abs(X) < self.theta, 0, X ) + + def get_config(self): + return {"name": self.__class__.__name__, + "theta": self.theta} + +class ThresholdedReLu(MaskedLayer): + ''' + Thresholded Rectified Activation + + Reference: + Zero-Bias Autoencoders and the Benefits of Co-Adapting Features + http://arxiv.org/pdf/1402.3337.pdf + ''' + def __init__(self, theta=1.0): + super(ThresholdedReLu, self).__init__() + self.theta = theta + + def get_output(self, train): + X = self.get_input(train) + return T.switch( X > self.theta, X, 0 ) + + def get_config(self): + return {"name": self.__class__.__name__, + "theta": self.theta}