From af3e94e26c6bbae33bf186bb24a5ea04e25e18a5 Mon Sep 17 00:00:00 2001 From: laicheng Date: Mon, 24 Feb 2020 22:40:37 +0800 Subject: [PATCH 1/5] add mish activation --- docs/modules/activation.rst | 5 +++++ tensorlayer/activation.py | 21 +++++++++++++++++++++ tests/test_activations.py | 10 +++++++++- 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/docs/modules/activation.rst b/docs/modules/activation.rst index 3965bd007..79bad9601 100644 --- a/docs/modules/activation.rst +++ b/docs/modules/activation.rst @@ -35,6 +35,7 @@ For more complex activation, TensorFlow API will be required. sign hard_tanh pixel_wise_softmax + mish Ramp ------ @@ -68,6 +69,10 @@ Pixel-wise softmax -------------------- .. autofunction:: pixel_wise_softmax +mish +--------- +.. autofunction:: mish + Parametric activation ------------------------------ See ``tensorlayer.layers``. diff --git a/tensorlayer/activation.py b/tensorlayer/activation.py index 7c7b833c3..b52bc120b 100644 --- a/tensorlayer/activation.py +++ b/tensorlayer/activation.py @@ -19,6 +19,7 @@ 'htanh', 'hard_tanh', 'pixel_wise_softmax', + 'mish', ] @@ -339,6 +340,26 @@ def pixel_wise_softmax(x, name='pixel_wise_softmax'): return tf.nn.softmax(x) +def mish(x): + """Mish activation function. + Mish is a novel smooth and non-monotonic neural activation function. + Parameters + ---------- + x : Tensor + input. + Returns + ------- + Tensor + A ``Tensor`` in the same type as ``x``. + References + ---------- + - `Mish: A Self Regularized Non-Monotonic Neural Activation Function, Diganta Misra. (2019)` + https://arxiv.org/abs/1908.08681 + """ + + return x * tf.math.tanh(tf.math.softplus(x)) + + # Alias lrelu = leaky_relu lrelu6 = leaky_relu6 diff --git a/tests/test_activations.py b/tests/test_activations.py index dc053dda5..e168bd91e 100644 --- a/tests/test_activations.py +++ b/tests/test_activations.py @@ -5,7 +5,7 @@ import unittest import tensorflow as tf - +import numpy as np import tensorlayer as tl from tests.utils import CustomTestCase @@ -116,6 +116,14 @@ def test_swish(self): self.assertAlmostEqual(computed_output.numpy(), good_output, places=5) + def test_mish(self): + for i in range(-5, 15): + good_output = i * np.tanh(np.math.log(1 + np.math.exp(i))) + + computed_output = tl.act.mish(float(i)) + + self.assertAlmostEqual(computed_output.numpy(), good_output, places=5) + if __name__ == '__main__': From 48d5e43279ec9bcd710b29aff7fbc685dd66f447 Mon Sep 17 00:00:00 2001 From: laicheng Date: Mon, 24 Feb 2020 22:47:45 +0800 Subject: [PATCH 2/5] yapf --- tensorlayer/activation.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tensorlayer/activation.py b/tensorlayer/activation.py index b52bc120b..df752868d 100644 --- a/tensorlayer/activation.py +++ b/tensorlayer/activation.py @@ -342,19 +342,24 @@ def pixel_wise_softmax(x, name='pixel_wise_softmax'): def mish(x): """Mish activation function. + Mish is a novel smooth and non-monotonic neural activation function. + Parameters ---------- x : Tensor input. + Returns ------- Tensor A ``Tensor`` in the same type as ``x``. + References ---------- - `Mish: A Self Regularized Non-Monotonic Neural Activation Function, Diganta Misra. (2019)` https://arxiv.org/abs/1908.08681 + """ return x * tf.math.tanh(tf.math.softplus(x)) From 17df52f7c4996111d854abdb7be7aad4fe00d179 Mon Sep 17 00:00:00 2001 From: laicheng Date: Mon, 24 Feb 2020 23:59:25 +0800 Subject: [PATCH 3/5] references --- tensorlayer/activation.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tensorlayer/activation.py b/tensorlayer/activation.py index df752868d..5c6e67328 100644 --- a/tensorlayer/activation.py +++ b/tensorlayer/activation.py @@ -357,11 +357,9 @@ def mish(x): References ---------- - - `Mish: A Self Regularized Non-Monotonic Neural Activation Function, Diganta Misra. (2019)` - https://arxiv.org/abs/1908.08681 - - """ + - `Mish: A Self Regularized Non-Monotonic Neural Activation Function [Diganta Misra, 2019]`__ + """ return x * tf.math.tanh(tf.math.softplus(x)) From e1b24f91b07c5bfd9c1ec9f6c0eccdd77f1f0e8e Mon Sep 17 00:00:00 2001 From: laicheng Date: Tue, 25 Feb 2020 00:17:29 +0800 Subject: [PATCH 4/5] references --- tensorlayer/activation.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tensorlayer/activation.py b/tensorlayer/activation.py index 5c6e67328..7df923286 100644 --- a/tensorlayer/activation.py +++ b/tensorlayer/activation.py @@ -345,6 +345,9 @@ def mish(x): Mish is a novel smooth and non-monotonic neural activation function. + This activation function is a `Mish` .introduced by the following paper: + `Mish: A Self Regularized Non-Monotonic Neural Activation Function [Diganta Misra, 2019]`__ + Parameters ---------- x : Tensor From d3f6af1f91aad267de12709b7132efa398659e81 Mon Sep 17 00:00:00 2001 From: laicheng Date: Tue, 25 Feb 2020 00:28:34 +0800 Subject: [PATCH 5/5] doc --- tensorlayer/activation.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/tensorlayer/activation.py b/tensorlayer/activation.py index 7df923286..e2d3ac3b9 100644 --- a/tensorlayer/activation.py +++ b/tensorlayer/activation.py @@ -343,10 +343,7 @@ def pixel_wise_softmax(x, name='pixel_wise_softmax'): def mish(x): """Mish activation function. - Mish is a novel smooth and non-monotonic neural activation function. - - This activation function is a `Mish` .introduced by the following paper: - `Mish: A Self Regularized Non-Monotonic Neural Activation Function [Diganta Misra, 2019]`__ + Reference: [Mish: A Self Regularized Non-Monotonic Neural Activation Function .Diganta Misra, 2019] Parameters ---------- @@ -358,10 +355,6 @@ def mish(x): Tensor A ``Tensor`` in the same type as ``x``. - References - ---------- - - `Mish: A Self Regularized Non-Monotonic Neural Activation Function [Diganta Misra, 2019]`__ - """ return x * tf.math.tanh(tf.math.softplus(x))