From 4b40685c5875d8170449467836cfb423cefc4372 Mon Sep 17 00:00:00 2001 From: AshDDftw Date: Sun, 25 Dec 2022 04:41:08 +0530 Subject: [PATCH 1/2] added Relu and LeakyRelu --- MLlib/tests/test_activations.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/MLlib/tests/test_activations.py b/MLlib/tests/test_activations.py index d950919..73935c0 100644 --- a/MLlib/tests/test_activations.py +++ b/MLlib/tests/test_activations.py @@ -1,5 +1,5 @@ import numpy as np -from MLlib.activations import Sigmoid, TanH, Softmax +from MLlib.activations import Sigmoid, TanH, Softmax, Relu, LeakyRelu def test_sigmoid(): @@ -18,3 +18,21 @@ def test_Softmax(): X = np.array([0]) assert Softmax.activation(X) == np.array([1, 2, 3, 4]).all() assert Softmax.derivative(X) == np.array([0, 1, 2, 3, 4, 5]).all() + + +def test_Relu(): + X=np.array([0]) + assert Relu.activation(X)==np.array([0]).all() + X=np.array([1,2.0,3.0]).all() + assert Relu.activation(X)==np.array([1,2.0,3.0]).all() + assert Relu.derivative(X)==np.array([1,2.0,3.0]).all() + + +def test_LeakyRelu(): + X=np.array([0]) + assert LeakyRelu.activation(X,0.01)==np.array(0) + X=np.array([2]) + assert LeakyRelu.activation(X,0.01)==np.array(2) + + + From b6c60422f5c6da0b8d08d13d78a0853b499e7c02 Mon Sep 17 00:00:00 2001 From: AshDDftw Date: Mon, 26 Dec 2022 01:33:50 +0530 Subject: [PATCH 2/2] Added activation and deactivation for ReLu and LeakyRelu --- MLlib/tests/test_activations.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/MLlib/tests/test_activations.py b/MLlib/tests/test_activations.py index 73935c0..ee8f1a3 100644 --- a/MLlib/tests/test_activations.py +++ b/MLlib/tests/test_activations.py @@ -21,18 +21,15 @@ def test_Softmax(): def test_Relu(): - X=np.array([0]) - assert Relu.activation(X)==np.array([0]).all() - X=np.array([1,2.0,3.0]).all() - assert Relu.activation(X)==np.array([1,2.0,3.0]).all() - assert Relu.derivative(X)==np.array([1,2.0,3.0]).all() + X = np.array([0]) + assert Relu.activation(X) == np.array([0]).all() + X = np.array([1, 2.0, 3.0]).all() + assert Relu.activation(X) == np.array([1, 2.0, 3.0]).all() + assert Relu.derivative(X) == np.array([1, 2.0, 3.0]).all() def test_LeakyRelu(): - X=np.array([0]) - assert LeakyRelu.activation(X,0.01)==np.array(0) - X=np.array([2]) - assert LeakyRelu.activation(X,0.01)==np.array(2) - - - + X = np.array([0]) + assert LeakyRelu.activation(X, 0.01) == np.array(0) + X = np.array([2]) + assert LeakyRelu.activation(X, 0.01) == np.array(2)