From a5de0ca7da423a6f121bc83ea6ca8dcf5649dd82 Mon Sep 17 00:00:00 2001 From: kosh Date: Thu, 9 May 2024 13:21:34 +0530 Subject: [PATCH] Implement softmax layer --- src/layer.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/layer.py b/src/layer.py index e3e9b20..3214d26 100644 --- a/src/layer.py +++ b/src/layer.py @@ -89,3 +89,28 @@ class ReLU(Layer): def d_output_wrt_inputs(self) -> np.ndarray: return (self._cached_inputs > 0) * 1.0 + + +class Softmax(Layer): + def forward(self, x: np.ndarray) -> np.ndarray: + exp = np.exp(x) + return exp / np.sum(exp) + + @property + def parameters(self) -> list[np.ndarray]: + return [] + + @parameters.setter + def parameters(self, parameters: list[np.ndarray]) -> None: + return + + def d_output_wrt_parameters(self) -> list[np.ndarray]: + return [] + + def d_output_wrt_inputs(self) -> np.ndarray: + exp = np.exp(self._cached_inputs) + out = exp / np.sum(exp) + return out * (1 - out) + + +Softmax()