Implement softmax layer

This commit is contained in:
Koshin S Hegde 2024-05-09 13:21:34 +05:30
parent 9b4f3073f3
commit a5de0ca7da

View File

@ -89,3 +89,28 @@ class ReLU(Layer):
def d_output_wrt_inputs(self) -> np.ndarray:
return (self._cached_inputs > 0) * 1.0
class Softmax(Layer):
def forward(self, x: np.ndarray) -> np.ndarray:
exp = np.exp(x)
return exp / np.sum(exp)
@property
def parameters(self) -> list[np.ndarray]:
return []
@parameters.setter
def parameters(self, parameters: list[np.ndarray]) -> None:
return
def d_output_wrt_parameters(self) -> list[np.ndarray]:
return []
def d_output_wrt_inputs(self) -> np.ndarray:
exp = np.exp(self._cached_inputs)
out = exp / np.sum(exp)
return out * (1 - out)
Softmax()