Implement softmax layer
This commit is contained in:
parent
9b4f3073f3
commit
a5de0ca7da
25
src/layer.py
25
src/layer.py
@ -89,3 +89,28 @@ class ReLU(Layer):
|
|||||||
|
|
||||||
def d_output_wrt_inputs(self) -> np.ndarray:
|
def d_output_wrt_inputs(self) -> np.ndarray:
|
||||||
return (self._cached_inputs > 0) * 1.0
|
return (self._cached_inputs > 0) * 1.0
|
||||||
|
|
||||||
|
|
||||||
|
class Softmax(Layer):
|
||||||
|
def forward(self, x: np.ndarray) -> np.ndarray:
|
||||||
|
exp = np.exp(x)
|
||||||
|
return exp / np.sum(exp)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parameters(self) -> list[np.ndarray]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
@parameters.setter
|
||||||
|
def parameters(self, parameters: list[np.ndarray]) -> None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def d_output_wrt_parameters(self) -> list[np.ndarray]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def d_output_wrt_inputs(self) -> np.ndarray:
|
||||||
|
exp = np.exp(self._cached_inputs)
|
||||||
|
out = exp / np.sum(exp)
|
||||||
|
return out * (1 - out)
|
||||||
|
|
||||||
|
|
||||||
|
Softmax()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user