Implement ReLU layer
This commit is contained in:
parent
2188edacab
commit
06c981253c
23
src/layer.py
23
src/layer.py
@ -32,7 +32,7 @@ class Layer(ABC):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def d_output_wrt_inputs(self) -> np.ndarray:
|
def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -64,5 +64,24 @@ class Dense(Layer):
|
|||||||
"""
|
"""
|
||||||
return (inputs, np.array([1]))
|
return (inputs, np.array([1]))
|
||||||
|
|
||||||
def d_output_wrt_inputs(self) -> np.ndarray:
|
def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
|
||||||
return self.__w
|
return self.__w
|
||||||
|
|
||||||
|
|
||||||
|
class ReLU(Layer):
|
||||||
|
def forward(self, x: np.ndarray) -> np.ndarray:
|
||||||
|
return (x > 0) * x
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parameters(self) -> tuple[np.ndarray, ...]:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
@parameters.setter
|
||||||
|
def parameters(self, parameters: tuple[np.ndarray, ...]) -> None:
|
||||||
|
return
|
||||||
|
|
||||||
|
def d_output_wrt_parameters(self, inputs: np.ndarray) -> tuple[np.ndarray, ...]:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
|
||||||
|
return (x > 0) * 1.0
|
||||||
|
Loading…
x
Reference in New Issue
Block a user