Implement ReLU layer

This commit is contained in:
Koshin S Hegde 2024-05-08 18:49:39 +05:30
parent 2188edacab
commit 06c981253c

View File

@ -32,7 +32,7 @@ class Layer(ABC):
pass pass
@abstractmethod @abstractmethod
def d_output_wrt_inputs(self) -> np.ndarray: def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
pass pass
@ -64,5 +64,24 @@ class Dense(Layer):
""" """
return (inputs, np.array([1])) return (inputs, np.array([1]))
def d_output_wrt_inputs(self) -> np.ndarray: def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
return self.__w return self.__w
class ReLU(Layer):
def forward(self, x: np.ndarray) -> np.ndarray:
return (x > 0) * x
@property
def parameters(self) -> tuple[np.ndarray, ...]:
return ()
@parameters.setter
def parameters(self, parameters: tuple[np.ndarray, ...]) -> None:
return
def d_output_wrt_parameters(self, inputs: np.ndarray) -> tuple[np.ndarray, ...]:
return ()
def d_output_wrt_inputs(self, x: np.ndarray) -> np.ndarray:
return (x > 0) * 1.0