feat: ActivationFunc classes ReLU and LeakyReLU

This commit is contained in:
Lenoctambule
2026-03-29 19:20:05 +02:00
parent 44bf4c0286
commit 53c7f73055
5 changed files with 37 additions and 21 deletions

View File

@@ -9,18 +9,6 @@ def softmax(v: np.ndarray) -> np.ndarray:
return exp_v / np.sum(exp_v)
def relu(x: np.ndarray, derivative=False) -> np.ndarray:
if derivative:
return x > 0
return x * (x > 0)
def leaky_relu(x: np.ndarray, derivative=False, k=0.01) -> np.ndarray:
if derivative:
return 1 * (x > 0) + k * (x <= 0)
return x * (x > 0) + x * 0.01 * (x <= 0)
def normalize(v: np.ndarray) -> np.ndarray:
return v / (np.linalg.norm(v) + 1e-8)