feat: ActivationFunc classes ReLU and LeakyReLU

This commit is contained in:
Lenoctambule
2026-03-29 19:20:05 +02:00
parent 44bf4c0286
commit 53c7f73055
5 changed files with 37 additions and 21 deletions

27
activations.py Normal file
View File

@@ -0,0 +1,27 @@
import numpy as np
from abc import ABC, abstractmethod
class ActivationFunc(ABC):
@abstractmethod
def derivative(v: np.ndarray) -> np.ndarray:
pass
class ReLU(ActivationFunc):
def __call__(self, x):
return x * (x > 0)
def derivative(self, x):
return x > 0
class LeakyReLU(ActivationFunc):
def __init__(self, k=0.01):
self.k = k
def __call__(self, x):
return x * (x > 0) + self.k * x * (x <= 0)
def derivative(self, x):
return (x > 0) + self.k * (x <= 0)