feat: ActivationFunc classes ReLU and LeakyReLU
This commit is contained in:
12
utils.py
12
utils.py
@@ -9,18 +9,6 @@ def softmax(v: np.ndarray) -> np.ndarray:
|
||||
return exp_v / np.sum(exp_v)
|
||||
|
||||
|
||||
def relu(x: np.ndarray, derivative=False) -> np.ndarray:
|
||||
if derivative:
|
||||
return x > 0
|
||||
return x * (x > 0)
|
||||
|
||||
|
||||
def leaky_relu(x: np.ndarray, derivative=False, k=0.01) -> np.ndarray:
|
||||
if derivative:
|
||||
return 1 * (x > 0) + k * (x <= 0)
|
||||
return x * (x > 0) + x * 0.01 * (x <= 0)
|
||||
|
||||
|
||||
def normalize(v: np.ndarray) -> np.ndarray:
|
||||
return v / (np.linalg.norm(v) + 1e-8)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user