feat: ActivationFunc classes ReLU and LeakyReLU
This commit is contained in:
27
activations.py
Normal file
27
activations.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import numpy as np
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class ActivationFunc(ABC):
|
||||
@abstractmethod
|
||||
def derivative(v: np.ndarray) -> np.ndarray:
|
||||
pass
|
||||
|
||||
|
||||
class ReLU(ActivationFunc):
|
||||
def __call__(self, x):
|
||||
return x * (x > 0)
|
||||
|
||||
def derivative(self, x):
|
||||
return x > 0
|
||||
|
||||
|
||||
class LeakyReLU(ActivationFunc):
|
||||
def __init__(self, k=0.01):
|
||||
self.k = k
|
||||
|
||||
def __call__(self, x):
|
||||
return x * (x > 0) + self.k * x * (x <= 0)
|
||||
|
||||
def derivative(self, x):
|
||||
return (x > 0) + self.k * (x <= 0)
|
||||
@@ -4,6 +4,7 @@ from utils import (dynamic_loss_plot_init,
|
||||
dynamic_loss_plot_finish)
|
||||
from tqdm import tqdm
|
||||
from layers import DeepNNLayer
|
||||
from activations import ActivationFunc
|
||||
|
||||
LOADER = ['⡿', '⣟', '⣯', '⣷', '⣾', '⣽', '⣻', '⢿']
|
||||
|
||||
@@ -13,7 +14,7 @@ class Autoencoder:
|
||||
encoder_layers: list[int],
|
||||
decoder_layers: list[int],
|
||||
lr: float,
|
||||
activation_func):
|
||||
activation_func: ActivationFunc):
|
||||
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
|
||||
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
|
||||
|
||||
@@ -49,7 +50,7 @@ class Autoencoder:
|
||||
with tqdm(bar_format="{desc} {elapsed} {rate_fmt}") as lbar:
|
||||
while True:
|
||||
lbar.set_description(
|
||||
f"{LOADER[epoch % len(LOADER)]} Training ({epoch=} error={float(prev_error):.6f}", # noqa
|
||||
f"{LOADER[epoch % len(LOADER)]} Training ({epoch=} error={float(prev_error):.6f})", # noqa
|
||||
)
|
||||
lbar.update()
|
||||
error = 0
|
||||
|
||||
10
layers.py
10
layers.py
@@ -1,6 +1,6 @@
|
||||
import numpy as np
|
||||
import types
|
||||
from utils import normalize
|
||||
from activations import ActivationFunc
|
||||
|
||||
|
||||
class NNLayer:
|
||||
@@ -8,7 +8,7 @@ class NNLayer:
|
||||
in_size: int,
|
||||
out_size: int,
|
||||
lr: float,
|
||||
activation_func: types.FunctionType):
|
||||
activation_func: ActivationFunc):
|
||||
self.W = np.random.uniform(-1, 1, (in_size, out_size))
|
||||
self.B = np.zeros((out_size))
|
||||
self.lr = lr
|
||||
@@ -18,7 +18,7 @@ class NNLayer:
|
||||
self.activation_func = activation_func
|
||||
|
||||
def __str__(self):
|
||||
return f'[ {self.W.shape[0]} => {self.W.shape[1]}\tlr:{self.lr}\tactivation:{self.activation_func.__name__} ]' # noqa
|
||||
return f'[ {self.W.shape[0]} => {self.W.shape[1]}\tlr:{self.lr}\tactivation:{self.activation_func.__class__.__name__} ]' # noqa
|
||||
|
||||
def forward(self, V: np.ndarray) -> np.ndarray:
|
||||
self.input = normalize(V)
|
||||
@@ -29,7 +29,7 @@ class NNLayer:
|
||||
return self.output
|
||||
|
||||
def backprop(self, error: np.ndarray) -> np.ndarray:
|
||||
error *= self.activation_func(self.output_linear, True)
|
||||
error *= self.activation_func.derivative(self.output_linear)
|
||||
ret = self.W @ error
|
||||
dW = np.outer(self.input, error) * self.lr
|
||||
dB = error * self.lr
|
||||
@@ -42,7 +42,7 @@ class DeepNNLayer:
|
||||
def __init__(self,
|
||||
layers: list[int],
|
||||
lr: float,
|
||||
activation_func: types.FunctionType):
|
||||
activation_func: ActivationFunc):
|
||||
self.layers: list[NNLayer] = []
|
||||
for i in range(len(layers) - 1):
|
||||
self.layers.append(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from autoencoder import Autoencoder
|
||||
from utils import leaky_relu
|
||||
from activations import LeakyReLU
|
||||
|
||||
|
||||
def load_mnist() -> list[np.ndarray]:
|
||||
@@ -32,7 +32,7 @@ def mnist_train(
|
||||
[in_len, 64, 16],
|
||||
[16, 64, in_len],
|
||||
0.01,
|
||||
leaky_relu
|
||||
LeakyReLU()
|
||||
)
|
||||
autoencoder.train_dataset(
|
||||
x_train,
|
||||
|
||||
12
utils.py
12
utils.py
@@ -9,18 +9,6 @@ def softmax(v: np.ndarray) -> np.ndarray:
|
||||
return exp_v / np.sum(exp_v)
|
||||
|
||||
|
||||
def relu(x: np.ndarray, derivative=False) -> np.ndarray:
|
||||
if derivative:
|
||||
return x > 0
|
||||
return x * (x > 0)
|
||||
|
||||
|
||||
def leaky_relu(x: np.ndarray, derivative=False, k=0.01) -> np.ndarray:
|
||||
if derivative:
|
||||
return 1 * (x > 0) + k * (x <= 0)
|
||||
return x * (x > 0) + x * 0.01 * (x <= 0)
|
||||
|
||||
|
||||
def normalize(v: np.ndarray) -> np.ndarray:
|
||||
return v / (np.linalg.norm(v) + 1e-8)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user