feat: sampling layer w/ forward method + abstract autoencoder

This commit is contained in:
Lenoctambule
2026-03-31 19:10:06 +02:00
parent cc74b62afd
commit e6b508f739
2 changed files with 92 additions and 39 deletions

View File

@@ -3,43 +3,14 @@ from utils import (dynamic_loss_plot_init,
dynamic_loss_plot_update,
dynamic_loss_plot_finish)
from tqdm import tqdm
from layers import DeepNNLayer
from layers import DeepNNLayer, SamplingLayer
from activations import ActivationFunc
from abc import ABC, abstractmethod
LOADER = ['', '', '', '', '', '', '', '']
class Autoencoder:
def __init__(self,
encoder_layers: list[int],
decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
def __str__(self):
return f'Encoder:\n{self.encoder}\n\nDecoder:\n{self.decoder}'
def loss(self, data_set: list[np.ndarray]) -> float:
loss = 0
for x in data_set:
loss += np.sum(np.abs(x - self.forward(x)[0])) / len(x)
return loss / len(data_set)
def train(self, v: np.ndarray):
out = self.decoder.forward(
self.encoder.forward(v)
)
self.encoder.backprop(
self.decoder.backprop(out - v)
)
return np.sum(np.abs(out - v)) / len(v)
class AAutoencoder(ABC):
def train_dataset(self,
data_set: list[np.ndarray],
max_epoch: int,
@@ -80,6 +51,65 @@ class Autoencoder:
dynamic_loss_plot_finish(ax, line)
return losses
def loss(self, data_set: list[np.ndarray]) -> float:
loss = 0
for x in data_set:
loss += np.sum(np.abs(x - self.forward(x)[0])) / len(x)
return loss / len(data_set)
def save(self, path: str):
path = path.removesuffix('.npy')
np.save(path, self)
def load(path: str) -> 'Autoencoder':
path = path.removesuffix('.npy') + '.npy'
data = np.load(path, allow_pickle=True)
return data.item()
@abstractmethod
def train(self, v: np.ndarray) -> float:
pass
@abstractmethod
def encode(self, v: np.ndarray) -> np.ndarray:
return self.encoder.forward(v)
@abstractmethod
def decode(self, v: np.ndarray) -> np.ndarray:
return self.decoder.forward(v)
@abstractmethod
def forward(self, v: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
code = self.encode(v)
out = self.decode(code)
return out, code
class Autoencoder(AAutoencoder):
def __init__(self,
encoder_layers: list[int],
decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
def train(self, v: np.ndarray) -> float:
out = self.decoder.forward(
self.encoder.forward(v)
)
self.encoder.backprop(
self.decoder.backprop(out - v)
)
return np.sum(np.abs(out - v)) / len(v)
def __str__(self):
return f'Encoder:\n{self.encoder}\n\nDecoder:\n{self.decoder}'
def encode(self, v: np.ndarray) -> np.ndarray:
return self.encoder.forward(v)
@@ -91,11 +121,17 @@ class Autoencoder:
out = self.decode(code)
return out, code
def save(self, path: str):
path = path.removesuffix('.npy')
np.save(path, self)
def load(path: str) -> 'Autoencoder':
path = path.removesuffix('.npy') + '.npy'
data = np.load(path, allow_pickle=True)
return data.item()
class VariationalAutoencoder(AAutoencoder):
def __init__(self,
encoder_layers: list[int],
decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
self.sampler = SamplingLayer(decoder_layers[0], lr, activation_func)