Compare commits

...

2 Commits

Author SHA1 Message Date
Lenoctambule
e6b508f739 feat: sampling layer w/ forward method + abstract autoencoder 2026-03-31 19:10:06 +02:00
Lenoctambule
cc74b62afd feat: identity activation func 2026-03-30 05:14:02 +02:00
3 changed files with 100 additions and 39 deletions

View File

@@ -25,3 +25,11 @@ class LeakyReLU(ActivationFunc):
def derivative(self, x): def derivative(self, x):
return (x > 0) + self.k * (x <= 0) return (x > 0) + self.k * (x <= 0)
class Identity(ActivationFunc):
def __call__(self, x):
return x
def derivative(x):
return 1

View File

@@ -3,43 +3,14 @@ from utils import (dynamic_loss_plot_init,
dynamic_loss_plot_update, dynamic_loss_plot_update,
dynamic_loss_plot_finish) dynamic_loss_plot_finish)
from tqdm import tqdm from tqdm import tqdm
from layers import DeepNNLayer from layers import DeepNNLayer, SamplingLayer
from activations import ActivationFunc from activations import ActivationFunc
from abc import ABC, abstractmethod
LOADER = ['', '', '', '', '', '', '', ''] LOADER = ['', '', '', '', '', '', '', '']
class Autoencoder: class AAutoencoder(ABC):
def __init__(self,
encoder_layers: list[int],
decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
def __str__(self):
return f'Encoder:\n{self.encoder}\n\nDecoder:\n{self.decoder}'
def loss(self, data_set: list[np.ndarray]) -> float:
loss = 0
for x in data_set:
loss += np.sum(np.abs(x - self.forward(x)[0])) / len(x)
return loss / len(data_set)
def train(self, v: np.ndarray):
out = self.decoder.forward(
self.encoder.forward(v)
)
self.encoder.backprop(
self.decoder.backprop(out - v)
)
return np.sum(np.abs(out - v)) / len(v)
def train_dataset(self, def train_dataset(self,
data_set: list[np.ndarray], data_set: list[np.ndarray],
max_epoch: int, max_epoch: int,
@@ -80,6 +51,65 @@ class Autoencoder:
dynamic_loss_plot_finish(ax, line) dynamic_loss_plot_finish(ax, line)
return losses return losses
def loss(self, data_set: list[np.ndarray]) -> float:
loss = 0
for x in data_set:
loss += np.sum(np.abs(x - self.forward(x)[0])) / len(x)
return loss / len(data_set)
def save(self, path: str):
path = path.removesuffix('.npy')
np.save(path, self)
def load(path: str) -> 'Autoencoder':
path = path.removesuffix('.npy') + '.npy'
data = np.load(path, allow_pickle=True)
return data.item()
@abstractmethod
def train(self, v: np.ndarray) -> float:
pass
@abstractmethod
def encode(self, v: np.ndarray) -> np.ndarray:
return self.encoder.forward(v)
@abstractmethod
def decode(self, v: np.ndarray) -> np.ndarray:
return self.decoder.forward(v)
@abstractmethod
def forward(self, v: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
code = self.encode(v)
out = self.decode(code)
return out, code
class Autoencoder(AAutoencoder):
def __init__(self,
encoder_layers: list[int],
decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
def train(self, v: np.ndarray) -> float:
out = self.decoder.forward(
self.encoder.forward(v)
)
self.encoder.backprop(
self.decoder.backprop(out - v)
)
return np.sum(np.abs(out - v)) / len(v)
def __str__(self):
return f'Encoder:\n{self.encoder}\n\nDecoder:\n{self.decoder}'
def encode(self, v: np.ndarray) -> np.ndarray: def encode(self, v: np.ndarray) -> np.ndarray:
return self.encoder.forward(v) return self.encoder.forward(v)
@@ -91,11 +121,17 @@ class Autoencoder:
out = self.decode(code) out = self.decode(code)
return out, code return out, code
def save(self, path: str):
path = path.removesuffix('.npy')
np.save(path, self)
def load(path: str) -> 'Autoencoder': class VariationalAutoencoder(AAutoencoder):
path = path.removesuffix('.npy') + '.npy' def __init__(self,
data = np.load(path, allow_pickle=True) encoder_layers: list[int],
return data.item() decoder_layers: list[int],
lr: float,
activation_func: ActivationFunc):
if encoder_layers[-1] != decoder_layers[0]:
raise Exception(
f"Encoder output and decoder input don't match {encoder_layers[-1]} != {encoder_layers[0]}" # noqa
)
self.encoder = DeepNNLayer(encoder_layers, lr, activation_func)
self.decoder = DeepNNLayer(decoder_layers, lr, activation_func)
self.sampler = SamplingLayer(decoder_layers[0], lr, activation_func)

View File

@@ -38,6 +38,23 @@ class NNLayer:
return ret return ret
class SamplingLayer:
def __init__(self,
in_size: int,
lr: float,
activation_func: ActivationFunc):
self.W_mean = np.random.uniform(-0.1, 0.1, (in_size, in_size))
self.W_variance = np.random.uniform(-0.1, 0.1, (in_size, in_size))
def forward(self, v) -> np.ndarray:
mean = self.W_mean @ v
variance = self.W_variance @ v
return np.random.normal(mean, variance)
def backprop(self, error: np.ndarray) -> np.ndarray:
pass
class DeepNNLayer: class DeepNNLayer:
def __init__(self, def __init__(self,
layers: list[int], layers: list[int],