:albemic: | Fat architecture. Hopefully better results.

This commit is contained in:
2025-04-06 00:05:43 +03:00
parent f928d8c2cf
commit 9394bc6c5a
5 changed files with 70 additions and 44 deletions

View File

@@ -1,18 +1,28 @@
import torch
import torch.nn as nn
def conv_block(in_channels, out_channels, kernel_size=3, dilation=1):
return nn.Sequential(
nn.Conv1d(in_channels, out_channels, kernel_size=kernel_size, dilation=dilation, padding=(kernel_size // 2) * dilation),
nn.BatchNorm1d(out_channels),
nn.Conv1d(
in_channels,
out_channels,
kernel_size=kernel_size,
dilation=dilation,
padding=(kernel_size // 2) * dilation
),
nn.InstanceNorm1d(out_channels),
nn.PReLU()
)
class AttentionBlock(nn.Module):
"""
Simple Channel Attention Block. Learns to weight channels based on their importance.
"""
def __init__(self, channels):
super(AttentionBlock, self).__init__()
self.attention = nn.Sequential(
nn.Conv1d(channels, channels // 4, kernel_size=1),
nn.ReLU(),
nn.ReLU(inplace=True),
nn.Conv1d(channels // 4, channels, kernel_size=1),
nn.Sigmoid()
)
@@ -24,7 +34,11 @@ class AttentionBlock(nn.Module):
class ResidualInResidualBlock(nn.Module):
def __init__(self, channels, num_convs=3):
super(ResidualInResidualBlock, self).__init__()
self.conv_layers = nn.Sequential(*[conv_block(channels, channels) for _ in range(num_convs)])
self.conv_layers = nn.Sequential(
*[conv_block(channels, channels) for _ in range(num_convs)]
)
self.attention = AttentionBlock(channels)
def forward(self, x):
@@ -34,19 +48,27 @@ class ResidualInResidualBlock(nn.Module):
return x + residual
class SISUGenerator(nn.Module):
def __init__(self, layer=4, num_rirb=4): #increased base layer and rirb amounts
def __init__(self, channels=64, num_rirb=8, alpha=1.0):
super(SISUGenerator, self).__init__()
self.alpha = alpha
self.conv1 = nn.Sequential(
nn.Conv1d(1, layer, kernel_size=7, padding=3),
nn.BatchNorm1d(layer),
nn.Conv1d(1, channels, kernel_size=7, padding=3),
nn.InstanceNorm1d(channels),
nn.PReLU(),
)
self.rir_blocks = nn.Sequential(*[ResidualInResidualBlock(layer) for _ in range(num_rirb)])
self.final_layer = nn.Conv1d(layer, 1, kernel_size=3, padding=1)
self.rir_blocks = nn.Sequential(
*[ResidualInResidualBlock(channels) for _ in range(num_rirb)]
)
self.final_layer = nn.Conv1d(channels, 1, kernel_size=3, padding=1)
def forward(self, x):
residual = x
residual_input = x
x = self.conv1(x)
x = self.rir_blocks(x)
x = self.final_layer(x)
return x + residual
x_rirb_out = self.rir_blocks(x)
learned_residual = self.final_layer(x_rirb_out)
output = residual_input + self.alpha * learned_residual
return output