| Made training bit... spicier.

This commit is contained in:
2025-09-10 19:52:53 +03:00
parent ff38cefdd3
commit 0bc8fc2792
8 changed files with 581 additions and 303 deletions

View File

@@ -1,6 +1,6 @@
import torch
import torch.nn as nn
def conv_block(in_channels, out_channels, kernel_size=3, dilation=1):
return nn.Sequential(
nn.Conv1d(
@@ -8,29 +8,32 @@ def conv_block(in_channels, out_channels, kernel_size=3, dilation=1):
out_channels,
kernel_size=kernel_size,
dilation=dilation,
padding=(kernel_size // 2) * dilation
padding=(kernel_size // 2) * dilation,
),
nn.InstanceNorm1d(out_channels),
nn.PReLU()
nn.PReLU(),
)
class AttentionBlock(nn.Module):
"""
Simple Channel Attention Block. Learns to weight channels based on their importance.
"""
def __init__(self, channels):
super(AttentionBlock, self).__init__()
self.attention = nn.Sequential(
nn.Conv1d(channels, channels // 4, kernel_size=1),
nn.ReLU(inplace=True),
nn.Conv1d(channels // 4, channels, kernel_size=1),
nn.Sigmoid()
nn.Sigmoid(),
)
def forward(self, x):
attention_weights = self.attention(x)
return x * attention_weights
class ResidualInResidualBlock(nn.Module):
def __init__(self, channels, num_convs=3):
super(ResidualInResidualBlock, self).__init__()
@@ -47,6 +50,7 @@ class ResidualInResidualBlock(nn.Module):
x = self.attention(x)
return x + residual
class SISUGenerator(nn.Module):
def __init__(self, channels=16, num_rirb=4, alpha=1.0):
super(SISUGenerator, self).__init__()