⚗️ | Added some stupid ways for training + some makeup

This commit is contained in:
2025-10-04 22:38:11 +03:00
parent 0bc8fc2792
commit 3f23242d6f
12 changed files with 304 additions and 463 deletions

View File

@@ -49,74 +49,18 @@ class AttentionBlock(nn.Module):
class SISUDiscriminator(nn.Module):
def __init__(self, base_channels=16):
def __init__(self, layers=32):
super(SISUDiscriminator, self).__init__()
layers = base_channels
self.model = nn.Sequential(
discriminator_block(
1,
layers,
kernel_size=7,
stride=1,
spectral_norm=True,
use_instance_norm=False,
),
discriminator_block(
layers,
layers * 2,
kernel_size=5,
stride=2,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(
layers * 2,
layers * 4,
kernel_size=5,
stride=1,
dilation=2,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(1, layers, kernel_size=7, stride=1),
discriminator_block(layers, layers * 2, kernel_size=5, stride=2),
discriminator_block(layers * 2, layers * 4, kernel_size=5, dilation=2),
AttentionBlock(layers * 4),
discriminator_block(
layers * 4,
layers * 8,
kernel_size=5,
stride=1,
dilation=4,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(
layers * 8,
layers * 4,
kernel_size=5,
stride=2,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(
layers * 4,
layers * 2,
kernel_size=3,
stride=1,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(layers * 4, layers * 8, kernel_size=5, dilation=4),
discriminator_block(layers * 8, layers * 2, kernel_size=5, stride=2),
discriminator_block(
layers * 2,
layers,
kernel_size=3,
stride=1,
spectral_norm=True,
use_instance_norm=True,
),
discriminator_block(
layers,
1,
kernel_size=3,
stride=1,
spectral_norm=False,
use_instance_norm=False,
),