⚗️ | Experiment with other layer layouts.

This commit is contained in:
2024-12-21 23:27:38 +02:00
parent b7d7e95c89
commit 70e20f53d4
6 changed files with 142 additions and 91 deletions

View File

@@ -3,21 +3,25 @@ import torch.nn as nn
class SISUGenerator(nn.Module):
def __init__(self, upscale_scale=1): # No noise_dim parameter
super(SISUGenerator, self).__init__()
self.model = nn.Sequential(
self.layers1 = nn.Sequential(
nn.Conv1d(2, 128, kernel_size=3, padding=1),
nn.LeakyReLU(0.2, inplace=True),
# nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(128, 256, kernel_size=3, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Upsample(scale_factor=upscale_scale, mode='nearest'),
nn.Conv1d(256, 128, kernel_size=3, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(128, 64, kernel_size=3, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(64, 2, kernel_size=3, padding=1),
nn.Tanh()
# nn.LeakyReLU(0.2, inplace=True),
)
def forward(self, x):
return self.model(x)
self.layers2 = nn.Sequential(
nn.Conv1d(256, 128, kernel_size=3, padding=1),
# nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(128, 64, kernel_size=3, padding=1),
# nn.LeakyReLU(0.2, inplace=True),
nn.Conv1d(64, 2, kernel_size=3, padding=1),
# nn.Tanh()
)
def forward(self, x, scale):
x = self.layers1(x)
upsample = nn.Upsample(scale_factor=scale, mode='nearest')
x = upsample(x)
x = self.layers2(x)
return x