⚗️ | Experiment with other layer layouts.
This commit is contained in:
32
generator.py
32
generator.py
@@ -3,21 +3,25 @@ import torch.nn as nn
|
||||
class SISUGenerator(nn.Module):
|
||||
def __init__(self, upscale_scale=1): # No noise_dim parameter
|
||||
super(SISUGenerator, self).__init__()
|
||||
self.model = nn.Sequential(
|
||||
self.layers1 = nn.Sequential(
|
||||
nn.Conv1d(2, 128, kernel_size=3, padding=1),
|
||||
nn.LeakyReLU(0.2, inplace=True),
|
||||
# nn.LeakyReLU(0.2, inplace=True),
|
||||
nn.Conv1d(128, 256, kernel_size=3, padding=1),
|
||||
nn.LeakyReLU(0.2, inplace=True),
|
||||
|
||||
nn.Upsample(scale_factor=upscale_scale, mode='nearest'),
|
||||
|
||||
nn.Conv1d(256, 128, kernel_size=3, padding=1),
|
||||
nn.LeakyReLU(0.2, inplace=True),
|
||||
nn.Conv1d(128, 64, kernel_size=3, padding=1),
|
||||
nn.LeakyReLU(0.2, inplace=True),
|
||||
nn.Conv1d(64, 2, kernel_size=3, padding=1),
|
||||
nn.Tanh()
|
||||
# nn.LeakyReLU(0.2, inplace=True),
|
||||
)
|
||||
|
||||
def forward(self, x):
|
||||
return self.model(x)
|
||||
self.layers2 = nn.Sequential(
|
||||
nn.Conv1d(256, 128, kernel_size=3, padding=1),
|
||||
# nn.LeakyReLU(0.2, inplace=True),
|
||||
nn.Conv1d(128, 64, kernel_size=3, padding=1),
|
||||
# nn.LeakyReLU(0.2, inplace=True),
|
||||
nn.Conv1d(64, 2, kernel_size=3, padding=1),
|
||||
# nn.Tanh()
|
||||
)
|
||||
|
||||
def forward(self, x, scale):
|
||||
x = self.layers1(x)
|
||||
upsample = nn.Upsample(scale_factor=scale, mode='nearest')
|
||||
x = upsample(x)
|
||||
x = self.layers2(x)
|
||||
return x
|
||||
|
||||
Reference in New Issue
Block a user