diff options
author | Christian C <cc@localhost> | 2024-11-11 12:29:32 -0800 |
---|---|---|
committer | Christian C <cc@localhost> | 2024-11-11 12:29:32 -0800 |
commit | b85ee9d64a536937912544c7bbd5b98b635b7e8d (patch) | |
tree | cef7bc17d7b29f40fc6b1867d0ce0a742d5583d0 /code/sunlab/suntorch/models/discriminator.py |
Initial commit
Diffstat (limited to 'code/sunlab/suntorch/models/discriminator.py')
-rw-r--r-- | code/sunlab/suntorch/models/discriminator.py | 32 |
1 files changed, 32 insertions, 0 deletions
diff --git a/code/sunlab/suntorch/models/discriminator.py b/code/sunlab/suntorch/models/discriminator.py new file mode 100644 index 0000000..9249095 --- /dev/null +++ b/code/sunlab/suntorch/models/discriminator.py @@ -0,0 +1,32 @@ +import torch.nn as nn +import torch.nn.functional as F +from torch import sigmoid + + +class Discriminator(nn.Module): + """# Discriminator Neural Network + N: Inner neuronal layer size + z_dim: Input dimension shape + """ + + def __init__(self, N, z_dim, dropout=0.0, negative_slope=0.3): + super(Discriminator, self).__init__() + self.lin1 = nn.Linear(z_dim, N) + self.lin2 = nn.Linear(N, N) + self.lin3 = nn.Linear(N, 1) + self.p = dropout + self.negative_slope = negative_slope + + def forward(self, x): + x = self.lin1(x) + if self.p > 0.0: + x = F.dropout(x, p=self.p, training=self.training) + x = F.leaky_relu(x, negative_slope=self.negative_slope) + + x = self.lin2(x) + if self.p > 0.0: + x = F.dropout(x, p=self.p, training=self.training) + x = F.leaky_relu(x, negative_slope=self.negative_slope) + + x = self.lin3(x) + return sigmoid(x) |