-
Notifications
You must be signed in to change notification settings - Fork 4
/
Model.py
52 lines (40 loc) · 1.34 KB
/
Model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from torchvision import models
import torch.nn as nn
class model(nn.Module):
def __init__(self, input_dim, output_dim):
super(model, self).__init__()
self.restored = False
self.input_dim = input_dim
self.output_dim = output_dim
num = len(input_dim)
feature = []
for i in range(num):
feature.append(
nn.Sequential(
nn.Linear(self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],self.input_dim[i]),
nn.BatchNorm1d(self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(self.input_dim[i],self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
))
self.feature = nn.ModuleList(feature)
self.feature_show = nn.Sequential(
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
)
def forward(self, input_data, domain):
feature = self.feature[domain](input_data)
feature = self.feature_show(feature)
return feature