-
Notifications
You must be signed in to change notification settings - Fork 0
/
fcn.py
76 lines (65 loc) · 2.3 KB
/
fcn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""Simple fully convolutional neural network (FCN) implementations
Integrates dropout functionality for regularization.
"""
import torch.nn as nn
from torch import Tensor
from torch.nn.modules import Module
class FCN(Module):
"""A simple 5 layer FCN with leaky relus and 'same' padding."""
def __init__(
self,
in_channels: int,
classes: int,
num_filters: int = 64,
dropout: float = 0.3,
) -> None:
"""Initializes the 5 layer FCN model.
Args:
in_channels: Number of input channels that the model will expect
classes: Number of filters in the final layer
num_filters: Number of filters in each convolutional layer
dropout: Dropout rate used when training the model
"""
super().__init__()
conv1 = nn.modules.Conv2d(
in_channels, num_filters, kernel_size=3, stride=1, padding=1
)
conv2 = nn.modules.Conv2d(
num_filters, num_filters, kernel_size=3, stride=1, padding=1
)
conv3 = nn.modules.Conv2d(
num_filters, num_filters, kernel_size=3, stride=1, padding=1
)
conv4 = nn.modules.Conv2d(
num_filters, num_filters, kernel_size=3, stride=1, padding=1
)
conv5 = nn.modules.Conv2d(
num_filters, num_filters, kernel_size=3, stride=1, padding=1
)
self.backbone = nn.modules.Sequential(
conv1,
nn.modules.LeakyReLU(inplace=True),
nn.Dropout(p=dropout),
conv2,
nn.modules.LeakyReLU(inplace=True),
nn.Dropout(p=dropout),
conv3,
nn.modules.LeakyReLU(inplace=True),
nn.Dropout(p=dropout),
conv4,
nn.modules.LeakyReLU(inplace=True),
nn.Dropout(p=dropout),
conv5,
nn.modules.LeakyReLU(inplace=True),
nn.Dropout(p=dropout),
)
self.last = nn.modules.Conv2d(
num_filters, classes, kernel_size=1, stride=1, padding=0
)
def forward(self, x: Tensor) -> Tensor:
"""Forward pass of the model."""
x = self.backbone(x)
x = self.last(x)
return x