Skip to content

Commit

Permalink
Merge pull request NoamRosenberg#21 from NoamRosenberg/Tmp
Browse files Browse the repository at this point in the history
Fix, forward missing layer
  • Loading branch information
NoamRosenberg authored Aug 30, 2019
2 parents 1105c78 + 9472f9b commit 47cf3f1
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 17 deletions.
28 changes: 14 additions & 14 deletions auto_deeplab.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def forward (self, x) :
normalized_betas8 = F.softmax (self.betas8, dim = -1)
normalized_betas16 = F.softmax(self.betas16, dim=-1)
normalized_top_betas = F.softmax(self.top_betas, dim=-1)
for layer in range (self._num_layers - 1) :
for layer in range (self._num_layers) :

if layer == 0 :
level4_new, = self.cells[count] (None, None, self.level_4[-1], None, normalized_alphas)
Expand All @@ -196,15 +196,15 @@ def forward (self, x) :
self.level_8[-1],
normalized_alphas)
count += 1
level4_new = normalized_bottom_betas[layer][0] * level4_new_1 + normalized_bottom_betas[layer][1] * level4_new_2
level4_new = normalized_bottom_betas[layer-1][0] * level4_new_1 + normalized_bottom_betas[layer-1][1] * level4_new_2

level8_new_1, level8_new_2 = self.cells[count] (None,
self.level_4[-1],
self.level_8[-1],
None,
normalized_alphas)
count += 1
level8_new = normalized_top_betas[layer][0] * level8_new_1 + normalized_top_betas[layer][1] * level8_new_2
level8_new = normalized_top_betas[layer-1][0] * level8_new_1 + normalized_top_betas[layer-1][1] * level8_new_2

level16_new, = self.cells[count] (None,
self.level_8[-1],
Expand All @@ -226,23 +226,23 @@ def forward (self, x) :
self.level_8[-1],
normalized_alphas)
count += 1
level4_new = normalized_bottom_betas[layer][0] * level4_new_1 + normalized_bottom_betas[layer][1] * level4_new_2
level4_new = normalized_bottom_betas[layer-1][0] * level4_new_1 + normalized_bottom_betas[layer-1][1] * level4_new_2

level8_new_1, level8_new_2, level8_new_3 = self.cells[count] (self.level_8[-2],
self.level_4[-1],
self.level_8[-1],
self.level_16[-1],
normalized_alphas)
count += 1
level8_new = normalized_betas8[layer - 1][0] * level8_new_1 + normalized_betas8[layer - 1][1] * level8_new_2 + normalized_betas8[layer - 1][2] * level8_new_3
level8_new = normalized_betas8[layer - 2][0] * level8_new_1 + normalized_betas8[layer - 2][1] * level8_new_2 + normalized_betas8[layer - 2][2] * level8_new_3

level16_new_1, level16_new_2 = self.cells[count] (None,
self.level_8[-1],
self.level_16[-1],
None,
normalized_alphas)
count += 1
level16_new = normalized_top_betas[layer][0] * level16_new_1 + normalized_top_betas[layer][1] * level16_new_2
level16_new = normalized_top_betas[layer-1][0] * level16_new_1 + normalized_top_betas[layer-1][1] * level16_new_2


level32_new, = self.cells[count] (None,
Expand All @@ -265,23 +265,23 @@ def forward (self, x) :
self.level_8[-1],
normalized_alphas)
count += 1
level4_new = normalized_bottom_betas[layer][0] * level4_new_1 + normalized_bottom_betas[layer][1] * level4_new_2
level4_new = normalized_bottom_betas[layer-1][0] * level4_new_1 + normalized_bottom_betas[layer-1][1] * level4_new_2

level8_new_1, level8_new_2, level8_new_3 = self.cells[count] (self.level_8[-2],
self.level_4[-1],
self.level_8[-1],
self.level_16[-1],
normalized_alphas)
count += 1
level8_new = normalized_betas8[layer - 1][0] * level8_new_1 + normalized_betas8[layer - 1][1] * level8_new_2 + normalized_betas8[layer - 1][2] * level8_new_3
level8_new = normalized_betas8[layer - 2][0] * level8_new_1 + normalized_betas8[layer - 2][1] * level8_new_2 + normalized_betas8[layer - 2][2] * level8_new_3

level16_new_1, level16_new_2, level16_new_3 = self.cells[count] (self.level_16[-2],
self.level_8[-1],
self.level_16[-1],
self.level_32[-1],
normalized_alphas)
count += 1
level16_new = normalized_betas16[layer - 2][0] * level16_new_1 + normalized_betas16[layer - 2][1] * level16_new_2 + normalized_betas16[layer - 2][2] * level16_new_3
level16_new = normalized_betas16[layer - 3][0] * level16_new_1 + normalized_betas16[layer - 3][1] * level16_new_2 + normalized_betas16[layer - 3][2] * level16_new_3


level32_new_1, level32_new_2 = self.cells[count] (None,
Expand All @@ -290,7 +290,7 @@ def forward (self, x) :
None,
normalized_alphas)
count += 1
level32_new = normalized_top_betas[layer][0] * level32_new_1 + normalized_top_betas[layer][1] * level32_new_2
level32_new = normalized_top_betas[layer-1][0] * level32_new_1 + normalized_top_betas[layer-1][1] * level32_new_2


self.level_4.append (level4_new)
Expand All @@ -306,7 +306,7 @@ def forward (self, x) :
self.level_8[-1],
normalized_alphas)
count += 1
level4_new = normalized_bottom_betas[layer][0] * level4_new_1 + normalized_bottom_betas[layer][1] * level4_new_2
level4_new = normalized_bottom_betas[layer-1][0] * level4_new_1 + normalized_bottom_betas[layer-1][1] * level4_new_2

level8_new_1, level8_new_2, level8_new_3 = self.cells[count] (self.level_8[-2],
self.level_4[-1],
Expand All @@ -315,15 +315,15 @@ def forward (self, x) :
normalized_alphas)
count += 1

level8_new = normalized_betas8[layer - 1][0] * level8_new_1 + normalized_betas8[layer - 1][1] * level8_new_2 + normalized_betas8[layer - 1][2] * level8_new_3
level8_new = normalized_betas8[layer - 2][0] * level8_new_1 + normalized_betas8[layer - 2][1] * level8_new_2 + normalized_betas8[layer - 2][2] * level8_new_3

level16_new_1, layer16_new_2, layer16_new_3 = self.cells[count] (self.level_16[-2],
self.level_8[-1],
self.level_16[-1],
self.level_32[-1],
normalized_alphas)
count += 1
level16_new = normalized_betas16[layer - 2][0] * level16_new_1 + normalized_betas16[layer - 2][1] * level16_new_2 + normalized_betas16[layer - 2][2] * level16_new_3
level16_new = normalized_betas16[layer - 3][0] * level16_new_1 + normalized_betas16[layer - 3][1] * level16_new_2 + normalized_betas16[layer - 3][2] * level16_new_3


level32_new_1, level32_new_2 = self.cells[count] (self.level_32[-2],
Expand All @@ -332,7 +332,7 @@ def forward (self, x) :
None,
normalized_alphas)
count += 1
level32_new = normalized_top_betas[layer][0] * level32_new_1 + normalized_top_betas[layer][1] * level32_new_2
level32_new = normalized_top_betas[layer-1][0] * level32_new_1 + normalized_top_betas[layer-1][1] * level32_new_2


self.level_4.append (level4_new)
Expand Down
5 changes: 2 additions & 3 deletions train_autodeeplab.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from utils.metrics import Evaluator
from auto_deeplab import AutoDeeplab
from architect import Architect
torch.backends.cudnn.benchmark = True

class Trainer(object):
def __init__(self, args):
Expand Down Expand Up @@ -129,7 +130,7 @@ def training(self, epoch):
loss.backward()
self.optimizer.step()

if epoch > self.args.alpha_epoch:
if epoch >= self.args.alpha_epoch:
search = next(iter(self.train_loaderB))
image_search, target_search = search['image'], search['label']
if self.args.cuda:
Expand Down Expand Up @@ -220,8 +221,6 @@ def main():
parser.add_argument('--backbone', type=str, default='resnet',
choices=['resnet', 'xception', 'drn', 'mobilenet'],
help='backbone name (default: resnet)')
parser.add_argument('--out-stride', type=int, default=16,
help='network output stride (default: 8)')
parser.add_argument('--dataset', type=str, default='cityscapes',
choices=['pascal', 'coco', 'cityscapes', 'kd'],
help='dataset name (default: pascal)')
Expand Down

0 comments on commit 47cf3f1

Please sign in to comment.