Skip to content

Commit

Permalink
PyTorch 1.11.0 compatibility updates (#1914)
Browse files Browse the repository at this point in the history
* PyTorch 1.11.0 compatibility updates

Resolves `AttributeError: 'Upsample' object has no attribute 'recompute_scale_factor'` first raised in ultralytics/yolov5#5499 and observed in all CI runs on just-released PyTorch 1.11.0.

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Update experimental.py

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
glenn-jocher and pre-commit-ci[bot] authored Mar 10, 2022
1 parent b6f6b5b commit 7093a2b
Showing 1 changed file with 10 additions and 9 deletions.
19 changes: 10 additions & 9 deletions models/experimental.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,21 +94,22 @@ def attempt_load(weights, map_location=None, inplace=True, fuse=True):
model = Ensemble()
for w in weights if isinstance(weights, list) else [weights]:
ckpt = torch.load(attempt_download(w), map_location=map_location) # load
if fuse:
model.append(ckpt['ema' if ckpt.get('ema') else 'model'].float().fuse().eval()) # FP32 model
else:
model.append(ckpt['ema' if ckpt.get('ema') else 'model'].float().eval()) # without layer fuse
ckpt = (ckpt['ema'] or ckpt['model']).float() # FP32 model
model.append(ckpt.fuse().eval() if fuse else ckpt.eval()) # fused or un-fused model in eval mode

# Compatibility updates
for m in model.modules():
if type(m) in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU, Detect, Model]:
m.inplace = inplace # pytorch 1.7.0 compatibility
if type(m) is Detect:
t = type(m)
if t in (nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU, Detect, Model):
m.inplace = inplace # torch 1.7.0 compatibility
if t is Detect:
if not isinstance(m.anchor_grid, list): # new Detect Layer compatibility
delattr(m, 'anchor_grid')
setattr(m, 'anchor_grid', [torch.zeros(1)] * m.nl)
elif type(m) is Conv:
m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility
elif t is Conv:
m._non_persistent_buffers_set = set() # torch 1.6.0 compatibility
elif t is nn.Upsample and not hasattr(m, 'recompute_scale_factor'):
m.recompute_scale_factor = None # torch 1.11.0 compatibility

if len(model) == 1:
return model[-1] # return model
Expand Down

0 comments on commit 7093a2b

Please sign in to comment.