Skip to content

Commit

Permalink
cleaning broken strings
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda committed Jul 12, 2023
1 parent c17b402 commit 011f209
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ def __init__(
import warnings

warnings.warn(
"You would need to load the pretrained state_dict yourself if you are "
"providing backbone of type torch.nn.Module / pl.LightningModule."
"You would need to load the pretrained state_dict yourself if you are"
" providing backbone of type torch.nn.Module / pl.LightningModule."
)
else:
backbone_model = create_fasterrcnn_backbone(
Expand Down
2 changes: 1 addition & 1 deletion src/pl_bolts/models/detection/yolo/darknet_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def _read_config(self, config_file: Iterable[str]) -> List[Dict[str, Any]]:
def convert(key: str, value: str) -> Union[str, int, float, List[Union[str, int, float]]]:
"""Converts a value to the correct type based on key."""
if key not in variable_types:
warn("Unknown YOLO configuration variable: " + key)
warn(f"Unknown YOLO configuration variable: {key}")
return value
if key in list_variables:
return [variable_types[key](v) for v in value.split(",")]
Expand Down
4 changes: 2 additions & 2 deletions src/pl_bolts/models/gans/srgan/srgan_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@ def cli_main(args=None):
generator_checkpoint = Path(f"model_checkpoints/srresnet-{args.dataset}-scale_factor={args.scale_factor}.pt")
if not generator_checkpoint.exists():
warn(
"No generator checkpoint found. Training generator from scratch. \
Use srresnet_module.py to pretrain the generator."
"No generator checkpoint found. Training generator from scratch."
" Use srresnet_module.py to pretrain the generator."
)
generator_checkpoint = None

Expand Down
2 changes: 1 addition & 1 deletion src/pl_bolts/optimizers/lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def get_lr(self) -> List[float]:
"""Compute learning rate using chainable form of the scheduler."""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.",
"To get the last learning rate computed by the scheduler; please use `get_last_lr()`.",
UserWarning,
)

Expand Down
4 changes: 2 additions & 2 deletions src/pl_bolts/utils/pretrained_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
from pl_bolts.utils.stability import under_review

vae_imagenet2012 = (
"https://pl-bolts-weights.s3.us-east-2.amazonaws.com/" "vae/imagenet_06_22_2019/checkpoints/epoch%3D63.ckpt"
"https://pl-bolts-weights.s3.us-east-2.amazonaws.com/vae/imagenet_06_22_2019/checkpoints/epoch%3D63.ckpt"
)

cpcv2_resnet18 = "https://pl-bolts-weights.s3.us-east-2.amazonaws.com/" "cpc/resnet18-v6/epoch%3D85.ckpt"
cpcv2_resnet18 = "https://pl-bolts-weights.s3.us-east-2.amazonaws.com/cpc/resnet18-v6/epoch%3D85.ckpt"
urls = {"vae-imagenet2012": vae_imagenet2012, "CPC_v2-resnet18": cpcv2_resnet18}


Expand Down

0 comments on commit 011f209

Please sign in to comment.