Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding FLOPs and size to model metadata #6936

Merged
merged 19 commits into from
Nov 11, 2022
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ gen.yml
*.orig
*-checkpoint.ipynb
*.venv
*.DS_Store
datumbox marked this conversation as resolved.
Show resolved Hide resolved

## Xcode User settings
xcuserdata/
Expand Down
27 changes: 16 additions & 11 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,14 @@ def inject_weight_metadata(app, what, name, obj, options, lines):
max_visible = 3
v_sample = ", ".join(v[:max_visible])
v = f"{v_sample}, ... ({len(v)-max_visible} omitted)" if len(v) > max_visible else v_sample
elif k == "_ops":
if obj.__name__.endswith("_QuantizedWeights"):
k = "integer operations (GOPs)"
datumbox marked this conversation as resolved.
Show resolved Hide resolved
else:
k = "floating point operations (GFLOPs)"
datumbox marked this conversation as resolved.
Show resolved Hide resolved
elif k == "_weight_size":
k = "weights file size (MB)"
datumbox marked this conversation as resolved.
Show resolved Hide resolved

table.append((str(k), str(v)))
table = tabulate(table, tablefmt="rst")
lines += [".. rst-class:: table-weights"] # Custom CSS class, see custom_torchvision.css
Expand All @@ -385,12 +393,12 @@ def generate_weights_table(module, table_name, metrics, dataset, include_pattern
if exclude_patterns is not None:
weights = [w for w in weights if all(p not in str(w) for p in exclude_patterns)]

rich_metadata = ["Size (MB)"]
if "_flops" in weights[0].meta: # assumes same rich meta for all models in module
rich_metadata = ["GFLOPs"] + rich_metadata
ops_name = "GOPs" if "QuantizedWeights" in weights_endswith else "GFLOPs"
datumbox marked this conversation as resolved.
Show resolved Hide resolved

metrics_keys, metrics_names = zip(*metrics)
column_names = ["Weight"] + list(metrics_names) + ["Params"] + rich_metadata + ["Recipe"] # Final column order
column_names = (
["Weight"] + list(metrics_names) + ["Params"] + [ops_name, "Size (MB)", "Recipe"]
) # Final column order
column_names = [f"**{name}**" for name in column_names] # Add bold

content = []
Expand All @@ -399,17 +407,14 @@ def generate_weights_table(module, table_name, metrics, dataset, include_pattern
f":class:`{w} <{type(w).__name__}>`",
*(w.meta["_metrics"][dataset][metric] for metric in metrics_keys),
f"{w.meta['num_params']/1e6:.1f}M",
f"{w.meta['_ops']:.3f}",
f"{round(w.meta['_weight_size'], 1):.1f}",
f"`link <{w.meta['recipe']}>`__",
]

if "_flops" in w.meta:
row.append(f"{w.meta['_flops']:.3f}")

row.append(f"{round(w.meta['_weight_size'], 1):.1f}")
row.append(f"`link <{w.meta['recipe']}>`__")

content.append(row)

column_widths = ["110"] + ["18"] * len(metrics_names) + ["18"] + ["18"] * len(rich_metadata) + ["10"]
column_widths = ["110"] + ["18"] * len(metrics_names) + ["18"] * 3 + ["10"]
widths_table = " ".join(column_widths)

table = tabulate(content, headers=column_names, tablefmt="rst")
Expand Down
17 changes: 7 additions & 10 deletions test/test_extended_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,26 +155,23 @@ def test_schema_meta_validation(model_fn):
"recipe",
"unquantized",
"_docs",
"_flops",
"_ops",
"_weight_size",
}
# mandatory fields for each computer vision task
classification_fields = {"categories", ("_metrics", "ImageNet-1K", "acc@1"), ("_metrics", "ImageNet-1K", "acc@5")}
rich_metadata = {"_flops"}
defaults = {
"all": {"_metrics", "min_size", "num_params", "recipe", "_docs", "_weight_size"},
"models": classification_fields | rich_metadata,
"detection": {"categories", ("_metrics", "COCO-val2017", "box_map")} | rich_metadata,
"all": {"_metrics", "min_size", "num_params", "recipe", "_docs", "_weight_size", "_ops"},
"models": classification_fields,
"detection": {"categories", ("_metrics", "COCO-val2017", "box_map")},
"quantization": classification_fields | {"backend", "unquantized"},
"segmentation": {
"categories",
("_metrics", "COCO-val2017-VOC-labels", "miou"),
("_metrics", "COCO-val2017-VOC-labels", "pixel_acc"),
}
| rich_metadata,
"video": {"categories", ("_metrics", "Kinetics-400", "acc@1"), ("_metrics", "Kinetics-400", "acc@5")}
| rich_metadata,
"optical_flow": rich_metadata,
},
"video": {"categories", ("_metrics", "Kinetics-400", "acc@1"), ("_metrics", "Kinetics-400", "acc@5")},
"optical_flow": set(),
}
model_name = model_fn.__name__
module_name = model_fn.__module__.split(".")[-2]
Expand Down
2 changes: 1 addition & 1 deletion torchvision/models/alexnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class AlexNet_Weights(WeightsEnum):
"acc@5": 79.066,
}
},
"_flops": 0.714188,
"_ops": 0.7,
"_weight_size": 233.086501,
"_docs": """
These weights reproduce closely the results of the paper using a simplified training recipe.
Expand Down
8 changes: 4 additions & 4 deletions torchvision/models/convnext.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ class ConvNeXt_Tiny_Weights(WeightsEnum):
"acc@5": 96.146,
}
},
"_flops": 4.455531,
"_ops": 4.5,
"_weight_size": 109.118672,
},
)
Expand All @@ -239,7 +239,7 @@ class ConvNeXt_Small_Weights(WeightsEnum):
"acc@5": 96.650,
}
},
"_flops": 8.683712,
"_ops": 8.7,
"_weight_size": 191.702775,
},
)
Expand All @@ -259,7 +259,7 @@ class ConvNeXt_Base_Weights(WeightsEnum):
"acc@5": 96.870,
}
},
"_flops": 15.354729,
"_ops": 15.4,
"_weight_size": 338.064286,
},
)
Expand All @@ -279,7 +279,7 @@ class ConvNeXt_Large_Weights(WeightsEnum):
"acc@5": 96.976,
}
},
"_flops": 34.361434,
"_ops": 34.4,
"_weight_size": 754.537187,
},
)
Expand Down
8 changes: 4 additions & 4 deletions torchvision/models/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ class DenseNet121_Weights(WeightsEnum):
"acc@5": 91.972,
}
},
"_flops": 2.834162,
"_ops": 2.8,
"_weight_size": 30.844645,
},
)
Expand All @@ -297,7 +297,7 @@ class DenseNet161_Weights(WeightsEnum):
"acc@5": 93.560,
}
},
"_flops": 7.727907,
"_ops": 7.7,
"_weight_size": 110.369482,
},
)
Expand All @@ -317,7 +317,7 @@ class DenseNet169_Weights(WeightsEnum):
"acc@5": 92.806,
}
},
"_flops": 3.359843,
"_ops": 3.4,
"_weight_size": 54.708029,
},
)
Expand All @@ -337,7 +337,7 @@ class DenseNet201_Weights(WeightsEnum):
"acc@5": 93.370,
}
},
"_flops": 4.291366,
"_ops": 4.3,
"_weight_size": 77.373247,
},
)
Expand Down
8 changes: 4 additions & 4 deletions torchvision/models/detection/faster_rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ class FasterRCNN_ResNet50_FPN_Weights(WeightsEnum):
"box_map": 37.0,
}
},
"_flops": 134.379721,
"_ops": 134.4,
"_weight_size": 159.743153,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand All @@ -409,7 +409,7 @@ class FasterRCNN_ResNet50_FPN_V2_Weights(WeightsEnum):
"box_map": 46.7,
}
},
"_flops": 280.370729,
"_ops": 280.4,
"_weight_size": 167.104394,
"_docs": """These weights were produced using an enhanced training recipe to boost the model accuracy.""",
},
Expand All @@ -430,7 +430,7 @@ class FasterRCNN_MobileNet_V3_Large_FPN_Weights(WeightsEnum):
"box_map": 32.8,
}
},
"_flops": 4.493592,
"_ops": 4.5,
"_weight_size": 74.238593,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand All @@ -451,7 +451,7 @@ class FasterRCNN_MobileNet_V3_Large_320_FPN_Weights(WeightsEnum):
"box_map": 22.8,
}
},
"_flops": 0.718998,
"_ops": 0.7,
"_weight_size": 74.238593,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand Down
2 changes: 1 addition & 1 deletion torchvision/models/detection/fcos.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,7 @@ class FCOS_ResNet50_FPN_Weights(WeightsEnum):
"box_map": 39.2,
}
},
"_flops": 128.207053,
"_ops": 128.2,
"_weight_size": 123.607730,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand Down
4 changes: 2 additions & 2 deletions torchvision/models/detection/keypoint_rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum):
"kp_map": 61.1,
}
},
"_flops": 133.924041,
"_ops": 133.9,
"_weight_size": 226.053994,
"_docs": """
These weights were produced by following a similar training recipe as on the paper but use a checkpoint
Expand All @@ -349,7 +349,7 @@ class KeypointRCNN_ResNet50_FPN_Weights(WeightsEnum):
"kp_map": 65.0,
}
},
"_flops": 137.419502,
"_ops": 137.4,
"_weight_size": 226.053994,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand Down
4 changes: 2 additions & 2 deletions torchvision/models/detection/mask_rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ class MaskRCNN_ResNet50_FPN_Weights(WeightsEnum):
"mask_map": 34.6,
}
},
"_flops": 134.379721,
"_ops": 134.4,
"_weight_size": 169.839934,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand All @@ -392,7 +392,7 @@ class MaskRCNN_ResNet50_FPN_V2_Weights(WeightsEnum):
"mask_map": 41.8,
}
},
"_flops": 333.577360,
"_ops": 333.6,
"_weight_size": 177.219453,
"_docs": """These weights were produced using an enhanced training recipe to boost the model accuracy.""",
},
Expand Down
4 changes: 2 additions & 2 deletions torchvision/models/detection/retinanet.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ class RetinaNet_ResNet50_FPN_Weights(WeightsEnum):
"box_map": 36.4,
}
},
"_flops": 151.540437,
"_ops": 151.5,
"_weight_size": 130.267216,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand All @@ -711,7 +711,7 @@ class RetinaNet_ResNet50_FPN_V2_Weights(WeightsEnum):
"box_map": 41.5,
}
},
"_flops": 152.238199,
"_ops": 152.2,
"_weight_size": 146.037091,
"_docs": """These weights were produced using an enhanced training recipe to boost the model accuracy.""",
},
Expand Down
2 changes: 1 addition & 1 deletion torchvision/models/detection/ssd.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class SSD300_VGG16_Weights(WeightsEnum):
"box_map": 25.1,
}
},
"_flops": 34.858153,
"_ops": 34.9,
"_weight_size": 135.988447,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand Down
2 changes: 1 addition & 1 deletion torchvision/models/detection/ssdlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ class SSDLite320_MobileNet_V3_Large_Weights(WeightsEnum):
"box_map": 21.3,
}
},
"_flops": 0.583172,
"_ops": 0.6,
"_weight_size": 13.417583,
"_docs": """These weights were produced by following a similar training recipe as on the paper.""",
},
Expand Down
Loading