Skip to content

Commit

Permalink
[Tests] Fix cpu offload test (huggingface#5626)
Browse files Browse the repository at this point in the history
* fix more

* fix more
  • Loading branch information
patrickvonplaten authored and kashif committed Nov 11, 2023
1 parent c8f540a commit e981244
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions tests/pipelines/test_pipelines_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -742,14 +742,14 @@ def test_model_cpu_offload_forward_pass(self, expected_max_diff=2e-4):

max_diff = np.abs(to_np(output_with_offload) - to_np(output_without_offload)).max()
self.assertLess(max_diff, expected_max_diff, "CPU offloading should not affect the inference results")
offloaded_modules = [
v
for k, v in pipe.components.items()
if isinstance(v, torch.nn.Module) and k not in pipe._exclude_from_cpu_offload
]
self.assertTrue(
all(
v.device == "cpu"
for k, v in pipe.components.values()
if isinstance(v, torch.nn.Module) and k not in pipe._exclude_from_cpu_offload
),
"CPU offloading should leave all pipeline components on the CPU after inference",
)
all(v.device.type == "cpu" for v in offloaded_modules)
), f"Not offloaded: {[v for v in offloaded_modules if v.device.type != 'cpu']}"

@unittest.skipIf(
torch_device != "cuda" or not is_xformers_available(),
Expand Down

0 comments on commit e981244

Please sign in to comment.