From 70a179e446219b66f208e4fbb37b74c5d77d6086 Mon Sep 17 00:00:00 2001 From: Kohya S Date: Wed, 30 Oct 2024 14:34:19 +0900 Subject: [PATCH] Fix to use SDPA instead of xformers --- library/sd3_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/sd3_models.py b/library/sd3_models.py index 840f91869..60356e82c 100644 --- a/library/sd3_models.py +++ b/library/sd3_models.py @@ -645,7 +645,7 @@ def _forward(self, context, x, c): if self.x_block.x_block_self_attn: x_q2, x_k2, x_v2 = x_qkv2 - attn2 = attention(x_q2, x_k2, x_v2, self.x_block.attn2.num_heads) + attn2 = attention(x_q2, x_k2, x_v2, self.x_block.attn2.num_heads, mode=self.mode) x = self.x_block.post_attention_x(x_attn_out, attn2, *x_intermediates) else: x = self.x_block.post_attention(x_attn_out, *x_intermediates)