diff --git a/wan/modules/attention.py b/wan/modules/attention.py index 4dbbe03fc..c8f69c2ee 100644 --- a/wan/modules/attention.py +++ b/wan/modules/attention.py @@ -107,7 +107,7 @@ def half(x): max_seqlen_k=lk, softmax_scale=softmax_scale, causal=causal, - deterministic=deterministic)[0].unflatten(0, (b, lq)) + deterministic=deterministic).unflatten(0, (b, lq)) else: assert FLASH_ATTN_2_AVAILABLE x = flash_attn.flash_attn_varlen_func(