From a63aff0377a8652b5640845a7451ca4d22ebcee7 Mon Sep 17 00:00:00 2001 From: DeepBeepMeep Date: Fri, 18 Apr 2025 09:46:52 +0200 Subject: [PATCH] fix flash attention --- wan/modules/attention.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wan/modules/attention.py b/wan/modules/attention.py index 25a8ef7..e70c098 100644 --- a/wan/modules/attention.py +++ b/wan/modules/attention.py @@ -276,7 +276,7 @@ def pay_attention( k=k, v=v, cu_seqlens_q= cu_seqlens_q, - cu_seqlens_kv= cu_seqlens_k, + cu_seqlens_k= cu_seqlens_k, seqused_q=None, seqused_k=None, max_seqlen_q=lq, @@ -289,8 +289,8 @@ def pay_attention( q=q, k=k, v=v, - cu_seqlens_q= [0, lq], - cu_seqlens_kv=[0, lk], + cu_seqlens_q= cu_seqlens_q, + cu_seqlens_k= cu_seqlens_k, max_seqlen_q=lq, max_seqlen_k=lk, dropout_p=dropout_p,