From b5d26646c580aba00db610136022879ec659f30b Mon Sep 17 00:00:00 2001 From: DeepBeepMeep Date: Tue, 8 Jul 2025 19:46:21 +0200 Subject: [PATCH] oops --- README.md | 4 ++-- wan/modules/attention.py | 30 +++++++++++++++--------------- wan/multitalk/attention.py | 22 +++++++++++----------- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 9799130..9f7b678 100644 --- a/README.md +++ b/README.md @@ -21,11 +21,11 @@ WanGP supports the Wan (and derived models), Hunyuan Video and LTV Video models ## 🔥 Latest Updates ### July 8 2025: WanGP v6.6, WanGP offers you **Vace Multitalk Dual Voices Fusionix Infinite** : -**Vace** our beloved super Control Net has been combined with **Multitalk** the new king in town that can animate two people speaking (**Dual Voices**). It is accelerated by the **Fusionix** model and thanks to *Sliding Windows* support and *Adaptive Projected Guidance* (much slower but should reduce the reddish effect with long videos) your two people will be able to talk for very a long time (which is an **Infinite** amount of time in the field of video generation). +**Vace** our beloved super Control Net has been combined with **Multitalk** the new king in town that can animate up to two people speaking (**Dual Voices**). It is accelerated by the **Fusionix** model and thanks to *Sliding Windows* support and *Adaptive Projected Guidance* (much slower but should reduce the reddish effect with long videos) your two people will be able to talk for very a long time (which is an **Infinite** amount of time in the field of video generation). Of course you will get as well *Multitalk* vanilla and also *Multitalk 720p* as a bonus. -And since I am mister nice guy I had enclosed as an exclusivity an *Audio Separator* that will save you time to isolate each voice when using Multitalk with two people. +And since I am mister nice guy I have enclosed as an exclusivity an *Audio Separator* that will save you time to isolate each voice when using Multitalk with two people. As I feel like a resting a bit I haven't produced a nice sample Video to illustrate all these new capabilities. But here is the thing, I ams sure you will publish in the *Share Your Best Video* channel your *Master Pieces*. The best one will be added to the *Announcements Channel* and will bring eternal fame to its author. diff --git a/wan/modules/attention.py b/wan/modules/attention.py index 41a934b..758ad59 100644 --- a/wan/modules/attention.py +++ b/wan/modules/attention.py @@ -7,23 +7,23 @@ import torch.nn.functional as F major, minor = torch.cuda.get_device_capability(None) bfloat16_supported = major >= 8 -try: - from xformers.ops import memory_efficient_attention -except ImportError: - memory_efficient_attention = None +# try: +# from xformers.ops import memory_efficient_attention +# except ImportError: +# memory_efficient_attention = None -try: - import flash_attn_interface - FLASH_ATTN_3_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_3_AVAILABLE = False +# try: +# import flash_attn_interface +# FLASH_ATTN_3_AVAILABLE = True +# except ModuleNotFoundError: +# FLASH_ATTN_3_AVAILABLE = False -try: - import flash_attn - FLASH_ATTN_2_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_2_AVAILABLE = False - flash_attn = None +# try: +# import flash_attn +# FLASH_ATTN_2_AVAILABLE = True +# except ModuleNotFoundError: +# FLASH_ATTN_2_AVAILABLE = False +# flash_attn = None try: from sageattention import sageattn_varlen diff --git a/wan/multitalk/attention.py b/wan/multitalk/attention.py index ffc2a50..44c1ca0 100644 --- a/wan/multitalk/attention.py +++ b/wan/multitalk/attention.py @@ -5,19 +5,19 @@ from einops import rearrange, repeat from .multitalk_utils import RotaryPositionalEmbedding1D, normalize_and_scale, split_token_counts_and_frame_ids from wan.modules.attention import pay_attention -import xformers.ops +# import xformers.ops -try: - import flash_attn_interface - FLASH_ATTN_3_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_3_AVAILABLE = False +# try: +# import flash_attn_interface +# FLASH_ATTN_3_AVAILABLE = True +# except ModuleNotFoundError: +# FLASH_ATTN_3_AVAILABLE = False -try: - import flash_attn - FLASH_ATTN_2_AVAILABLE = True -except ModuleNotFoundError: - FLASH_ATTN_2_AVAILABLE = False +# try: +# import flash_attn +# FLASH_ATTN_2_AVAILABLE = True +# except ModuleNotFoundError: +# FLASH_ATTN_2_AVAILABLE = False import warnings