mirror of
https://github.com/Wan-Video/Wan2.1.git
synced 2025-11-04 14:16:57 +00:00
I need rest!
This commit is contained in:
parent
b5d26646c5
commit
34f9333fdc
@ -7,23 +7,23 @@ import torch.nn.functional as F
|
|||||||
major, minor = torch.cuda.get_device_capability(None)
|
major, minor = torch.cuda.get_device_capability(None)
|
||||||
bfloat16_supported = major >= 8
|
bfloat16_supported = major >= 8
|
||||||
|
|
||||||
# try:
|
try:
|
||||||
# from xformers.ops import memory_efficient_attention
|
from xformers.ops import memory_efficient_attention
|
||||||
# except ImportError:
|
except ImportError:
|
||||||
# memory_efficient_attention = None
|
memory_efficient_attention = None
|
||||||
|
|
||||||
# try:
|
try:
|
||||||
# import flash_attn_interface
|
import flash_attn_interface
|
||||||
# FLASH_ATTN_3_AVAILABLE = True
|
FLASH_ATTN_3_AVAILABLE = True
|
||||||
# except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
# FLASH_ATTN_3_AVAILABLE = False
|
FLASH_ATTN_3_AVAILABLE = False
|
||||||
|
|
||||||
# try:
|
try:
|
||||||
# import flash_attn
|
import flash_attn
|
||||||
# FLASH_ATTN_2_AVAILABLE = True
|
FLASH_ATTN_2_AVAILABLE = True
|
||||||
# except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
# FLASH_ATTN_2_AVAILABLE = False
|
FLASH_ATTN_2_AVAILABLE = False
|
||||||
# flash_attn = None
|
flash_attn = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from sageattention import sageattn_varlen
|
from sageattention import sageattn_varlen
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user