I need rest!

This commit is contained in:
DeepBeepMeep 2025-07-08 19:53:50 +02:00
parent b5d26646c5
commit 34f9333fdc

View File

@ -7,23 +7,23 @@ import torch.nn.functional as F
major, minor = torch.cuda.get_device_capability(None) major, minor = torch.cuda.get_device_capability(None)
bfloat16_supported = major >= 8 bfloat16_supported = major >= 8
# try: try:
# from xformers.ops import memory_efficient_attention from xformers.ops import memory_efficient_attention
# except ImportError: except ImportError:
# memory_efficient_attention = None memory_efficient_attention = None
# try: try:
# import flash_attn_interface import flash_attn_interface
# FLASH_ATTN_3_AVAILABLE = True FLASH_ATTN_3_AVAILABLE = True
# except ModuleNotFoundError: except ModuleNotFoundError:
# FLASH_ATTN_3_AVAILABLE = False FLASH_ATTN_3_AVAILABLE = False
# try: try:
# import flash_attn import flash_attn
# FLASH_ATTN_2_AVAILABLE = True FLASH_ATTN_2_AVAILABLE = True
# except ModuleNotFoundError: except ModuleNotFoundError:
# FLASH_ATTN_2_AVAILABLE = False FLASH_ATTN_2_AVAILABLE = False
# flash_attn = None flash_attn = None
try: try:
from sageattention import sageattn_varlen from sageattention import sageattn_varlen