You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardexpand all lines: comfy/cli_args.py
+1
Original file line number
Diff line number
Diff line change
@@ -106,6 +106,7 @@ class LatentPreviewMethod(enum.Enum):
106
106
attn_group.add_argument("--use-quad-cross-attention", action="store_true", help="Use the sub-quadratic cross attention optimization . Ignored when xformers is used.")
107
107
attn_group.add_argument("--use-pytorch-cross-attention", action="store_true", help="Use the new pytorch 2.0 cross attention function.")
Copy file name to clipboardexpand all lines: comfy/ldm/modules/attention.py
+60
Original file line number
Diff line number
Diff line change
@@ -24,6 +24,13 @@
24
24
logging.error(f"\n\nTo use the `--use-sage-attention` feature, the `sageattention` package must be installed first.\ncommand:\n\t{sys.executable} -m pip install sageattention")
25
25
exit(-1)
26
26
27
+
ifmodel_management.flash_attention_enabled():
28
+
try:
29
+
fromflash_attnimportflash_attn_func
30
+
exceptModuleNotFoundError:
31
+
logging.error(f"\n\nTo use the `--use-flash-attention` feature, the `flash-attn` package must be installed first.\ncommand:\n\t{sys.executable} -m pip install flash-attn")
0 commit comments