You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
TORCH_WARN_ONCE("Using AOTriton backend for Flash Attention backward...");
421
428
returnmha_bwd_aot(dout,
422
429
q,
423
430
k,
@@ -442,6 +449,7 @@ mha_bwd(const at::Tensor &dout, // batch_size x seqlen_q x num_heads, x head_si
442
449
at::ROCmFABackend::Ck) {
443
450
TORCH_WARN_ONCE("Warning! You have opted to use CK flash attention backend in a build that was not compiled using USE_CK_FLASH_ATTENTION=1. Please set this variable and try again. Defaulting to use aotriton backend...");
444
451
}
452
+
TORCH_WARN_ONCE("Using AOTriton backend for Flash Attention backward...");
445
453
returnmha_bwd_aot(
446
454
dout,
447
455
q,
@@ -492,6 +500,7 @@ mha_varlen_bwd(const at::Tensor &dout, // total_q x num_heads, x head_size
492
500
#if defined(USE_CK_FLASH_ATTENTION)
493
501
if (at::globalContext().getROCmFAPreferredBackend() ==
494
502
at::ROCmFABackend::Ck) {
503
+
TORCH_WARN_ONCE("Using CK backend for Flash Attention varlen backward...");
0 commit comments