Skip to content

Commit 52a5f37

Browse files
committed
remove comment from gpt
Signed-off-by: Peter Dykas <[email protected]>
1 parent 16316ba commit 52a5f37

File tree

1 file changed

+0
-1
lines changed

1 file changed

+0
-1
lines changed

tests/pytorch/attention/test_attention.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,6 @@ def test_dpa_num_splits(dtype, model_configs, model):
325325
"""Test DotProductAttention with FlashAttention-3 num_splits enabled"""
326326
if not FlashAttentionUtils.v3_is_installed:
327327
pytest.skip("num_splits requires FlashAttention-3.")
328-
# Reuse the main test, passing num_splits only to FlashAttention; others run normally
329328
test_dot_product_attention(dtype, model_configs, model, False, True, None, False, False, num_splits=2)
330329

331330
model_configs_softmax = {

0 commit comments

Comments
 (0)