Skip to content

Commit 9f65061

Browse files
committed
scaled_dot_product_attention
1 parent 2f39b94 commit 9f65061

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

onnxscript/tests/function_libs/torch_lib/ops_test_data.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1692,9 +1692,6 @@ def _where_input_wrangler(
16921692
nn_ops.aten_scaled_dot_product_attention,
16931693
trace_only=True,
16941694
)
1695-
.skip(
1696-
reason="fixme: ORT crashes on Windows, segfaults randomly on Linux",
1697-
)
16981695
.skip(
16991696
matcher=lambda sample: (attn_mask := sample.kwargs.get("attn_mask")) is not None
17001697
and attn_mask.dtype == torch.bool,
@@ -1709,9 +1706,6 @@ def _where_input_wrangler(
17091706
nn_ops.aten_scaled_dot_product_attention_bool_mask,
17101707
trace_only=True,
17111708
)
1712-
.skip(
1713-
reason="fixme: ORT crashes on Windows, segfaults randomly on Linux",
1714-
)
17151709
.skip(
17161710
matcher=lambda sample: (attn_mask := sample.kwargs.get("attn_mask")) is not None
17171711
and attn_mask.dtype != torch.bool,

0 commit comments

Comments
 (0)