diff --git a/extension/llm/modules/test/test_attention.py b/extension/llm/modules/test/test_attention.py index b792fd9b1ef..bd0c44d8b5f 100644 --- a/extension/llm/modules/test/test_attention.py +++ b/extension/llm/modules/test/test_attention.py @@ -146,6 +146,7 @@ def test_attention_export(self): assert_close(et_res, tt_res) + @unittest.skip(reason="TODO(T207740932): test is flaky") def test_attention_aoti(self): # Self attention. diff --git a/extension/llm/modules/test/test_position_embeddings.py b/extension/llm/modules/test/test_position_embeddings.py index 05f43527bef..039cc798b19 100644 --- a/extension/llm/modules/test/test_position_embeddings.py +++ b/extension/llm/modules/test/test_position_embeddings.py @@ -163,6 +163,7 @@ def test_tiled_token_positional_embedding_export(self): assert_close(y, ref_y) + @unittest.skip(reason="TODO(T207740932): test is flaky") def test_tiled_token_positional_embedding_aoti(self): tpe_ep = torch.export.export( self.tpe,