From bd4af6c8bb89591e7d3bffc2d473479801bdbb42 Mon Sep 17 00:00:00 2001 From: "Yanan Cao (PyTorch)" Date: Wed, 18 Dec 2024 17:07:07 -0800 Subject: [PATCH] executorch/extension/llm/modules/test Reviewed By: avikchaudhuri Differential Revision: D67383699 --- extension/llm/modules/test/test_attention.py | 2 ++ extension/llm/modules/test/test_position_embeddings.py | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/extension/llm/modules/test/test_attention.py b/extension/llm/modules/test/test_attention.py index cda9becd69..82ee1febf4 100644 --- a/extension/llm/modules/test/test_attention.py +++ b/extension/llm/modules/test/test_attention.py @@ -150,6 +150,7 @@ def test_attention_export(self): (self.x, self.x), kwargs={"input_pos": self.input_pos}, dynamic_shapes=self.dynamic_shapes, + strict=True, ) et_res = et_mha_ep.module()(self.x, self.x, input_pos=self.input_pos) tt_res = self.tt_mha(self.x, self.x, input_pos=self.input_pos) @@ -196,6 +197,7 @@ def test_attention_executorch(self): (self.x, self.x), kwargs={"input_pos": self.input_pos}, dynamic_shapes=self.dynamic_shapes, + strict=True, ) et_program = to_edge( et_mha_ep, diff --git a/extension/llm/modules/test/test_position_embeddings.py b/extension/llm/modules/test/test_position_embeddings.py index 039cc798b1..15da2335d7 100644 --- a/extension/llm/modules/test/test_position_embeddings.py +++ b/extension/llm/modules/test/test_position_embeddings.py @@ -49,7 +49,6 @@ def test_tile_positional_embedding_smoke(self): self.assertTrue(torch.allclose(y, ref_y)) def test_tile_positional_embedding_export(self): - tpe_ep = torch.export.export( self.tpe, (self.x, self.aspect_ratio), @@ -57,6 +56,7 @@ def test_tile_positional_embedding_export(self): self.dynamic_shape, None, ), # assuming aspect ratio is static + strict=True, ) y = tpe_ep.module()(self.x, self.aspect_ratio) @@ -91,6 +91,7 @@ def test_tile_positional_embedding_et(self): self.dynamic_shape, None, ), # assuming aspect ratio is static + strict=True, ) et_program = to_edge( tpe_ep, @@ -148,7 +149,6 @@ def test_tiled_token_positional_embedding_smoke(self): assert_close(y, ref_y) def test_tiled_token_positional_embedding_export(self): - tpe_ep = torch.export.export( self.tpe, (self.x, self.aspect_ratio), @@ -156,6 +156,7 @@ def test_tiled_token_positional_embedding_export(self): self.dynamic_shape, None, ), # assuming aspect ratio is static + strict=True, ) y = tpe_ep.module()(self.x, self.aspect_ratio) @@ -172,6 +173,7 @@ def test_tiled_token_positional_embedding_aoti(self): self.dynamic_shape, None, ), # assuming aspect ratio is static + strict=True, ) with tempfile.TemporaryDirectory() as tmpdir: @@ -195,6 +197,7 @@ def test_tiled_token_positional_embedding_et(self): self.dynamic_shape, None, ), # assuming aspect ratio is static + strict=True, ) et_program = to_edge( tpe_ep,