From 9f57bc001725163499ae2865b32893bc6c8093d3 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Mon, 14 Apr 2025 16:44:05 +0800 Subject: [PATCH 01/20] Update fx_translator.py --- python/tvm/relax/frontend/torch/fx_translator.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/tvm/relax/frontend/torch/fx_translator.py b/python/tvm/relax/frontend/torch/fx_translator.py index a5b50a7d1dce..2effb68b7e8e 100644 --- a/python/tvm/relax/frontend/torch/fx_translator.py +++ b/python/tvm/relax/frontend/torch/fx_translator.py @@ -761,6 +761,7 @@ def create_convert_map( "lerp": self._lerp, # statistical "mean": self._mean, + "norm": self._norm, "prod": self._prod, "std": self._std, "sum": self._sum, From 76a742b2432f9ced13ee2882a47bb98c0fb172df Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Mon, 14 Apr 2025 16:45:36 +0800 Subject: [PATCH 02/20] Update base_fx_graph_translator.py --- .../torch/base_fx_graph_translator.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index c9c6afd71a64..0a2abf99bee8 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -917,6 +917,32 @@ def _mean(self, node: fx.Node) -> relax.Var: keepdim = args[2] if len(node.args) > 2 else node.kwargs.get("keepdim", False) return self.block_builder.emit(relax.op.mean(x, dim, keepdims=keepdim)) + def _norm(self, node: fx.Node) -> relax.Var: + data = self.env[node.args[0]] + dtype = data.struct_info.dtype + order = node.args[1] if len(node.args) > 1 else node.kwargs.get("p", 2) + axis = node.args[2] if len(node.args) > 2 else None + keepdims = node.args[3] if len(node.args) > 3 else False + + if order == float("inf"): + return self.block_builder.emit(relax.op.max(relax.op.abs(data), axis=axis, keepdims=keepdims)) + elif order == float("-inf"): + return self.block_builder.emit(relax.op.min(relax.op.abs(data), axis=axis, keepdims=keepdims)) + # frobenius_norm + elif order == "fro": + return self.block_builder.emit( + relax.op.sqrt(relax.op.sum(relax.op.multiply(data, data), axis=axis, keepdims=keepdims)) + ) + else: + reci_order = relax.const(1 / order, dtype=dtype) + order = relax.const(order, dtype=dtype) + return self.block_builder.emit( + relax.op.power( + relax.op.sum(relax.op.power(relax.op.abs(data), order), axis=axis, keepdims=keepdims), + reci_order, + ) + ) + def _prod(self, node: fx.Node) -> relax.Var: args = self.retrieve_args(node) x = args[0] From 315647f075c15fb2afda8bef55b5e032505f50ee Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Mon, 14 Apr 2025 16:46:37 +0800 Subject: [PATCH 03/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 138 ++++++++++++++++++++ 1 file changed, 138 insertions(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index ee5a5c78c74a..2b69147877b6 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -4513,5 +4513,143 @@ def main( verify_model(Narrow(), [([5, 3], "float32")], {}, Expected) +def test_norm(): + input_info = [([1, 3, 5, 3], "float32")] + + class Norm1(Module): + def forward(self, x): + return torch.norm(x, p=float('inf'), dim=None, keepdim=False) + + @tvm.script.ir_module + class Expected1: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((), dtype="float32") = R.max(R.abs(inp_0), axis=None, keepdims=False) + gv: R.Tensor((), dtype="float32") = lv + R.output(gv) + return gv + + class Norm2(Module): + def forward(self, x): + return torch.norm(x, p=float('-inf'), dim=None, keepdim=False) + + @tvm.script.ir_module + class Expected2: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((), dtype="float32") = R.min(R.abs(inp_0), axis=None, keepdims=False) + gv: R.Tensor((), dtype="float32") = lv + R.output(gv) + return gv + + class Norm3(Module): + def forward(self, x): + return torch.norm(x, p=float(2), dim=None, keepdim=False) + + @tvm.script.ir_module + class Expected3: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((1, 3, 5, 3), dtype="float32") = R.abs(inp_0) + lv1: R.Tensor((1, 3, 5, 3), dtype="float32") = R.power(lv, R.const(2, "float32")) + lv2: R.Tensor((), dtype="float32") = R.sum(lv1, axis=None, keepdims=False) + lv3: R.Tensor((), dtype="float32") = R.power(lv2, R.const(0.5, "float32")) + gv: R.Tensor((), dtype="float32") = lv3 + R.output(gv) + return gv + + class Norm4(Module): + def forward(self, x): + return torch.norm(x, p=float(1.0), dim=None, keepdim=False) + + @tvm.script.ir_module + class Expected4: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((1, 3, 5, 3), dtype="float32") = R.abs(inp_0) + lv1: R.Tensor((1, 3, 5, 3), dtype="float32") = R.power(lv, R.const(1.0, "float32")) + lv2: R.Tensor((), dtype="float32") = R.sum(lv1, axis=None, keepdims=False) + lv3: R.Tensor((), dtype="float32") = R.power(lv2, R.const(1.0, "float32")) + gv: R.Tensor((), dtype="float32") = lv3 + R.output(gv) + return gv + + class Norm5(Module): + def forward(self, x): + return torch.norm(x, p=float(-4), dim=None, keepdim=True) + + @tvm.script.ir_module + class Expected5: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((1, 3, 5, 3), dtype="float32") = R.abs(inp_0) + lv1: R.Tensor((1, 3, 5, 3), dtype="float32") = R.power(lv, R.const(-4, "float32")) + lv2: R.Tensor((), dtype="float32") = R.sum(lv1, axis=None, keepdims=False) + lv3: R.Tensor((), dtype="float32") = R.power(lv2, R.const(-0.25, "float32")) + gv: R.Tensor((), dtype="float32") = lv3 + R.output(gv) + return gv + + class Norm6(Module): + def forward(self, x): + return torch.norm(x, p=float(0.5), dim=None, keepdim=True) + + @tvm.script.ir_module + class Expected6: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((1, 3, 5, 3), dtype="float32") = R.abs(inp_0) + lv1: R.Tensor((1, 3, 5, 3), dtype="float32") = R.power(lv, R.const(0.5, "float32")) + lv2: R.Tensor((), dtype="float32") = R.sum(lv1, axis=None, keepdims=False) + lv3: R.Tensor((), dtype="float32") = R.power(lv2, R.const(2, "float32")) + gv: R.Tensor((), dtype="float32") = lv3 + R.output(gv) + return gv + + class Norm7(Module): + def forward(self, x): + return torch.norm(x, p="fro", dim=None, keepdim=False) + + @tvm.script.ir_module + class Expected7: + @R.function + def main( + inp_0: R.Tensor((1, 3, 5, 3), dtype="float32"), + ) -> R.Tensor((), dtype="float32"): + with R.dataflow(): + lv: R.Tensor((1, 3, 5, 3), dtype="float32") = R.multiply(inp_0, inp_0) + lv1: R.Tensor((), dtype="float32") = R.sum(lv, axis=None, keepdims=False) + lv2: R.Tensor((), dtype="float32") = R.sqrt(lv1) + gv: R.Tensor((), dtype="float32") = lv2 + R.output(gv) + return gv + + verify_model(Norm1(), input_info, {}, Expected1) + verify_model(Norm2(), input_info, {}, Expected2) + verify_model(Norm3(), input_info, {}, Expected3) + verify_model(Norm4(), input_info, {}, Expected4) + verify_model(Norm5(), input_info, {}, Expected5) + verify_model(Norm6(), input_info, {}, Expected6) + verify_model(Norm7(), input_info, {}, Expected7) + + if __name__ == "__main__": tvm.testing.main() From a05053e2f0b9b1f01145f892d262a255248ff4b2 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 00:22:33 +0800 Subject: [PATCH 04/20] Update base_fx_graph_translator.py --- .../frontend/torch/base_fx_graph_translator.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index 0a2abf99bee8..3a8b56b8d41e 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -925,13 +925,21 @@ def _norm(self, node: fx.Node) -> relax.Var: keepdims = node.args[3] if len(node.args) > 3 else False if order == float("inf"): - return self.block_builder.emit(relax.op.max(relax.op.abs(data), axis=axis, keepdims=keepdims)) + return self.block_builder.emit( + relax.op.max(relax.op.abs(data), axis=axis, keepdims=keepdims) + ) elif order == float("-inf"): - return self.block_builder.emit(relax.op.min(relax.op.abs(data), axis=axis, keepdims=keepdims)) + return self.block_builder.emit( + relax.op.min(relax.op.abs(data), axis=axis, keepdims=keepdims) + ) # frobenius_norm elif order == "fro": return self.block_builder.emit( - relax.op.sqrt(relax.op.sum(relax.op.multiply(data, data), axis=axis, keepdims=keepdims)) + relax.op.sqrt( + relax.op.sum( + relax.op.multiply(data, data), axis=axis, keepdims=keepdims + ), + ) ) else: reci_order = relax.const(1 / order, dtype=dtype) From 6385d5072c493475628a5acd7fe3bde60d392bfa Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 00:24:36 +0800 Subject: [PATCH 05/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index 2b69147877b6..7e306320e620 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -4518,7 +4518,7 @@ def test_norm(): class Norm1(Module): def forward(self, x): - return torch.norm(x, p=float('inf'), dim=None, keepdim=False) + return torch.norm(x, p=float("inf"), dim=None, keepdim=False) @tvm.script.ir_module class Expected1: @@ -4534,7 +4534,7 @@ def main( class Norm2(Module): def forward(self, x): - return torch.norm(x, p=float('-inf'), dim=None, keepdim=False) + return torch.norm(x, p=float("-inf"), dim=None, keepdim=False) @tvm.script.ir_module class Expected2: From 9e872db456a436ccfd0e7fac305019e386bf26ec Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 00:37:30 +0800 Subject: [PATCH 06/20] Update base_fx_graph_translator.py --- .../tvm/relax/frontend/torch/base_fx_graph_translator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py index 3a8b56b8d41e..8df014767537 100644 --- a/python/tvm/relax/frontend/torch/base_fx_graph_translator.py +++ b/python/tvm/relax/frontend/torch/base_fx_graph_translator.py @@ -936,9 +936,7 @@ def _norm(self, node: fx.Node) -> relax.Var: elif order == "fro": return self.block_builder.emit( relax.op.sqrt( - relax.op.sum( - relax.op.multiply(data, data), axis=axis, keepdims=keepdims - ), + relax.op.sum(relax.op.multiply(data, data), axis=axis, keepdims=keepdims), ) ) else: @@ -946,7 +944,9 @@ def _norm(self, node: fx.Node) -> relax.Var: order = relax.const(order, dtype=dtype) return self.block_builder.emit( relax.op.power( - relax.op.sum(relax.op.power(relax.op.abs(data), order), axis=axis, keepdims=keepdims), + relax.op.sum( + relax.op.power(relax.op.abs(data), order), axis=axis, keepdims=keepdims + ), reci_order, ) ) From e08ded242cd555a309bb6fe162f9b63750e424ef Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 16:14:33 +0800 Subject: [PATCH 07/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 52 ++++++++++----------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index 7e306320e620..2c0a56017156 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -4514,11 +4514,19 @@ def main( def test_norm(): + input_info = [([1, 3, 5, 3], "float32")] - class Norm1(Module): + class Norm(Module): + def __init__(self, p, dim=None, keepdim=False): + super().__init__() + self.p = p + self.dim = dim + self.keepdim = keepdim + def forward(self, x): - return torch.norm(x, p=float("inf"), dim=None, keepdim=False) + return torch.norm(x, p=self.p, dim=self.dim, keepdim=self.keepdim) + @tvm.script.ir_module class Expected1: @@ -4532,9 +4540,6 @@ def main( R.output(gv) return gv - class Norm2(Module): - def forward(self, x): - return torch.norm(x, p=float("-inf"), dim=None, keepdim=False) @tvm.script.ir_module class Expected2: @@ -4548,9 +4553,6 @@ def main( R.output(gv) return gv - class Norm3(Module): - def forward(self, x): - return torch.norm(x, p=float(2), dim=None, keepdim=False) @tvm.script.ir_module class Expected3: @@ -4567,9 +4569,6 @@ def main( R.output(gv) return gv - class Norm4(Module): - def forward(self, x): - return torch.norm(x, p=float(1.0), dim=None, keepdim=False) @tvm.script.ir_module class Expected4: @@ -4586,9 +4585,6 @@ def main( R.output(gv) return gv - class Norm5(Module): - def forward(self, x): - return torch.norm(x, p=float(-4), dim=None, keepdim=True) @tvm.script.ir_module class Expected5: @@ -4605,9 +4601,6 @@ def main( R.output(gv) return gv - class Norm6(Module): - def forward(self, x): - return torch.norm(x, p=float(0.5), dim=None, keepdim=True) @tvm.script.ir_module class Expected6: @@ -4624,9 +4617,6 @@ def main( R.output(gv) return gv - class Norm7(Module): - def forward(self, x): - return torch.norm(x, p="fro", dim=None, keepdim=False) @tvm.script.ir_module class Expected7: @@ -4642,13 +4632,21 @@ def main( R.output(gv) return gv - verify_model(Norm1(), input_info, {}, Expected1) - verify_model(Norm2(), input_info, {}, Expected2) - verify_model(Norm3(), input_info, {}, Expected3) - verify_model(Norm4(), input_info, {}, Expected4) - verify_model(Norm5(), input_info, {}, Expected5) - verify_model(Norm6(), input_info, {}, Expected6) - verify_model(Norm7(), input_info, {}, Expected7) + norms = [ + (float('inf'), None, False), + (float('-inf'), None, False), + (float(2), None, False), + (float(1.0), None, False), + (float(-4), None, True), + (float(0.5), None, True), + ("fro", None, False) + ] + + for norm, expected in zip(norms, [ + Expected1, Expected2, Expected3, Expected4, Expected5, Expected6, Expected7 + ]): + p, dim, keepdim = norm + verify_model(Norm(p, dim=dim, keepdim=keepdim), input_info, {}, expected) if __name__ == "__main__": From 8fd8de25fe2cd95862647b78b12bf49b06814116 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 16:30:38 +0800 Subject: [PATCH 08/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index 2c0a56017156..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -4527,7 +4527,6 @@ def __init__(self, p, dim=None, keepdim=False): def forward(self, x): return torch.norm(x, p=self.p, dim=self.dim, keepdim=self.keepdim) - @tvm.script.ir_module class Expected1: @R.function @@ -4540,7 +4539,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected2: @R.function @@ -4553,7 +4551,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected3: @R.function @@ -4569,7 +4566,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected4: @R.function @@ -4585,7 +4581,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected5: @R.function @@ -4601,7 +4596,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected6: @R.function @@ -4617,7 +4611,6 @@ def main( R.output(gv) return gv - @tvm.script.ir_module class Expected7: @R.function @@ -4633,18 +4626,18 @@ def main( return gv norms = [ - (float('inf'), None, False), - (float('-inf'), None, False), + (float("inf"), None, False), + (float("-inf"), None, False), (float(2), None, False), (float(1.0), None, False), (float(-4), None, True), (float(0.5), None, True), - ("fro", None, False) + ("fro", None, False), ] - for norm, expected in zip(norms, [ - Expected1, Expected2, Expected3, Expected4, Expected5, Expected6, Expected7 - ]): + for norm, expected in zip( + norms, [Expected1, Expected2, Expected3, Expected4, Expected5, Expected6, Expected7] + ): p, dim, keepdim = norm verify_model(Norm(p, dim=dim, keepdim=keepdim), input_info, {}, expected) From 56f50587e2ce704f278d6fb5201e2e3b4aa27c9a Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 18:33:04 +0800 Subject: [PATCH 09/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..eb0e02ca55d2 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import operator import pytest import torch From ed186f8a340545760d8aa70b14483e96bd99e76a Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 18:33:13 +0800 Subject: [PATCH 10/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index eb0e02ca55d2..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + import operator import pytest import torch From 16b0cb77a74f9892ccb0f3bdc12666fa029b5109 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 19:40:07 +0800 Subject: [PATCH 11/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..eb0e02ca55d2 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import operator import pytest import torch From 63fba7312c0d474082f89155b798757c97d33095 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 19:40:17 +0800 Subject: [PATCH 12/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index eb0e02ca55d2..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + import operator import pytest import torch From f831561df6f4fca3f6266d9fafa7e278c3ea9745 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 20:58:09 +0800 Subject: [PATCH 13/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..eb0e02ca55d2 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import operator import pytest import torch From d9e1054c1ebc0e3d5fc7ff91b9fd432eb71401ee Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 20:58:21 +0800 Subject: [PATCH 14/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index eb0e02ca55d2..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + import operator import pytest import torch From a4f0be635764847147e8159f5cea435844c0a234 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 21:08:22 +0800 Subject: [PATCH 15/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..00aebfdd9f43 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. + import operator import pytest import torch From f0321ae19df2d12214533c2dced2c87daf9349d8 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 21:08:31 +0800 Subject: [PATCH 16/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index 00aebfdd9f43..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. - import operator import pytest import torch From b5dcbfa63dad64bb58fcbcf95e3ba716ae27502c Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 22:13:13 +0800 Subject: [PATCH 17/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..629bddc02f14 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -23,7 +23,6 @@ from torch.nn import Module import torchvision import math - import tvm from tvm import relax import tvm.testing From d9a4a4ed0d5737712dafbc98a9618482ef84dc52 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 22:13:27 +0800 Subject: [PATCH 18/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index 629bddc02f14..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -23,6 +23,7 @@ from torch.nn import Module import torchvision import math + import tvm from tvm import relax import tvm.testing From 47d9e545ff143a441a1849e862b6b42d58ae1c15 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 22:44:51 +0800 Subject: [PATCH 19/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index a962de8a3237..eb0e02ca55d2 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import operator import pytest import torch From c34ad32de5b7b4cacdb9a00f6dda43294d2d23f2 Mon Sep 17 00:00:00 2001 From: Shushi Hong <820958424@qq.com> Date: Wed, 16 Apr 2025 22:45:34 +0800 Subject: [PATCH 20/20] Update test_frontend_from_fx.py --- tests/python/relax/test_frontend_from_fx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/python/relax/test_frontend_from_fx.py b/tests/python/relax/test_frontend_from_fx.py index eb0e02ca55d2..a962de8a3237 100644 --- a/tests/python/relax/test_frontend_from_fx.py +++ b/tests/python/relax/test_frontend_from_fx.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + import operator import pytest import torch