-
Notifications
You must be signed in to change notification settings - Fork 102
Improve Reshape and Slice folding #2807
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,65 @@ | ||
| # Copyright (c) Microsoft Corporation. | ||
|
||
| # Licensed under the MIT License. | ||
| """Materialize Reshape shape input from known output shape. | ||
|
|
||
| When symbolic shape inference has been run, a Reshape node may have a known | ||
| output shape even though its shape input is computed dynamically (e.g., via a | ||
| Shape → Cast → Split → Concat chain). This rule replaces the shape input | ||
| with a concrete constant, allowing the dynamic chain to become dead code and | ||
| be removed by unused-node elimination. | ||
|
|
||
| - Fully static output shape → constant with exact dims. | ||
| - Exactly one symbolic dim → replace it with ``-1`` (Reshape infers it). | ||
| """ | ||
|
|
||
| from __future__ import annotations | ||
|
|
||
| from onnxscript import ir | ||
| from onnxscript.rewriter import _ir_utils as ir_utils | ||
| from onnxscript.rewriter._basics import MatchResult | ||
| from onnxscript.rewriter._rewrite_rule import RewriteRuleClassBase, RewriteRuleSet | ||
|
|
||
|
|
||
| class MaterializeReshapeShape(RewriteRuleClassBase): | ||
| """Replace a dynamic Reshape shape input with a constant when output shape is known.""" | ||
|
|
||
| def pattern(self, op, data, shape): | ||
| return op.Reshape(data, shape) | ||
|
|
||
| def check(self, context, data: ir.Value, shape: ir.Value) -> MatchResult: | ||
| check_result = MatchResult() | ||
|
|
||
| # Shape input must not already be a constant | ||
| if ir_utils.get_numpy_value(shape) is not None: | ||
| return check_result.fail("Shape input is already a constant.") | ||
|
|
||
| output = context.output_values[0] | ||
| if output.shape is None: | ||
| return check_result.fail("Output shape is not known.") | ||
|
|
||
| dims = list(output.shape) | ||
| sym_count = sum(1 for d in dims if not isinstance(d, int)) | ||
|
|
||
| if sym_count == 0: | ||
| self._new_dims = [int(d) for d in dims] | ||
| elif sym_count == 1: | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nit: We can omit the first branch and generalize this condition to |
||
| self._new_dims = [-1 if not isinstance(d, int) else int(d) for d in dims] | ||
| else: | ||
| return check_result.fail( | ||
| f"Output shape has {sym_count} symbolic dims, cannot materialize." | ||
| ) | ||
|
|
||
| # Preserve allowzero attribute from original node | ||
| self._allowzero = context.nodes[0].attributes.get_int("allowzero", 0) | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This may not be correct. I think we may need to make allowzero=1 in the rewritten node
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let me rephrase. May be it is correct. But verifying it is correct seems less trivial. It seems easier if allowzero=1.
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I thought about this and thought the current is correct. Let me look at this again |
||
| return check_result | ||
|
|
||
| def rewrite(self, op, data: ir.Value, shape: ir.Value): | ||
| new_shape = op.Constant( | ||
| value=ir.tensor(self._new_dims, dtype=ir.DataType.INT64), | ||
| ) | ||
justinchuby marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| return op.Reshape(data, new_shape, allowzero=self._allowzero or None) | ||
|
|
||
|
|
||
| materialize_reshape_shape_rule = MaterializeReshapeShape.rule() | ||
|
|
||
| rules = RewriteRuleSet([materialize_reshape_shape_rule]) | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,169 @@ | ||
| # Copyright (c) Microsoft Corporation. | ||
| # Licensed under the MIT License. | ||
| from __future__ import annotations | ||
|
|
||
| import unittest | ||
|
|
||
| import numpy as np | ||
|
|
||
| from onnxscript import ir | ||
| from onnxscript.rewriter import testing | ||
| from onnxscript.rewriter.rules.common import _materialize_reshape_shape | ||
|
|
||
|
|
||
| class MaterializeReshapeShapeTest(unittest.TestCase): | ||
| def test_fully_static_output_shape_materializes(self): | ||
| """When output shape is fully static, replace dynamic shape input with constant.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float[2, 3] output) | ||
| { | ||
| shape = Shape(data) | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = ir.Shape([2, 3]) | ||
| break | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 1) | ||
| reshape_nodes = [n for n in model.graph if n.op_type == "Reshape"] | ||
| self.assertEqual(len(reshape_nodes), 1) | ||
| shape_input = reshape_nodes[0].inputs[1] | ||
| self.assertIsNotNone(shape_input.const_value) | ||
| self.assertEqual(shape_input.const_value.numpy().tolist(), [2, 3]) | ||
|
|
||
| def test_one_symbolic_dim_uses_minus_one(self): | ||
| """When output has one symbolic dim, replace it with -1.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float[B, 3] output) | ||
| { | ||
| shape = Shape(data) | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = ir.Shape(["B", 3]) | ||
| break | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 1) | ||
| reshape_nodes = [n for n in model.graph if n.op_type == "Reshape"] | ||
| self.assertEqual(len(reshape_nodes), 1) | ||
| shape_input = reshape_nodes[0].inputs[1] | ||
| self.assertIsNotNone(shape_input.const_value) | ||
| self.assertEqual(shape_input.const_value.numpy().tolist(), [-1, 3]) | ||
|
|
||
| def test_two_symbolic_dims_not_materialized(self): | ||
| """When output has two symbolic dims, the rule should not fire.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float[B, C] output) | ||
| { | ||
| shape = Shape(data) | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = ir.Shape(["B", "C"]) | ||
| break | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 0) | ||
|
|
||
| def test_constant_shape_input_not_replaced(self): | ||
| """When the shape input is already a constant, the rule should not fire.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float[2, 3] output) | ||
| { | ||
| shape = Constant<value: tensor = int64[2] {2, 3}>() | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 0) | ||
|
|
||
| def test_unknown_output_shape_not_materialized(self): | ||
| """When the output shape is unknown, the rule should not fire.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float output) | ||
| { | ||
| shape = Shape(data) | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = None | ||
| break | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 0) | ||
|
|
||
| def test_allowzero_attribute_preserved(self): | ||
| """The allowzero attribute should be preserved on the new Reshape.""" | ||
| model = ir.from_onnx_text( | ||
| """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[6] data) => (float[2, 3] output) | ||
| { | ||
| shape = Shape(data) | ||
| output = Reshape<allowzero=1>(data, shape) | ||
| } | ||
| """ | ||
| ) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = ir.Shape([2, 3]) | ||
| break | ||
| count = _materialize_reshape_shape.rules.apply_to_model(model) | ||
| self.assertEqual(count, 1) | ||
| reshape_nodes = [n for n in model.graph if n.op_type == "Reshape"] | ||
| self.assertEqual(len(reshape_nodes), 1) | ||
| allowzero = reshape_nodes[0].attributes.get_int("allowzero", 0) | ||
| self.assertEqual(allowzero, 1) | ||
|
|
||
| def test_numerical_correctness_static(self): | ||
| """Verify numerical equivalence for fully static materialization.""" | ||
| # Build a model where a dynamic Concat produces the shape for Reshape. | ||
| # After materialization, the Reshape uses a constant shape. | ||
| model_text = """ | ||
| <ir_version: 7, opset_import: [ "" : 17]> | ||
| agraph (float[12] data, float[3, 4] ref) => (float[3, 4] output) | ||
| { | ||
| shape = Shape(ref) | ||
| output = Reshape(data, shape) | ||
| } | ||
| """ | ||
| original = ir.from_onnx_text(model_text) | ||
| model = ir.from_onnx_text(model_text) | ||
| for node in model.graph: | ||
| if node.op_type == "Reshape": | ||
| node.outputs[0].shape = ir.Shape([3, 4]) | ||
| break | ||
| _materialize_reshape_shape.rules.apply_to_model(model) | ||
| testing.assert_numerically_equal( | ||
| original, | ||
| model, | ||
| ( | ||
| np.arange(12).astype(np.float32), | ||
| np.zeros((3, 4), dtype=np.float32), | ||
| ), | ||
| ) | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| unittest.main() |
Uh oh!
There was an error while loading. Please reload this page.