-
Notifications
You must be signed in to change notification settings - Fork 72
[Pass] Support lift constants to initializers pass #2160
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
justinchuby
merged 16 commits into
microsoft:main
from
titaiwangms:titaiwang/add_constant_to_initilizer_pass
Apr 10, 2025
Merged
Changes from 8 commits
Commits
Show all changes
16 commits
Select commit
Hold shift + click to select a range
c662194
support lift constants to initializers pass
titaiwangms c59af87
resolve conflict
titaiwangms c0ea8e6
lint
titaiwangms 7db9be0
update name authority
titaiwangms c007277
Merge branch 'main' into titaiwang/add_constant_to_initilizer_pass
titaiwangms 1956c7f
update
titaiwangms 0ba7025
add constant attribute variations
titaiwangms c830972
add tests
titaiwangms a285ceb
add support to subgraph
titaiwangms 3b15f45
Merge branch 'main' into titaiwang/add_constant_to_initilizer_pass
titaiwangms 22df674
add new tests
titaiwangms 2c87912
Merge branch 'main' into titaiwang/add_constant_to_initilizer_pass
titaiwangms 82c4016
address reviews
titaiwangms a2e9d2a
assert to self.assert
titaiwangms 0a731e6
Merge branch 'main' into titaiwang/add_constant_to_initilizer_pass
titaiwangms e648167
Update onnxscript/ir/passes/common/constant_manipulation_test.py
titaiwangms File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
# Copyright (c) Microsoft Corporation. | ||
# Licensed under the MIT License. | ||
"""Lift constants to initializers.""" | ||
|
||
from __future__ import annotations | ||
|
||
__all__ = [ | ||
"LiftConstantsToInitializersPass", | ||
] | ||
|
||
import logging | ||
|
||
import numpy as np | ||
|
||
from onnxscript import ir | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
class LiftConstantsToInitializersPass(ir.passes.InPlacePass): | ||
def call(self, model: ir.Model) -> ir.passes.PassResult: | ||
"""Convert constant nodes in main graph to initializers.""" | ||
count = 0 | ||
for node in model.graph: | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
if node.op_type != "Constant" or node.domain not in ("", "onnx.ai"): | ||
continue | ||
|
||
constant_node_attribute = set(node.attributes.keys()) | ||
if len(constant_node_attribute) != 1: | ||
logger.debug( | ||
"Invalid constant node '%s' has more than one attribute", node.name | ||
) | ||
continue | ||
|
||
attr_name, attr_value = next(iter(node.attributes.items())) | ||
initializer_name = node.outputs[0].name | ||
assert initializer_name is not None | ||
assert isinstance(attr_value, ir.Attr) | ||
tensor = _constant_node_attribute_to_tensor( | ||
attr_name, attr_value, initializer_name | ||
) | ||
if tensor is None: | ||
logger.debug( | ||
"Invalid constant node '%s' has unsupported attribute value", node.name | ||
) | ||
continue | ||
# Register an initializer with the tensor value | ||
initializer = ir.Value( | ||
name=initializer_name, | ||
shape=tensor.shape, # type: ignore[arg-type] | ||
type=ir.TensorType(tensor.dtype), | ||
const_value=tensor, | ||
) | ||
# TODO(titaiwang): Is it possible that the initializer name has | ||
# been taken? | ||
model.graph.register_initializer(initializer) | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
# Replace the constant node with the initilizer | ||
ir.convenience.replace_all_uses_with(node.outputs[0], initializer) | ||
model.graph.remove(node, safe=True) | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
count += 1 | ||
logger.info( | ||
"Converted constant node '%s' to initializer '%s'", node.name, initializer_name | ||
) | ||
if count: | ||
logger.info("Lifted %s constants to initializers", count) | ||
return ir.passes.PassResult(model, modified=bool(count)) | ||
|
||
|
||
def _constant_node_attribute_to_tensor( | ||
attr_name: str, attr_value: ir.Attr, initializer_name: str | ||
) -> ir.Tensor | None: | ||
"""Convert constant node attribute to tensor.""" | ||
if attr_name == "value": | ||
tensor = attr_value.as_tensor() # type: ignore[union-attr] | ||
elif attr_name == "value_int": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_int(), dtype=np.int64), name=initializer_name | ||
) | ||
elif attr_name == "value_ints": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_ints(), dtype=np.int64), name=initializer_name | ||
) | ||
elif attr_name == "value_float": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_float(), dtype=np.float32), name=initializer_name | ||
) | ||
elif attr_name == "value_floats": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_floats(), dtype=np.float32), name=initializer_name | ||
) | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
elif attr_name == "value_string": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_string(), dtype=np.object_), name=initializer_name | ||
justinchuby marked this conversation as resolved.
Show resolved
Hide resolved
|
||
) | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
elif attr_name == "value_strings": | ||
tensor = ir.Tensor( | ||
np.array(attr_value.as_strings(), dtype=np.object_), name=initializer_name | ||
) | ||
justinchuby marked this conversation as resolved.
Show resolved
Hide resolved
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
else: | ||
tensor = None | ||
return tensor # type: ignore[return-value] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
# Copyright (c) Microsoft Corporation. | ||
|
||
# Licensed under the MIT License. | ||
from __future__ import annotations | ||
|
||
|
||
import unittest | ||
|
||
import numpy as np | ||
import parameterized | ||
|
||
from onnxscript import ir | ||
from onnxscript.ir.passes.common import constant_manipulation | ||
|
||
|
||
class TestLiftConstantsToInitializersPass(unittest.TestCase): | ||
@parameterized.parameterized.expand( | ||
[ | ||
(ir.DataType.FLOAT, np.float32), | ||
(ir.DataType.INT64, np.int64), | ||
] | ||
) | ||
def test_pass_with_lifting_constants_to_initializers(self, ir_dtype, numpy_dtype): | ||
inputs = [ | ||
ir.Value(name="input_a", type=ir.TensorType(ir_dtype), shape=ir.Shape((2, 3))), | ||
ir.Value( | ||
name="input_b", | ||
type=ir.TensorType(ir_dtype), | ||
shape=ir.Shape((2, 3)), | ||
), | ||
] | ||
|
||
constant_tensor = ir.tensor(np.random.rand(2, 3).astype(numpy_dtype)) | ||
titaiwangms marked this conversation as resolved.
Show resolved
Hide resolved
|
||
attribute = ir.convenience.convert_attributes({"value": constant_tensor}) | ||
const_node = ir.Node("", "Constant", inputs=[], attributes=attribute, num_outputs=1) | ||
add_node = ir.Node("", "Add", inputs=[inputs[0], const_node.outputs[0]]) | ||
mul_node = ir.Node("", "Mul", inputs=[add_node.outputs[0], inputs[1]]) | ||
|
||
model = ir.Model( | ||
graph=ir.Graph( | ||
inputs=inputs, | ||
outputs=mul_node.outputs, | ||
nodes=[const_node, add_node, mul_node], | ||
opset_imports={"": 20}, | ||
), | ||
ir_version=10, | ||
) | ||
|
||
# Check that the initializer is not in the graph yet | ||
self.assertEqual(len(model.graph.initializers), 0) | ||
# And 1 constant node | ||
self.assertEqual(len([node for node in model.graph if node.op_type == "Constant"]), 1) | ||
|
||
# Perform lift constants to initializers | ||
result = constant_manipulation.LiftConstantsToInitializersPass()(model) | ||
self.assertTrue(result.modified) | ||
# Check that the constant node is lifted to an initializer | ||
self.assertEqual(len(result.model.graph.initializers), 1) | ||
# Check the value | ||
self.assertEqual( | ||
result.model.graph.initializers[ | ||
"val_0" | ||
].const_value, # name created by name_authority | ||
constant_tensor, | ||
) | ||
# And 0 constant node | ||
self.assertEqual( | ||
len([node for node in result.model.graph if node.op_type == "Constant"]), 0 | ||
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.