|
| 1 | +# Copyright (c) Microsoft Corporation. |
| 2 | +# Licensed under the MIT License. |
| 3 | +from __future__ import annotations |
| 4 | + |
| 5 | +import unittest |
| 6 | + |
| 7 | +import numpy as np |
| 8 | +import parameterized |
| 9 | + |
| 10 | +from onnxscript import ir |
| 11 | +from onnxscript.ir.passes.common import constant_manipulation |
| 12 | + |
| 13 | + |
| 14 | +class TestLiftConstantsToInitializersPass(unittest.TestCase): |
| 15 | + @parameterized.parameterized.expand( |
| 16 | + [ |
| 17 | + (ir.DataType.FLOAT,), |
| 18 | + (ir.DataType.INT64,), |
| 19 | + ] |
| 20 | + ) |
| 21 | + def test_pass_with_lifting_float_and_int_constants_to_initializers(self, ir_dtype): |
| 22 | + inputs = [ |
| 23 | + ir.Value(name="input_a", type=ir.TensorType(ir_dtype), shape=ir.Shape((2, 3))), |
| 24 | + ir.Value( |
| 25 | + name="input_b", |
| 26 | + type=ir.TensorType(ir_dtype), |
| 27 | + shape=ir.Shape((2, 3)), |
| 28 | + ), |
| 29 | + ] |
| 30 | + |
| 31 | + constant_tensor = ir.tensor(np.random.rand(2, 3).astype(ir_dtype.numpy())) |
| 32 | + const_node = ir.node( |
| 33 | + "Constant", inputs=[], attributes={"value": constant_tensor}, num_outputs=1 |
| 34 | + ) |
| 35 | + add_node = ir.node("Add", inputs=[inputs[0], const_node.outputs[0]]) |
| 36 | + mul_node = ir.node("Mul", inputs=[add_node.outputs[0], inputs[1]]) |
| 37 | + |
| 38 | + model = ir.Model( |
| 39 | + graph=ir.Graph( |
| 40 | + inputs=inputs, |
| 41 | + outputs=mul_node.outputs, |
| 42 | + nodes=[const_node, add_node, mul_node], |
| 43 | + opset_imports={"": 20}, |
| 44 | + ), |
| 45 | + ir_version=10, |
| 46 | + ) |
| 47 | + |
| 48 | + # Check that the initializer is not in the graph yet |
| 49 | + self.assertEqual(len(model.graph.initializers), 0) |
| 50 | + # And 1 constant node |
| 51 | + self.assertEqual(len([node for node in model.graph if node.op_type == "Constant"]), 1) |
| 52 | + |
| 53 | + # Perform lift constants to initializers |
| 54 | + result = constant_manipulation.LiftConstantsToInitializersPass()(model) |
| 55 | + self.assertTrue(result.modified) |
| 56 | + # Check that the constant node is lifted to an initializer |
| 57 | + self.assertEqual(len(result.model.graph.initializers), 1) |
| 58 | + # Check the value |
| 59 | + self.assertEqual( |
| 60 | + result.model.graph.initializers[ |
| 61 | + "val_0" |
| 62 | + ].const_value, # name created by name_authority |
| 63 | + constant_tensor, |
| 64 | + ) |
| 65 | + # And 0 constant node |
| 66 | + self.assertEqual( |
| 67 | + len([node for node in result.model.graph if node.op_type == "Constant"]), 0 |
| 68 | + ) |
| 69 | + |
| 70 | + def test_pass_with_lifting_constants_to_initializers_within_subgraph(self): |
| 71 | + input_value = ir.Value( |
| 72 | + name="input", type=ir.TensorType(ir.DataType.FLOAT), shape=ir.Shape((2, 3)) |
| 73 | + ) |
| 74 | + |
| 75 | + then_constant_tensor = ir.tensor(np.random.rand(2, 3).astype(np.float32)) |
| 76 | + then_const_node = ir.node( |
| 77 | + "Constant", inputs=[], attributes={"value": then_constant_tensor}, num_outputs=1 |
| 78 | + ) |
| 79 | + # then branch adds the constant to the input |
| 80 | + # else branch multiplies the input by the constant |
| 81 | + add_node = ir.node("Add", inputs=[input_value, then_const_node.outputs[0]]) |
| 82 | + then_graph = ir.Graph( |
| 83 | + inputs=[input_value], |
| 84 | + outputs=[add_node.outputs[0]], |
| 85 | + nodes=[then_const_node, add_node], |
| 86 | + opset_imports={"": 20}, |
| 87 | + ) |
| 88 | + else_constant_tensor = ir.tensor(np.random.rand(2, 3).astype(np.float32)) |
| 89 | + else_const_node = ir.node( |
| 90 | + "Constant", inputs=[], attributes={"value": else_constant_tensor}, num_outputs=1 |
| 91 | + ) |
| 92 | + mul_node = ir.node("Mul", inputs=[input_value, else_const_node.outputs[0]]) |
| 93 | + else_graph = ir.Graph( |
| 94 | + inputs=[input_value], |
| 95 | + outputs=[mul_node.outputs[0]], |
| 96 | + nodes=[else_const_node, mul_node], |
| 97 | + opset_imports={"": 20}, |
| 98 | + ) |
| 99 | + # create a conditional node that uses the then and else graphs |
| 100 | + cond_node = ir.node( |
| 101 | + "If", |
| 102 | + inputs=[input_value], |
| 103 | + attributes={"then_branch": then_graph, "else_branch": else_graph}, |
| 104 | + num_outputs=1, |
| 105 | + ) |
| 106 | + # construnct the model |
| 107 | + main_graph = ir.Graph( |
| 108 | + inputs=[input_value], |
| 109 | + outputs=cond_node.outputs, |
| 110 | + nodes=[cond_node], |
| 111 | + opset_imports={"": 20}, |
| 112 | + ) |
| 113 | + main_graph.sort() |
| 114 | + model = ir.Model( |
| 115 | + graph=main_graph, |
| 116 | + ir_version=10, |
| 117 | + ) |
| 118 | + result = constant_manipulation.LiftConstantsToInitializersPass()(model) |
| 119 | + self.assertTrue(result.modified) |
| 120 | + # Check that the constant node is lifted to the subgraph initializers |
| 121 | + for node in ir.traversal.RecursiveGraphIterator(result.model.graph): |
| 122 | + if node.op_type == "Constant": |
| 123 | + raise AssertionError( |
| 124 | + f"Constant node '{node.name}' was not lifted to initializers" |
| 125 | + ) |
| 126 | + self.assertEqual(len(else_graph.initializers), 1) |
| 127 | + self.assertEqual(len(then_graph.initializers), 1) |
| 128 | + self.assertIs( |
| 129 | + else_graph.initializers["val_0"].const_value, |
| 130 | + else_constant_tensor, |
| 131 | + ) |
| 132 | + self.assertIs( |
| 133 | + then_graph.initializers["val_0"].const_value, |
| 134 | + then_constant_tensor, |
| 135 | + ) |
| 136 | + |
| 137 | + @parameterized.parameterized.expand( |
| 138 | + [ |
| 139 | + (1.0, "value_float", np.float32), |
| 140 | + (1, "value_int", np.int64), |
| 141 | + ("hello world!", "value_string", np.bytes_), |
| 142 | + ([1.0, 2.0, 3.0], "value_floats", np.float32), |
| 143 | + ([1, 2, 3], "value_ints", np.int64), |
| 144 | + (["hello world!", "thank you."], "value_strings", np.bytes_), |
| 145 | + ] |
| 146 | + ) |
| 147 | + def test_pass_with_lifting_constants_to_initializers_with_floats_ints_strings( |
| 148 | + self, value, constant_attribute, np_dtype |
| 149 | + ): |
| 150 | + input_value = ir.Value( |
| 151 | + name="input", type=ir.TensorType(ir.DataType.FLOAT), shape=ir.Shape((2, 3)) |
| 152 | + ) |
| 153 | + |
| 154 | + constant_value = value |
| 155 | + const_node = ir.node( |
| 156 | + "Constant", |
| 157 | + inputs=[], |
| 158 | + attributes={constant_attribute: constant_value}, |
| 159 | + num_outputs=1, |
| 160 | + ) |
| 161 | + identity_node_constant = ir.node( |
| 162 | + "Identity", inputs=[const_node.outputs[0]], num_outputs=1 |
| 163 | + ) |
| 164 | + identity_node_input = ir.node("Identity", inputs=[input_value], num_outputs=1) |
| 165 | + |
| 166 | + model = ir.Model( |
| 167 | + graph=ir.Graph( |
| 168 | + inputs=[input_value], |
| 169 | + outputs=[identity_node_input.outputs[0], identity_node_constant.outputs[0]], |
| 170 | + nodes=[identity_node_input, const_node, identity_node_constant], |
| 171 | + opset_imports={"": 20}, |
| 172 | + ), |
| 173 | + ir_version=10, |
| 174 | + ) |
| 175 | + |
| 176 | + # Check that the initializer is not in the graph yet |
| 177 | + self.assertEqual(len(model.graph.initializers), 0) |
| 178 | + # And 1 constant node |
| 179 | + self.assertEqual(len([node for node in model.graph if node.op_type == "Constant"]), 1) |
| 180 | + |
| 181 | + # Perform lift constants to initializers |
| 182 | + result = constant_manipulation.LiftConstantsToInitializersPass()(model) |
| 183 | + self.assertTrue(result.modified) |
| 184 | + # Check that the constant node is lifted to an initializer |
| 185 | + self.assertEqual(len(result.model.graph.initializers), 1) |
| 186 | + np.testing.assert_array_equal( |
| 187 | + result.model.graph.initializers["val_1"].const_value.numpy(), |
| 188 | + np.array(constant_value, dtype=np_dtype), |
| 189 | + ) |
0 commit comments