File tree Expand file tree Collapse file tree 1 file changed +6
-9
lines changed
test/python/onnx_importer Expand file tree Collapse file tree 1 file changed +6
-9
lines changed Original file line number Diff line number Diff line change @@ -90,23 +90,20 @@ def linear_model() -> onnx.ModelProto:
9090def path_based_shape_inference_model () -> onnx .ModelProto :
9191 # Create a model with a serialized form that's large enough to require
9292 # path-based shape inference.
93- large_tensor = numpy .random .rand (onnx .checker .MAXIMUM_PROTOBUF ).astype (
94- numpy .float32
95- )
93+ dtype = numpy .float32
94+ byte_size = numpy .dtype (dtype ).itemsize
95+ tensor_size = onnx .checker .MAXIMUM_PROTOBUF // byte_size + 1
96+ large_tensor = numpy .random .rand (tensor_size ).astype (dtype )
9697 assert large_tensor .nbytes > onnx .checker .MAXIMUM_PROTOBUF
9798 node1 = make_node (
9899 "Constant" ,
99100 [],
100101 ["large_const" ],
101102 value = numpy_helper .from_array (large_tensor , name = "large_const" ),
102103 )
103- X = make_tensor_value_info (
104- "large_const" , TensorProto .FLOAT , [onnx .checker .MAXIMUM_PROTOBUF ]
105- )
104+ X = make_tensor_value_info ("large_const" , TensorProto .FLOAT , [tensor_size ])
106105 graph = make_graph ([node1 ], "large_const_graph" , [], [X ])
107- onnx_model = make_model (graph )
108- check_model (onnx_model )
109- return onnx_model
106+ return make_model (graph )
110107
111108
112109ALL_MODELS = [const_model , linear_model , path_based_shape_inference_model ]
You can’t perform that action at this time.
0 commit comments