Skip to content

Commit e35a179

Browse files
Remove proto2text | chore! (#1193)
Remove proto2text because it is simply an alias of `onnx.printer.to_text`. --------- Co-authored-by: Ti-Tai Wang <[email protected]>
1 parent 82d2063 commit e35a179

19 files changed

+30
-44
lines changed

.lintrunner.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ include_patterns = [
88
'**/*.pyi',
99
]
1010
exclude_patterns = [
11-
'docs/**',
1211
'onnxscript/tests/models/**',
1312
]
1413
command = [

README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,8 @@ pytest onnxscript
6565
import onnx
6666

6767
# We use ONNX opset 15 to define the function below.
68-
from onnxscript import FLOAT
68+
from onnxscript import FLOAT, script
6969
from onnxscript import opset15 as op
70-
from onnxscript import script
7170

7271

7372
# We use the script decorator to indicate that

docs/examples/01_plot_selu.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,6 @@ def Selu(X, alpha: float, gamma: float):
3232

3333
# %%
3434
# Let's see what the translated function looks like:
35-
from onnxscript import proto2text # noqa: E402
35+
import onnx # noqa: E402
3636

37-
print(proto2text(onnx_fun))
37+
print(onnx.printer.to_text(onnx_fun))

docs/examples/02_plot_square_loss.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,8 @@
1414
import onnx
1515
from onnxruntime import InferenceSession
1616

17-
from onnxscript import FLOAT
17+
from onnxscript import FLOAT, script
1818
from onnxscript import opset15 as op
19-
from onnxscript import proto2text, script
2019

2120

2221
@script()
@@ -32,7 +31,7 @@ def square_loss(X: FLOAT["N", 1], Y: FLOAT["N", 1]) -> FLOAT[1, 1]: # noqa: F82
3231

3332
# %%
3433
# Let's see what the generated model looks like.
35-
print(proto2text(model))
34+
print(onnx.printer.to_text(model))
3635

3736
# %%
3837
# We can run shape-inference and type-check the model using the standard ONNX API.

docs/examples/03_export_lib.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,8 @@
88
**This is preliminary. Proto extensions are required to fully support LibProto.**
99
"""
1010

11-
from onnxscript import export_onnx_lib
11+
from onnxscript import export_onnx_lib, script
1212
from onnxscript import opset15 as op
13-
from onnxscript import script
1413
from onnxscript.values import Opset
1514

1615
# %%

docs/examples/04_plot_eager_mode_evaluation.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,8 @@
1212
"""
1313
import numpy as np
1414

15-
from onnxscript import FLOAT
15+
from onnxscript import FLOAT, script
1616
from onnxscript import opset15 as op
17-
from onnxscript import script
1817

1918

2019
@script()

docs/examples/05_plot_model_props.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,10 @@
1515
# %%
1616
# First, we define the implementation of a square-loss function in onnxscript.
1717

18-
from onnxscript import FLOAT
18+
import onnx
19+
20+
from onnxscript import FLOAT, script
1921
from onnxscript import opset15 as op
20-
from onnxscript import proto2text, script
2122

2223

2324
@script(ir_version=7, producer_name="OnnxScript", producer_version="0.1")
@@ -29,4 +30,4 @@ def square_loss(X: FLOAT["N"], Y: FLOAT["N"]) -> FLOAT[1]: # noqa: F821
2930
# %%
3031
# Let's see what the generated model looks like.
3132
model = square_loss.to_model_proto()
32-
print(proto2text(model))
33+
print(onnx.printer.to_text(model))

docs/examples/06_plot_model_local_funs.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,10 @@
1414
# %%
1515
# First, let us define an ONNXScript function that calls other ONNXScript functions.
1616

17-
from onnxscript import FLOAT
17+
import onnx
18+
19+
from onnxscript import FLOAT, script
1820
from onnxscript import opset15 as op
19-
from onnxscript import proto2text, script
2021
from onnxscript.values import Opset
2122

2223
# A dummy opset used for model-local functions
@@ -43,17 +44,17 @@ def l2norm(x: FLOAT["N"], y: FLOAT["N"]) -> FLOAT[1]: # noqa: F821
4344
# Let's see what the generated model looks like by default:
4445

4546
model = l2norm.to_model_proto()
46-
print(proto2text(model))
47+
print(onnx.printer.to_text(model))
4748

4849
# %%
4950
# Let's now explicitly specify which functions to include.
5051
# First, generate a model with no model-local functions:
5152

5253
model = l2norm.to_model_proto(functions=[])
53-
print(proto2text(model))
54+
print(onnx.printer.to_text(model))
5455

5556
# %%
5657
# Now, generate a model with one model-local function:
5758

5859
model = l2norm.to_model_proto(functions=[sum])
59-
print(proto2text(model))
60+
print(onnx.printer.to_text(model))

docs/tutorial/examples/forloop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
def sumprod(x, N):
77
sum = op.Identity(x)
88
prod = op.Identity(x)
9-
for i in range(N):
9+
for _ in range(N):
1010
sum = sum + x
1111
prod = prod * x
1212
return sum, prod

docs/tutorial/examples/forwhileloop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
def sumprod_break(x, N):
77
sum = op.Identity(x)
88
prod = op.Identity(x)
9-
for i in range(N):
9+
for _ in range(N):
1010
sum = sum + x
1111
prod = prod * x
1212
cond = op.ReduceSum(prod) > 1e7

docs/tutorial/examples/hardmax_end_to_end.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
import onnx
22

33
# We use ONNX opset 15 to define the function below.
4-
from onnxscript import FLOAT
4+
from onnxscript import FLOAT, script
55
from onnxscript import opset15 as op
6-
from onnxscript import script
76

87

98
# We use the script decorator to indicate that

docs/tutorial/examples/outerscope_redef_error.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1+
from onnxscript import graph, script
12
from onnxscript import opset15 as op
2-
from onnxscript import script
33

44
try:
55

docs/tutorial/examples/scanloop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1+
from onnxscript import graph, script
12
from onnxscript import opset15 as op
2-
from onnxscript import script
33

44

55
@script()

onnxscript/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858

5959
# isort: on
6060

61-
from ._internal.utils import external_tensor, proto2text
61+
from ._internal.utils import external_tensor
6262
from .values import OnnxFunction, TracedOnnxFunction
6363

6464
try:
@@ -73,7 +73,6 @@
7373
"OnnxFunction",
7474
"TracedOnnxFunction",
7575
"proto2python",
76-
"proto2text",
7776
"external_tensor",
7877
"graph",
7978
"BFLOAT16",

onnxscript/_internal/utils.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,16 +14,6 @@
1414

1515
from onnxscript import tensor
1616

17-
# print utility unavailable in ONNX 1.12 or earlier:
18-
# pylint: disable=unused-import, ungrouped-imports
19-
try:
20-
from onnx.printer import to_text as proto2text
21-
except ImportError:
22-
23-
def proto2text(_: Any) -> str: # type: ignore[misc]
24-
return "<print utility unavailable>"
25-
26-
2717
# pylint: enable=unused-import, ungrouped-imports
2818

2919

onnxscript/backend/onnx_export_test.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import unittest
1212
from typing import Pattern
1313

14+
import onnx
1415
import onnxruntime as ort
1516
import parameterized
1617
from onnxruntime.capi import onnxruntime_pybind11_state
@@ -273,7 +274,7 @@ def test_export2python_produces_correct_onnx_script_model(
273274
except Exception as e:
274275
raise AssertionError(
275276
f"Unable to load onnx for test {backend_test.name!r}.\n"
276-
f"{onnxscript.proto2text(proto)}\n"
277+
f"{onnx.printer.to_text(proto)}\n"
277278
f"-----\n"
278279
f"{backend_test.onnx_model}"
279280
) from e
@@ -298,7 +299,7 @@ def _run_function(obj, *inputs):
298299
except Exception as e:
299300
raise AssertionError(
300301
f"Unable to run test {backend_test.name!r} after conversion.\n"
301-
f"{onnxscript.proto2text(proto)}"
302+
f"{onnx.printer.to_text(proto)}"
302303
) from e
303304

304305
backend_test.run(_load_function, _run_function)

onnxscript/evaluator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,7 @@ def _call_ort(
480480
raise EagerModeError(
481481
f"Unable to create onnxruntime InferenceSession "
482482
f"for executing {schema.domain}.{schema.name} op "
483-
f"with onnx model\n{utils.proto2text(model)}"
483+
f"with onnx model\n{onnx.printer.to_text(model)}"
484484
) from e
485485

486486
try:

onnxscript/function_libs/torch_lib/graph_building.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1029,7 +1029,7 @@ def to_model_proto(
10291029
warnings.warn(f"ONNX model is invalid: {e}", stacklevel=1)
10301030
logging.debug(
10311031
"ONNX model:\n%s\n\nTorchScript graph:\n%s",
1032-
onnxscript.proto2text(onnx_model),
1032+
onnx.printer.to_text(onnx_model),
10331033
self.torch_graph,
10341034
)
10351035
return onnx_model

onnxscript/tests/function_libs/torch_lib/ops_test_common.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -359,7 +359,7 @@ def _format_model_and_input_information(onnx_model, inputs):
359359
f"Inputs:\n"
360360
f"{pprint.pformat(inputs)}\n"
361361
f"Model:\n"
362-
f"{onnxscript.proto2text(onnx_model)}"
362+
f"{onnx.printer.to_text(onnx_model)}"
363363
)
364364

365365

@@ -526,7 +526,7 @@ def _capture_graph_and_evaluate_torch_script_evaluator(function: Callable, args,
526526
onnx.checker.check_model(onnx_model, full_check=True)
527527
except (onnx.checker.ValidationError, onnx.shape_inference.InferenceError) as e:
528528
raise AssertionError(
529-
f"ONNX model is invalid. Model:\n{onnxscript.proto2text(onnx_model)}"
529+
f"ONNX model is invalid. Model:\n{onnx.printer.to_text(onnx_model)}"
530530
) from e
531531

532532
try:

0 commit comments

Comments
 (0)