2525from openvino .runtime .utils .types import get_element_type
2626from openvino .tools .ovc import convert_model
2727from optimum .exporters .onnx .base import OnnxConfig
28- from optimum .exporters .onnx .convert import check_dummy_inputs_are_allowed , export_tensorflow as export_tensorflow_onnx
28+ from optimum .exporters .onnx .convert import check_dummy_inputs_are_allowed
2929from optimum .exporters .onnx .convert import export_pytorch as export_pytorch_to_onnx
30+ from optimum .exporters .onnx .convert import export_tensorflow as export_tensorflow_onnx
3031from optimum .utils import is_diffusers_available
3132
3233from ...intel .openvino .utils import OV_XML_FILE_NAME
@@ -119,6 +120,20 @@ def export(
119120
120121
121122def export_tensorflow (model : Union ["PreTrainedModel" , "ModelMixin" ], config : OnnxConfig , opset : int , output : Path ):
123+ """
124+ Export the TensorFlow model to OpenVINO format.
125+
126+ Args:
127+ model (Union[): The model to export.
128+ config (OnnxConfig): The configuration of the model.
129+ opset (int): The ONNX opset version to use.
130+ output (Path): The path to save the model.
131+
132+ Returns:
133+ input_names: list of input names from ONNX configuration
134+ output_names: list of output names from ONNX configuration
135+ bool: True if the model was exported successfully.
136+ """
122137 onnx_path = Path (output ).with_suffix (".onnx" )
123138 input_names , output_names = export_tensorflow_onnx (model , config , opset , onnx_path )
124139 ov_model = convert_model (str (onnx_path ))
@@ -139,6 +154,30 @@ def export_pytorch_via_onnx(
139154 input_shapes : Optional [Dict ] = None ,
140155 model_kwargs : Optional [Dict [str , Any ]] = None ,
141156):
157+ """
158+ Exports a PyTorch model to an OpenVINO Intermediate Representation via ONNX export.
159+
160+ Args:
161+ model ([`PreTrainedModel`]):
162+ The model to export.
163+ config ([`~exporters.onnx.config.OnnxConfig`]):
164+ The configuration associated with the exported model.
165+ opset (`int`):
166+ The version of the ONNX operator set to use.
167+ output (`Path`):
168+ Directory to store the exported model.
169+ device (`str`, defaults to `"cpu"`):
170+ The device on which the model will be exported. Either `cpu` or `cuda`. Only PyTorch is supported for
171+ export on CUDA devices.
172+ input_shapes (`optional[Dict]`, defaults to `None`):
173+ If specified, allows to use specific shapes for the example input provided to the exporter.
174+ model_kwargs (optional[Dict[str, Any]], defaults to `None`):
175+ Additional kwargs for model export
176+
177+ Returns:
178+ `Tuple[List[str], List[str], bool]`: A tuple with an ordered list of the model's inputs, and the named inputs from
179+ the ONNX configuration and boolean flag - was legacy ONNX path were applied to model or not.
180+ """
142181 import torch
143182
144183 output = Path (output )
@@ -186,10 +225,12 @@ def export_pytorch(
186225 export on CUDA devices.
187226 input_shapes (`optional[Dict]`, defaults to `None`):
188227 If specified, allows to use specific shapes for the example input provided to the exporter.
228+ model_kwargs (optional[Dict[str, Any]], defaults to `None`):
229+ Additional kwargs for model export
189230
190231 Returns:
191- `Tuple[List[str], List[str]]`: A tuple with an ordered list of the model's inputs, and the named inputs from
192- the ONNX configuration.
232+ `Tuple[List[str], List[str], bool ]`: A tuple with an ordered list of the model's inputs, and the named inputs from
233+ the ONNX configuration and boolean flag - was legacy ONNX path were applied to model or not .
193234 """
194235 import torch
195236 from torch .utils ._pytree import tree_map
@@ -299,6 +340,28 @@ def export_models(
299340 input_shapes : Optional [Dict ] = None ,
300341 model_kwargs : Optional [Dict [str , Any ]] = None ,
301342) -> Tuple [List [List [str ]], List [List [str ]]]:
343+ """
344+ Export the models to OpenVINO IR format
345+
346+ Args:
347+ models_and_onnx_configs (Dict[ str, Tuple[Union["PreTrainedModel", "TFPreTrainedModel", "ModelMixin"], "OnnxConfig"]):
348+ output_dir (Path): output directory for saving models
349+ opset (Optional[int], optional, Default to None): ONNX export opset
350+ output_names (Optional[List[str]], optional, Defaults to None): model output names
351+ device (str, optional, Defaults to "cpu"):
352+ The device on which the model will be exported. Either `cpu` or `cuda`. Only PyTorch is supported for
353+ export on CUDA devices.
354+ input_shapes (Optional[Dict], optional, Defaults to None):
355+ If specified, allows to use specific shapes for the example input provided to the exporter.
356+ model_kwargs (Optional[Dict[str, Any]], optional):
357+ Additional kwargs for model export
358+
359+ Raises:
360+ ValueError: if custom names set not equal of number of models
361+
362+ Returns:
363+ list of input_names and output_names from ONNX configuration
364+ """
302365 outputs = []
303366
304367 if output_names is not None and len (output_names ) != len (models_and_onnx_configs ):
0 commit comments