Skip to content

Commit d96bf75

Browse files
committed
fix circular imports
1 parent 90e67ae commit d96bf75

File tree

4 files changed

+8
-3
lines changed

4 files changed

+8
-3
lines changed

optimum/exporters/openvino/__main__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,11 @@
2727
from optimum.utils import DEFAULT_DUMMY_SHAPES
2828
from optimum.utils.save_utils import maybe_save_preprocessors
2929

30-
from ...intel.openvino.utils import OV_XML_FILE_NAME
3130
from .convert import export_models
3231

3332

33+
OV_XML_FILE_NAME = "openvino_model.xml"
34+
3435
logger = logging.getLogger(__name__)
3536

3637
if is_torch_available():
@@ -219,6 +220,7 @@ def main_export(
219220
custom_onnx_configs=custom_onnx_configs if custom_onnx_configs is not None else {},
220221
custom_architecture=custom_architecture,
221222
fn_get_submodels=fn_get_submodels,
223+
_variant="default",
222224
)
223225

224226
if not is_stable_diffusion:

optimum/exporters/openvino/convert.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@
3030
from optimum.exporters.onnx.convert import export_tensorflow as export_tensorflow_onnx
3131
from optimum.utils import is_diffusers_available
3232

33-
from ...intel.openvino.utils import OV_XML_FILE_NAME
3433
from .utils import (
34+
OV_XML_FILE_NAME,
3535
clear_class_registry,
3636
flattenize_inputs,
3737
get_input_shapes,

optimum/exporters/openvino/utils.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,9 @@
2929
from diffusers import ModelMixin
3030

3131

32+
OV_XML_FILE_NAME = "openvino_model.xml"
33+
34+
3235
def is_torch_model(model: Union["PreTrainedModel", "ModelMixin"]):
3336
"""
3437
Checks whether the model is a torch model.

optimum/intel/utils/modeling_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def patch_decoder_attention_mask(model: "PreTrainedModel"):
102102
Returns:
103103
model with applied patch
104104
"""
105-
if model.config.model_type == "bloom":
105+
if model.config.model_type in {"bloom", "mpt"}:
106106
model.transformer._prepare_attn_mask = _prepare_attn_mask
107107
elif model.config.model_type == "llama":
108108
model.model._prepare_decoder_attention_mask = _prepare_decoder_attention_mask

0 commit comments

Comments
 (0)