diff --git a/mmdeploy/apis/inference.py b/mmdeploy/apis/inference.py index 3f02caea8b..678a25624b 100644 --- a/mmdeploy/apis/inference.py +++ b/mmdeploy/apis/inference.py @@ -42,7 +42,8 @@ def inference_model(model_cfg: Union[str, mmengine.Config], from mmdeploy.apis.utils import build_task_processor task_processor = build_task_processor(model_cfg, deploy_cfg, device) - model = task_processor.build_backend_model(backend_files) + model = task_processor.build_backend_model( + backend_files, task_processor.update_data_preprocessor) input_shape = get_input_shape(deploy_cfg) model_inputs, _ = task_processor.create_input(img, input_shape) diff --git a/tools/test.py b/tools/test.py index 8bf6c9ddb0..b9e18df3cf 100644 --- a/tools/test.py +++ b/tools/test.py @@ -122,7 +122,8 @@ def main(): dataloader = task_processor.build_dataloader(test_dataloader) # load the model of the backend - model = task_processor.build_backend_model(args.model) + model = task_processor.build_backend_model( + args.model, task_processor.update_data_preprocessor) destroy_model = model.destroy is_device_cpu = (args.device == 'cpu')