updated mct inference

mct-2.1.1
Francesco Mattioli 3 months ago
parent 640b73e26b
commit 709b3fd3aa
  1. 4
      ultralytics/nn/autobackend.py

@ -166,7 +166,7 @@ class AutoBackend(nn.Module):
check_requirements("opencv-python>=4.5.4") check_requirements("opencv-python>=4.5.4")
net = cv2.dnn.readNetFromONNX(w) net = cv2.dnn.readNetFromONNX(w)
# ONNX Runtime # ONNX Runtime and MCT
elif onnx: elif onnx:
LOGGER.info(f"Loading {w} for ONNX Runtime inference...") LOGGER.info(f"Loading {w} for ONNX Runtime inference...")
check_requirements(("onnx", "onnxruntime-gpu" if cuda else "onnxruntime")) check_requirements(("onnx", "onnxruntime-gpu" if cuda else "onnxruntime"))
@ -178,7 +178,7 @@ class AutoBackend(nn.Module):
providers = ["CUDAExecutionProvider", "CPUExecutionProvider"] if cuda else ["CPUExecutionProvider"] providers = ["CUDAExecutionProvider", "CPUExecutionProvider"] if cuda else ["CPUExecutionProvider"]
if mct: if mct:
import mct_quantizers as mctq import mct_quantizers as mctq
from sony_custom_layers.pytorch.object_detection import nms_ort
session = onnxruntime.InferenceSession( session = onnxruntime.InferenceSession(
w, mctq.get_ort_session_options(), providers=["CPUExecutionProvider"] w, mctq.get_ort_session_options(), providers=["CPUExecutionProvider"]
) )

Loading…
Cancel
Save