Fix TensorRT inference issue on NVIDIA Jetson (#9482)

pull/9484/head^2
Lakshantha Dissanayake 8 months ago committed by GitHub
parent 959acf67db
commit 2f77b2efbb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 4
      docker/Dockerfile-jetson
  2. 1
      pyproject.toml

@ -28,8 +28,8 @@ RUN grep -v "opencv-python" pyproject.toml > temp.toml && mv temp.toml pyproject
# Install pip packages manually for TensorRT compatibility https://github.com/NVIDIA/TensorRT/issues/2567
RUN python3 -m pip install --upgrade pip wheel
RUN pip install --no-cache tqdm matplotlib pyyaml psutil pandas onnx "numpy==1.23"
RUN pip install --no-cache -e .
RUN pip install --no-cache tqdm matplotlib pyyaml psutil pandas onnx
RUN pip install --no-cache -e ".[export]"
# Set environment variables
ENV OMP_NUM_THREADS=1

@ -101,6 +101,7 @@ export = [
"openvino>=2024.0.0", # OpenVINO export
"tensorflow<=2.13.1; python_version <= '3.11'", # TF bug https://github.com/ultralytics/ultralytics/issues/5161
"tensorflowjs>=3.9.0; python_version <= '3.11'", # TF.js export, automatically installs tensorflow
"numpy==1.23.5; platform_machine == 'aarch64'", # Fix error: `np.bool` was a deprecated alias for the builtin `bool` when using TensorRT models on NVIDIA Jetson
]
explorer = [
"lancedb", # vector search

Loading…
Cancel
Save