Add JetPack6 Docker for NVIDIA Jetson Orin Series (#14707)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
pull/14740/head
Lakshantha Dissanayake 4 months ago committed by GitHub
parent 673e76b862
commit df38884442
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 7
      .github/workflows/docker.yaml
  2. 14
      docker/Dockerfile-jetson-jetpack4
  3. 1
      docker/Dockerfile-jetson-jetpack5
  4. 52
      docker/Dockerfile-jetson-jetpack6

@ -23,6 +23,10 @@ on:
type: boolean
description: Use Dockerfile-arm64
default: true
Dockerfile-jetson-jetpack6:
type: boolean
description: Use Dockerfile-jetson-jetpack6
default: true
Dockerfile-jetson-jetpack5:
type: boolean
description: Use Dockerfile-jetson-jetpack5
@ -62,6 +66,9 @@ jobs:
- dockerfile: "Dockerfile-arm64"
tags: "latest-arm64"
platforms: "linux/arm64"
- dockerfile: "Dockerfile-jetson-jetpack6"
tags: "latest-jetson-jetpack6"
platforms: "linux/arm64"
- dockerfile: "Dockerfile-jetson-jetpack5"
tags: "latest-jetson-jetpack5"
platforms: "linux/arm64"

@ -34,20 +34,20 @@ COPY . $APP_HOME
RUN chown -R root:root $APP_HOME
ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $APP_HOME
# Download onnxruntime-gpu, TensorRT, PyTorch and Torchvision
# Download onnxruntime-gpu 1.8.0 and tensorrt 8.2.0.6
# Other versions can be seen in https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048
ADD https://nvidia.box.com/shared/static/gjqofg7rkg97z3gc8jeyup6t8n9j8xjw.whl onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl
ADD https://forums.developer.nvidia.com/uploads/short-url/hASzFOm9YsJx6VVFrDW1g44CMmv.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl
ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \
torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl
ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl \
torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl
# Install pip packages
RUN python3 -m pip install --upgrade pip wheel
RUN pip install onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl \
torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl
RUN pip install --no-cache-dir \
onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl \
tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl \
https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \
https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl
RUN pip install --no-cache-dir -e ".[export]"
RUN rm *.whl
# Usage Examples -------------------------------------------------------------------------------------------------------

@ -38,6 +38,7 @@ ADD https://nvidia.box.com/shared/static/mvdcltm9ewdy2d5nurkiqorofz1s53ww.whl on
RUN python3 -m pip install --upgrade pip wheel
RUN pip install onnxruntime_gpu-1.15.1-cp38-cp38-linux_aarch64.whl
RUN pip install --no-cache-dir -e ".[export]"
RUN rm *.whl
# Usage Examples -------------------------------------------------------------------------------------------------------

@ -0,0 +1,52 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license
# Builds ultralytics/ultralytics:jetson-jetpack6 image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics
# Supports JetPack6.x for YOLOv8 on Jetson AGX Orin, Orin NX and Orin Nano Series
# Start FROM https://catalog.ngc.nvidia.com/orgs/nvidia/containers/l4t-jetpack
FROM nvcr.io/nvidia/l4t-jetpack:r36.3.0
# Set environment variables
ENV APP_HOME /usr/src/ultralytics
# Downloads to user config dir
ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/Arial.ttf \
https://github.com/ultralytics/assets/releases/download/v0.0.0/Arial.Unicode.ttf \
/root/.config/Ultralytics/
# Install dependencies
RUN apt update && \
apt install --no-install-recommends -y git python3-pip libopenmpi-dev libopenblas-base libomp-dev
# Create working directory
WORKDIR $APP_HOME
# Copy contents and assign permissions
COPY . $APP_HOME
RUN chown -R root:root $APP_HOME
ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $APP_HOME
# Download onnxruntime-gpu 1.18.0 from https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048
ADD https://nvidia.box.com/shared/static/48dtuob7meiw6ebgfsfqakc9vse62sg4.whl onnxruntime_gpu-1.18.0-cp310-cp310-linux_aarch64.whl
# Pip install onnxruntime-gpu, torch, torchvision and ultralytics
RUN python3 -m pip install --upgrade pip wheel
RUN pip install --no-cache-dir \
onnxruntime_gpu-1.18.0-cp310-cp310-linux_aarch64.whl \
https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-2.3.0-cp310-cp310-linux_aarch64.whl \
https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.18.0a0+6043bc2-cp310-cp310-linux_aarch64.whl
RUN pip install --no-cache-dir -e ".[export]"
RUN rm *.whl
# Usage Examples -------------------------------------------------------------------------------------------------------
# Build and Push
# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker build --platform linux/arm64 -f docker/Dockerfile-jetson-jetpack6 -t $t . && sudo docker push $t
# Run
# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker run -it --ipc=host $t
# Pull and Run
# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker pull $t && sudo docker run -it --ipc=host $t
# Pull and Run with NVIDIA runtime
# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker pull $t && sudo docker run -it --ipc=host --runtime=nvidia $t
Loading…
Cancel
Save