diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index d9e0f7c1a6..d798cbec18 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -23,6 +23,10 @@ on: type: boolean description: Use Dockerfile-arm64 default: true + Dockerfile-jetson-jetpack6: + type: boolean + description: Use Dockerfile-jetson-jetpack6 + default: true Dockerfile-jetson-jetpack5: type: boolean description: Use Dockerfile-jetson-jetpack5 @@ -62,6 +66,9 @@ jobs: - dockerfile: "Dockerfile-arm64" tags: "latest-arm64" platforms: "linux/arm64" + - dockerfile: "Dockerfile-jetson-jetpack6" + tags: "latest-jetson-jetpack6" + platforms: "linux/arm64" - dockerfile: "Dockerfile-jetson-jetpack5" tags: "latest-jetson-jetpack5" platforms: "linux/arm64" diff --git a/docker/Dockerfile-jetson-jetpack4 b/docker/Dockerfile-jetson-jetpack4 index 12931ad30f..0895ae3eb6 100644 --- a/docker/Dockerfile-jetson-jetpack4 +++ b/docker/Dockerfile-jetson-jetpack4 @@ -34,20 +34,20 @@ COPY . $APP_HOME RUN chown -R root:root $APP_HOME ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $APP_HOME -# Download onnxruntime-gpu, TensorRT, PyTorch and Torchvision +# Download onnxruntime-gpu 1.8.0 and tensorrt 8.2.0.6 # Other versions can be seen in https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048 ADD https://nvidia.box.com/shared/static/gjqofg7rkg97z3gc8jeyup6t8n9j8xjw.whl onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl ADD https://forums.developer.nvidia.com/uploads/short-url/hASzFOm9YsJx6VVFrDW1g44CMmv.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl -ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \ - torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl -ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl \ - torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl # Install pip packages RUN python3 -m pip install --upgrade pip wheel -RUN pip install onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl \ - torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl +RUN pip install --no-cache-dir \ + onnxruntime_gpu-1.8.0-cp38-cp38-linux_aarch64.whl \ + tensorrt-8.2.0.6-cp38-none-linux_aarch64.whl \ + https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-1.11.0a0+gitbc2c6ed-cp38-cp38-linux_aarch64.whl \ + https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.12.0a0+9b5a3fe-cp38-cp38-linux_aarch64.whl RUN pip install --no-cache-dir -e ".[export]" +RUN rm *.whl # Usage Examples ------------------------------------------------------------------------------------------------------- diff --git a/docker/Dockerfile-jetson-jetpack5 b/docker/Dockerfile-jetson-jetpack5 index b71db9e5f0..53ca0ecd12 100644 --- a/docker/Dockerfile-jetson-jetpack5 +++ b/docker/Dockerfile-jetson-jetpack5 @@ -38,6 +38,7 @@ ADD https://nvidia.box.com/shared/static/mvdcltm9ewdy2d5nurkiqorofz1s53ww.whl on RUN python3 -m pip install --upgrade pip wheel RUN pip install onnxruntime_gpu-1.15.1-cp38-cp38-linux_aarch64.whl RUN pip install --no-cache-dir -e ".[export]" +RUN rm *.whl # Usage Examples ------------------------------------------------------------------------------------------------------- diff --git a/docker/Dockerfile-jetson-jetpack6 b/docker/Dockerfile-jetson-jetpack6 new file mode 100644 index 0000000000..7183b3489b --- /dev/null +++ b/docker/Dockerfile-jetson-jetpack6 @@ -0,0 +1,52 @@ +# Ultralytics YOLO 🚀, AGPL-3.0 license +# Builds ultralytics/ultralytics:jetson-jetpack6 image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Supports JetPack6.x for YOLOv8 on Jetson AGX Orin, Orin NX and Orin Nano Series + +# Start FROM https://catalog.ngc.nvidia.com/orgs/nvidia/containers/l4t-jetpack +FROM nvcr.io/nvidia/l4t-jetpack:r36.3.0 + +# Set environment variables +ENV APP_HOME /usr/src/ultralytics + +# Downloads to user config dir +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/Arial.ttf \ + https://github.com/ultralytics/assets/releases/download/v0.0.0/Arial.Unicode.ttf \ + /root/.config/Ultralytics/ + +# Install dependencies +RUN apt update && \ + apt install --no-install-recommends -y git python3-pip libopenmpi-dev libopenblas-base libomp-dev + +# Create working directory +WORKDIR $APP_HOME + +# Copy contents and assign permissions +COPY . $APP_HOME +RUN chown -R root:root $APP_HOME +ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $APP_HOME + +# Download onnxruntime-gpu 1.18.0 from https://elinux.org/Jetson_Zoo and https://forums.developer.nvidia.com/t/pytorch-for-jetson/72048 +ADD https://nvidia.box.com/shared/static/48dtuob7meiw6ebgfsfqakc9vse62sg4.whl onnxruntime_gpu-1.18.0-cp310-cp310-linux_aarch64.whl + +# Pip install onnxruntime-gpu, torch, torchvision and ultralytics +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache-dir \ + onnxruntime_gpu-1.18.0-cp310-cp310-linux_aarch64.whl \ + https://github.com/ultralytics/assets/releases/download/v0.0.0/torch-2.3.0-cp310-cp310-linux_aarch64.whl \ + https://github.com/ultralytics/assets/releases/download/v0.0.0/torchvision-0.18.0a0+6043bc2-cp310-cp310-linux_aarch64.whl +RUN pip install --no-cache-dir -e ".[export]" +RUN rm *.whl + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker build --platform linux/arm64 -f docker/Dockerfile-jetson-jetpack6 -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with NVIDIA runtime +# t=ultralytics/ultralytics:latest-jetson-jetpack6 && sudo docker pull $t && sudo docker run -it --ipc=host --runtime=nvidia $t