Fix ARM64 Docker building (#13217)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
pull/13219/head^2
Lakshantha Dissanayake 6 months ago committed by GitHub
parent b14f9cdb10
commit 51c3169e9f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 8
      .github/workflows/docker.yaml
  2. 3
      docker/Dockerfile-arm64
  3. 3
      docker/Dockerfile-jetson
  4. 5
      pyproject.toml

@ -18,22 +18,26 @@ on:
Dockerfile-cpu:
type: boolean
description: Use Dockerfile-cpu
default: true
Dockerfile-arm64:
type: boolean
description: Use Dockerfile-arm64
default: true
Dockerfile-jetson:
type: boolean
description: Use Dockerfile-jetson
default: true
Dockerfile-python:
type: boolean
description: Use Dockerfile-python
default: true
Dockerfile-conda:
type: boolean
description: Use Dockerfile-conda
default: true
push:
type: boolean
description: Push images to Docker Hub
default: true
description: Publish all Images to Docker Hub
jobs:
docker:

@ -33,8 +33,9 @@ ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $A
RUN rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED
# Install pip packages
# Install tensorstore from .whl because PyPI does not include aarch64 binaries
RUN python3 -m pip install --upgrade pip wheel
RUN pip install --no-cache-dir -e ".[export]"
RUN pip install --no-cache-dir https://github.com/ultralytics/yolov5/releases/download/v7.0/tensorstore-0.1.59-cp311-cp311-linux_aarch64.whl -e ".[export]"
# Creates a symbolic link to make 'python' point to 'python3'
RUN ln -sf /usr/bin/python3 /usr/bin/python

@ -32,12 +32,11 @@ ADD https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt $A
RUN grep -v "opencv-python" pyproject.toml > temp.toml && mv temp.toml pyproject.toml
# Download onnxruntime-gpu 1.15.1 for Jetson Linux 35.2.1 (JetPack 5.1). Other versions can be seen in https://elinux.org/Jetson_Zoo#ONNX_Runtime
RUN wget https://nvidia.box.com/shared/static/mvdcltm9ewdy2d5nurkiqorofz1s53ww.whl -O onnxruntime_gpu-1.15.1-cp38-cp38-linux_aarch64.whl
ADD https://nvidia.box.com/shared/static/mvdcltm9ewdy2d5nurkiqorofz1s53ww.whl onnxruntime_gpu-1.15.1-cp38-cp38-linux_aarch64.whl
# Install pip packages manually for TensorRT compatibility https://github.com/NVIDIA/TensorRT/issues/2567
RUN python3 -m pip install --upgrade pip wheel
RUN pip install onnxruntime_gpu-1.15.1-cp38-cp38-linux_aarch64.whl
RUN pip install --no-cache-dir tqdm matplotlib pyyaml psutil pandas onnx
RUN pip install --no-cache-dir -e ".[export]"

@ -102,7 +102,7 @@ export = [
"openvino>=2024.0.0", # OpenVINO export
"tensorflow>=2.0.0", # TF bug https://github.com/ultralytics/ultralytics/issues/5161
"tensorflowjs>=3.9.0", # TF.js export, automatically installs tensorflow
"keras", # not installed auotomatically by tensorflow>=2.16
"keras", # not installed automatically by tensorflow>=2.16
"flatbuffers>=23.5.26,<100; platform_machine == 'aarch64'", # update old 'flatbuffers' included inside tensorflow package
"numpy==1.23.5; platform_machine == 'aarch64'", # fix error: `np.bool` was a deprecated alias for the builtin `bool` when using TensorRT models on NVIDIA Jetson
"h5py!=3.11.0; platform_machine == 'aarch64'", # fix h5py build issues due to missing aarch64 wheels in 3.11 release
@ -112,9 +112,6 @@ explorer = [
"duckdb<=0.9.2", # SQL queries, duckdb==0.10.0 bug https://github.com/ultralytics/ultralytics/pull/8181
"streamlit", # visualizing with GUI
]
# tflite-support # for TFLite model metadata
# nvidia-pyindex # TensorRT export
# nvidia-tensorrt # TensorRT export
logging = [
"comet", # https://docs.ultralytics.com/integrations/comet/
"tensorboard>=2.13.0",

Loading…
Cancel
Save