`ultralytics 8.1.18` add cmake for building onnxsim on aarch64 (#8395)

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
pull/8397/head v8.1.18
Lakshantha Dissanayake 9 months ago committed by GitHub
parent 8c28e0c3fe
commit 0572b29445
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      docker/Dockerfile-arm64
  2. 2
      ultralytics/__init__.py
  3. 10
      ultralytics/engine/exporter.py

@ -15,7 +15,7 @@ ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/Arial.ttf \
# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package # g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package
# cmake and build-essential is needed to build onnxsim when exporting to tflite # cmake and build-essential is needed to build onnxsim when exporting to tflite
RUN apt update \ RUN apt update \
&& apt install --no-install-recommends -y python3-pip git zip curl htop gcc libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 cmake build-essential && apt install --no-install-recommends -y python3-pip git zip curl htop gcc libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 build-essential
# Create working directory # Create working directory
WORKDIR /usr/src/ultralytics WORKDIR /usr/src/ultralytics

@ -1,6 +1,6 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license # Ultralytics YOLO 🚀, AGPL-3.0 license
__version__ = "8.1.17" __version__ = "8.1.18"
from ultralytics.data.explorer.explorer import Explorer from ultralytics.data.explorer.explorer import Explorer
from ultralytics.models import RTDETR, SAM, YOLO, YOLOWorld from ultralytics.models import RTDETR, SAM, YOLO, YOLOWorld

@ -343,6 +343,8 @@ class Exporter:
requirements = ["onnx>=1.12.0"] requirements = ["onnx>=1.12.0"]
if self.args.simplify: if self.args.simplify:
requirements += ["onnxsim>=0.4.33", "onnxruntime-gpu" if torch.cuda.is_available() else "onnxruntime"] requirements += ["onnxsim>=0.4.33", "onnxruntime-gpu" if torch.cuda.is_available() else "onnxruntime"]
if ARM64:
check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
check_requirements(requirements) check_requirements(requirements)
import onnx # noqa import onnx # noqa
@ -712,8 +714,12 @@ class Exporter:
try: try:
import tensorflow as tf # noqa import tensorflow as tf # noqa
except ImportError: except ImportError:
check_requirements(f"tensorflow{'-macos' if MACOS else '-aarch64' if ARM64 else '' if cuda else '-cpu'}") suffix = "-macos" if MACOS else "-aarch64" if ARM64 else "" if cuda else "-cpu"
version = "" if ARM64 else "<=2.13.1"
check_requirements(f"tensorflow{suffix}{version}")
import tensorflow as tf # noqa import tensorflow as tf # noqa
if ARM64:
check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
check_requirements( check_requirements(
( (
"onnx>=1.12.0", "onnx>=1.12.0",
@ -722,7 +728,7 @@ class Exporter:
"onnxsim>=0.4.33", "onnxsim>=0.4.33",
"onnx_graphsurgeon>=0.3.26", "onnx_graphsurgeon>=0.3.26",
"tflite_support", "tflite_support",
"flatbuffers>=23.5.26", # update old 'flatbuffers' included inside tensorflow package "flatbuffers>=23.5.26,<100", # update old 'flatbuffers' included inside tensorflow package
"onnxruntime-gpu" if cuda else "onnxruntime", "onnxruntime-gpu" if cuda else "onnxruntime",
), ),
cmds="--extra-index-url https://pypi.ngc.nvidia.com", cmds="--extra-index-url https://pypi.ngc.nvidia.com",

Loading…
Cancel
Save