Disable Ray tests (#17266)

Co-authored-by: UltralyticsAssistant <web@ultralytics.com>
pull/16802/head^2
Glenn Jocher 4 weeks ago committed by GitHub
parent b8c90baffe
commit 11b4194344
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 4
      .github/workflows/ci.yaml
  2. 10
      ultralytics/utils/tuner.py

@ -184,7 +184,7 @@ jobs:
torch="torch==1.8.0 torchvision==0.9.0"
fi
if [[ "${{ github.event_name }}" =~ ^(schedule|workflow_dispatch)$ ]]; then
slow="pycocotools mlflow ray[tune]"
slow="pycocotools mlflow"
fi
pip install -e ".[export]" $torch $slow pytest-cov --extra-index-url https://download.pytorch.org/whl/cpu
- name: Check environment
@ -247,7 +247,7 @@ jobs:
- name: Install requirements
run: |
python -m pip install --upgrade pip wheel
pip install -e ".[export]" pytest mlflow pycocotools "ray[tune]"
pip install -e ".[export]" pytest mlflow pycocotools
- name: Check environment
run: |
yolo checks

@ -1,12 +1,16 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license
from ultralytics.cfg import TASK2DATA, TASK2METRIC, get_save_dir
from ultralytics.utils import DEFAULT_CFG, DEFAULT_CFG_DICT, LOGGER, NUM_THREADS, checks
def run_ray_tune(
model, space: dict = None, grace_period: int = 10, gpu_per_trial: int = None, max_samples: int = 10, **train_args
model,
space: dict = None,
grace_period: int = 10,
gpu_per_trial: int = None,
max_samples: int = 10,
**train_args,
):
"""
Runs hyperparameter tuning using Ray Tune.
@ -38,7 +42,7 @@ def run_ray_tune(
train_args = {}
try:
checks.check_requirements(("ray[tune]", "numpy<2.0.0"))
checks.check_requirements("ray[tune]")
import ray
from ray import tune

Loading…
Cancel
Save