Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
pull/16941/head^2
Ultralytics Assistant 1 month ago committed by GitHub
parent 0027e0837c
commit a622b404ef
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      .github/workflows/cla.yml
  2. 2
      .github/workflows/docker.yaml
  3. 4
      .github/workflows/docs.yml
  4. 5
      .github/workflows/format.yml
  5. 2
      .github/workflows/merge-main-into-prs.yml
  6. 6
      .github/workflows/publish.yml
  7. 2
      docs/en/integrations/weights-biases.md
  8. 2
      ultralytics/cfg/__init__.py
  9. 2
      ultralytics/data/utils.py
  10. 2
      ultralytics/nn/autobackend.py
  11. 9
      ultralytics/nn/tasks.py
  12. 4
      ultralytics/solutions/analytics.py
  13. 3
      ultralytics/solutions/heatmap.py
  14. 6
      ultralytics/utils/__init__.py
  15. 2
      ultralytics/utils/callbacks/tensorboard.py
  16. 6
      ultralytics/utils/checks.py
  17. 2
      ultralytics/utils/downloads.py
  18. 2
      ultralytics/utils/plotting.py
  19. 2
      ultralytics/utils/torch_utils.py

@ -30,7 +30,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Must be repository secret PAT
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets._GITHUB_TOKEN }}
with:
path-to-signatures: "signatures/version1/cla.json"
path-to-document: "https://docs.ultralytics.com/help/CLA" # CLA document

@ -182,7 +182,7 @@ jobs:
steps:
- name: Trigger Additional GitHub Actions
env:
GH_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
GH_TOKEN: ${{ secrets._GITHUB_TOKEN }}
run: |
sleep 60
gh workflow run deploy_cloud_run.yml \

@ -34,7 +34,7 @@ jobs:
uses: actions/checkout@v4
with:
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}
token: ${{ secrets._GITHUB_TOKEN }}
ref: ${{ github.head_ref || github.ref }}
fetch-depth: 0
- name: Set up Python
@ -94,5 +94,5 @@ jobs:
else
LATEST_HASH=$(git rev-parse --short=7 HEAD)
git commit -m "Update Docs for 'ultralytics ${{ steps.check_pypi.outputs.version }} - $LATEST_HASH'"
git push https://${{ secrets.PERSONAL_ACCESS_TOKEN }}@github.com/ultralytics/docs.git gh-pages
git push https://${{ secrets._GITHUB_TOKEN }}@github.com/ultralytics/docs.git gh-pages
fi

@ -20,15 +20,14 @@ jobs:
- name: Run Ultralytics Formatting
uses: ultralytics/actions@main
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }} # note GITHUB_TOKEN automatically generated
token: ${{ secrets._GITHUB_TOKEN }} # note GITHUB_TOKEN automatically generated
labels: true # autolabel issues and PRs
python: true # format Python code and docstrings
prettier: true # format YAML, JSON, Markdown and CSS
spelling: true # check spelling
links: false # check broken links
summary: true # print PR summary with GPT4o (requires 'openai_api_key')
openai_azure_api_key: ${{ secrets.OPENAI_AZURE_API_KEY }}
openai_azure_endpoint: ${{ secrets.OPENAI_AZURE_ENDPOINT }}
openai_api_key: ${{ secrets.OPENAI_API_KEY }}
first_issue_response: |
👋 Hello @${{ github.actor }}, thank you for your interest in Ultralytics 🚀! We recommend a visit to the [Docs](https://docs.ultralytics.com) for new users where you can find many [Python](https://docs.ultralytics.com/usage/python/) and [CLI](https://docs.ultralytics.com/usage/cli/) usage examples and where many of the most common questions may already be answered.

@ -33,7 +33,7 @@ jobs:
import os
import time
g = Github("${{ secrets.PERSONAL_ACCESS_TOKEN }}")
g = Github("${{ secrets._GITHUB_TOKEN }}")
repo = g.get_repo("${{ github.repository }}")
# Fetch the default branch name

@ -23,7 +23,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }} # use your PAT here
token: ${{ secrets._GITHUB_TOKEN }} # use your PAT here
- name: Git config
run: |
git config --global user.name "UltralyticsAssistant"
@ -103,7 +103,7 @@ jobs:
if: (github.event_name == 'push' || github.event.inputs.pypi == 'true') && steps.check_pypi.outputs.increment == 'True'
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets._GITHUB_TOKEN }}
CURRENT_TAG: ${{ steps.check_pypi.outputs.current_tag }}
PREVIOUS_TAG: ${{ steps.check_pypi.outputs.previous_tag }}
run: |
@ -111,7 +111,7 @@ jobs:
shell: bash
- name: Extract PR Details
env:
GH_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets._GITHUB_TOKEN }}
run: |
# Check if the event is a pull request or pull_request_target
if [ "${{ github.event_name }}" = "pull_request" ] || [ "${{ github.event_name }}" = "pull_request_target" ]; then

@ -210,7 +210,7 @@ These features help in tracking experiments, optimizing models, and collaboratin
After running your training script with W&B integration:
1. A link to your W&B dashboard will be provided in the console output.
2. Click on the link or go to [wandb.ai](https://wandb.ai) and log in to your account.
2. Click on the link or go to [wandb.ai](https://wandb.ai/) and log in to your account.
3. Navigate to your project to view detailed metrics, visualizations, and model performance data.
The dashboard offers insights into your model's training process, allowing you to analyze and improve your YOLO11 models effectively.

@ -639,7 +639,7 @@ def smart_value(v):
else:
try:
return eval(v)
except: # noqa E722
except Exception:
return v

@ -65,7 +65,7 @@ def exif_size(img: Image.Image):
rotation = exif.get(274, None) # the EXIF key for the orientation tag is 274
if rotation in {6, 8}: # rotation 270 or 90
s = s[1], s[0]
except: # noqa E722
except Exception:
pass
return s

@ -46,7 +46,7 @@ def default_class_names(data=None):
if data:
try:
return yaml_load(check_yaml(data))["names"]
except: # noqa E722
except Exception:
pass
return {i: f"class{i}" for i in range(999)} # return default if above errors

@ -963,7 +963,6 @@ def parse_model(d, ch, verbose=True): # model_dict, input_channels(3)
args[j] = locals()[a] if a in locals() else ast.literal_eval(a)
except ValueError:
pass
n = n_ = max(round(n * depth), 1) if n > 1 else n # depth gain
if m in {
Classify,
@ -1102,7 +1101,7 @@ def guess_model_scale(model_path):
(str): The size character of the model's scale, which can be n, s, m, l, or x.
"""
try:
return re.search(r"yolo[v]?\d+([nslmx])", Path(model_path).stem).group(1) # n, s, m, l, or x
return re.search(r"yolo[v]?\d+([nslmx])", Path(model_path).stem).group(1) # noqa, returns n, s, m, l, or x
except AttributeError:
return ""
@ -1139,7 +1138,7 @@ def guess_model_task(model):
if isinstance(model, dict):
try:
return cfg2task(model)
except: # noqa E722
except Exception:
pass
# Guess from PyTorch model
@ -1147,12 +1146,12 @@ def guess_model_task(model):
for x in "model.args", "model.model.args", "model.model.model.args":
try:
return eval(x)["task"]
except: # noqa E722
except Exception:
pass
for x in "model.yaml", "model.model.yaml", "model.model.model.yaml":
try:
return cfg2task(eval(x))
except: # noqa E722
except Exception:
pass
for m in model.modules():

@ -61,11 +61,11 @@ class Analytics(BaseSolution):
self.extract_tracks(im0) # Extract tracks
if self.type == "line":
for box in self.boxes:
for _ in self.boxes:
self.total_counts += 1
im0 = self.update_graph(frame_number=frame_number)
self.total_counts = 0
elif self.type == "pie" or self.type == "bar" or self.type == "area":
elif self.type in {"pie", "bar", "area"}:
self.clswise_count = {}
for box, cls in zip(self.boxes, self.clss):
if self.names[int(cls)] in self.clswise_count:

@ -52,7 +52,8 @@ class Heatmap(ObjectCounter):
Returns:
im0 (ndarray): Processed image for further usage
"""
self.heatmap = np.zeros_like(im0, dtype=np.float32) * 0.99 if not self.initialized else self.heatmap
if not self.initialized:
self.heatmap = np.zeros_like(im0, dtype=np.float32) * 0.99
self.initialized = True # Initialize heatmap only once
self.annotator = Annotator(im0, line_width=self.line_width) # Initialize annotator

@ -526,7 +526,7 @@ def read_device_model() -> str:
try:
with open("/proc/device-tree/model") as f:
return f.read()
except: # noqa E722
except Exception:
return ""
@ -584,7 +584,7 @@ def is_docker() -> bool:
try:
with open("/proc/self/cgroup") as f:
return "docker" in f.read()
except: # noqa E722
except Exception:
return False
@ -623,7 +623,7 @@ def is_online() -> bool:
for dns in ("1.1.1.1", "8.8.8.8"): # check Cloudflare and Google DNS
socket.create_connection(address=(dns, 80), timeout=2.0).close()
return True
except: # noqa E722
except Exception:
return False

@ -50,7 +50,7 @@ def _log_tensorboard_graph(trainer):
LOGGER.info(f"{PREFIX}model graph visualization added ✅")
return
except: # noqa E722
except Exception:
# Fallback to TorchScript export steps (RTDETR)
try:
model = deepcopy(de_parallel(trainer.model))

@ -277,7 +277,7 @@ def check_latest_pypi_version(package_name="ultralytics"):
response = requests.get(f"https://pypi.org/pypi/{package_name}/json", timeout=3)
if response.status_code == 200:
return response.json()["info"]["version"]
except: # noqa E722
except Exception:
return None
@ -299,7 +299,7 @@ def check_pip_update_available():
f"Update with 'pip install -U ultralytics'"
)
return True
except: # noqa E722
except Exception:
pass
return False
@ -715,7 +715,7 @@ def git_describe(path=ROOT): # path must be a directory
"""Return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe."""
try:
return subprocess.check_output(f"git -C {path} describe --tags --long --always", shell=True).decode()[:-1]
except: # noqa E722
except Exception:
return ""

@ -60,7 +60,7 @@ def is_url(url, check=False):
with request.urlopen(url) as response:
return response.getcode() == 200 # check if exists online
return True
except: # noqa E722
except Exception:
return False

@ -1117,7 +1117,7 @@ def plot_images(
im[y : y + h, x : x + w, :][mask] = (
im[y : y + h, x : x + w, :][mask] * 0.4 + np.array(color) * 0.6
)
except: # noqa E722
except Exception:
pass
annotator.fromarray(im)
if not save:

@ -119,7 +119,7 @@ def get_cpu_info():
info = cpuinfo.get_cpu_info() # info dict
string = info.get(k[0] if k[0] in info else k[1] if k[1] in info else k[2], "unknown")
PERSISTENT_CACHE["cpu_info"] = string.replace("(R)", "").replace("CPU ", "").replace("@ ", "")
except: # noqa E722
except Exception:
pass
return PERSISTENT_CACHE.get("cpu_info", "unknown")

Loading…
Cancel
Save