Merge branch 'main' into mkdocs-exclude

mkdocs-exclude
Ultralytics Assistant 4 months ago committed by GitHub
commit f372d834fd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 9
      .github/workflows/format.yml
  2. 2
      README.md
  3. 2
      README.zh-CN.md
  4. 2
      docs/en/index.md
  5. 8
      examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py
  6. 372
      examples/heatmaps.ipynb
  7. 215
      examples/hub.ipynb
  8. 406
      examples/object_counting.ipynb
  9. 476
      examples/object_tracking.ipynb
  10. 4
      tests/test_cli.py
  11. 2
      ultralytics/__init__.py
  12. 2
      ultralytics/data/augment.py
  13. 1
      ultralytics/engine/model.py

@ -1,4 +1,4 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license
# Ultralytics 🚀 - AGPL-3.0 License https://ultralytics.com/license
# Ultralytics Actions https://github.com/ultralytics/actions
# This workflow automatically formats code and documentation in PRs to official Ultralytics standards
@ -9,7 +9,7 @@ on:
types: [opened, edited]
pull_request_target:
branches: [main]
types: [opened, closed, synchronize]
types: [opened, closed, synchronize, review_requested]
jobs:
format:
@ -21,10 +21,9 @@ jobs:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || secrets.GITHUB_TOKEN }} # note GITHUB_TOKEN automatically generated
labels: true # autolabel issues and PRs
python: true # format Python code and docstrings
markdown: true # format Markdown
prettier: true # format YAML
prettier: true # format YAML, JSON, Markdown and CSS
spelling: true # check spelling
links: false # check broken links
summary: true # print PR summary with GPT4 (requires 'openai_api_key' or 'openai_azure_api_key' and 'openai_azure_endpoint')
summary: true # print PR summary with GPT4o (requires 'openai_api_key')
openai_azure_api_key: ${{ secrets.OPENAI_AZURE_API_KEY }}
openai_azure_endpoint: ${{ secrets.OPENAI_AZURE_ENDPOINT }}

@ -1,6 +1,6 @@
<div align="center">
<p>
<a href="https://github.com/ultralytics/assets/releases/tag/v8.2.0" target="_blank">
<a href="https://www.ultralytics.com/events/yolovision" target="_blank">
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="YOLO Vision banner"></a>
</p>

@ -1,6 +1,6 @@
<div align="center">
<p>
<a href="https://github.com/ultralytics/assets/releases/tag/v8.2.0" target="_blank">
<a href="https://www.ultralytics.com/events/yolovision" target="_blank">
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="YOLO Vision banner"></a>
</p>

@ -5,7 +5,7 @@ keywords: Ultralytics, YOLOv8, object detection, image segmentation, deep learni
---
<div align="center">
<a href="https://github.com/ultralytics/assets/releases/tag/v8.2.0" target="_blank"><img width="1024%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="Ultralytics YOLO banner"></a>
<a href="https://www.ultralytics.com/events/yolovision" target="_blank"><img width="1024%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="Ultralytics YOLO banner"></a>
<a href="https://docs.ultralytics.com/zh/">中文</a> |
<a href="https://docs.ultralytics.com/ko/">한국어</a> |
<a href="https://docs.ultralytics.com/ja/">日本語</a> |

@ -12,11 +12,15 @@ from ultralytics.utils.files import increment_path
from ultralytics.utils.plotting import Annotator, colors
class SahiInference:
class SAHIInference:
"""Runs YOLOv8 and SAHI for object detection on video with options to view, save, and track results."""
def __init__(self):
"""Initializes the SAHIInference class for performing sliced inference using SAHI with YOLOv8 models."""
self.detection_model = None
def load_model(self, weights):
"""Loads a YOLOv8 model with specified weights for object detection using SAHI."""
yolov8_model_path = f"models/{weights}"
download_yolov8s_model(yolov8_model_path)
self.detection_model = AutoDetectionModel.from_pretrained(
@ -98,5 +102,5 @@ class SahiInference:
if __name__ == "__main__":
inference = SahiInference()
inference = SAHIInference()
inference.inference(**vars(inference.parse_opt()))

@ -1,191 +1,193 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"gpuType": "T4",
"toc_visible": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "PN1cAxdvd61e"
},
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/heatmaps.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/guides/heatmaps\">Heatmap Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
]
},
"cells": [
{
"cell_type": "markdown",
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/heatmaps.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/guides/heatmaps\">Heatmap Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
],
"metadata": {
"id": "PN1cAxdvd61e"
}
},
{
"cell_type": "markdown",
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
],
"metadata": {
"id": "o68Sg1oOeZm2"
}
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"id": "9dSwz_uOReMI",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "99866c77-e210-41e1-d581-8508371ce634"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"source": [
"# Introduction to Heatmaps\n",
"\n",
"A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) transforms complex data into a vibrant, color-coded matrix. This visual tool employs a spectrum of colors to represent varying data values, where warmer hues indicate higher intensities and cooler tones signify lower values. Heatmaps excel in visualizing intricate data patterns, correlations, and anomalies, offering an accessible and engaging approach to data interpretation across diverse domains.\n",
"\n",
"## Real World Applications\n",
"\n",
"| Transportation | Retail |\n",
"|:-----------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Ultralytics YOLOv8 Transportation Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/288d7053-622b-4452-b4e4-1f41aeb764aa) | ![Ultralytics YOLOv8 Retail Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/edef75ad-50a7-4c0a-be4a-a66cdfc12802) |\n",
"| Ultralytics YOLOv8 Transportation Heatmap | Ultralytics YOLOv8 Retail Heatmap |\n"
],
"metadata": {
"id": "m7VkxQ2aeg7k"
}
},
{
"cell_type": "code",
"source": [
"import cv2\n",
"from ultralytics import YOLO, solutions\n",
"\n",
"# Load YOLO model\n",
"model = YOLO(\"yolov8n.pt\")\n",
"\n",
"# Open video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"assert cap.isOpened(), \"Error reading video file\"\n",
"\n",
"# Get video properties\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Initialize video writer\n",
"video_writer = cv2.VideoWriter(\"heatmap_output.avi\", cv2.VideoWriter_fourcc(*\"mp4v\"), fps, (w, h))\n",
"\n",
"# Initialize heatmap object\n",
"heatmap_obj = solutions.Heatmap(\n",
" colormap=cv2.COLORMAP_PARULA,\n",
" view_img=True,\n",
" shape=\"circle\",\n",
" classes_names=model.names,\n",
")\n",
"\n",
"while cap.isOpened():\n",
" success, im0 = cap.read()\n",
" if not success:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Perform tracking on the current frame\n",
" tracks = model.track(im0, persist=True, show=False)\n",
"\n",
" # Generate heatmap on the frame\n",
" im0 = heatmap_obj.generate_heatmap(im0, tracks)\n",
"\n",
" # Write the frame to the output video\n",
" video_writer.write(im0)\n",
"\n",
"# Release resources\n",
"cap.release()\n",
"video_writer.release()\n",
"cv2.destroyAllWindows()"
],
"metadata": {
"id": "Cx-u59HQdu2o"
},
"execution_count": null,
"outputs": []
{
"cell_type": "markdown",
"metadata": {
"id": "o68Sg1oOeZm2"
},
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "9dSwz_uOReMI",
"outputId": "99866c77-e210-41e1-d581-8508371ce634"
},
"outputs": [
{
"cell_type": "markdown",
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on using heatmaps with Ultralytics, you can explore the comprehensive [Ultralytics Heatmaps Docs](https://docs.ultralytics.com/guides/heatmaps/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of your heatmap visualizations.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
],
"metadata": {
"id": "QrlKg-y3fEyD"
}
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
]
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "m7VkxQ2aeg7k"
},
"source": [
"# Introduction to Heatmaps\n",
"\n",
"A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) transforms complex data into a vibrant, color-coded matrix. This visual tool employs a spectrum of colors to represent varying data values, where warmer hues indicate higher intensities and cooler tones signify lower values. Heatmaps excel in visualizing intricate data patterns, correlations, and anomalies, offering an accessible and engaging approach to data interpretation across diverse domains.\n",
"\n",
"## Real World Applications\n",
"\n",
"| Transportation | Retail |\n",
"|:-----------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Ultralytics YOLOv8 Transportation Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/288d7053-622b-4452-b4e4-1f41aeb764aa) | ![Ultralytics YOLOv8 Retail Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/edef75ad-50a7-4c0a-be4a-a66cdfc12802) |\n",
"| Ultralytics YOLOv8 Transportation Heatmap | Ultralytics YOLOv8 Retail Heatmap |\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Cx-u59HQdu2o"
},
"outputs": [],
"source": [
"import cv2\n",
"\n",
"from ultralytics import YOLO, solutions\n",
"\n",
"# Load YOLO model\n",
"model = YOLO(\"yolov8n.pt\")\n",
"\n",
"# Open video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"assert cap.isOpened(), \"Error reading video file\"\n",
"\n",
"# Get video properties\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Initialize video writer\n",
"video_writer = cv2.VideoWriter(\"heatmap_output.avi\", cv2.VideoWriter_fourcc(*\"mp4v\"), fps, (w, h))\n",
"\n",
"# Initialize heatmap object\n",
"heatmap_obj = solutions.Heatmap(\n",
" colormap=cv2.COLORMAP_PARULA,\n",
" view_img=True,\n",
" shape=\"circle\",\n",
" classes_names=model.names,\n",
")\n",
"\n",
"while cap.isOpened():\n",
" success, im0 = cap.read()\n",
" if not success:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Perform tracking on the current frame\n",
" tracks = model.track(im0, persist=True, show=False)\n",
"\n",
" # Generate heatmap on the frame\n",
" im0 = heatmap_obj.generate_heatmap(im0, tracks)\n",
"\n",
" # Write the frame to the output video\n",
" video_writer.write(im0)\n",
"\n",
"# Release resources\n",
"cap.release()\n",
"video_writer.release()\n",
"cv2.destroyAllWindows()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "QrlKg-y3fEyD"
},
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on using heatmaps with Ultralytics, you can explore the comprehensive [Ultralytics Heatmaps Docs](https://docs.ultralytics.com/guides/heatmaps/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of your heatmap visualizations.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"gpuType": "T4",
"provenance": [],
"toc_visible": true
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

@ -1,113 +1,114 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Ultralytics HUB",
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "FIzICjaph_Wy"
},
"source": [
"<a align=\"center\" href=\"https://ultralytics.com/hub\" target=\"_blank\">\n",
"<img width=\"1024\", src=\"https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png\"></a>\n",
"\n",
"<div align=\"center\">\n",
"\n",
"[中文](https://docs.ultralytics.com/zh/hub/) | [한국어](https://docs.ultralytics.com/ko/hub/) | [日本語](https://docs.ultralytics.com/ja/hub/) | [Русский](https://docs.ultralytics.com/ru/hub/) | [Deutsch](https://docs.ultralytics.com/de/hub/) | [Français](https://docs.ultralytics.com/fr/hub/) | [Español](https://docs.ultralytics.com/es/hub/) | [Português](https://docs.ultralytics.com/pt/hub/) | [Türkçe](https://docs.ultralytics.com/tr/hub/) | [Tiếng Việt](https://docs.ultralytics.com/vi/hub/) | [हि](https://docs.ultralytics.com/hi/hub/) | [العربية](https://docs.ultralytics.com/ar/hub/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/hub/actions/workflows/ci.yaml\">\n",
" <img src=\"https://github.com/ultralytics/hub/actions/workflows/ci.yaml/badge.svg\" alt=\"CI CPU\"></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/hub/blob/main/hub.ipynb\">\n",
" <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the [Ultralytics](https://ultralytics.com/) HUB notebook!\n",
"\n",
"This notebook allows you to train Ultralytics [YOLO](https://github.com/ultralytics/ultralytics) 🚀 models using [HUB](https://hub.ultralytics.com/). Please browse the HUB <a href=\"https://docs.ultralytics.com/hub/\">Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/hub/issues/new/choose\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"</div>"
]
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "FIzICjaph_Wy"
},
"source": [
"<a align=\"center\" href=\"https://ultralytics.com/hub\" target=\"_blank\">\n",
"<img width=\"1024\", src=\"https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png\"></a>\n",
"\n",
"<div align=\"center\">\n",
"\n",
"[中文](https://docs.ultralytics.com/zh/hub/) | [한국어](https://docs.ultralytics.com/ko/hub/) | [日本語](https://docs.ultralytics.com/ja/hub/) | [Русский](https://docs.ultralytics.com/ru/hub/) | [Deutsch](https://docs.ultralytics.com/de/hub/) | [Français](https://docs.ultralytics.com/fr/hub/) | [Español](https://docs.ultralytics.com/es/hub/) | [Português](https://docs.ultralytics.com/pt/hub/) | [Türkçe](https://docs.ultralytics.com/tr/hub/) | [Tiếng Việt](https://docs.ultralytics.com/vi/hub/) | [हि](https://docs.ultralytics.com/hi/hub/) | [العربية](https://docs.ultralytics.com/ar/hub/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/hub/actions/workflows/ci.yaml\">\n",
" <img src=\"https://github.com/ultralytics/hub/actions/workflows/ci.yaml/badge.svg\" alt=\"CI CPU\"></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/hub/blob/main/hub.ipynb\">\n",
" <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the [Ultralytics](https://ultralytics.com/) HUB notebook!\n",
"\n",
"This notebook allows you to train Ultralytics [YOLO](https://github.com/ultralytics/ultralytics) 🚀 models using [HUB](https://hub.ultralytics.com/). Please browse the HUB <a href=\"https://docs.ultralytics.com/hub/\">Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/hub/issues/new/choose\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"</div>"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "eRQ2ow94MiOv"
},
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
]
},
{
"cell_type": "code",
"metadata": {
"id": "FyDnXd-n4c7Y",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "e1d713ec-e8a6-4422-fe61-c76ec9f03df5"
},
"source": [
"%pip install ultralytics # install\n",
"from ultralytics import YOLO, checks, hub\n",
"checks() # checks"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Ultralytics YOLOv8.2.3 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 28.8/78.2 GB disk)\n"
]
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "cQ9BwaAqxAm4"
},
"source": [
"# Start\n",
"\n",
"⚡ Login with your API key, load your YOLO 🚀 model and start training in 3 lines of code!"
]
{
"cell_type": "markdown",
"metadata": {
"id": "eRQ2ow94MiOv"
},
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "FyDnXd-n4c7Y",
"outputId": "e1d713ec-e8a6-4422-fe61-c76ec9f03df5"
},
"outputs": [
{
"cell_type": "code",
"metadata": {
"id": "XSlZaJ9Iw_iZ"
},
"source": [
"# Log in to HUB using your API key (https://hub.ultralytics.com/settings?tab=api+keys)\n",
"hub.login('YOUR_API_KEY')\n",
"\n",
"# Load your model from HUB (replace 'YOUR_MODEL_ID' with your model ID)\n",
"model = YOLO('https://hub.ultralytics.com/models/YOUR_MODEL_ID')\n",
"\n",
"# Train the model\n",
"results = model.train()"
],
"execution_count": null,
"outputs": []
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics YOLOv8.2.3 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 28.8/78.2 GB disk)\n"
]
}
]
],
"source": [
"%pip install ultralytics # install\n",
"from ultralytics import YOLO, checks, hub\n",
"\n",
"checks() # checks"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "cQ9BwaAqxAm4"
},
"source": [
"# Start\n",
"\n",
"⚡ Login with your API key, load your YOLO 🚀 model and start training in 3 lines of code!"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "XSlZaJ9Iw_iZ"
},
"outputs": [],
"source": [
"# Log in to HUB using your API key (https://hub.ultralytics.com/settings?tab=api+keys)\n",
"hub.login(\"YOUR_API_KEY\")\n",
"\n",
"# Load your model from HUB (replace 'YOUR_MODEL_ID' with your model ID)\n",
"model = YOLO(\"https://hub.ultralytics.com/models/YOUR_MODEL_ID\")\n",
"\n",
"# Train the model\n",
"results = model.train()"
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"name": "Ultralytics HUB",
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

@ -1,208 +1,210 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"gpuType": "T4"
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "PN1cAxdvd61e"
},
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_counting.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/guides/object-counting/\"> Object Counting Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
]
},
"cells": [
{
"cell_type": "markdown",
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_counting.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/guides/object-counting/\"> Object Counting Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
],
"metadata": {
"id": "PN1cAxdvd61e"
}
},
{
"cell_type": "markdown",
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
],
"metadata": {
"id": "o68Sg1oOeZm2"
}
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"id": "9dSwz_uOReMI",
"outputId": "fd3bab88-2f25-46c0-cae9-04d2beedc0c1",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Ultralytics YOLOv8.2.18 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"source": [
"# Object Counting using Ultralytics YOLOv8 🚀\n",
"\n",
"## What is Object Counting?\n",
"\n",
"Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) involves accurate identification and counting of specific objects in videos and camera streams. YOLOv8 excels in real-time applications, providing efficient and precise object counting for various scenarios like crowd analysis and surveillance, thanks to its state-of-the-art algorithms and deep learning capabilities.\n",
"\n",
"## Advantages of Object Counting?\n",
"\n",
"- **Resource Optimization:** Object counting facilitates efficient resource management by providing accurate counts, and optimizing resource allocation in applications like inventory management.\n",
"- **Enhanced Security:** Object counting enhances security and surveillance by accurately tracking and counting entities, aiding in proactive threat detection.\n",
"- **Informed Decision-Making:** Object counting offers valuable insights for decision-making, optimizing processes in retail, traffic management, and various other domains.\n",
"\n",
"## Real World Applications\n",
"\n",
"| Logistics | Aquaculture |\n",
"|:-------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Conveyor Belt Packets Counting Using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/70e2d106-510c-4c6c-a57a-d34a765aa757) | ![Fish Counting in Sea using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/c60d047b-3837-435f-8d29-bb9fc95d2191) |\n",
"| Conveyor Belt Packets Counting Using Ultralytics YOLOv8 | Fish Counting in Sea using Ultralytics YOLOv8 |\n"
],
"metadata": {
"id": "m7VkxQ2aeg7k"
}
},
{
"cell_type": "code",
"source": [
"import cv2\n",
"from ultralytics import YOLO, solutions\n",
"\n",
"# Load the pre-trained YOLOv8 model\n",
"model = YOLO(\"yolov8n.pt\")\n",
"\n",
"# Open the video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"assert cap.isOpened(), \"Error reading video file\"\n",
"\n",
"# Get video properties: width, height, and frames per second (fps)\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Define points for a line or region of interest in the video frame\n",
"line_points = [(20, 400), (1080, 400)] # Line coordinates\n",
"\n",
"# Specify classes to count, for example: person (0) and car (2)\n",
"classes_to_count = [0, 2] # Class IDs for person and car\n",
"\n",
"# Initialize the video writer to save the output video\n",
"video_writer = cv2.VideoWriter(\"object_counting_output.avi\", cv2.VideoWriter_fourcc(*\"mp4v\"), fps, (w, h))\n",
"\n",
"# Initialize the Object Counter with visualization options and other parameters\n",
"counter = solutions.ObjectCounter(\n",
" view_img=True, # Display the image during processing\n",
" reg_pts=line_points, # Region of interest points\n",
" classes_names=model.names, # Class names from the YOLO model\n",
" draw_tracks=True, # Draw tracking lines for objects\n",
" line_thickness=2, # Thickness of the lines drawn\n",
")\n",
"\n",
"# Process video frames in a loop\n",
"while cap.isOpened():\n",
" success, im0 = cap.read()\n",
" if not success:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Perform object tracking on the current frame, filtering by specified classes\n",
" tracks = model.track(im0, persist=True, show=False, classes=classes_to_count)\n",
"\n",
" # Use the Object Counter to count objects in the frame and get the annotated image\n",
" im0 = counter.start_counting(im0, tracks)\n",
"\n",
" # Write the annotated frame to the output video\n",
" video_writer.write(im0)\n",
"\n",
"# Release the video capture and writer objects\n",
"cap.release()\n",
"video_writer.release()\n",
"\n",
"# Close all OpenCV windows\n",
"cv2.destroyAllWindows()"
],
"metadata": {
"id": "Cx-u59HQdu2o"
},
"execution_count": null,
"outputs": []
{
"cell_type": "markdown",
"metadata": {
"id": "o68Sg1oOeZm2"
},
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "9dSwz_uOReMI",
"outputId": "fd3bab88-2f25-46c0-cae9-04d2beedc0c1"
},
"outputs": [
{
"cell_type": "markdown",
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on counting objects with Ultralytics, you can explore the comprehensive [Ultralytics Object Counting Docs](https://docs.ultralytics.com/guides/object-counting/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of counting and visualization.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
],
"metadata": {
"id": "QrlKg-y3fEyD"
}
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics YOLOv8.2.18 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
]
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "m7VkxQ2aeg7k"
},
"source": [
"# Object Counting using Ultralytics YOLOv8 🚀\n",
"\n",
"## What is Object Counting?\n",
"\n",
"Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) involves accurate identification and counting of specific objects in videos and camera streams. YOLOv8 excels in real-time applications, providing efficient and precise object counting for various scenarios like crowd analysis and surveillance, thanks to its state-of-the-art algorithms and deep learning capabilities.\n",
"\n",
"## Advantages of Object Counting?\n",
"\n",
"- **Resource Optimization:** Object counting facilitates efficient resource management by providing accurate counts, and optimizing resource allocation in applications like inventory management.\n",
"- **Enhanced Security:** Object counting enhances security and surveillance by accurately tracking and counting entities, aiding in proactive threat detection.\n",
"- **Informed Decision-Making:** Object counting offers valuable insights for decision-making, optimizing processes in retail, traffic management, and various other domains.\n",
"\n",
"## Real World Applications\n",
"\n",
"| Logistics | Aquaculture |\n",
"|:-------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Conveyor Belt Packets Counting Using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/70e2d106-510c-4c6c-a57a-d34a765aa757) | ![Fish Counting in Sea using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/c60d047b-3837-435f-8d29-bb9fc95d2191) |\n",
"| Conveyor Belt Packets Counting Using Ultralytics YOLOv8 | Fish Counting in Sea using Ultralytics YOLOv8 |\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Cx-u59HQdu2o"
},
"outputs": [],
"source": [
"import cv2\n",
"\n",
"from ultralytics import YOLO, solutions\n",
"\n",
"# Load the pre-trained YOLOv8 model\n",
"model = YOLO(\"yolov8n.pt\")\n",
"\n",
"# Open the video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"assert cap.isOpened(), \"Error reading video file\"\n",
"\n",
"# Get video properties: width, height, and frames per second (fps)\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Define points for a line or region of interest in the video frame\n",
"line_points = [(20, 400), (1080, 400)] # Line coordinates\n",
"\n",
"# Specify classes to count, for example: person (0) and car (2)\n",
"classes_to_count = [0, 2] # Class IDs for person and car\n",
"\n",
"# Initialize the video writer to save the output video\n",
"video_writer = cv2.VideoWriter(\"object_counting_output.avi\", cv2.VideoWriter_fourcc(*\"mp4v\"), fps, (w, h))\n",
"\n",
"# Initialize the Object Counter with visualization options and other parameters\n",
"counter = solutions.ObjectCounter(\n",
" view_img=True, # Display the image during processing\n",
" reg_pts=line_points, # Region of interest points\n",
" classes_names=model.names, # Class names from the YOLO model\n",
" draw_tracks=True, # Draw tracking lines for objects\n",
" line_thickness=2, # Thickness of the lines drawn\n",
")\n",
"\n",
"# Process video frames in a loop\n",
"while cap.isOpened():\n",
" success, im0 = cap.read()\n",
" if not success:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Perform object tracking on the current frame, filtering by specified classes\n",
" tracks = model.track(im0, persist=True, show=False, classes=classes_to_count)\n",
"\n",
" # Use the Object Counter to count objects in the frame and get the annotated image\n",
" im0 = counter.start_counting(im0, tracks)\n",
"\n",
" # Write the annotated frame to the output video\n",
" video_writer.write(im0)\n",
"\n",
"# Release the video capture and writer objects\n",
"cap.release()\n",
"video_writer.release()\n",
"\n",
"# Close all OpenCV windows\n",
"cv2.destroyAllWindows()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "QrlKg-y3fEyD"
},
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on counting objects with Ultralytics, you can explore the comprehensive [Ultralytics Object Counting Docs](https://docs.ultralytics.com/guides/object-counting/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of counting and visualization.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"gpuType": "T4",
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

@ -1,243 +1,245 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"gpuType": "T4"
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "PN1cAxdvd61e"
},
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_tracking.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/modes/track/\"> Tracking Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
]
},
"cells": [
{
"cell_type": "markdown",
"source": [
"<div align=\"center\">\n",
"\n",
" <a href=\"https://ultralytics.com/yolov8\" target=\"_blank\">\n",
" <img width=\"1024\", src=\"https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png\"></a>\n",
"\n",
" [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हि](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n",
"\n",
" <a href=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml\"><img src=\"https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg\" alt=\"Ultralytics CI\"></a>\n",
" <a href=\"https://console.paperspace.com/github/ultralytics/ultralytics\"><img src=\"https://assets.paperspace.io/img/gradient-badge.svg\" alt=\"Run on Gradient\"/></a>\n",
" <a href=\"https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_tracking.ipynb\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
" <a href=\"https://www.kaggle.com/ultralytics/yolov8\"><img src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"></a>\n",
" <a href=\"https://ultralytics.com/discord\"><img alt=\"Discord\" src=\"https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue\"></a>\n",
"\n",
"Welcome to the Ultralytics YOLOv8 🚀 notebook! <a href=\"https://github.com/ultralytics/ultralytics\">YOLOv8</a> is the latest version of the YOLO (You Only Look Once) AI models developed by <a href=\"https://ultralytics.com\">Ultralytics</a>. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n",
"\n",
"YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n",
"\n",
"We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href=\"https://docs.ultralytics.com/modes/track/\"> Tracking Docs</a> for details, raise an issue on <a href=\"https://github.com/ultralytics/ultralytics\">GitHub</a> for support, and join our <a href=\"https://ultralytics.com/discord\">Discord</a> community for questions and discussions!\n",
"\n",
"</div>"
],
"metadata": {
"id": "PN1cAxdvd61e"
}
},
{
"cell_type": "markdown",
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
],
"metadata": {
"id": "o68Sg1oOeZm2"
}
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"id": "9dSwz_uOReMI",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "ed8c2370-8fc7-4e4e-f669-d0bae4d944e9"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"source": [
"# Ultralytics Object Tracking\n",
"\n",
"[Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) instance segmentation involves identifying and outlining individual objects in an image, providing a detailed understanding of spatial distribution. Unlike semantic segmentation, it uniquely labels and precisely delineates each object, crucial for tasks like object detection and medical imaging.\n",
"\n",
"There are two types of instance segmentation tracking available in the Ultralytics package:\n",
"\n",
"- **Instance Segmentation with Class Objects:** Each class object is assigned a unique color for clear visual separation.\n",
"\n",
"- **Instance Segmentation with Object Tracks:** Every track is represented by a distinct color, facilitating easy identification and tracking.\n",
"\n",
"## Samples\n",
"\n",
"| Instance Segmentation | Instance Segmentation + Object Tracking |\n",
"|:---------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Ultralytics Instance Segmentation](https://github.com/RizwanMunawar/ultralytics/assets/62513924/d4ad3499-1f33-4871-8fbc-1be0b2643aa2) | ![Ultralytics Instance Segmentation with Object Tracking](https://github.com/RizwanMunawar/ultralytics/assets/62513924/2e5c38cc-fd5c-4145-9682-fa94ae2010a0) |\n",
"| Ultralytics Instance Segmentation 😍 | Ultralytics Instance Segmentation with Object Tracking 🔥 |"
],
"metadata": {
"id": "m7VkxQ2aeg7k"
}
},
{
"cell_type": "markdown",
"source": [
"## CLI\n",
"\n",
"Command-Line Interface (CLI) example."
],
"metadata": {
"id": "-ZF9DM6e6gz0"
}
},
{
"cell_type": "code",
"source": [
"!yolo track source=\"/path/to/video/file.mp4\" save=True"
],
"metadata": {
"id": "-XJqhOwo6iqT"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"## Python\n",
"\n",
"Python Instance Segmentation and Object tracking example."
],
"metadata": {
"id": "XRcw0vIE6oNb"
}
},
{
"cell_type": "code",
"source": [
"from collections import defaultdict\n",
"\n",
"import cv2\n",
"from ultralytics import YOLO\n",
"from ultralytics.utils.plotting import Annotator, colors\n",
"\n",
"# Dictionary to store tracking history with default empty lists\n",
"track_history = defaultdict(lambda: [])\n",
"\n",
"# Load the YOLO model with segmentation capabilities\n",
"model = YOLO(\"yolov8n-seg.pt\")\n",
"\n",
"# Open the video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"\n",
"# Retrieve video properties: width, height, and frames per second\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Initialize video writer to save the output video with the specified properties\n",
"out = cv2.VideoWriter(\"instance-segmentation-object-tracking.avi\", cv2.VideoWriter_fourcc(*\"MJPG\"), fps, (w, h))\n",
"\n",
"while True:\n",
" # Read a frame from the video\n",
" ret, im0 = cap.read()\n",
" if not ret:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Create an annotator object to draw on the frame\n",
" annotator = Annotator(im0, line_width=2)\n",
"\n",
" # Perform object tracking on the current frame\n",
" results = model.track(im0, persist=True)\n",
"\n",
" # Check if tracking IDs and masks are present in the results\n",
" if results[0].boxes.id is not None and results[0].masks is not None:\n",
" # Extract masks and tracking IDs\n",
" masks = results[0].masks.xy\n",
" track_ids = results[0].boxes.id.int().cpu().tolist()\n",
"\n",
" # Annotate each mask with its corresponding tracking ID and color\n",
" for mask, track_id in zip(masks, track_ids):\n",
" annotator.seg_bbox(mask=mask, mask_color=colors(track_id, True), track_label=str(track_id))\n",
"\n",
" # Write the annotated frame to the output video\n",
" out.write(im0)\n",
" # Display the annotated frame\n",
" cv2.imshow(\"instance-segmentation-object-tracking\", im0)\n",
"\n",
" # Exit the loop if 'q' is pressed\n",
" if cv2.waitKey(1) & 0xFF == ord(\"q\"):\n",
" break\n",
"\n",
"# Release the video writer and capture objects, and close all OpenCV windows\n",
"out.release()\n",
"cap.release()\n",
"cv2.destroyAllWindows()"
],
"metadata": {
"id": "Cx-u59HQdu2o"
},
"execution_count": null,
"outputs": []
{
"cell_type": "markdown",
"metadata": {
"id": "o68Sg1oOeZm2"
},
"source": [
"# Setup\n",
"\n",
"Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/pyproject.toml) and check software and hardware.\n",
"\n",
"[![PyPI - Version](https://img.shields.io/pypi/v/ultralytics?logo=pypi&logoColor=white)](https://pypi.org/project/ultralytics/) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ultralytics?logo=python&logoColor=gold)](https://pypi.org/project/ultralytics/)"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "9dSwz_uOReMI",
"outputId": "ed8c2370-8fc7-4e4e-f669-d0bae4d944e9"
},
"outputs": [
{
"cell_type": "markdown",
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on using tracking with Ultralytics, you can explore the comprehensive [Ultralytics Tracking Docs](https://docs.ultralytics.com/modes/track/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of tracking and visualization.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
],
"metadata": {
"id": "QrlKg-y3fEyD"
}
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n"
]
}
]
],
"source": [
"%pip install ultralytics\n",
"import ultralytics\n",
"\n",
"ultralytics.checks()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "m7VkxQ2aeg7k"
},
"source": [
"# Ultralytics Object Tracking\n",
"\n",
"[Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) instance segmentation involves identifying and outlining individual objects in an image, providing a detailed understanding of spatial distribution. Unlike semantic segmentation, it uniquely labels and precisely delineates each object, crucial for tasks like object detection and medical imaging.\n",
"\n",
"There are two types of instance segmentation tracking available in the Ultralytics package:\n",
"\n",
"- **Instance Segmentation with Class Objects:** Each class object is assigned a unique color for clear visual separation.\n",
"\n",
"- **Instance Segmentation with Object Tracks:** Every track is represented by a distinct color, facilitating easy identification and tracking.\n",
"\n",
"## Samples\n",
"\n",
"| Instance Segmentation | Instance Segmentation + Object Tracking |\n",
"|:---------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------:|\n",
"| ![Ultralytics Instance Segmentation](https://github.com/RizwanMunawar/ultralytics/assets/62513924/d4ad3499-1f33-4871-8fbc-1be0b2643aa2) | ![Ultralytics Instance Segmentation with Object Tracking](https://github.com/RizwanMunawar/ultralytics/assets/62513924/2e5c38cc-fd5c-4145-9682-fa94ae2010a0) |\n",
"| Ultralytics Instance Segmentation 😍 | Ultralytics Instance Segmentation with Object Tracking 🔥 |"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "-ZF9DM6e6gz0"
},
"source": [
"## CLI\n",
"\n",
"Command-Line Interface (CLI) example."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "-XJqhOwo6iqT"
},
"outputs": [],
"source": [
"!yolo track source=\"/path/to/video/file.mp4\" save=True"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "XRcw0vIE6oNb"
},
"source": [
"## Python\n",
"\n",
"Python Instance Segmentation and Object tracking example."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Cx-u59HQdu2o"
},
"outputs": [],
"source": [
"from collections import defaultdict\n",
"\n",
"import cv2\n",
"\n",
"from ultralytics import YOLO\n",
"from ultralytics.utils.plotting import Annotator, colors\n",
"\n",
"# Dictionary to store tracking history with default empty lists\n",
"track_history = defaultdict(lambda: [])\n",
"\n",
"# Load the YOLO model with segmentation capabilities\n",
"model = YOLO(\"yolov8n-seg.pt\")\n",
"\n",
"# Open the video file\n",
"cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n",
"\n",
"# Retrieve video properties: width, height, and frames per second\n",
"w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n",
"\n",
"# Initialize video writer to save the output video with the specified properties\n",
"out = cv2.VideoWriter(\"instance-segmentation-object-tracking.avi\", cv2.VideoWriter_fourcc(*\"MJPG\"), fps, (w, h))\n",
"\n",
"while True:\n",
" # Read a frame from the video\n",
" ret, im0 = cap.read()\n",
" if not ret:\n",
" print(\"Video frame is empty or video processing has been successfully completed.\")\n",
" break\n",
"\n",
" # Create an annotator object to draw on the frame\n",
" annotator = Annotator(im0, line_width=2)\n",
"\n",
" # Perform object tracking on the current frame\n",
" results = model.track(im0, persist=True)\n",
"\n",
" # Check if tracking IDs and masks are present in the results\n",
" if results[0].boxes.id is not None and results[0].masks is not None:\n",
" # Extract masks and tracking IDs\n",
" masks = results[0].masks.xy\n",
" track_ids = results[0].boxes.id.int().cpu().tolist()\n",
"\n",
" # Annotate each mask with its corresponding tracking ID and color\n",
" for mask, track_id in zip(masks, track_ids):\n",
" annotator.seg_bbox(mask=mask, mask_color=colors(track_id, True), track_label=str(track_id))\n",
"\n",
" # Write the annotated frame to the output video\n",
" out.write(im0)\n",
" # Display the annotated frame\n",
" cv2.imshow(\"instance-segmentation-object-tracking\", im0)\n",
"\n",
" # Exit the loop if 'q' is pressed\n",
" if cv2.waitKey(1) & 0xFF == ord(\"q\"):\n",
" break\n",
"\n",
"# Release the video writer and capture objects, and close all OpenCV windows\n",
"out.release()\n",
"cap.release()\n",
"cv2.destroyAllWindows()"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "QrlKg-y3fEyD"
},
"source": [
"# Additional Resources\n",
"\n",
"## Community Support\n",
"\n",
"For more information on using tracking with Ultralytics, you can explore the comprehensive [Ultralytics Tracking Docs](https://docs.ultralytics.com/modes/track/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of tracking and visualization.\n",
"\n",
"## Ultralytics ⚡ Resources\n",
"\n",
"At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n",
"\n",
"- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n",
"- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n",
"- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n",
"- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n",
"\n",
"## YOLOv8 🚀 Resources\n",
"\n",
"YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n",
"\n",
"- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n",
"- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n",
"- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n",
"\n",
"These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed."
]
}
],
"metadata": {
"accelerator": "GPU",
"colab": {
"gpuType": "T4",
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

@ -84,9 +84,7 @@ def test_fastsam(task="segment", model=WEIGHTS_DIR / "FastSAM-s.pt", data="coco8
new_masks, _ = Predictor.remove_small_regions(everything_results[0].masks.data, min_area=20)
# Run inference with bboxes and points and texts prompt at the same time
results = sam_model(
source, bboxes=[439, 437, 524, 709], points=[[200, 200]], labels=[1], texts="a photo of a dog"
)
sam_model(source, bboxes=[439, 437, 524, 709], points=[[200, 200]], labels=[1], texts="a photo of a dog")
def test_mobilesam():

@ -1,6 +1,6 @@
# Ultralytics YOLO 🚀, AGPL-3.0 license
__version__ = "8.2.77"
__version__ = "8.2.78"
import os

@ -539,7 +539,6 @@ class Mosaic(BaseMixTransform):
assert 0 <= p <= 1.0, f"The probability should be in range [0, 1], but got {p}."
assert n in {4, 9}, "grid must be equal to 4 or 9."
super().__init__(dataset=dataset, p=p)
self.dataset = dataset
self.imgsz = imgsz
self.border = (-imgsz // 2, -imgsz // 2) # width, height
self.n = n
@ -1692,7 +1691,6 @@ class CopyPaste:
instances.convert_bbox(format="xyxy")
instances.denormalize(w, h)
if self.p and len(instances.segments):
n = len(instances)
_, w, _ = im.shape # height, width, channels
im_new = np.zeros(im.shape, np.uint8)

@ -381,6 +381,7 @@ class Model(nn.Module):
"""
self._check_is_pytorch_model()
if isinstance(weights, (str, Path)):
self.overrides["pretrained"] = weights # remember the weights for DDP training
weights, self.ckpt = attempt_load_one_weight(weights)
self.model.load(weights)
return self

Loading…
Cancel
Save