From b6d425c041ace32c6a2e95dae4f726fa1710262b Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Sat, 18 May 2024 19:47:58 +0200 Subject: [PATCH] `ultralytics 8.2.18` major Ultralytics Solutions refactor (#12797) Co-authored-by: Muhammad Rizwan Munawar --- examples/heatmaps.ipynb | 110 +++++++++++++------- examples/object_counting.ipynb | 22 ++-- examples/object_tracking.ipynb | 178 ++++++++++++++++++++------------- ultralytics/__init__.py | 2 +- 4 files changed, 194 insertions(+), 118 deletions(-) diff --git a/examples/heatmaps.ipynb b/examples/heatmaps.ipynb index 0488e3d894..b73c90d6b7 100644 --- a/examples/heatmaps.ipynb +++ b/examples/heatmaps.ipynb @@ -4,7 +4,8 @@ "metadata": { "colab": { "provenance": [], - "gpuType": "T4" + "gpuType": "T4", + "toc_visible": true }, "kernelspec": { "name": "python3", @@ -27,14 +28,16 @@ " [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हिन्दी](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n", "\n", " \"Ultralytics\n", + " \"Run\n", " \"Open\n", + " \"Open\n", " \"Discord\"\n", "\n", - "Welcome to the Ultralytics YOLOv8 🚀 notebook! YOLOv8 is the latest version of the YOLO (You Only Look Once) AI models developed by Ultralytics. This notebook serves as the starting point for exploring the heatmaps and understand its features and capabilities.\n", + "Welcome to the Ultralytics YOLOv8 🚀 notebook! YOLOv8 is the latest version of the YOLO (You Only Look Once) AI models developed by Ultralytics. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n", "\n", "YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n", "\n", - "We hope that the resources in this notebook will help you get the most out of Ultralytics Heatmaps. Please browse the YOLOv8 Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", + "We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 Heatmap Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", "\n", "" ], @@ -55,21 +58,43 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { - "id": "9dSwz_uOReMI" + "id": "9dSwz_uOReMI", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "99866c77-e210-41e1-d581-8508371ce634" }, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n", + "Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n" + ] + } + ], "source": [ - "!pip install ultralytics" + "%pip install ultralytics\n", + "import ultralytics\n", + "ultralytics.checks()" ] }, { "cell_type": "markdown", "source": [ - "# Ultralytics Heatmaps\n", + "# Introduction to Heatmaps\n", + "\n", + "A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) transforms complex data into a vibrant, color-coded matrix. This visual tool employs a spectrum of colors to represent varying data values, where warmer hues indicate higher intensities and cooler tones signify lower values. Heatmaps excel in visualizing intricate data patterns, correlations, and anomalies, offering an accessible and engaging approach to data interpretation across diverse domains.\n", + "\n", + "## Real World Applications\n", "\n", - "Heatmap is color-coded matrix, generated by Ultralytics YOLOv8, simplifies intricate data by using vibrant colors. This visual representation employs warmer hues for higher intensities and cooler tones for lower values. Heatmaps are effective in illustrating complex data patterns, correlations, and anomalies, providing a user-friendly and engaging way to interpret data across various domains." + "| Transportation | Retail |\n", + "|:-----------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:|\n", + "| ![Ultralytics YOLOv8 Transportation Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/288d7053-622b-4452-b4e4-1f41aeb764aa) | ![Ultralytics YOLOv8 Retail Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/edef75ad-50a7-4c0a-be4a-a66cdfc12802) |\n", + "| Ultralytics YOLOv8 Transportation Heatmap | Ultralytics YOLOv8 Retail Heatmap |\n" ], "metadata": { "id": "m7VkxQ2aeg7k" @@ -78,39 +103,46 @@ { "cell_type": "code", "source": [ - "from ultralytics import YOLO\n", - "from ultralytics.solutions import heatmap\n", "import cv2\n", + "from ultralytics import YOLO, solutions\n", "\n", + "# Load YOLO model\n", "model = YOLO(\"yolov8n.pt\")\n", + "\n", + "# Open video file\n", "cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n", "assert cap.isOpened(), \"Error reading video file\"\n", + "\n", + "# Get video properties\n", "w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n", "\n", - "# Video writer\n", - "video_writer = cv2.VideoWriter(\"heatmap_output.avi\",\n", - " cv2.VideoWriter_fourcc(*'mp4v'),\n", - " fps,\n", - " (w, h))\n", + "# Initialize video writer\n", + "video_writer = cv2.VideoWriter(\"heatmap_output.avi\", cv2.VideoWriter_fourcc(*\"mp4v\"), fps, (w, h))\n", "\n", - "# Init heatmap\n", - "heatmap_obj = heatmap.Heatmap()\n", - "heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA,\n", - " imw=w,\n", - " imh=h,\n", - " view_img=True,\n", - " shape=\"circle\")\n", + "# Initialize heatmap object\n", + "heatmap_obj = solutions.Heatmap(\n", + " colormap=cv2.COLORMAP_PARULA,\n", + " view_img=True,\n", + " shape=\"circle\",\n", + " classes_names=model.names,\n", + ")\n", "\n", "while cap.isOpened():\n", " success, im0 = cap.read()\n", " if not success:\n", " print(\"Video frame is empty or video processing has been successfully completed.\")\n", " break\n", + "\n", + " # Perform tracking on the current frame\n", " tracks = model.track(im0, persist=True, show=False)\n", "\n", + " # Generate heatmap on the frame\n", " im0 = heatmap_obj.generate_heatmap(im0, tracks)\n", + "\n", + " # Write the frame to the output video\n", " video_writer.write(im0)\n", "\n", + "# Release resources\n", "cap.release()\n", "video_writer.release()\n", "cv2.destroyAllWindows()" @@ -124,20 +156,30 @@ { "cell_type": "markdown", "source": [ - "#Community Support\n", + "# Additional Resources\n", + "\n", + "## Community Support\n", + "\n", + "For more information on using heatmaps with Ultralytics, you can explore the comprehensive [Ultralytics Heatmaps Docs](https://docs.ultralytics.com/guides/heatmaps/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of your heatmap visualizations.\n", + "\n", + "## Ultralytics ⚡ Resources\n", + "\n", + "At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n", + "\n", + "- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n", + "- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n", + "- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n", + "- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n", + "\n", + "## YOLOv8 🚀 Resources\n", "\n", - "For more information, you can explore Ultralytics Heatmaps Docs\n", + "YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n", "\n", - "Ultralytics ⚡ resources\n", - "- About Us – https://ultralytics.com/about\n", - "- Join Our Team – https://ultralytics.com/work\n", - "- Contact Us – https://ultralytics.com/contact\n", - "- Discord – https://ultralytics.com/discord\n", - "- Ultralytics License – https://ultralytics.com/license\n", + "- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n", + "- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n", + "- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n", "\n", - "YOLOv8 🚀 resources\n", - "- GitHub – https://github.com/ultralytics/ultralytics\n", - "- Docs – https://docs.ultralytics.com/" + "These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed." ], "metadata": { "id": "QrlKg-y3fEyD" diff --git a/examples/object_counting.ipynb b/examples/object_counting.ipynb index d6afd3f234..a25dbf189e 100644 --- a/examples/object_counting.ipynb +++ b/examples/object_counting.ipynb @@ -78,9 +78,8 @@ { "cell_type": "code", "source": [ - "from ultralytics import YOLO\n", - "from ultralytics.solutions import object_counter\n", "import cv2\n", + "from ultralytics import YOLO, solutions\n", "\n", "model = YOLO(\"yolov8n.pt\")\n", "cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n", @@ -91,17 +90,16 @@ "region_points = [(20, 400), (1080, 404), (1080, 360), (20, 360)]\n", "\n", "# Video writer\n", - "video_writer = cv2.VideoWriter(\"object_counting_output.avi\",\n", - " cv2.VideoWriter_fourcc(*'mp4v'),\n", - " fps,\n", - " (w, h))\n", + "video_writer = cv2.VideoWriter(\"object_counting_output.avi\", cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))\n", "\n", "# Init Object Counter\n", - "counter = object_counter.ObjectCounter()\n", - "counter.set_args(view_img=True,\n", - " reg_pts=region_points,\n", - " classes_names=model.names,\n", - " draw_tracks=True)\n", + "counter = solutions.ObjectCounter(\n", + " view_img=True,\n", + " reg_pts=region_points,\n", + " classes_names=model.names,\n", + " draw_tracks=True,\n", + " line_thickness=2,\n", + ")\n", "\n", "while cap.isOpened():\n", " success, im0 = cap.read()\n", @@ -146,4 +144,4 @@ } } ] -} +} \ No newline at end of file diff --git a/examples/object_tracking.ipynb b/examples/object_tracking.ipynb index 610514eafd..e576922a5c 100644 --- a/examples/object_tracking.ipynb +++ b/examples/object_tracking.ipynb @@ -27,14 +27,16 @@ " [中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [Türkçe](https://docs.ultralytics.com/tr/) | [Tiếng Việt](https://docs.ultralytics.com/vi/) | [हिन्दी](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/)\n", "\n", " \"Ultralytics\n", + " \"Run\n", " \"Open\n", + " \"Open\n", " \"Discord\"\n", "\n", - "Welcome to the Ultralytics YOLOv8 🚀 notebook! YOLOv8 is the latest version of the YOLO (You Only Look Once) AI models developed by Ultralytics. This notebook serves as the starting point for exploring the Object Tracking and understand its features and capabilities.\n", + "Welcome to the Ultralytics YOLOv8 🚀 notebook! YOLOv8 is the latest version of the YOLO (You Only Look Once) AI models developed by Ultralytics. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n", "\n", "YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n", "\n", - "We hope that the resources in this notebook will help you get the most out of Ultralytics Object Tracking. Please browse the YOLOv8 Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", + "We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 Tracking Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", "\n", "" ], @@ -55,13 +57,28 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { - "id": "9dSwz_uOReMI" + "id": "9dSwz_uOReMI", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "ed8c2370-8fc7-4e4e-f669-d0bae4d944e9" }, - "outputs": [], + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Ultralytics YOLOv8.2.17 🚀 Python-3.10.12 torch-2.2.1+cu121 CUDA:0 (Tesla T4, 15102MiB)\n", + "Setup complete ✅ (2 CPUs, 12.7 GB RAM, 29.8/78.2 GB disk)\n" + ] + } + ], "source": [ - "!pip install ultralytics" + "%pip install ultralytics\n", + "import ultralytics\n", + "ultralytics.checks()" ] }, { @@ -69,7 +86,20 @@ "source": [ "# Ultralytics Object Tracking\n", "\n", - "Within the domain of video analytics, object tracking stands out as a crucial undertaking. It goes beyond merely identifying the location and class of objects within the frame; it also involves assigning a unique ID to each detected object as the video unfolds. The applications of this technology are vast, spanning from surveillance and security to real-time sports analytics." + "[Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) instance segmentation involves identifying and outlining individual objects in an image, providing a detailed understanding of spatial distribution. Unlike semantic segmentation, it uniquely labels and precisely delineates each object, crucial for tasks like object detection and medical imaging.\n", + "\n", + "There are two types of instance segmentation tracking available in the Ultralytics package:\n", + "\n", + "- **Instance Segmentation with Class Objects:** Each class object is assigned a unique color for clear visual separation.\n", + "\n", + "- **Instance Segmentation with Object Tracks:** Every track is represented by a distinct color, facilitating easy identification and tracking.\n", + "\n", + "## Samples\n", + "\n", + "| Instance Segmentation | Instance Segmentation + Object Tracking |\n", + "|:---------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------:|\n", + "| ![Ultralytics Instance Segmentation](https://github.com/RizwanMunawar/ultralytics/assets/62513924/d4ad3499-1f33-4871-8fbc-1be0b2643aa2) | ![Ultralytics Instance Segmentation with Object Tracking](https://github.com/RizwanMunawar/ultralytics/assets/62513924/2e5c38cc-fd5c-4145-9682-fa94ae2010a0) |\n", + "| Ultralytics Instance Segmentation 😍 | Ultralytics Instance Segmentation with Object Tracking 🔥 |" ], "metadata": { "id": "m7VkxQ2aeg7k" @@ -78,7 +108,9 @@ { "cell_type": "markdown", "source": [ - "## CLI" + "## CLI\n", + "\n", + "Command-Line Interface (CLI) example." ], "metadata": { "id": "-ZF9DM6e6gz0" @@ -100,7 +132,7 @@ "source": [ "## Python\n", "\n", - "- Draw Object tracking trails" + "Python Instance Segmentation and Object tracking example." ], "metadata": { "id": "XRcw0vIE6oNb" @@ -109,67 +141,61 @@ { "cell_type": "code", "source": [ + "from collections import defaultdict\n", + "\n", "import cv2\n", - "import numpy as np\n", "from ultralytics import YOLO\n", - "\n", - "from ultralytics.utils.checks import check_imshow\n", "from ultralytics.utils.plotting import Annotator, colors\n", "\n", - "from collections import defaultdict\n", - "\n", + "# Dictionary to store tracking history with default empty lists\n", "track_history = defaultdict(lambda: [])\n", - "model = YOLO(\"yolov8n.pt\")\n", - "names = model.model.names\n", "\n", - "video_path = \"/path/to/video/file.mp4\"\n", - "cap = cv2.VideoCapture(video_path)\n", - "assert cap.isOpened(), \"Error reading video file\"\n", + "# Load the YOLO model with segmentation capabilities\n", + "model = YOLO(\"yolov8n-seg.pt\")\n", + "\n", + "# Open the video file\n", + "cap = cv2.VideoCapture(\"path/to/video/file.mp4\")\n", "\n", + "# Retrieve video properties: width, height, and frames per second\n", "w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))\n", "\n", - "result = cv2.VideoWriter(\"object_tracking.avi\",\n", - " cv2.VideoWriter_fourcc(*'mp4v'),\n", - " fps,\n", - " (w, h))\n", - "\n", - "while cap.isOpened():\n", - " success, frame = cap.read()\n", - " if success:\n", - " results = model.track(frame, persist=True, verbose=False)\n", - " boxes = results[0].boxes.xyxy.cpu()\n", - "\n", - " if results[0].boxes.id is not None:\n", - "\n", - " # Extract prediction results\n", - " clss = results[0].boxes.cls.cpu().tolist()\n", - " track_ids = results[0].boxes.id.int().cpu().tolist()\n", - " confs = results[0].boxes.conf.float().cpu().tolist()\n", - "\n", - " # Annotator Init\n", - " annotator = Annotator(frame, line_width=2)\n", - "\n", - " for box, cls, track_id in zip(boxes, clss, track_ids):\n", - " annotator.box_label(box, color=colors(int(cls), True), label=names[int(cls)])\n", - "\n", - " # Store tracking history\n", - " track = track_history[track_id]\n", - " track.append((int((box[0] + box[2]) / 2), int((box[1] + box[3]) / 2)))\n", - " if len(track) > 30:\n", - " track.pop(0)\n", - "\n", - " # Plot tracks\n", - " points = np.array(track, dtype=np.int32).reshape((-1, 1, 2))\n", - " cv2.circle(frame, (track[-1]), 7, colors(int(cls), True), -1)\n", - " cv2.polylines(frame, [points], isClosed=False, color=colors(int(cls), True), thickness=2)\n", - "\n", - " result.write(frame)\n", - " if cv2.waitKey(1) & 0xFF == ord(\"q\"):\n", - " break\n", - " else:\n", + "# Initialize video writer to save the output video with the specified properties\n", + "out = cv2.VideoWriter(\"instance-segmentation-object-tracking.avi\", cv2.VideoWriter_fourcc(*\"MJPG\"), fps, (w, h))\n", + "\n", + "while True:\n", + " # Read a frame from the video\n", + " ret, im0 = cap.read()\n", + " if not ret:\n", + " print(\"Video frame is empty or video processing has been successfully completed.\")\n", + " break\n", + "\n", + " # Create an annotator object to draw on the frame\n", + " annotator = Annotator(im0, line_width=2)\n", + "\n", + " # Perform object tracking on the current frame\n", + " results = model.track(im0, persist=True)\n", + "\n", + " # Check if tracking IDs and masks are present in the results\n", + " if results[0].boxes.id is not None and results[0].masks is not None:\n", + " # Extract masks and tracking IDs\n", + " masks = results[0].masks.xy\n", + " track_ids = results[0].boxes.id.int().cpu().tolist()\n", + "\n", + " # Annotate each mask with its corresponding tracking ID and color\n", + " for mask, track_id in zip(masks, track_ids):\n", + " annotator.seg_bbox(mask=mask, mask_color=colors(track_id, True), track_label=str(track_id))\n", + "\n", + " # Write the annotated frame to the output video\n", + " out.write(im0)\n", + " # Display the annotated frame\n", + " cv2.imshow(\"instance-segmentation-object-tracking\", im0)\n", + "\n", + " # Exit the loop if 'q' is pressed\n", + " if cv2.waitKey(1) & 0xFF == ord(\"q\"):\n", " break\n", "\n", - "result.release()\n", + "# Release the video writer and capture objects, and close all OpenCV windows\n", + "out.release()\n", "cap.release()\n", "cv2.destroyAllWindows()" ], @@ -182,24 +208,34 @@ { "cell_type": "markdown", "source": [ - "#Community Support\n", + "# Additional Resources\n", + "\n", + "## Community Support\n", + "\n", + "For more information on using tracking with Ultralytics, you can explore the comprehensive [Ultralytics Tracking Docs](https://docs.ultralytics.com/modes/track/). This guide covers everything from basic concepts to advanced techniques, ensuring you get the most out of tracking and visualization.\n", + "\n", + "## Ultralytics ⚡ Resources\n", + "\n", + "At Ultralytics, we are committed to providing cutting-edge AI solutions. Here are some key resources to learn more about our company and get involved with our community:\n", + "\n", + "- [Ultralytics HUB](https://ultralytics.com/hub): Simplify your AI projects with Ultralytics HUB, our no-code tool for effortless YOLO training and deployment.\n", + "- [Ultralytics Licensing](https://ultralytics.com/license): Review our licensing terms to understand how you can use our software in your projects.\n", + "- [About Us](https://ultralytics.com/about): Discover our mission, vision, and the story behind Ultralytics.\n", + "- [Join Our Team](https://ultralytics.com/work): Explore career opportunities and join our team of talented professionals.\n", + "\n", + "## YOLOv8 🚀 Resources\n", "\n", - "For more information, you can explore Ultralytics Object Tracking Docs\n", + "YOLOv8 is the latest evolution in the YOLO series, offering state-of-the-art performance in object detection and image segmentation. Here are some essential resources to help you get started with YOLOv8:\n", "\n", - "Ultralytics ⚡ resources\n", - "- About Us – https://ultralytics.com/about\n", - "- Join Our Team – https://ultralytics.com/work\n", - "- Contact Us – https://ultralytics.com/contact\n", - "- Discord – https://ultralytics.com/discord\n", - "- Ultralytics License – https://ultralytics.com/license\n", + "- [GitHub](https://github.com/ultralytics/ultralytics): Access the YOLOv8 repository on GitHub, where you can find the source code, contribute to the project, and report issues.\n", + "- [Docs](https://docs.ultralytics.com/): Explore the official documentation for YOLOv8, including installation guides, tutorials, and detailed API references.\n", + "- [Discord](https://ultralytics.com/discord): Join our Discord community to connect with other users, share your projects, and get help from the Ultralytics team.\n", "\n", - "YOLOv8 🚀 resources\n", - "- GitHub – https://github.com/ultralytics/ultralytics\n", - "- Docs – https://docs.ultralytics.com/" + "These resources are designed to help you leverage the full potential of Ultralytics' offerings and YOLOv8. Whether you're a beginner or an experienced developer, you'll find the information and support you need to succeed." ], "metadata": { "id": "QrlKg-y3fEyD" } } ] -} +} \ No newline at end of file diff --git a/ultralytics/__init__.py b/ultralytics/__init__.py index ae92993108..b7beda5064 100644 --- a/ultralytics/__init__.py +++ b/ultralytics/__init__.py @@ -1,6 +1,6 @@ # Ultralytics YOLO 🚀, AGPL-3.0 license -__version__ = "8.2.17" +__version__ = "8.2.18" from ultralytics.data.explorer.explorer import Explorer from ultralytics.models import RTDETR, SAM, YOLO, YOLOWorld