Created using Colaboratory

pull/196/head
Glenn Jocher 2 years ago
parent 341ce0ff3f
commit 1135701163
  1. 379
      examples/tutorial.ipynb

@ -57,9 +57,10 @@
"metadata": {
"id": "wbvMlHd_QwMG",
"colab": {
"base_uri": "https://localhost:8080/"
"base_uri": "https://localhost:8080/",
"height": 0
},
"outputId": "276a3983-eedd-4eeb-9701-2ae78e1b4c44"
"outputId": "d7b7b0a6-6a29-4e6d-e404-8d67d2be86e2"
},
"source": [
"# Pip install method (recommended)\n",
@ -67,14 +68,14 @@
"import ultralytics\n",
"ultralytics.checks()"
],
"execution_count": null,
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"name": "stderr",
"text": [
"\u001b[2K\r\u001b[2K\rUltralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 23.1/166.8 GB disk)\n"
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Setup complete ✅ (2 CPUs, 12.7 GB RAM, 23.1/78.2 GB disk)\n"
]
}
]
@ -110,28 +111,28 @@
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "b4d62712-a064-4f5b-856f-8a1a58e900ec"
"outputId": "bc3ee5db-5c36-4dcc-d016-d6b93c756eb2"
},
"source": [
"!yolo task=detect mode=predict model=yolov8n.pt conf=0.25 source='https://ultralytics.com/images/zidane.jpg'\n",
"# display.Image(filename='runs/detect/predict/zidane.jpg', width=600)"
],
"execution_count": null,
"execution_count": 2,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Downloading https://ultralytics.com/images/zidane.jpg to zidane.jpg...\n",
"100% 165k/165k [00:00<00:00, 72.6MB/s]\n",
"Ultralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"100% 165k/165k [00:00<00:00, 8.97MB/s]\n",
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Downloading https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt to yolov8n.pt...\n",
"100% 6.24M/6.24M [00:03<00:00, 2.14MB/s]\n",
"100% 6.24M/6.24M [00:01<00:00, 6.32MB/s]\n",
"\n",
"Fusing layers... \n",
"YOLOv8n summary: 168 layers, 3151904 parameters, 0 gradients, 8.7 GFLOPs\n",
"image 1/1 /content/zidane.jpg: 384x640 2 persons, 1 tie, 78.0ms\n",
"Speed: 0.6ms pre-process, 78.0ms inference, 29.2ms postprocess per image at shape (1, 3, 640, 640)\n",
"image 1/1 /content/zidane.jpg: 384x640 2 persons, 1 tie, 13.6ms\n",
"Speed: 0.4ms pre-process, 13.6ms inference, 51.9ms postprocess per image at shape (1, 3, 640, 640)\n",
"Results saved to \u001b[1mruns/detect/predict\u001b[0m\n"
]
}
@ -178,104 +179,104 @@
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "a3853c39-e2d3-40ca-bdab-32880af1751b"
"outputId": "ec81409c-7f16-44ec-ac70-8c09021e25a1"
},
"source": [
"# Validate YOLOv8n on COCO128 val\n",
"!yolo task=detect mode=val model=yolov8n.pt data=coco128.yaml"
],
"execution_count": null,
"execution_count": 3,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Ultralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Fusing layers... \n",
"YOLOv8n summary: 168 layers, 3151904 parameters, 0 gradients, 8.7 GFLOPs\n",
"\n",
"Dataset not found ⚠, missing paths ['/datasets/coco128/images/train2017']\n",
"Downloading https://ultralytics.com/assets/coco128.zip to coco128.zip...\n",
"100% 6.66M/6.66M [00:03<00:00, 2.27MB/s]\n",
"Dataset download success ✅ (4.8s), saved to \u001b[1m/datasets\u001b[0m\n",
"100% 6.66M/6.66M [00:01<00:00, 6.22MB/s]\n",
"Dataset download success ✅ (1.9s), saved to \u001b[1m/datasets\u001b[0m\n",
"Downloading https://ultralytics.com/assets/Arial.ttf to /root/.config/Ultralytics/Arial.ttf...\n",
"100% 755k/755k [00:00<00:00, 115MB/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning /datasets/coco128/labels/train2017... 126 images, 2 backgrounds, 0 corrupt: 100% 128/128 [00:00<00:00, 948.62it/s] \n",
"100% 755k/755k [00:00<00:00, 27.8MB/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mScanning /datasets/coco128/labels/train2017... 126 images, 2 backgrounds, 0 corrupt: 100% 128/128 [00:00<00:00, 1327.78it/s]\n",
"\u001b[34m\u001b[1mval: \u001b[0mNew cache created: /datasets/coco128/labels/train2017.cache\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 8/8 [00:07<00:00, 1.08it/s]\n",
" all 128 929 0.639 0.523 0.608 0.447\n",
" person 128 254 0.82 0.679 0.767 0.54\n",
" bicycle 128 6 0.369 0.167 0.294 0.203\n",
" car 128 46 0.746 0.196 0.24 0.147\n",
" motorcycle 128 5 0.687 0.8 0.898 0.707\n",
" airplane 128 6 0.645 0.667 0.851 0.689\n",
" bus 128 7 0.743 0.714 0.727 0.633\n",
" train 128 3 0.532 0.667 0.741 0.554\n",
" truck 128 12 0.667 0.333 0.448 0.263\n",
" boat 128 6 0.226 0.167 0.332 0.184\n",
" traffic light 128 14 0.535 0.143 0.193 0.146\n",
" stop sign 128 2 0.601 0.5 0.828 0.68\n",
" bench 128 9 0.716 0.444 0.518 0.292\n",
" bird 128 16 0.815 0.75 0.859 0.499\n",
" cat 128 4 0.782 0.909 0.945 0.749\n",
" dog 128 9 0.603 0.889 0.764 0.58\n",
" horse 128 2 0.73 1 0.995 0.572\n",
" elephant 128 17 0.794 0.882 0.934 0.74\n",
" bear 128 1 0.615 1 0.995 0.995\n",
" zebra 128 4 0.837 1 0.995 0.964\n",
" giraffe 128 9 0.717 1 0.975 0.676\n",
" backpack 128 6 0.584 0.333 0.432 0.276\n",
" umbrella 128 18 0.83 0.556 0.674 0.409\n",
" handbag 128 19 1 0.11 0.255 0.111\n",
" tie 128 7 0.669 0.714 0.711 0.519\n",
" suitcase 128 4 0.773 0.867 0.945 0.644\n",
" frisbee 128 5 0.586 0.8 0.732 0.686\n",
" skis 128 1 0.733 1 0.995 0.497\n",
" snowboard 128 7 1 0.546 0.753 0.516\n",
" sports ball 128 6 0.574 0.5 0.464 0.263\n",
" kite 128 10 0.597 0.5 0.548 0.22\n",
" baseball bat 128 4 0.496 0.5 0.269 0.12\n",
" baseball glove 128 7 0.598 0.429 0.429 0.293\n",
" skateboard 128 5 0.806 0.6 0.635 0.411\n",
" tennis racket 128 7 0.955 0.429 0.564 0.373\n",
" bottle 128 18 0.601 0.444 0.444 0.281\n",
" wine glass 128 16 0.815 0.562 0.604 0.377\n",
" cup 128 36 0.606 0.25 0.425 0.308\n",
" fork 128 6 0.518 0.167 0.17 0.169\n",
" knife 128 16 0.646 0.438 0.581 0.365\n",
" spoon 128 22 0.491 0.227 0.334 0.186\n",
" bowl 128 28 0.643 0.536 0.632 0.498\n",
" banana 128 1 0 0 0.142 0.0529\n",
" sandwich 128 2 0.226 0.5 0.414 0.414\n",
" orange 128 4 1 0 0.87 0.562\n",
" broccoli 128 11 0.416 0.182 0.239 0.197\n",
" carrot 128 24 0.719 0.458 0.583 0.377\n",
" hot dog 128 2 0.269 0.5 0.695 0.695\n",
" pizza 128 5 0.673 1 0.995 0.859\n",
" donut 128 14 0.673 1 0.976 0.886\n",
" cake 128 4 0.627 1 0.945 0.824\n",
" chair 128 35 0.428 0.543 0.498 0.251\n",
" couch 128 6 0.47 0.5 0.673 0.537\n",
" potted plant 128 14 0.808 0.643 0.714 0.501\n",
" bed 128 3 0.842 0.667 0.741 0.559\n",
" dining table 128 13 0.413 0.538 0.47 0.378\n",
" toilet 128 2 0.618 0.5 0.62 0.596\n",
" tv 128 2 0.517 0.5 0.828 0.712\n",
" laptop 128 3 1 0 0.592 0.422\n",
" mouse 128 2 1 0 0.0768 0.023\n",
" remote 128 8 0.843 0.5 0.512 0.439\n",
" cell phone 128 8 0.254 0.0636 0.109 0.0757\n",
" microwave 128 3 0.459 1 0.995 0.798\n",
" oven 128 5 0.436 0.4 0.391 0.31\n",
" sink 128 6 0.265 0.167 0.198 0.124\n",
" refrigerator 128 5 0.654 0.4 0.61 0.468\n",
" book 128 29 0.466 0.0619 0.394 0.178\n",
" clock 128 9 0.653 0.778 0.778 0.595\n",
" vase 128 2 0.18 1 0.695 0.695\n",
" scissors 128 1 1 0 0.124 0.0224\n",
" teddy bear 128 21 0.756 0.429 0.573 0.341\n",
" toothbrush 128 5 1 0.37 0.803 0.533\n",
"Speed: 2.6ms pre-process, 5.2ms inference, 0.0ms loss, 3.9ms post-process per image\n"
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 8/8 [00:04<00:00, 1.74it/s]\n",
" all 128 929 0.64 0.537 0.605 0.446\n",
" person 128 254 0.797 0.677 0.764 0.538\n",
" bicycle 128 6 0.514 0.333 0.315 0.264\n",
" car 128 46 0.813 0.217 0.273 0.168\n",
" motorcycle 128 5 0.687 0.887 0.898 0.685\n",
" airplane 128 6 0.82 0.833 0.927 0.675\n",
" bus 128 7 0.491 0.714 0.728 0.671\n",
" train 128 3 0.534 0.667 0.706 0.604\n",
" truck 128 12 1 0.332 0.473 0.297\n",
" boat 128 6 0.226 0.167 0.316 0.134\n",
" traffic light 128 14 0.734 0.2 0.202 0.139\n",
" stop sign 128 2 1 0.992 0.995 0.701\n",
" bench 128 9 0.839 0.582 0.62 0.365\n",
" bird 128 16 0.921 0.728 0.864 0.51\n",
" cat 128 4 0.875 1 0.995 0.791\n",
" dog 128 9 0.603 0.889 0.785 0.585\n",
" horse 128 2 0.597 1 0.995 0.518\n",
" elephant 128 17 0.849 0.765 0.9 0.679\n",
" bear 128 1 0.593 1 0.995 0.995\n",
" zebra 128 4 0.848 1 0.995 0.965\n",
" giraffe 128 9 0.72 1 0.951 0.722\n",
" backpack 128 6 0.589 0.333 0.376 0.232\n",
" umbrella 128 18 0.804 0.5 0.643 0.414\n",
" handbag 128 19 0.424 0.0526 0.165 0.0889\n",
" tie 128 7 0.804 0.714 0.674 0.476\n",
" suitcase 128 4 0.635 0.883 0.745 0.534\n",
" frisbee 128 5 0.675 0.8 0.759 0.688\n",
" skis 128 1 0.567 1 0.995 0.497\n",
" snowboard 128 7 0.742 0.714 0.747 0.5\n",
" sports ball 128 6 0.716 0.433 0.485 0.278\n",
" kite 128 10 0.817 0.45 0.569 0.184\n",
" baseball bat 128 4 0.551 0.25 0.353 0.175\n",
" baseball glove 128 7 0.624 0.429 0.429 0.293\n",
" skateboard 128 5 0.846 0.6 0.6 0.41\n",
" tennis racket 128 7 0.726 0.387 0.487 0.33\n",
" bottle 128 18 0.448 0.389 0.376 0.208\n",
" wine glass 128 16 0.743 0.362 0.584 0.333\n",
" cup 128 36 0.58 0.278 0.404 0.29\n",
" fork 128 6 0.527 0.167 0.246 0.184\n",
" knife 128 16 0.564 0.5 0.59 0.36\n",
" spoon 128 22 0.597 0.182 0.328 0.19\n",
" bowl 128 28 0.648 0.643 0.618 0.491\n",
" banana 128 1 0 0 0.124 0.0379\n",
" sandwich 128 2 0.249 0.5 0.308 0.308\n",
" orange 128 4 1 0.31 0.995 0.623\n",
" broccoli 128 11 0.374 0.182 0.249 0.203\n",
" carrot 128 24 0.648 0.458 0.572 0.362\n",
" hot dog 128 2 0.351 0.553 0.745 0.721\n",
" pizza 128 5 0.644 1 0.995 0.843\n",
" donut 128 14 0.657 1 0.94 0.864\n",
" cake 128 4 0.618 1 0.945 0.845\n",
" chair 128 35 0.506 0.514 0.442 0.239\n",
" couch 128 6 0.463 0.5 0.706 0.555\n",
" potted plant 128 14 0.65 0.643 0.711 0.472\n",
" bed 128 3 0.698 0.667 0.789 0.625\n",
" dining table 128 13 0.432 0.615 0.485 0.366\n",
" toilet 128 2 0.615 0.5 0.695 0.676\n",
" tv 128 2 0.373 0.62 0.745 0.696\n",
" laptop 128 3 1 0 0.451 0.361\n",
" mouse 128 2 1 0 0.0625 0.00625\n",
" remote 128 8 0.843 0.5 0.605 0.529\n",
" cell phone 128 8 0 0 0.0549 0.0393\n",
" microwave 128 3 0.435 0.667 0.806 0.718\n",
" oven 128 5 0.412 0.4 0.339 0.27\n",
" sink 128 6 0.35 0.167 0.182 0.129\n",
" refrigerator 128 5 0.589 0.4 0.604 0.452\n",
" book 128 29 0.629 0.103 0.346 0.178\n",
" clock 128 9 0.788 0.83 0.875 0.74\n",
" vase 128 2 0.376 1 0.828 0.795\n",
" scissors 128 1 1 0 0.249 0.0746\n",
" teddy bear 128 21 0.877 0.333 0.591 0.394\n",
" toothbrush 128 5 0.743 0.6 0.638 0.374\n",
"Speed: 0.9ms pre-process, 5.5ms inference, 0.0ms loss, 2.4ms post-process per image\n"
]
}
]
@ -300,20 +301,20 @@
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "a264b615-5f73-4b84-c708-61f15a7e5c03"
"outputId": "0f87de5c-da4e-4290-ee64-2de4d4d7cd8e"
},
"source": [
"# Train YOLOv8n on COCO128 for 3 epochs\n",
"!yolo task=detect mode=train model=yolov8n.pt data=coco128.yaml epochs=3 imgsz=640"
],
"execution_count": null,
"execution_count": 4,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=yolov8n.pt, data=coco128.yaml, epochs=3, patience=50, batch=16, imgsz=640, save=True, cache=False, device=, workers=8, project=None, name=None, exist_ok=False, pretrained=False, optimizer=SGD, verbose=False, seed=0, deterministic=True, single_cls=False, image_weights=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, overlap_mask=True, mask_ratio=4, dropout=False, val=True, save_json=False, save_hybrid=False, conf=0.001, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=ultralytics/assets, show=False, save_txt=False, save_conf=False, save_crop=False, hide_labels=False, hide_conf=False, vid_stride=1, line_thickness=3, visualize=False, augment=False, agnostic_nms=False, retina_masks=False, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=17, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, fl_gamma=0.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, hydra={'output_subdir': None, 'run': {'dir': '.'}}, v5loader=False, save_dir=runs/detect/train\n",
"Ultralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=yolov8n.pt, data=coco128.yaml, epochs=3, patience=50, batch=16, imgsz=640, save=True, cache=False, device=None, workers=8, project=None, name=None, exist_ok=False, pretrained=False, optimizer=SGD, verbose=False, seed=0, deterministic=True, single_cls=False, image_weights=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, hide_labels=False, hide_conf=False, vid_stride=1, line_thickness=3, visualize=False, augment=False, agnostic_nms=False, retina_masks=False, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=17, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, fl_gamma=0.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, hydra={'output_subdir': None, 'run': {'dir': '.'}}, v5loader=False, save_dir=runs/detect/train\n",
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"\n",
" from n params module arguments \n",
" 0 -1 1 464 ultralytics.nn.modules.Conv [3, 16, 3, 2] \n",
@ -352,102 +353,102 @@
"Starting training for 3 epochs...\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n",
" 1/3 4.31G 1.221 1.429 1.241 196 640: 100% 8/8 [00:08<00:00, 1.04s/it]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:02<00:00, 1.48it/s]\n",
" all 128 929 0.645 0.539 0.617 0.454\n",
" 1/3 4.31G 1.221 1.429 1.241 196 640: 100% 8/8 [00:08<00:00, 1.06s/it]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:01<00:00, 2.18it/s]\n",
" all 128 929 0.671 0.516 0.617 0.457\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n",
" 2/3 5.31G 1.186 1.306 1.255 287 640: 100% 8/8 [00:06<00:00, 1.17it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:02<00:00, 1.37it/s]\n",
" all 128 929 0.654 0.601 0.643 0.474\n",
" 2/3 5.31G 1.186 1.306 1.255 287 640: 100% 8/8 [00:05<00:00, 1.57it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:01<00:00, 2.23it/s]\n",
" all 128 929 0.668 0.582 0.637 0.473\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n",
" 3/3 5.31G 1.17 1.408 1.267 189 640: 100% 8/8 [00:06<00:00, 1.18it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:04<00:00, 1.17s/it]\n",
" all 128 929 0.636 0.619 0.648 0.479\n",
" 3/3 5.31G 1.17 1.408 1.267 189 640: 100% 8/8 [00:04<00:00, 1.62it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:03<00:00, 1.01it/s]\n",
" all 128 929 0.638 0.601 0.645 0.483\n",
"\n",
"3 epochs completed in 0.013 hours.\n",
"3 epochs completed in 0.010 hours.\n",
"Optimizer stripped from runs/detect/train/weights/last.pt, 6.5MB\n",
"Optimizer stripped from runs/detect/train/weights/best.pt, 6.5MB\n",
"\n",
"Validating runs/detect/train/weights/best.pt...\n",
"Ultralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CUDA:0 (Tesla T4, 15110MiB)\n",
"Fusing layers... \n",
"Model summary: 168 layers, 3151904 parameters, 0 gradients, 8.7 GFLOPs\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:05<00:00, 1.42s/it]\n",
" all 128 929 0.658 0.605 0.65 0.481\n",
" person 128 254 0.764 0.717 0.777 0.547\n",
" bicycle 128 6 0.4 0.167 0.333 0.192\n",
" car 128 46 0.76 0.196 0.263 0.153\n",
" motorcycle 128 5 0.711 0.987 0.938 0.786\n",
" airplane 128 6 0.756 0.833 0.927 0.666\n",
" bus 128 7 1 0.705 0.723 0.632\n",
" train 128 3 0.708 1 0.913 0.742\n",
" truck 128 12 0.733 0.417 0.452 0.308\n",
" boat 128 6 0.453 0.421 0.502 0.317\n",
" traffic light 128 14 0.528 0.143 0.164 0.137\n",
" stop sign 128 2 0.597 0.5 0.828 0.663\n",
" bench 128 9 0.953 0.556 0.627 0.371\n",
" bird 128 16 0.697 0.875 0.948 0.622\n",
" cat 128 4 0.856 1 0.995 0.834\n",
" dog 128 9 0.641 0.889 0.855 0.624\n",
" horse 128 2 0.732 1 0.995 0.597\n",
" elephant 128 17 0.829 0.941 0.946 0.74\n",
" bear 128 1 0.618 1 0.995 0.995\n",
" zebra 128 4 0.843 1 0.995 0.972\n",
" giraffe 128 9 0.879 1 0.995 0.642\n",
" backpack 128 6 0.554 0.333 0.426 0.271\n",
" umbrella 128 18 0.707 0.67 0.719 0.469\n",
" handbag 128 19 0.892 0.158 0.262 0.101\n",
" tie 128 7 0.682 0.714 0.665 0.447\n",
" suitcase 128 4 0.505 1 0.995 0.594\n",
" frisbee 128 5 0.629 0.8 0.732 0.672\n",
" skis 128 1 0.876 1 0.995 0.497\n",
" snowboard 128 7 0.504 0.571 0.68 0.511\n",
" sports ball 128 6 0.576 0.5 0.514 0.263\n",
" kite 128 10 0.657 0.5 0.603 0.24\n",
" baseball bat 128 4 0.452 0.427 0.267 0.103\n",
" baseball glove 128 7 0.531 0.429 0.43 0.327\n",
" skateboard 128 5 0.869 0.6 0.6 0.415\n",
" tennis racket 128 7 0.851 0.429 0.504 0.342\n",
" bottle 128 18 0.574 0.389 0.395 0.236\n",
" wine glass 128 16 0.892 0.517 0.664 0.402\n",
" cup 128 36 0.719 0.361 0.446 0.326\n",
" fork 128 6 0.542 0.167 0.183 0.179\n",
" knife 128 16 0.837 0.562 0.698 0.386\n",
" spoon 128 22 0.522 0.348 0.386 0.219\n",
" bowl 128 28 0.681 0.685 0.676 0.55\n",
" banana 128 1 0.0197 0.0985 0.199 0.039\n",
" sandwich 128 2 0.457 1 0.995 0.995\n",
" orange 128 4 0.75 0.752 0.845 0.627\n",
" broccoli 128 11 0.274 0.182 0.262 0.209\n",
" carrot 128 24 0.625 0.667 0.667 0.443\n",
" hot dog 128 2 0.494 1 0.828 0.796\n",
" pizza 128 5 0.737 1 0.995 0.866\n",
" donut 128 14 0.599 1 0.95 0.891\n",
" cake 128 4 0.781 1 0.995 0.877\n",
" chair 128 35 0.385 0.571 0.488 0.264\n",
" couch 128 6 0.661 0.652 0.798 0.624\n",
" potted plant 128 14 0.819 0.571 0.682 0.506\n",
" bed 128 3 0.866 1 0.995 0.611\n",
" dining table 128 13 0.454 0.615 0.591 0.451\n",
" toilet 128 2 0.524 0.5 0.62 0.608\n",
" tv 128 2 0.545 0.634 0.828 0.762\n",
" laptop 128 3 1 0 0.708 0.562\n",
" mouse 128 2 1 0 0.0681 0.0272\n",
" remote 128 8 0.849 0.5 0.525 0.432\n",
" cell phone 128 8 0.491 0.125 0.118 0.0949\n",
" microwave 128 3 0.379 1 0.913 0.729\n",
" oven 128 5 0.5 0.6 0.462 0.361\n",
" sink 128 6 0.285 0.167 0.226 0.14\n",
" refrigerator 128 5 0.552 0.505 0.724 0.544\n",
" book 128 29 0.651 0.258 0.436 0.212\n",
" clock 128 9 0.666 0.778 0.886 0.72\n",
" vase 128 2 0.241 1 0.695 0.695\n",
" scissors 128 1 1 0 0.0995 0.00995\n",
" teddy bear 128 21 0.631 0.619 0.629 0.379\n",
" toothbrush 128 5 1 0.687 0.92 0.614\n",
"Speed: 3.4ms pre-process, 4.6ms inference, 0.0ms loss, 3.6ms post-process per image\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100% 4/4 [00:04<00:00, 1.20s/it]\n",
" all 128 929 0.638 0.602 0.644 0.483\n",
" person 128 254 0.703 0.709 0.769 0.548\n",
" bicycle 128 6 0.455 0.333 0.322 0.254\n",
" car 128 46 0.773 0.217 0.291 0.184\n",
" motorcycle 128 5 0.551 0.8 0.895 0.724\n",
" airplane 128 6 0.743 0.833 0.927 0.73\n",
" bus 128 7 0.692 0.714 0.7 0.636\n",
" train 128 3 0.733 0.931 0.913 0.797\n",
" truck 128 12 0.752 0.5 0.497 0.324\n",
" boat 128 6 0.41 0.333 0.492 0.344\n",
" traffic light 128 14 0.682 0.214 0.202 0.139\n",
" stop sign 128 2 0.933 1 0.995 0.671\n",
" bench 128 9 0.752 0.556 0.603 0.416\n",
" bird 128 16 0.875 0.876 0.957 0.641\n",
" cat 128 4 0.863 1 0.995 0.76\n",
" dog 128 9 0.554 0.778 0.855 0.664\n",
" horse 128 2 0.706 1 0.995 0.561\n",
" elephant 128 17 0.761 0.882 0.929 0.722\n",
" bear 128 1 0.595 1 0.995 0.995\n",
" zebra 128 4 0.85 1 0.995 0.966\n",
" giraffe 128 9 0.891 1 0.995 0.683\n",
" backpack 128 6 0.487 0.333 0.354 0.224\n",
" umbrella 128 18 0.54 0.667 0.687 0.461\n",
" handbag 128 19 0.496 0.105 0.212 0.125\n",
" tie 128 7 0.611 0.714 0.615 0.432\n",
" suitcase 128 4 0.469 1 0.745 0.529\n",
" frisbee 128 5 0.622 0.8 0.733 0.64\n",
" skis 128 1 0.721 1 0.995 0.531\n",
" snowboard 128 7 0.687 0.714 0.751 0.51\n",
" sports ball 128 6 0.71 0.42 0.503 0.282\n",
" kite 128 10 0.81 0.5 0.59 0.197\n",
" baseball bat 128 4 0.474 0.461 0.261 0.115\n",
" baseball glove 128 7 0.67 0.429 0.43 0.317\n",
" skateboard 128 5 0.751 0.6 0.599 0.387\n",
" tennis racket 128 7 0.742 0.415 0.507 0.378\n",
" bottle 128 18 0.409 0.333 0.354 0.235\n",
" wine glass 128 16 0.562 0.5 0.597 0.356\n",
" cup 128 36 0.67 0.306 0.411 0.296\n",
" fork 128 6 0.57 0.167 0.229 0.203\n",
" knife 128 16 0.608 0.562 0.634 0.405\n",
" spoon 128 22 0.529 0.358 0.369 0.201\n",
" bowl 128 28 0.594 0.679 0.671 0.56\n",
" banana 128 1 0.0625 0.312 0.199 0.0513\n",
" sandwich 128 2 0.638 0.913 0.828 0.828\n",
" orange 128 4 0.743 0.728 0.895 0.595\n",
" broccoli 128 11 0.49 0.264 0.278 0.232\n",
" carrot 128 24 0.547 0.667 0.704 0.47\n",
" hot dog 128 2 0.578 1 0.828 0.796\n",
" pizza 128 5 0.835 1 0.995 0.84\n",
" donut 128 14 0.537 1 0.891 0.788\n",
" cake 128 4 0.807 1 0.995 0.904\n",
" chair 128 35 0.401 0.514 0.485 0.277\n",
" couch 128 6 0.795 0.649 0.746 0.504\n",
" potted plant 128 14 0.563 0.643 0.676 0.471\n",
" bed 128 3 0.777 1 0.995 0.735\n",
" dining table 128 13 0.425 0.692 0.578 0.48\n",
" toilet 128 2 0.508 0.5 0.745 0.721\n",
" tv 128 2 0.55 0.649 0.828 0.762\n",
" laptop 128 3 1 0 0.741 0.653\n",
" mouse 128 2 1 0 0.0454 0.00907\n",
" remote 128 8 0.83 0.5 0.569 0.449\n",
" cell phone 128 8 0 0 0.0819 0.0266\n",
" microwave 128 3 0.475 0.667 0.83 0.699\n",
" oven 128 5 0.5 0.4 0.348 0.275\n",
" sink 128 6 0.354 0.187 0.368 0.217\n",
" refrigerator 128 5 0.518 0.4 0.729 0.571\n",
" book 128 29 0.583 0.241 0.396 0.204\n",
" clock 128 9 0.891 0.889 0.91 0.773\n",
" vase 128 2 0.506 1 0.828 0.745\n",
" scissors 128 1 1 0 0.142 0.0426\n",
" teddy bear 128 21 0.587 0.476 0.63 0.458\n",
" toothbrush 128 5 0.784 0.736 0.898 0.544\n",
"Speed: 0.4ms pre-process, 4.7ms inference, 0.0ms loss, 3.3ms post-process per image\n",
"Saving runs/detect/train/predictions.json...\n",
"Results saved to \u001b[1mruns/detect/train\u001b[0m\n"
]
@ -490,26 +491,24 @@
"base_uri": "https://localhost:8080/"
},
"id": "CYIjW4igCjqD",
"outputId": "dd9e5aa6-8538-4403-c9d7-1548e8c4c39e"
"outputId": "f0910315-d678-45b4-c283-3ccd018a5024"
},
"execution_count": null,
"execution_count": 5,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Fusing layers... \n",
"YOLOv8n summary: 168 layers, 3151904 parameters, 0 gradients, 8.7 GFLOPs\n",
"Ultralytics YOLO 🚀 0.0.59 Python-3.8.16 torch-1.13.0+cu116 CPU\n",
"Ultralytics YOLOv8.0.1 🚀 Python-3.8.16 torch-1.13.0+cu116 CPU\n",
"Fusing layers... \n",
"YOLOv8n summary: 168 layers, 3151904 parameters, 0 gradients, 8.7 GFLOPs\n",
"\n",
"\u001b[34m\u001b[1mPyTorch:\u001b[0m starting from yolov8n.pt with output shape (1, 84, 8400) (6.2 MB)\n",
"\n",
"\u001b[34m\u001b[1mTorchScript:\u001b[0m starting export with torch 1.13.0+cu116...\n",
"\u001b[34m\u001b[1mTorchScript:\u001b[0m export success ✅ 1.9s, saved as yolov8n.torchscript (12.4 MB)\n",
"\u001b[34m\u001b[1mTorchScript:\u001b[0m export success ✅ 1.8s, saved as yolov8n.torchscript (12.4 MB)\n",
"\n",
"Export complete (3.2s)\n",
"Export complete (2.5s)\n",
"Results saved to \u001b[1m/content\u001b[0m\n",
"Predict: yolo task=detect mode=predict model=yolov8n.torchscript -WARNING ⚠ not yet supported for YOLOv8 exported models\n",
"Validate: yolo task=detect mode=val model=yolov8n.torchscript -WARNING ⚠ not yet supported for YOLOv8 exported models\n",

Loading…
Cancel
Save