init repo
Browse files- README.md +37 -0
- events.out.tfevents.1717087574.05dad19e7779.6477.0 +3 -0
- model.pt +3 -0
- train.py +36 -0
- yolov8-p26-c2f.yaml +66 -0
README.md
CHANGED
|
@@ -1,3 +1,40 @@
|
|
| 1 |
---
|
| 2 |
license: agpl-3.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
license: agpl-3.0
|
| 3 |
+
library: ultralytics
|
| 4 |
+
tags:
|
| 5 |
+
- object-detection
|
| 6 |
+
- pytorch
|
| 7 |
+
- coco
|
| 8 |
+
- p2-p6-layer
|
| 9 |
+
- yolov8
|
| 10 |
---
|
| 11 |
+
|
| 12 |
+
# YOLOv8-p2 COCO Pretrained Model
|
| 13 |
+
|
| 14 |
+
This model is a YOLOv8-p26 model trained on the COCO dataset, **with P2-P6 output layers.**
|
| 15 |
+
|
| 16 |
+
## Example Usage
|
| 17 |
+
|
| 18 |
+
```angular2html
|
| 19 |
+
from huggingface_hub import hf_hub_download
|
| 20 |
+
from ultralytics import YOLO
|
| 21 |
+
from PIL import Image
|
| 22 |
+
|
| 23 |
+
model_path = hf_hub_download("soyeollee/yolov8x-p26-coco", "model.pt")
|
| 24 |
+
model = YOLO(model_path)
|
| 25 |
+
|
| 26 |
+
image_path = "/path/to/image"
|
| 27 |
+
output = model(Image.open(image_path))
|
| 28 |
+
```
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
## Performance
|
| 32 |
+
|
| 33 |
+
| metric (maxDets=100) | yolov8x <br/>(official) | [yolov8x-p2](https://huggingface.co/soyeollee/yolov8x-p2-coco) | yolov8x-p26<br/>(this repo) | yolov11x <br/>(official) | [yolo11x-p2](https://huggingface.co/soyeollee/yolo11x-p2-coco) |
|
| 34 |
+
|-------------------------------------------------|--------------------------|--------------------------------|-----------------------------|--------------------------|--------------------------------|
|
| 35 |
+
| AP @[ IoU=0.50:0.95 / area= all / ] | **0.540** | **0.541** | **0.544** | **0.546** | **0.553** |
|
| 36 |
+
| AP @[ IoU=0.50 / area= all / ] | 0.710 | 0.712 | 0.713 | 0.716 | 0.722 |
|
| 37 |
+
| AP @[ IoU=0.75 / area= all / ] | 0.588 | 0.590 | 0.593 | 0.595 | 0.606 |
|
| 38 |
+
| AP @[ IoU=0.50:0.95 / area= small / ] | 0.360 | 0.386 | 0.381 | 0.377 | 0.386 |
|
| 39 |
+
| AP @[ IoU=0.50:0.95 /area=medium / ] | 0.594 | 0.586 | 0.586 | 0.597 | 0.595 |
|
| 40 |
+
| AP @[ IoU=0.50:0.95 / area= large / ] | 0.707 | 0.686 | 0.691 | 0.702 | 0.702 |
|
events.out.tfevents.1717087574.05dad19e7779.6477.0
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0002608f81ab2d7c5ada7f959e2d0b9129909778e3c4afa4ad860e7ff7473323
|
| 3 |
+
size 779594
|
model.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:25b20fe3155491f3b289ae42de854cd0552b27d3d2fdb7351a4f3bf1acaf93fd
|
| 3 |
+
size 195340380
|
train.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import yaml
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from ultralytics import YOLO
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
config = {
|
| 8 |
+
'name': '01_v8x-p26_coco_pretrained',
|
| 9 |
+
'epochs': 500,
|
| 10 |
+
'model': 'yolov8x-p26-c2f.yaml',
|
| 11 |
+
'pretrained': None,
|
| 12 |
+
'patience': 50,
|
| 13 |
+
'seed': 0,
|
| 14 |
+
'deterministic': True,
|
| 15 |
+
'data': 'coco.yaml',
|
| 16 |
+
'imgsz': 640,
|
| 17 |
+
'batch': 64, # 16
|
| 18 |
+
'optimizer': 'SGD',
|
| 19 |
+
'close_mosaic': 10,
|
| 20 |
+
'mixup': 0.15,
|
| 21 |
+
'mosaic': 1.0,
|
| 22 |
+
'copy_paste': 0.3,
|
| 23 |
+
'scale': 0.9,
|
| 24 |
+
'cos_lr': False,
|
| 25 |
+
'lr0': 0.01,
|
| 26 |
+
'lrf': 0.01,
|
| 27 |
+
'warmup_epochs': 3.0,
|
| 28 |
+
'cache': 'disk',
|
| 29 |
+
'device': [0, 1, 2, 3],
|
| 30 |
+
'workers': 16,
|
| 31 |
+
'project': 'runs/train',
|
| 32 |
+
'val': True,
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
model = YOLO(config['model'])
|
| 36 |
+
model.train(**config)
|
yolov8-p26-c2f.yaml
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
| 2 |
+
# YOLOv8 object detection model with P3-P6 outputs. For Usage examples see https://docs.ultralytics.com/tasks/detect
|
| 3 |
+
|
| 4 |
+
# Parameters
|
| 5 |
+
nc: 80 # number of classes
|
| 6 |
+
scales: # model compound scaling constants, i.e. 'model=yolov8n-p6.yaml' will call yolov8-p6.yaml with scale 'n'
|
| 7 |
+
# [depth, width, max_channels]
|
| 8 |
+
n: [0.33, 0.25, 1024]
|
| 9 |
+
s: [0.33, 0.50, 1024]
|
| 10 |
+
m: [0.67, 0.75, 768]
|
| 11 |
+
l: [1.00, 1.00, 512]
|
| 12 |
+
x: [1.00, 1.25, 512]
|
| 13 |
+
# YOLOv8x-p26-c2f summary: 557 layers, 98725296 parameters, 98725280 gradients, 335.2 GFLOPs
|
| 14 |
+
#
|
| 15 |
+
|
| 16 |
+
# YOLOv8.0x6 backbone
|
| 17 |
+
backbone:
|
| 18 |
+
# [from, repeats, module, args]
|
| 19 |
+
- [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
|
| 20 |
+
- [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
|
| 21 |
+
- [-1, 3, C2f, [128, True]]
|
| 22 |
+
- [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
|
| 23 |
+
- [-1, 6, C2f, [256, True]]
|
| 24 |
+
- [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
|
| 25 |
+
- [-1, 6, C2f, [512, True]]
|
| 26 |
+
- [-1, 1, Conv, [768, 3, 2]] # 7-P5/32
|
| 27 |
+
- [-1, 3, C2f, [768, True]]
|
| 28 |
+
- [-1, 1, Conv, [1024, 3, 2]] # 9-P6/64
|
| 29 |
+
- [-1, 3, C2f, [1024, True]]
|
| 30 |
+
- [-1, 1, SPPF, [1024, 5]] # 11
|
| 31 |
+
|
| 32 |
+
# YOLOv8.0x6 head
|
| 33 |
+
head:
|
| 34 |
+
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
| 35 |
+
- [[-1, 8], 1, Concat, [1]] # cat backbone P5
|
| 36 |
+
- [-1, 3, C2f, [768, False]] # 14
|
| 37 |
+
|
| 38 |
+
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
| 39 |
+
- [[-1, 6], 1, Concat, [1]] # cat backbone P4
|
| 40 |
+
- [-1, 3, C2f, [512, False]] # 17
|
| 41 |
+
|
| 42 |
+
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
| 43 |
+
- [[-1, 4], 1, Concat, [1]] # cat backbone P3
|
| 44 |
+
- [-1, 3, C2f, [256, False]] # 20
|
| 45 |
+
|
| 46 |
+
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
| 47 |
+
- [[-1, 2], 1, Concat, [1]] # cat backbone P2
|
| 48 |
+
- [-1, 3, C2f, [128, False]] # 23 (P2/4-xsmall)
|
| 49 |
+
|
| 50 |
+
- [-1, 1, Conv, [128, 3, 2]]
|
| 51 |
+
- [[-1, 20], 1, Concat, [1]] # cat head P3
|
| 52 |
+
- [-1, 3, C2f, [256, False]] # 26 (P3/8-small)
|
| 53 |
+
|
| 54 |
+
- [-1, 1, Conv, [256, 3, 2]]
|
| 55 |
+
- [[-1, 17], 1, Concat, [1]] # cat head P4
|
| 56 |
+
- [-1, 3, C2f, [512, False]] # 29 (P4/32-medium)
|
| 57 |
+
|
| 58 |
+
- [-1, 1, Conv, [512, 3, 2]]
|
| 59 |
+
- [[-1, 14], 1, Concat, [1]] # cat head P5
|
| 60 |
+
- [-1, 3, C2f, [768, False]] # 32 (P5/64-large)
|
| 61 |
+
|
| 62 |
+
- [-1, 1, Conv, [768, 3, 2]]
|
| 63 |
+
- [[-1, 11], 1, Concat, [1]] # cat head P6
|
| 64 |
+
- [-1, 3, C2f, [1024, False]] # 35 (P6/64-xlarge)
|
| 65 |
+
|
| 66 |
+
- [[23, 26, 29, 32, 35], 1, Detect, [nc]] # Detect(P2, P3, P4, P5, P6)
|