Skip to content

Commit 322cb80

Browse files
committed
support benchmarking YOLOv11
1 parent 794474d commit 322cb80

2 files changed

Lines changed: 83 additions & 2 deletions

File tree

evaluation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from tqdm import tqdm
1010

1111

12-
def evaluate(inference, image_dir: str, annotations_file_path: str, output_file_name: str="predictions.json"):
12+
def evaluate(inference, image_dir: str, annotations_file_path: str, class_mapping: dict[int, str]|None=None, output_file_name: str="predictions.json"):
1313
predictions = []
1414

1515
coco_annotations = COCO(annotations_file_path)
@@ -45,7 +45,7 @@ def evaluate(inference, image_dir: str, annotations_file_path: str, output_file_
4545
predictions.append({
4646
"image_id": image_id,
4747
"bbox": this_xywh.tolist(),
48-
"category_id": int(this_class_id),
48+
"category_id": class_mapping[int(this_class_id)] if class_mapping is not None else int(this_class_id),
4949
"score": float(this_score)
5050
})
5151

models/benchmark_yolov11.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
import torch
2+
import torchvision.transforms.functional as TF
3+
from supervision.utils.file import read_json_file
4+
from supervision.dataset.formats.coco import coco_categories_to_classes, build_coco_class_index_mapping
5+
import os
6+
7+
from onnx_inference import ONNXInference
8+
from trt_inference import TRTInference, build_engine
9+
from evaluation import evaluate
10+
11+
12+
def get_coco_class_index_mapping(annotations_path: str):
13+
coco_data = read_json_file(annotations_path)
14+
classes = coco_categories_to_classes(coco_categories=coco_data["categories"])
15+
class_mapping = build_coco_class_index_mapping(
16+
coco_categories=coco_data["categories"], target_classes=classes
17+
)
18+
return class_mapping
19+
20+
21+
def cxcywh_to_xyxy(boxes):
22+
boxes = boxes.clone()
23+
boxes[..., 0] = boxes[..., 0] - boxes[..., 2] / 2
24+
boxes[..., 1] = boxes[..., 1] - boxes[..., 3] / 2
25+
boxes[..., 2] = boxes[..., 0] + boxes[..., 2]
26+
boxes[..., 3] = boxes[..., 1] + boxes[..., 3]
27+
return boxes
28+
29+
30+
def preprocess_image(image: torch.Tensor, image_input_shape: tuple[int, int]) -> torch.Tensor:
31+
if len(image.shape) == 3:
32+
image = image.unsqueeze(0)
33+
34+
image = TF.resize(image, image_input_shape[2:])
35+
return image
36+
37+
38+
def postprocess_output(outputs: dict[str, torch.Tensor], image_shape: tuple[int, int]) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
39+
bboxes = outputs["output0"][0, :, :4]
40+
scores = outputs["output0"][0, :, 4]
41+
labels = outputs["output0"][0, :, 5]
42+
43+
bboxes /= torch.tensor([image_shape[2], image_shape[3], image_shape[2], image_shape[3]], device=bboxes.device)
44+
45+
return bboxes, labels, scores
46+
47+
48+
class YOLOv11ONNXInference(ONNXInference):
49+
def preprocess(self, input_image: torch.Tensor) -> torch.Tensor:
50+
return preprocess_image(input_image, self.image_input_shape)
51+
52+
def postprocess(self, outputs: dict[str, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
53+
return postprocess_output(outputs, self.image_input_shape)
54+
55+
56+
class YOLOv11TRTInference(TRTInference):
57+
def preprocess(self, input_image: torch.Tensor) -> torch.Tensor:
58+
return preprocess_image(input_image, self.image_input_shape)
59+
60+
def postprocess(self, outputs: dict[str, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
61+
return postprocess_output(outputs, self.image_input_shape)
62+
63+
64+
if __name__ == "__main__":
65+
model_path = "yolo11n.onnx"
66+
engine_path = "yolo11n.engine"
67+
coco_dir = "/home/isaac/cocodir/val2017"
68+
coco_annotations_file_path = "/home/isaac/cocodir/annotations/instances_val2017.json"
69+
70+
class_mapping = get_coco_class_index_mapping(coco_annotations_file_path)
71+
inv_class_mapping = {v: k for k, v in class_mapping.items()}
72+
73+
# onnx_inference = YOLOv11ONNXInference(model_path)
74+
if not os.path.exists(engine_path):
75+
build_engine(model_path, engine_path)
76+
77+
inference = YOLOv11TRTInference(engine_path)
78+
79+
evaluate(inference, coco_dir, coco_annotations_file_path, inv_class_mapping)
80+
81+
inference.print_latency_stats()

0 commit comments

Comments
 (0)