├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.yml │ ├── config.yml │ ├── feature-request.yml │ └── question.yml ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── cla.yml │ ├── docker.yml │ ├── docs.yml │ ├── format.yml │ ├── links.yml │ ├── merge-main-into-prs.yml │ ├── mirror.yml │ ├── publish.yml │ └── stale.yml ├── .gitignore ├── CITATION.cff ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── README.zh-CN.md ├── docker ├── Dockerfile ├── Dockerfile-arm64 ├── Dockerfile-conda ├── Dockerfile-cpu ├── Dockerfile-jetson-jetpack4 ├── Dockerfile-jetson-jetpack5 ├── Dockerfile-jetson-jetpack6 ├── Dockerfile-jupyter ├── Dockerfile-python └── Dockerfile-runner ├── docs ├── README.md ├── build_docs.py ├── build_reference.py ├── coming_soon_template.md ├── en │ ├── CNAME │ ├── datasets │ │ ├── classify │ │ │ ├── caltech101.md │ │ │ ├── caltech256.md │ │ │ ├── cifar10.md │ │ │ ├── cifar100.md │ │ │ ├── fashion-mnist.md │ │ │ ├── imagenet.md │ │ │ ├── imagenet10.md │ │ │ ├── imagenette.md │ │ │ ├── imagewoof.md │ │ │ ├── index.md │ │ │ └── mnist.md │ │ ├── detect │ │ │ ├── african-wildlife.md │ │ │ ├── argoverse.md │ │ │ ├── brain-tumor.md │ │ │ ├── coco.md │ │ │ ├── coco128.md │ │ │ ├── coco8-multispectral.md │ │ │ ├── coco8.md │ │ │ ├── globalwheat2020.md │ │ │ ├── index.md │ │ │ ├── lvis.md │ │ │ ├── medical-pills.md │ │ │ ├── objects365.md │ │ │ ├── open-images-v7.md │ │ │ ├── roboflow-100.md │ │ │ ├── signature.md │ │ │ ├── sku-110k.md │ │ │ ├── visdrone.md │ │ │ ├── voc.md │ │ │ └── xview.md │ │ ├── explorer │ │ │ ├── api.md │ │ │ ├── dashboard.md │ │ │ ├── explorer.md │ │ │ └── index.md │ │ ├── index.md │ │ ├── obb │ │ │ ├── dota-v2.md │ │ │ ├── dota8.md │ │ │ └── index.md │ │ ├── pose │ │ │ ├── coco.md │ │ │ ├── coco8-pose.md │ │ │ ├── dog-pose.md │ │ │ ├── hand-keypoints.md │ │ │ ├── index.md │ │ │ └── tiger-pose.md │ │ ├── segment │ │ │ ├── carparts-seg.md │ │ │ ├── coco.md │ │ │ ├── coco8-seg.md │ │ │ ├── crack-seg.md │ │ │ ├── index.md │ │ │ └── package-seg.md │ │ └── track │ │ │ └── index.md │ ├── guides │ │ ├── analytics.md │ │ ├── azureml-quickstart.md │ │ ├── conda-quickstart.md │ │ ├── coral-edge-tpu-on-raspberry-pi.md │ │ ├── data-collection-and-annotation.md │ │ ├── deepstream-nvidia-jetson.md │ │ ├── defining-project-goals.md │ │ ├── distance-calculation.md │ │ ├── docker-quickstart.md │ │ ├── heatmaps.md │ │ ├── hyperparameter-tuning.md │ │ ├── index.md │ │ ├── instance-segmentation-and-tracking.md │ │ ├── isolating-segmentation-objects.md │ │ ├── kfold-cross-validation.md │ │ ├── model-deployment-options.md │ │ ├── model-deployment-practices.md │ │ ├── model-evaluation-insights.md │ │ ├── model-monitoring-and-maintenance.md │ │ ├── model-testing.md │ │ ├── model-training-tips.md │ │ ├── nvidia-jetson.md │ │ ├── object-blurring.md │ │ ├── object-counting.md │ │ ├── object-cropping.md │ │ ├── optimizing-openvino-latency-vs-throughput-modes.md │ │ ├── parking-management.md │ │ ├── preprocessing_annotated_data.md │ │ ├── queue-management.md │ │ ├── raspberry-pi.md │ │ ├── region-counting.md │ │ ├── ros-quickstart.md │ │ ├── sahi-tiled-inference.md │ │ ├── security-alarm-system.md │ │ ├── speed-estimation.md │ │ ├── steps-of-a-cv-project.md │ │ ├── streamlit-live-inference.md │ │ ├── trackzone.md │ │ ├── triton-inference-server.md │ │ ├── view-results-in-terminal.md │ │ ├── vision-eye.md │ │ ├── workouts-monitoring.md │ │ ├── yolo-common-issues.md │ │ ├── yolo-data-augmentation.md │ │ ├── yolo-performance-metrics.md │ │ └── yolo-thread-safe-inference.md │ ├── help │ │ ├── CI.md │ │ ├── CLA.md │ │ ├── FAQ.md │ │ ├── code-of-conduct.md │ │ ├── contributing.md │ │ ├── environmental-health-safety.md │ │ ├── index.md │ │ ├── minimum-reproducible-example.md │ │ ├── privacy.md │ │ └── security.md │ ├── hub │ │ ├── api │ │ │ └── index.md │ │ ├── app │ │ │ ├── android.md │ │ │ ├── index.md │ │ │ └── ios.md │ │ ├── cloud-training.md │ │ ├── datasets.md │ │ ├── index.md │ │ ├── inference-api.md │ │ ├── integrations.md │ │ ├── models.md │ │ ├── pro.md │ │ ├── projects.md │ │ ├── quickstart.md │ │ └── teams.md │ ├── index.md │ ├── integrations │ │ ├── albumentations.md │ │ ├── amazon-sagemaker.md │ │ ├── clearml.md │ │ ├── comet.md │ │ ├── coreml.md │ │ ├── dvc.md │ │ ├── edge-tpu.md │ │ ├── google-colab.md │ │ ├── gradio.md │ │ ├── ibm-watsonx.md │ │ ├── index.md │ │ ├── jupyterlab.md │ │ ├── kaggle.md │ │ ├── mlflow.md │ │ ├── mnn.md │ │ ├── ncnn.md │ │ ├── neural-magic.md │ │ ├── onnx.md │ │ ├── openvino.md │ │ ├── paddlepaddle.md │ │ ├── paperspace.md │ │ ├── ray-tune.md │ │ ├── roboflow.md │ │ ├── rockchip-rknn.md │ │ ├── seeedstudio-recamera.md │ │ ├── sony-imx500.md │ │ ├── tensorboard.md │ │ ├── tensorrt.md │ │ ├── tf-graphdef.md │ │ ├── tf-savedmodel.md │ │ ├── tfjs.md │ │ ├── tflite.md │ │ ├── torchscript.md │ │ ├── vscode.md │ │ └── weights-biases.md │ ├── macros │ │ ├── augmentation-args.md │ │ ├── export-args.md │ │ ├── export-table.md │ │ ├── predict-args.md │ │ ├── sam-auto-annotate.md │ │ ├── solutions-args.md │ │ ├── track-args.md │ │ ├── train-args.md │ │ ├── validation-args.md │ │ ├── visualization-args.md │ │ ├── yolo-cls-perf.md │ │ ├── yolo-det-perf.md │ │ ├── yolo-obb-perf.md │ │ ├── yolo-pose-perf.md │ │ └── yolo-seg-perf.md │ ├── models │ │ ├── fast-sam.md │ │ ├── index.md │ │ ├── mobile-sam.md │ │ ├── rtdetr.md │ │ ├── sam-2.md │ │ ├── sam.md │ │ ├── yolo-nas.md │ │ ├── yolo-world.md │ │ ├── yolo11.md │ │ ├── yolo12.md │ │ ├── yoloe.md │ │ ├── yolov10.md │ │ ├── yolov3.md │ │ ├── yolov4.md │ │ ├── yolov5.md │ │ ├── yolov6.md │ │ ├── yolov7.md │ │ ├── yolov8.md │ │ └── yolov9.md │ ├── modes │ │ ├── benchmark.md │ │ ├── export.md │ │ ├── index.md │ │ ├── predict.md │ │ ├── track.md │ │ ├── train.md │ │ └── val.md │ ├── quickstart.md │ ├── reference │ │ ├── cfg │ │ │ └── __init__.md │ │ ├── data │ │ │ ├── annotator.md │ │ │ ├── augment.md │ │ │ ├── base.md │ │ │ ├── build.md │ │ │ ├── converter.md │ │ │ ├── dataset.md │ │ │ ├── loaders.md │ │ │ ├── split.md │ │ │ ├── split_dota.md │ │ │ └── utils.md │ │ ├── engine │ │ │ ├── exporter.md │ │ │ ├── model.md │ │ │ ├── predictor.md │ │ │ ├── results.md │ │ │ ├── trainer.md │ │ │ ├── tuner.md │ │ │ └── validator.md │ │ ├── hub │ │ │ ├── __init__.md │ │ │ ├── auth.md │ │ │ ├── google │ │ │ │ └── __init__.md │ │ │ ├── session.md │ │ │ └── utils.md │ │ ├── models │ │ │ ├── fastsam │ │ │ │ ├── model.md │ │ │ │ ├── predict.md │ │ │ │ ├── utils.md │ │ │ │ └── val.md │ │ │ ├── nas │ │ │ │ ├── model.md │ │ │ │ ├── predict.md │ │ │ │ └── val.md │ │ │ ├── rtdetr │ │ │ │ ├── model.md │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ ├── sam │ │ │ │ ├── amg.md │ │ │ │ ├── build.md │ │ │ │ ├── model.md │ │ │ │ ├── modules │ │ │ │ │ ├── blocks.md │ │ │ │ │ ├── decoders.md │ │ │ │ │ ├── encoders.md │ │ │ │ │ ├── memory_attention.md │ │ │ │ │ ├── sam.md │ │ │ │ │ ├── tiny_encoder.md │ │ │ │ │ ├── transformer.md │ │ │ │ │ └── utils.md │ │ │ │ └── predict.md │ │ │ ├── utils │ │ │ │ ├── loss.md │ │ │ │ └── ops.md │ │ │ └── yolo │ │ │ │ ├── classify │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ │ ├── detect │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ │ ├── model.md │ │ │ │ ├── obb │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ │ ├── pose │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ │ ├── segment │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ └── val.md │ │ │ │ ├── world │ │ │ │ ├── train.md │ │ │ │ └── train_world.md │ │ │ │ └── yoloe │ │ │ │ ├── predict.md │ │ │ │ ├── train.md │ │ │ │ ├── train_seg.md │ │ │ │ └── val.md │ │ ├── nn │ │ │ ├── autobackend.md │ │ │ ├── modules │ │ │ │ ├── activation.md │ │ │ │ ├── block.md │ │ │ │ ├── conv.md │ │ │ │ ├── head.md │ │ │ │ ├── transformer.md │ │ │ │ └── utils.md │ │ │ ├── tasks.md │ │ │ └── text_model.md │ │ ├── solutions │ │ │ ├── ai_gym.md │ │ │ ├── analytics.md │ │ │ ├── distance_calculation.md │ │ │ ├── heatmap.md │ │ │ ├── instance_segmentation.md │ │ │ ├── object_blurrer.md │ │ │ ├── object_counter.md │ │ │ ├── object_cropper.md │ │ │ ├── parking_management.md │ │ │ ├── queue_management.md │ │ │ ├── region_counter.md │ │ │ ├── security_alarm.md │ │ │ ├── solutions.md │ │ │ ├── speed_estimation.md │ │ │ ├── streamlit_inference.md │ │ │ ├── trackzone.md │ │ │ └── vision_eye.md │ │ ├── trackers │ │ │ ├── basetrack.md │ │ │ ├── bot_sort.md │ │ │ ├── byte_tracker.md │ │ │ ├── track.md │ │ │ └── utils │ │ │ │ ├── gmc.md │ │ │ │ ├── kalman_filter.md │ │ │ │ └── matching.md │ │ └── utils │ │ │ ├── __init__.md │ │ │ ├── autobatch.md │ │ │ ├── benchmarks.md │ │ │ ├── callbacks │ │ │ ├── base.md │ │ │ ├── clearml.md │ │ │ ├── comet.md │ │ │ ├── dvc.md │ │ │ ├── hub.md │ │ │ ├── mlflow.md │ │ │ ├── neptune.md │ │ │ ├── raytune.md │ │ │ ├── tensorboard.md │ │ │ └── wb.md │ │ │ ├── checks.md │ │ │ ├── dist.md │ │ │ ├── downloads.md │ │ │ ├── errors.md │ │ │ ├── export.md │ │ │ ├── files.md │ │ │ ├── instance.md │ │ │ ├── loss.md │ │ │ ├── metrics.md │ │ │ ├── ops.md │ │ │ ├── patches.md │ │ │ ├── plotting.md │ │ │ ├── tal.md │ │ │ ├── torch_utils.md │ │ │ ├── triton.md │ │ │ └── tuner.md │ ├── robots.txt │ ├── solutions │ │ └── index.md │ ├── tasks │ │ ├── classify.md │ │ ├── detect.md │ │ ├── index.md │ │ ├── obb.md │ │ ├── pose.md │ │ └── segment.md │ ├── usage │ │ ├── callbacks.md │ │ ├── cfg.md │ │ ├── cli.md │ │ ├── engine.md │ │ ├── python.md │ │ └── simple-utilities.md │ └── yolov5 │ │ ├── environments │ │ ├── aws_quickstart_tutorial.md │ │ ├── azureml_quickstart_tutorial.md │ │ ├── docker_image_quickstart_tutorial.md │ │ └── google_cloud_quickstart_tutorial.md │ │ ├── index.md │ │ ├── quickstart_tutorial.md │ │ └── tutorials │ │ ├── architecture_description.md │ │ ├── clearml_logging_integration.md │ │ ├── comet_logging_integration.md │ │ ├── hyperparameter_evolution.md │ │ ├── model_ensembling.md │ │ ├── model_export.md │ │ ├── model_pruning_and_sparsity.md │ │ ├── multi_gpu_training.md │ │ ├── neural_magic_pruning_quantization.md │ │ ├── pytorch_hub_model_loading.md │ │ ├── test_time_augmentation.md │ │ ├── tips_for_best_training_results.md │ │ ├── train_custom_data.md │ │ └── transfer_learning_with_frozen_layers.md ├── mkdocs_github_authors.yaml ├── model_data.py └── overrides │ ├── javascript │ ├── benchmark.js │ ├── extra.js │ ├── giscus.js │ └── tablesort.js │ ├── main.html │ ├── partials │ └── comments.html │ └── stylesheets │ └── style.css ├── examples ├── README.md ├── RTDETR-ONNXRuntime-Python │ ├── README.md │ └── main.py ├── YOLO-Interactive-Tracking-UI │ ├── README.md │ └── interactive_tracker.py ├── YOLO-Series-ONNXRuntime-Rust │ ├── Cargo.toml │ ├── README.md │ └── src │ │ └── main.rs ├── YOLOv8-Action-Recognition │ ├── README.md │ ├── action_recognition.py │ └── requirements.txt ├── YOLOv8-CPP-Inference │ ├── CMakeLists.txt │ ├── README.md │ ├── inference.cpp │ ├── inference.h │ └── main.cpp ├── YOLOv8-LibTorch-CPP-Inference │ ├── CMakeLists.txt │ ├── README.md │ └── main.cc ├── YOLOv8-MNN-CPP │ ├── CMakeLists.txt │ ├── README.md │ ├── main.cpp │ └── main_interpreter.cpp ├── YOLOv8-ONNXRuntime-CPP │ ├── CMakeLists.txt │ ├── README.md │ ├── inference.cpp │ ├── inference.h │ └── main.cpp ├── YOLOv8-ONNXRuntime-Rust │ ├── Cargo.toml │ ├── README.md │ └── src │ │ ├── cli.rs │ │ ├── lib.rs │ │ ├── main.rs │ │ ├── model.rs │ │ ├── ort_backend.rs │ │ └── yolo_result.rs ├── YOLOv8-ONNXRuntime │ ├── README.md │ └── main.py ├── YOLOv8-OpenCV-ONNX-Python │ ├── README.md │ └── main.py ├── YOLOv8-OpenVINO-CPP-Inference │ ├── CMakeLists.txt │ ├── README.md │ ├── inference.cc │ ├── inference.h │ └── main.cc ├── YOLOv8-Region-Counter │ ├── README.md │ └── yolov8_region_counter.py ├── YOLOv8-SAHI-Inference-Video │ ├── README.md │ └── yolov8_sahi.py ├── YOLOv8-Segmentation-ONNXRuntime-Python │ ├── README.md │ └── main.py ├── YOLOv8-TFLite-Python │ ├── README.md │ └── main.py ├── heatmaps.ipynb ├── hub.ipynb ├── object_counting.ipynb ├── object_tracking.ipynb └── tutorial.ipynb ├── mkdocs.yml ├── pyproject.toml ├── tests ├── __init__.py ├── conftest.py ├── test_cli.py ├── test_cuda.py ├── test_engine.py ├── test_exports.py ├── test_integrations.py ├── test_python.py └── test_solutions.py └── ultralytics ├── __init__.py ├── assets ├── bus.jpg └── zidane.jpg ├── cfg ├── __init__.py ├── datasets │ ├── Argoverse.yaml │ ├── DOTAv1.5.yaml │ ├── DOTAv1.yaml │ ├── GlobalWheat2020.yaml │ ├── ImageNet.yaml │ ├── Objects365.yaml │ ├── SKU-110K.yaml │ ├── VOC.yaml │ ├── VisDrone.yaml │ ├── african-wildlife.yaml │ ├── brain-tumor.yaml │ ├── carparts-seg.yaml │ ├── coco-pose.yaml │ ├── coco.yaml │ ├── coco128-seg.yaml │ ├── coco128.yaml │ ├── coco8-multispectral.yaml │ ├── coco8-pose.yaml │ ├── coco8-seg.yaml │ ├── coco8.yaml │ ├── crack-seg.yaml │ ├── dog-pose.yaml │ ├── dota8-multispectral.yaml │ ├── dota8.yaml │ ├── hand-keypoints.yaml │ ├── lvis.yaml │ ├── medical-pills.yaml │ ├── open-images-v7.yaml │ ├── package-seg.yaml │ ├── signature.yaml │ ├── tiger-pose.yaml │ └── xView.yaml ├── default.yaml ├── models │ ├── 11 │ │ ├── yolo11-cls-resnet18.yaml │ │ ├── yolo11-cls.yaml │ │ ├── yolo11-obb.yaml │ │ ├── yolo11-pose.yaml │ │ ├── yolo11-seg.yaml │ │ ├── yolo11.yaml │ │ ├── yoloe-11-seg.yaml │ │ └── yoloe-11.yaml │ ├── 12 │ │ ├── yolo12-cls.yaml │ │ ├── yolo12-obb.yaml │ │ ├── yolo12-pose.yaml │ │ ├── yolo12-seg.yaml │ │ └── yolo12.yaml │ ├── README.md │ ├── rt-detr │ │ ├── rtdetr-l.yaml │ │ ├── rtdetr-resnet101.yaml │ │ ├── rtdetr-resnet50.yaml │ │ └── rtdetr-x.yaml │ ├── v10 │ │ ├── yolov10b.yaml │ │ ├── yolov10l.yaml │ │ ├── yolov10m.yaml │ │ ├── yolov10n.yaml │ │ ├── yolov10s.yaml │ │ └── yolov10x.yaml │ ├── v3 │ │ ├── yolov3-spp.yaml │ │ ├── yolov3-tiny.yaml │ │ └── yolov3.yaml │ ├── v5 │ │ ├── yolov5-p6.yaml │ │ └── yolov5.yaml │ ├── v6 │ │ └── yolov6.yaml │ ├── v8 │ │ ├── yoloe-v8-seg.yaml │ │ ├── yoloe-v8.yaml │ │ ├── yolov8-cls-resnet101.yaml │ │ ├── yolov8-cls-resnet50.yaml │ │ ├── yolov8-cls.yaml │ │ ├── yolov8-ghost-p2.yaml │ │ ├── yolov8-ghost-p6.yaml │ │ ├── yolov8-ghost.yaml │ │ ├── yolov8-obb.yaml │ │ ├── yolov8-p2.yaml │ │ ├── yolov8-p6.yaml │ │ ├── yolov8-pose-p6.yaml │ │ ├── yolov8-pose.yaml │ │ ├── yolov8-rtdetr.yaml │ │ ├── yolov8-seg-p6.yaml │ │ ├── yolov8-seg.yaml │ │ ├── yolov8-world.yaml │ │ ├── yolov8-worldv2.yaml │ │ └── yolov8.yaml │ └── v9 │ │ ├── yolov9c-seg.yaml │ │ ├── yolov9c.yaml │ │ ├── yolov9e-seg.yaml │ │ ├── yolov9e.yaml │ │ ├── yolov9m.yaml │ │ ├── yolov9s.yaml │ │ └── yolov9t.yaml ├── solutions │ └── default.yaml └── trackers │ ├── botsort.yaml │ └── bytetrack.yaml ├── data ├── __init__.py ├── annotator.py ├── augment.py ├── base.py ├── build.py ├── converter.py ├── dataset.py ├── loaders.py ├── scripts │ ├── download_weights.sh │ ├── get_coco.sh │ ├── get_coco128.sh │ └── get_imagenet.sh ├── split.py ├── split_dota.py └── utils.py ├── engine ├── __init__.py ├── exporter.py ├── model.py ├── predictor.py ├── results.py ├── trainer.py ├── tuner.py └── validator.py ├── hub ├── __init__.py ├── auth.py ├── google │ └── __init__.py ├── session.py └── utils.py ├── models ├── __init__.py ├── fastsam │ ├── __init__.py │ ├── model.py │ ├── predict.py │ ├── utils.py │ └── val.py ├── nas │ ├── __init__.py │ ├── model.py │ ├── predict.py │ └── val.py ├── rtdetr │ ├── __init__.py │ ├── model.py │ ├── predict.py │ ├── train.py │ └── val.py ├── sam │ ├── __init__.py │ ├── amg.py │ ├── build.py │ ├── model.py │ ├── modules │ │ ├── __init__.py │ │ ├── blocks.py │ │ ├── decoders.py │ │ ├── encoders.py │ │ ├── memory_attention.py │ │ ├── sam.py │ │ ├── tiny_encoder.py │ │ ├── transformer.py │ │ └── utils.py │ └── predict.py ├── utils │ ├── __init__.py │ ├── loss.py │ └── ops.py └── yolo │ ├── __init__.py │ ├── classify │ ├── __init__.py │ ├── predict.py │ ├── train.py │ └── val.py │ ├── detect │ ├── __init__.py │ ├── predict.py │ ├── train.py │ └── val.py │ ├── model.py │ ├── obb │ ├── __init__.py │ ├── predict.py │ ├── train.py │ └── val.py │ ├── pose │ ├── __init__.py │ ├── predict.py │ ├── train.py │ └── val.py │ ├── segment │ ├── __init__.py │ ├── predict.py │ ├── train.py │ └── val.py │ ├── world │ ├── __init__.py │ ├── train.py │ └── train_world.py │ └── yoloe │ ├── __init__.py │ ├── predict.py │ ├── train.py │ ├── train_seg.py │ └── val.py ├── nn ├── __init__.py ├── autobackend.py ├── modules │ ├── __init__.py │ ├── activation.py │ ├── block.py │ ├── conv.py │ ├── head.py │ ├── transformer.py │ └── utils.py ├── tasks.py └── text_model.py ├── solutions ├── __init__.py ├── ai_gym.py ├── analytics.py ├── distance_calculation.py ├── heatmap.py ├── instance_segmentation.py ├── object_blurrer.py ├── object_counter.py ├── object_cropper.py ├── parking_management.py ├── queue_management.py ├── region_counter.py ├── security_alarm.py ├── solutions.py ├── speed_estimation.py ├── streamlit_inference.py ├── trackzone.py └── vision_eye.py ├── trackers ├── README.md ├── __init__.py ├── basetrack.py ├── bot_sort.py ├── byte_tracker.py ├── track.py └── utils │ ├── __init__.py │ ├── gmc.py │ ├── kalman_filter.py │ └── matching.py └── utils ├── __init__.py ├── autobatch.py ├── benchmarks.py ├── callbacks ├── __init__.py ├── base.py ├── clearml.py ├── comet.py ├── dvc.py ├── hub.py ├── mlflow.py ├── neptune.py ├── raytune.py ├── tensorboard.py └── wb.py ├── checks.py ├── dist.py ├── downloads.py ├── errors.py ├── export.py ├── files.py ├── instance.py ├── loss.py ├── metrics.py ├── ops.py ├── patches.py ├── plotting.py ├── tal.py ├── torch_utils.py ├── triton.py └── tuner.py /.dockerignore: -------------------------------------------------------------------------------- 1 | # Python 2 | __pycache__ 3 | *.pyc 4 | *.pyo 5 | *.pyd 6 | .Python 7 | *.py[cod] 8 | *$py.class 9 | .pytest_cache 10 | .coverage 11 | coverage.xml 12 | .ruff_cache 13 | *.egg-info 14 | dist 15 | build 16 | 17 | # Development 18 | .env 19 | .venv 20 | env/ 21 | venv/ 22 | ENV/ 23 | .idea 24 | .vscode 25 | *.swp 26 | *.swo 27 | .DS_Store 28 | 29 | # Project specific 30 | *.log 31 | benchmarks.log 32 | runs/ 33 | 34 | # Dependencies 35 | node_modules/ 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | blank_issues_enabled: true 4 | contact_links: 5 | - name: 📄 Docs 6 | url: https://docs.ultralytics.com/ 7 | about: Full Ultralytics YOLO Documentation 8 | - name: 💬 Forum 9 | url: https://community.ultralytics.com/ 10 | about: Ask on Ultralytics Community Forum 11 | - name: 🎧 Discord 12 | url: https://ultralytics.com/discord 13 | about: Ask on Ultralytics Discord 14 | - name: ⌨️ Reddit 15 | url: https://reddit.com/r/ultralytics 16 | about: Ask on Ultralytics Subreddit 17 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Dependabot for package version updates 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: pip 9 | directory: "/" 10 | schedule: 11 | interval: weekly 12 | time: "04:00" 13 | open-pull-requests-limit: 10 14 | reviewers: 15 | - glenn-jocher 16 | labels: 17 | - dependencies 18 | 19 | - package-ecosystem: github-actions 20 | directory: "/.github/workflows" 21 | schedule: 22 | interval: weekly 23 | time: "04:00" 24 | open-pull-requests-limit: 5 25 | reviewers: 26 | - glenn-jocher 27 | labels: 28 | - dependencies 29 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # This CITATION.cff file was generated with https://bit.ly/cffinit 2 | 3 | cff-version: 1.2.0 4 | title: Ultralytics YOLO 5 | message: >- 6 | If you use this software, please cite it using the 7 | metadata from this file. 8 | type: software 9 | authors: 10 | - given-names: Glenn 11 | family-names: Jocher 12 | affiliation: Ultralytics 13 | orcid: "https://orcid.org/0000-0001-5950-6979" 14 | - family-names: Qiu 15 | given-names: Jing 16 | affiliation: Ultralytics 17 | orcid: "https://orcid.org/0000-0003-3783-7069" 18 | - given-names: Ayush 19 | family-names: Chaurasia 20 | affiliation: Ultralytics 21 | orcid: "https://orcid.org/0000-0002-7603-6750" 22 | repository-code: "https://github.com/ultralytics/ultralytics" 23 | url: "https://ultralytics.com" 24 | license: AGPL-3.0 25 | version: 8.0.0 26 | date-released: "2023-01-10" 27 | -------------------------------------------------------------------------------- /docker/Dockerfile-jupyter: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Builds ultralytics/ultralytics:latest-jupyter image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics 4 | # Image provides JupyterLab interface for interactive YOLO development and includes tutorial notebooks 5 | 6 | # Start from Python-based Ultralytics image for full Python environment 7 | FROM ultralytics/ultralytics:latest-python 8 | 9 | # Install JupyterLab for interactive development 10 | RUN uv pip install --system jupyterlab 11 | 12 | # Create persistent data directory structure 13 | RUN mkdir /data 14 | 15 | # Configure YOLO directories 16 | RUN mkdir /data/{datasets,weights,runs} && \ 17 | yolo settings datasets_dir="/data/datasets" weights_dir="/data/weights" runs_dir="/data/runs" 18 | 19 | # Start JupyterLab with tutorial notebook 20 | ENTRYPOINT ["/usr/local/bin/jupyter", "lab", "--allow-root", "--ip=*", "/ultralytics/examples/tutorial.ipynb"] 21 | 22 | # Usage Examples ------------------------------------------------------------------------------------------------------- 23 | 24 | # Build and Push 25 | # t=ultralytics/ultralytics:latest-jupyter && sudo docker build -f docker/Dockerfile-jupyter -t $t . && sudo docker push $t 26 | 27 | # Run 28 | # t=ultralytics/ultralytics:latest-jupyter && sudo docker run -it --ipc=host -p 8888:8888 $t 29 | 30 | # Pull and Run 31 | # t=ultralytics/ultralytics:latest-jupyter && sudo docker pull $t && sudo docker run -it --ipc=host -p 8888:8888 $t 32 | 33 | # Pull and Run with local volume mounted 34 | # t=ultralytics/ultralytics:latest-jupyter && sudo docker pull $t && sudo docker run -it --ipc=host -p 8888:8888 -v "$(pwd)"/datasets:/data/datasets $t 35 | -------------------------------------------------------------------------------- /docs/en/CNAME: -------------------------------------------------------------------------------- 1 | docs.ultralytics.com 2 | -------------------------------------------------------------------------------- /docs/en/macros/sam-auto-annotate.md: -------------------------------------------------------------------------------- 1 | | Argument | Type | Default | Description | 2 | | ------------ | ----------- | -------------- | ------------------------------------------------------------------------------------ | 3 | | `data` | `str` | required | Path to directory containing target images for annotation or segmentation. | 4 | | `det_model` | `str` | `'yolo11x.pt'` | YOLO detection model path for initial object detection. | 5 | | `sam_model` | `str` | `'sam_b.pt'` | SAM model path for segmentation (supports SAM, SAM2 variants and mobile_sam models). | 6 | | `device` | `str` | `''` | Computation device (e.g., 'cuda:0', 'cpu', or '' for automatic device detection). | 7 | | `conf` | `float` | `0.25` | YOLO detection confidence threshold for filtering weak detections. | 8 | | `iou` | `float` | `0.45` | IoU threshold for Non-Maximum Suppression to filter overlapping boxes. | 9 | | `imgsz` | `int` | `640` | Input size for resizing images (must be multiple of 32). | 10 | | `max_det` | `int` | `300` | Maximum number of detections per image for memory efficiency. | 11 | | `classes` | `list[int]` | `None` | List of class indices to detect (e.g., `[0, 1]` for person & bicycle). | 12 | | `output_dir` | `str` | `None` | Save directory for annotations (defaults to './labels' relative to data path). | 13 | -------------------------------------------------------------------------------- /docs/en/macros/yolo-det-perf.md: -------------------------------------------------------------------------------- 1 | | Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
T4 TensorRT10
(ms) | params
(M) | FLOPs
(B) | 2 | | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | 3 | | [YOLO11n](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n.pt) | 640 | 39.5 | 56.1 ± 0.8 | 1.5 ± 0.0 | 2.6 | 6.5 | 4 | | [YOLO11s](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s.pt) | 640 | 47.0 | 90.0 ± 1.2 | 2.5 ± 0.0 | 9.4 | 21.5 | 5 | | [YOLO11m](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m.pt) | 640 | 51.5 | 183.2 ± 2.0 | 4.7 ± 0.1 | 20.1 | 68.0 | 6 | | [YOLO11l](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l.pt) | 640 | 53.4 | 238.6 ± 1.4 | 6.2 ± 0.1 | 25.3 | 86.9 | 7 | | [YOLO11x](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x.pt) | 640 | 54.7 | 462.8 ± 6.7 | 11.3 ± 0.2 | 56.9 | 194.9 | 8 | -------------------------------------------------------------------------------- /docs/en/reference/data/annotator.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics' annotator script for automatic image annotation using YOLO and SAM models. Contribute to improve it on GitHub!. 3 | keywords: Ultralytics, image annotation, YOLO, SAM, Python script, GitHub, object detection, segmentation 4 | --- 5 | 6 | # Reference for `ultralytics/data/annotator.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/annotator.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/annotator.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/annotator.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.annotator.auto_annotate 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/data/base.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics BaseDataset class for efficient image loading and processing with custom transformations and caching options. 3 | keywords: Ultralytics, BaseDataset, image processing, data augmentation, YOLO, dataset class, image caching 4 | --- 5 | 6 | # Reference for `ultralytics/data/base.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/base.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/base.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/base.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.base.BaseDataset 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/data/build.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the functionality and examples of data builders like InfiniteDataLoader and various YOLO dataset builders in Ultralytics. 3 | keywords: Ultralytics, Data Builders, InfiniteDataLoader, YOLO dataset, build.py, AI, Machine Learning 4 | --- 5 | 6 | # Reference for `ultralytics/data/build.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/build.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/build.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/build.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.build.InfiniteDataLoader 15 | 16 |



17 | 18 | ## ::: ultralytics.data.build._RepeatSampler 19 | 20 |



21 | 22 | ## ::: ultralytics.data.build.seed_worker 23 | 24 |



25 | 26 | ## ::: ultralytics.data.build.build_yolo_dataset 27 | 28 |



29 | 30 | ## ::: ultralytics.data.build.build_grounding 31 | 32 |



33 | 34 | ## ::: ultralytics.data.build.build_dataloader 35 | 36 |



37 | 38 | ## ::: ultralytics.data.build.check_source 39 | 40 |



41 | 42 | ## ::: ultralytics.data.build.load_inference_source 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/data/converter.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore comprehensive data conversion tools for YOLO models including COCO, DOTA, and YOLO bbox2segment converters. 3 | keywords: Ultralytics, data conversion, YOLO models, COCO, DOTA, YOLO bbox2segment, machine learning, annotations 4 | --- 5 | 6 | # Reference for `ultralytics/data/converter.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/converter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/converter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/converter.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.converter.coco91_to_coco80_class 15 | 16 |



17 | 18 | ## ::: ultralytics.data.converter.coco80_to_coco91_class 19 | 20 |



21 | 22 | ## ::: ultralytics.data.converter.convert_coco 23 | 24 |



25 | 26 | ## ::: ultralytics.data.converter.convert_segment_masks_to_yolo_seg 27 | 28 |



29 | 30 | ## ::: ultralytics.data.converter.convert_dota_to_yolo_obb 31 | 32 |



33 | 34 | ## ::: ultralytics.data.converter.min_index 35 | 36 |



37 | 38 | ## ::: ultralytics.data.converter.merge_multi_segment 39 | 40 |



41 | 42 | ## ::: ultralytics.data.converter.yolo_bbox2segment 43 | 44 |



45 | 46 | ## ::: ultralytics.data.converter.create_synthetic_coco_dataset 47 | 48 |



49 | 50 | ## ::: ultralytics.data.converter.convert_to_multispectral 51 | 52 |

53 | -------------------------------------------------------------------------------- /docs/en/reference/data/dataset.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the YOLODataset and its subclasses for object detection, segmentation, and multi-modal tasks. Find details on dataset loading, caching, and augmentation. 3 | keywords: Ultralytics, YOLODataset, object detection, segmentation, dataset loading, caching, data augmentation 4 | --- 5 | 6 | # Reference for `ultralytics/data/dataset.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/dataset.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/dataset.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/dataset.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.dataset.YOLODataset 15 | 16 |



17 | 18 | ## ::: ultralytics.data.dataset.YOLOMultiModalDataset 19 | 20 |



21 | 22 | ## ::: ultralytics.data.dataset.GroundingDataset 23 | 24 |



25 | 26 | ## ::: ultralytics.data.dataset.YOLOConcatDataset 27 | 28 |



29 | 30 | ## ::: ultralytics.data.dataset.SemanticDataset 31 | 32 |



33 | 34 | ## ::: ultralytics.data.dataset.ClassificationDataset 35 | 36 |

37 | -------------------------------------------------------------------------------- /docs/en/reference/data/loaders.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed documentation on Ultralytics data loaders including SourceTypes, LoadStreams, and more. Enhance your ML workflows with our comprehensive guides. 3 | keywords: Ultralytics, data loaders, SourceTypes, LoadStreams, LoadScreenshots, LoadImagesAndVideos, LoadPilAndNumpy, LoadTensor, ML workflows 4 | --- 5 | 6 | # Reference for `ultralytics/data/loaders.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/loaders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/loaders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/loaders.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.loaders.SourceTypes 15 | 16 |



17 | 18 | ## ::: ultralytics.data.loaders.LoadStreams 19 | 20 |



21 | 22 | ## ::: ultralytics.data.loaders.LoadScreenshots 23 | 24 |



25 | 26 | ## ::: ultralytics.data.loaders.LoadImagesAndVideos 27 | 28 |



29 | 30 | ## ::: ultralytics.data.loaders.LoadPilAndNumpy 31 | 32 |



33 | 34 | ## ::: ultralytics.data.loaders.LoadTensor 35 | 36 |



37 | 38 | ## ::: ultralytics.data.loaders.autocast_list 39 | 40 |



41 | 42 | ## ::: ultralytics.data.loaders.get_best_youtube_url 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/data/split.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to split datasets into train, validation, and test subsets using Ultralytics utilities for efficient data preparation. 3 | keywords: dataset splitting, autosplit dataset, training dataset preparation, validation set creation, Ultralytics data tools 4 | --- 5 | 6 | # Reference for `ultralytics/data/split.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/split.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/split.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/split.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.split.split_classify_dataset 15 | 16 |



17 | 18 | ## ::: ultralytics.data.split.autosplit 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/data/split_dota.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to utilize the ultralytics.data.split_dota module to process and split DOTA datasets efficiently. Explore detailed functions and examples. 3 | keywords: Ultralytics, DOTA dataset, data splitting, YOLO, Python, bbox_iof, load_yolo_dota, get_windows, crop_and_save 4 | --- 5 | 6 | # Reference for `ultralytics/data/split_dota.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/split_dota.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/split_dota.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/split_dota.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.data.split_dota.bbox_iof 15 | 16 |



17 | 18 | ## ::: ultralytics.data.split_dota.load_yolo_dota 19 | 20 |



21 | 22 | ## ::: ultralytics.data.split_dota.get_windows 23 | 24 |



25 | 26 | ## ::: ultralytics.data.split_dota.get_window_obj 27 | 28 |



29 | 30 | ## ::: ultralytics.data.split_dota.crop_and_save 31 | 32 |



33 | 34 | ## ::: ultralytics.data.split_dota.split_images_and_labels 35 | 36 |



37 | 38 | ## ::: ultralytics.data.split_dota.split_trainval 39 | 40 |



41 | 42 | ## ::: ultralytics.data.split_dota.split_test 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/engine/exporter.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to export YOLOv8 models to formats like ONNX, TensorRT, CoreML, and more. Optimize your exports for different platforms. 3 | keywords: YOLOv8, export formats, ONNX, TensorRT, CoreML, machine learning model export, AI, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/engine/exporter.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/exporter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/exporter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/exporter.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.exporter.Exporter 15 | 16 |



17 | 18 | ## ::: ultralytics.engine.exporter.IOSDetectModel 19 | 20 |



21 | 22 | ## ::: ultralytics.engine.exporter.NMSModel 23 | 24 |



25 | 26 | ## ::: ultralytics.engine.exporter.export_formats 27 | 28 |



29 | 30 | ## ::: ultralytics.engine.exporter.validate_args 31 | 32 |



33 | 34 | ## ::: ultralytics.engine.exporter.gd_outputs 35 | 36 |



37 | 38 | ## ::: ultralytics.engine.exporter.try_export 39 | 40 |



41 | 42 | ## ::: ultralytics.engine.exporter.arange_patch 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/engine/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the base class for implementing YOLO models with unified APIs for training, validation, prediction, and more. Learn how to utilize different task types and model configurations. 3 | keywords: YOLO model, Ultralytics, machine learning, deep learning, PyTorch model, training, validation, prediction, exporting, benchmarking, Ultralytics HUB, Triton Server 4 | --- 5 | 6 | # Reference for `ultralytics/engine/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.model.Model 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/engine/predictor.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover how to use the Base Predictor class in the Ultralytics YOLO engine for efficient image and video inference. 3 | keywords: Ultralytics, YOLO, Base Predictor, image inference, video inference, machine learning, Python 4 | --- 5 | 6 | # Reference for `ultralytics/engine/predictor.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/predictor.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/predictor.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/predictor.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.predictor.BasePredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/engine/results.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the details of Ultralytics engine results including classes like BaseTensor, Results, Boxes, Masks, Keypoints, Probs, and OBB to handle inference results efficiently. 3 | keywords: Ultralytics, engine results, BaseTensor, Results class, Boxes, Masks, Keypoints, Probs, OBB, inference results, machine learning, PyTorch 4 | --- 5 | 6 | # Reference for `ultralytics/engine/results.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/results.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/results.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/results.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.results.BaseTensor 15 | 16 |



17 | 18 | ## ::: ultralytics.engine.results.Results 19 | 20 |



21 | 22 | ## ::: ultralytics.engine.results.Boxes 23 | 24 |



25 | 26 | ## ::: ultralytics.engine.results.Masks 27 | 28 |



29 | 30 | ## ::: ultralytics.engine.results.Keypoints 31 | 32 |



33 | 34 | ## ::: ultralytics.engine.results.Probs 35 | 36 |



37 | 38 | ## ::: ultralytics.engine.results.OBB 39 | 40 |

41 | -------------------------------------------------------------------------------- /docs/en/reference/engine/trainer.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to use BaseTrainer in Ultralytics YOLO for efficient model training. Comprehensive guide for configurations, datasets, and optimization. 3 | keywords: Ultralytics, YOLO, BaseTrainer, model training, configuration, datasets, optimization, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/engine/trainer.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/trainer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/trainer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/trainer.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.trainer.BaseTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/engine/tuner.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Optimize YOLO model performance using Ultralytics Tuner. Learn about systematic hyperparameter tuning for object detection, segmentation, classification, and tracking. 3 | keywords: Ultralytics, YOLO, hyperparameter tuning, machine learning, deep learning, object detection, instance segmentation, image classification, pose estimation, multi-object tracking 4 | --- 5 | 6 | # Reference for `ultralytics/engine/tuner.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/tuner.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/tuner.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/tuner.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.tuner.Tuner 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/engine/validator.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics BaseValidator for model validation in PyTorch, TensorFlow, ONNX, and more. Learn to check model accuracy and performance metrics. 3 | keywords: Ultralytics, BaseValidator, model validation, PyTorch, TensorFlow, ONNX, model accuracy, performance metrics 4 | --- 5 | 6 | # Reference for `ultralytics/engine/validator.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/validator.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/validator.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/validator.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.engine.validator.BaseValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/hub/__init__.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics HUB API functions for login, logout, model reset, export, and dataset checks. Enhance your YOLO workflows with these essential utilities. 3 | keywords: Ultralytics HUB API, login, logout, reset model, export model, check dataset, YOLO, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/hub/__init__.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/\_\_init\_\_.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/__init__.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/__init__.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.hub.login 15 | 16 |



17 | 18 | ## ::: ultralytics.hub.logout 19 | 20 |



21 | 22 | ## ::: ultralytics.hub.reset_model 23 | 24 |



25 | 26 | ## ::: ultralytics.hub.export_fmts_hub 27 | 28 |



29 | 30 | ## ::: ultralytics.hub.export_model 31 | 32 |



33 | 34 | ## ::: ultralytics.hub.get_export 35 | 36 |



37 | 38 | ## ::: ultralytics.hub.check_dataset 39 | 40 |

41 | -------------------------------------------------------------------------------- /docs/en/reference/hub/auth.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to manage API key and cookie-based authentication in Ultralytics with the Auth class. Step-by-step guide for effective authentication. 3 | keywords: Ultralytics, authentication, API key, cookies, Auth class, YOLO, API, guide 4 | --- 5 | 6 | # Reference for `ultralytics/hub/auth.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/auth.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/auth.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/auth.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.hub.auth.Auth 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/hub/google/__init__.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Reference for the GCPRegions class in Ultralytics, which provides functionality for testing and analyzing latency across Google Cloud Platform regions. 3 | keywords: Ultralytics, GCP, Google Cloud Platform, regions, latency testing, cloud computing, networking, performance analysis 4 | --- 5 | 6 | # Reference for `ultralytics/hub/google/__init__.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/google/\_\_init\_\_.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/google/__init__.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/google/__init__.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.hub.google.GCPRegions 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/hub/session.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the HUBTrainingSession class for managing Ultralytics YOLO model training, heartbeats, and checkpointing. 3 | keywords: Ultralytics, YOLO, HUBTrainingSession, model training, heartbeats, checkpointing, Python 4 | --- 5 | 6 | # Reference for `ultralytics/hub/session.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/session.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/session.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/session.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.hub.session.HUBTrainingSession 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/hub/utils.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the utilities in the Ultralytics HUB. Learn about smart_request, request_with_credentials, and more to enhance your YOLO projects. 3 | keywords: Ultralytics, HUB, Utilities, YOLO, smart_request, request_with_credentials 4 | --- 5 | 6 | # Reference for `ultralytics/hub/utils.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/utils.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.hub.utils.Events 15 | 16 |



17 | 18 | ## ::: ultralytics.hub.utils.request_with_credentials 19 | 20 |



21 | 22 | ## ::: ultralytics.hub.utils.requests_with_progress 23 | 24 |



25 | 26 | ## ::: ultralytics.hub.utils.smart_request 27 | 28 |

29 | -------------------------------------------------------------------------------- /docs/en/reference/models/fastsam/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover how to use the FastSAM model with Ultralytics. Learn about its interface and implementation details with practical examples. 3 | keywords: FastSAM, Ultralytics, model interface, YOLO, deep learning, machine learning, segmentation, predictor, validator, Python 4 | --- 5 | 6 | # Reference for `ultralytics/models/fastsam/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.fastsam.model.FastSAM 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/fastsam/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Fast SAM Predictor in the Ultralytics YOLO framework. Learn about its segmentation prediction tasks, configuration, and post-processing steps. 3 | keywords: Ultralytics, Fast SAM Predictor, YOLO, segmentation, prediction, AI model, non-max suppression, mask prediction, tutorial 4 | --- 5 | 6 | # Reference for `ultralytics/models/fastsam/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.fastsam.predict.FastSAMPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/fastsam/utils.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the utility functions in FastSAM for adjusting bounding boxes and calculating IoU, benefiting computer vision projects. 3 | keywords: FastSAM, bounding boxes, IoU, Ultralytics, image processing, computer vision 4 | --- 5 | 6 | # Reference for `ultralytics/models/fastsam/utils.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/utils.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.fastsam.utils.adjust_bboxes_to_image_border 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/fastsam/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover FastSAM Validator for segmentation in Ultralytics YOLO. Learn how to validate with custom metrics and avoid common errors. Contribute on GitHub!. 3 | keywords: FastSAM Validator, Ultralytics, YOLO, segmentation, validation, metrics, GitHub, contribute, documentation 4 | --- 5 | 6 | # Reference for `ultralytics/models/fastsam/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.fastsam.val.FastSAMValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/nas/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the YOLO-NAS model interface and learn how to utilize pre-trained YOLO-NAS models for object detection with Ultralytics. 3 | keywords: Ultralytics, YOLO, YOLO-NAS, object detection, pre-trained models, machine learning, deep learning, NAS model 4 | --- 5 | 6 | # Reference for `ultralytics/models/nas/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.nas.model.NAS 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/nas/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn about NASPredictor in Ultralytics YOLO for efficient object detection. Explore its attributes, methods, and usage with examples. 3 | keywords: Ultralytics, YOLO, NASPredictor, object detection, machine learning, AI, non-maximum suppression, bounding boxes, image processing 4 | --- 5 | 6 | # Reference for `ultralytics/models/nas/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.nas.predict.NASPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/nas/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics NASValidator for efficient YOLO model validation. Learn about NMS and post-processing configurations. 3 | keywords: Ultralytics, YOLO, NASValidator, object detection, non-maximum suppression, NMS, YOLO models, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/nas/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.nas.val.NASValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/rtdetr/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the interface for Baidu's RT-DETR, a Vision Transformer-based real-time object detector in the Ultralytics Docs. Learn more about its efficient hybrid encoding and IoU-aware query selection. 3 | keywords: RT-DETR, real-time object detection, Vision Transformer, Ultralytics, model interface, Baidu, hybrid encoding, IoU-aware query selection, machine learning, AI 4 | --- 5 | 6 | # Reference for `ultralytics/models/rtdetr/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.rtdetr.model.RTDETR 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/rtdetr/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Access the complete reference for the RTDETRPredictor class in Ultralytics. Learn about its attributes, methods, and example usage for real-time object detection. 3 | keywords: RTDETRPredictor, Ultralytics, Real-Time Detection Transformer, object detection, Vision Transformers, documentation, RT-DETR, Python class 4 | --- 5 | 6 | # Reference for `ultralytics/models/rtdetr/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.rtdetr.predict.RTDETRPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/rtdetr/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore RTDETRTrainer for efficient real-time object detection leveraging Vision Transformers. Learn configuration, dataset handling, and advanced model training. 3 | keywords: RTDETRTrainer, real-time object detection, Vision Transformers, YOLO, RT-DETR model, model training, dataset handling 4 | --- 5 | 6 | # Reference for `ultralytics/models/rtdetr/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.rtdetr.train.RTDETRTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/rtdetr/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the RTDETRValidator and RTDETRDataset classes for real-time detection and tracking. Understand initialization, transformations, and post-processing. 3 | keywords: RTDETR, Ultralytics, object detection, tracking, YOLO, RTDETRDataset, RTDETRValidator, real-time detection 4 | --- 5 | 6 | # Reference for `ultralytics/models/rtdetr/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.rtdetr.val.RTDETRDataset 15 | 16 |



17 | 18 | ## ::: ultralytics.models.rtdetr.val.RTDETRValidator 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the SAM (Segment Anything Model) and SAM 2 (Segment Anything Model 2) interface for real-time image segmentation. Learn about promptable segmentation and zero-shot capabilities. 3 | keywords: Ultralytics, SAM, Segment Anything Model, SAM 2, Segment Anything Model 2, image segmentation, real-time segmentation, zero-shot performance, promptable segmentation, SA-1B dataset 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.model.SAM 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/modules/decoders.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the MaskDecoder and MLP modules in Ultralytics for efficient mask prediction using transformer architecture. Detailed attributes, functionalities, and implementation. 3 | keywords: Ultralytics, MaskDecoder, MLP, machine learning, transformer architecture, mask prediction, neural networks, PyTorch modules 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/modules/decoders.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/decoders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/decoders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/decoders.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.modules.decoders.MaskDecoder 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.modules.decoders.SAM2MaskDecoder 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/modules/encoders.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed documentation of various SAM encoder modules such as ImageEncoderViT, PromptEncoder, and more, available in Ultralytics' repository. 3 | keywords: Ultralytics, SAM encoder, ImageEncoderViT, PromptEncoder, PositionEmbeddingRandom, Block, Attention, PatchEmbed 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/modules/encoders.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/encoders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/encoders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/encoders.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.modules.encoders.ImageEncoderViT 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.modules.encoders.PromptEncoder 19 | 20 |



21 | 22 | ## ::: ultralytics.models.sam.modules.encoders.MemoryEncoder 23 | 24 |



25 | 26 | ## ::: ultralytics.models.sam.modules.encoders.ImageEncoder 27 | 28 |



29 | 30 | ## ::: ultralytics.models.sam.modules.encoders.FpnNeck 31 | 32 |



33 | 34 | ## ::: ultralytics.models.sam.modules.encoders.Hiera 35 | 36 |

37 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/modules/memory_attention.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed documentation of various SAM 2 encoder modules such as MemoryAttentionLayer, MemoryAttention, available in Ultralytics' repository. 3 | keywords: Ultralytics, SAM 2 encoder, MemoryAttentionLayer, MemoryAttention 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/modules/memory_attention.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/memory_attention.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/memory_attention.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/memory_attention.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.modules.memory_attention.MemoryAttentionLayer 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.modules.memory_attention.MemoryAttention 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/modules/sam.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover the Ultralytics SAM and SAM 2 module for object segmentation. Learn about its components, such as image encoders and mask decoders, in this comprehensive guide. 3 | keywords: Ultralytics, SAM Module, SAM 2 Module, object segmentation, image encoder, mask decoder, prompt encoder, AI, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/modules/sam.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/sam.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/sam.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/sam.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.modules.sam.SAMModel 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.modules.sam.SAM2Model 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/modules/transformer.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the TwoWayTransformer module in Ultralytics, designed for simultaneous attention to image and query points. Ideal for object detection and segmentation tasks. 3 | keywords: Ultralytics, TwoWayTransformer, module, deep learning, transformer, object detection, image segmentation, attention mechanism, neural networks 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/modules/transformer.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/transformer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/transformer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/transformer.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.modules.transformer.TwoWayTransformer 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.modules.transformer.TwoWayAttentionBlock 19 | 20 |



21 | 22 | ## ::: ultralytics.models.sam.modules.transformer.Attention 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/models/sam/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics SAM and SAM 2 Predictor for advanced, real-time image segmentation using the Segment Anything Model (SAM and SAM 2). Complete implementation details and auxiliary utilities. 3 | keywords: Ultralytics, SAM, Segment Anything Model, SAM 2, Segment Anything Model 2, image segmentation, real-time, prediction, AI, machine learning, Python, torch, inference 4 | --- 5 | 6 | # Reference for `ultralytics/models/sam/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.sam.predict.Predictor 15 | 16 |



17 | 18 | ## ::: ultralytics.models.sam.predict.SAM2Predictor 19 | 20 |



21 | 22 | ## ::: ultralytics.models.sam.predict.SAM2VideoPredictor 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/models/utils/loss.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed implementations of loss functions for DETR and RT-DETR models in Ultralytics. 3 | keywords: ultralytics, YOLO, DETR, RT-DETR, loss functions, object detection, deep learning, focal loss, varifocal loss, Hungarian matcher 4 | --- 5 | 6 | # Reference for `ultralytics/models/utils/loss.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/loss.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/loss.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/utils/loss.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.utils.loss.DETRLoss 15 | 16 |



17 | 18 | ## ::: ultralytics.models.utils.loss.RTDETRDetectionLoss 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/utils/ops.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the utilities and operations in Ultralytics models like HungarianMatcher and get_cdn_group. Learn how to optimize and manage model operations efficiently. 3 | keywords: Ultralytics, models, utils, operations, HungarianMatcher, get_cdn_group, model optimization, pytorch, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/utils/ops.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/ops.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/ops.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/utils/ops.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.utils.ops.HungarianMatcher 15 | 16 |



17 | 18 | ## ::: ultralytics.models.utils.ops.get_cdn_group 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/classify/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn about the ClassificationPredictor class for YOLO models at Ultralytics. Get details on initialization, preprocessing, and postprocessing for classification tasks. 3 | keywords: YOLO, ClassificationPredictor, Ultralytics, model prediction, preprocess, postprocess, deep learning, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/classify/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.classify.predict.ClassificationPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/classify/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the train.py module in Ultralytics YOLO for efficient classification model training. Learn more with examples and detailed code documentation. 3 | keywords: YOLO, Ultralytics, classification, training, machine learning, deep learning, PyTorch, train.py 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/classify/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.classify.train.ClassificationTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/classify/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the source code and functionalities of the YOLO Classification Validator in Ultralytics for evaluating classification models effectively. 3 | keywords: Ultralytics, YOLO, classification, validation, ClassifyMetrics, ConfusionMatrix, PyTorch, deep learning, model evaluation, AI, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/classify/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.classify.val.ClassificationValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/detect/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics YOLO Detection Predictor. Learn how to implement and use the DetectionPredictor class for object detection in Python. 3 | keywords: YOLO, Ultralytics, DetectionPredictor, object detection, Python, machine learning, AI, non_max_suppression 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/detect/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.detect.predict.DetectionPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/detect/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn about the DetectionTrainer class for training YOLO models on custom datasets. Discover methods, examples, and more. 3 | keywords: Ultralytics, YOLO, DetectionTrainer, training, object detection, machine learning, build dataset, dataloader, detection model 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/detect/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.detect.train.DetectionTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/detect/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the DetectionValidator class for YOLO models in Ultralytics. Learn validation techniques, metrics, and dataset handling for object detection. 3 | keywords: YOLO validation, detection validation, YOLO metrics, Ultralytics, object detection, machine learning, AI 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/detect/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.detect.val.DetectionValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the ultralytics.models.yolo.model module for YOLO object detection. Learn initialization, model mapping, and more. 3 | keywords: YOLO, object detection, Ultralytics, YOLO model, machine learning, Python, model initialization 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.model.YOLO 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.model.YOLOWorld 19 | 20 |



21 | 22 | ## ::: ultralytics.models.yolo.model.YOLOE 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/obb/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to use the Ultralytics YOLO OBBPredictor for oriented bounding box predictions. Enhance your object detection models with ease. 3 | keywords: Ultralytics, YOLO, OBBPredictor, oriented bounding box, object detection, AI, machine learning, PyTorch 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/obb/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/obb/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.obb.predict.OBBPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/obb/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics YOLO OBB Trainer class for efficient training with Oriented Bounding Box models. Learn with examples and method details. 3 | keywords: Ultralytics, YOLO, OBB Trainer, Oriented Bounding Box, Machine Learning, Training, AI 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/obb/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/obb/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.obb.train.OBBTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/obb/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the OBBValidator for YOLO, an advanced class for oriented bounding boxes (OBB). Learn initialization, processes, and evaluation methods. 3 | keywords: Ultralytics, YOLO, OBBValidator, Oriented Bounding Boxes, DetectionValidator, validation, Python, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/obb/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/obb/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/obb/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.obb.val.OBBValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/pose/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn about the PosePredictor class for YOLO model predictions on pose data. Get setup instructions, example usage, and implementation details. 3 | keywords: YOLO, Pose Prediction, Ultralytics, PosePredictor, YOLOv8, Machine Learning, Deep Learning, Python, AI Models 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/pose/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.pose.predict.PosePredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/pose/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the PoseTrainer class for training pose models using YOLO from Ultralytics. Includes initialization, model configuration, and plotting methods. 3 | keywords: PoseTrainer, YOLO, Ultralytics, pose models, training, model configuration, deep learning, machine learning, pose estimation 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/pose/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.pose.train.PoseTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/pose/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the PoseValidator class for YOLO models. Learn how to extend DetectionValidator for pose validation with example code and detailed methods. 3 | keywords: Ultralytics, YOLO, PoseValidator, pose validation, machine learning, object detection, keypoints, python code, AI, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/pose/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.pose.val.PoseValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/segment/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Understand the SegmentationPredictor class for segmentation-based predictions using YOLO. Learn more about its implementation and example usage. 3 | keywords: YOLO, SegmentationPredictor, machine learning, computer vision, object detection, Ultralytics, prediction, model, non-max suppression 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/segment/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.segment.predict.SegmentationPredictor 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/segment/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to train YOLO models for segmentation tasks with Ultralytics. Explore the SegmentationTrainer class and its functionalities. 3 | keywords: YOLO, segmentation, train, Ultralytics, SegmentationTrainer, Python, machine learning, deep learning, tutorials 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/segment/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.segment.train.SegmentationTrainer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/segment/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the YOLO Segmentation Validator module for validating segment models. Understand its usage, metrics, and implementation within the Ultralytics framework. 3 | keywords: YOLO, segmentation, validator, Ultralytics, model validation, machine learning, deep learning, AI, computer vision 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/segment/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.segment.val.SegmentationValidator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/world/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to train a World Model with Ultralytics YOLO using advanced techniques and customizable options for optimal performance. 3 | keywords: Ultralytics, YOLO, World Model, training, deep learning, computer vision, AI, machine learning, tutorial 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/world/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/world/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/world/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/world/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.world.train.WorldTrainer 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.world.train.on_pretrain_routine_end 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/world/train_world.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the WorldTrainerFromScratch in YOLO for open-set datasets. Learn how to build, train, and evaluate models efficiently. 3 | keywords: YOLO, WorldTrainer, open-set datasets, training, evaluation, build dataset, YOLO World, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/world/train_world.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/world/train_world.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/world/train_world.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/world/train_world.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.world.train_world.WorldTrainerFromScratch 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/yoloe/predict.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Documentation for YOLOE visual prompt predictors in Ultralytics, supporting inference with visual prompts for both object detection and segmentation models. 3 | keywords: YOLOE, visual prompts, predictors, YOLOEVPDetectPredictor, YOLOEVPSegPredictor, inference, object detection, segmentation, Ultralytics, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/yoloe/predict.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/yoloe/predict.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.yoloe.predict.YOLOEVPDetectPredictor 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.yoloe.predict.YOLOEVPSegPredictor 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/yoloe/train.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn about YOLOE enhanced visual prompting (EVP) predictors in Ultralytics, which enable object detection and segmentation models to use visual prompts during inference for improved performance. 3 | keywords: YOLOE, EVP, visual prompts, computer vision, object detection, segmentation, bounding boxes, masks, predictors, YOLOEVPDetectPredictor, YOLOEVPSegPredictor, Ultralytics, inference 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/yoloe/train.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/yoloe/train.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.yoloe.train.YOLOETrainer 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.yoloe.train.YOLOEPETrainer 19 | 20 |



21 | 22 | ## ::: ultralytics.models.yolo.yoloe.train.YOLOETrainerFromScratch 23 | 24 |



25 | 26 | ## ::: ultralytics.models.yolo.yoloe.train.YOLOEPEFreeTrainer 27 | 28 |



29 | 30 | ## ::: ultralytics.models.yolo.yoloe.train.YOLOEVPTrainer 31 | 32 |

33 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/yoloe/train_seg.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Documentation for YOLOE segmentation trainer classes in Ultralytics, supporting different training approaches including standard training, linear probing, training from scratch, and visual prompt training. 3 | keywords: YOLOE, segmentation, trainers, YOLOESegTrainer, YOLOEPESegTrainer, YOLOESegTrainerFromScratch, YOLOESegVPTrainer, linear probing, visual prompts, Ultralytics, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/yoloe/train_seg.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/train_seg.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/train_seg.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/yoloe/train_seg.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.yoloe.train_seg.YOLOESegTrainer 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.yoloe.train_seg.YOLOEPESegTrainer 19 | 20 |



21 | 22 | ## ::: ultralytics.models.yolo.yoloe.train_seg.YOLOESegTrainerFromScratch 23 | 24 |



25 | 26 | ## ::: ultralytics.models.yolo.yoloe.train_seg.YOLOESegVPTrainer 27 | 28 |

29 | -------------------------------------------------------------------------------- /docs/en/reference/models/yolo/yoloe/val.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Documentation for YOLOE validator classes in Ultralytics, supporting both text and visual prompt embeddings for object detection and segmentation models. 3 | keywords: YOLOE, validation, object detection, segmentation, visual prompts, text prompts, embeddings, Ultralytics, YOLOEDetectValidator, YOLOESegValidator, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/models/yolo/yoloe/val.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/yoloe/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/yoloe/val.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.models.yolo.yoloe.val.YOLOEDetectValidator 15 | 16 |



17 | 18 | ## ::: ultralytics.models.yolo.yoloe.val.YOLOESegValidator 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/nn/autobackend.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Get to know more about Ultralytics nn.autobackend.check_class_names functionality. Optimize your YOLO models seamlessly. 3 | keywords: Ultralytics, AutoBackend, check_class_names, YOLO, YOLO models, optimization 4 | --- 5 | 6 | # Reference for `ultralytics/nn/autobackend.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/autobackend.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/autobackend.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/autobackend.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.nn.autobackend.AutoBackend 15 | 16 |



17 | 18 | ## ::: ultralytics.nn.autobackend.check_class_names 19 | 20 |



21 | 22 | ## ::: ultralytics.nn.autobackend.default_class_names 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/nn/modules/activation.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore activation functions in Ultralytics, including the Unified activation function and other custom implementations for neural networks. 3 | keywords: ultralytics, activation functions, neural networks, Unified activation, AGLU, SiLU, ReLU, PyTorch, deep learning, custom activations 4 | --- 5 | 6 | # Reference for `ultralytics/nn/modules/activation.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/activation.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/activation.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/activation.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.nn.modules.activation.AGLU 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/nn/modules/head.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore docs covering Ultralytics YOLO detection, pose & RTDETRDecoder. Comprehensive guides to help you understand Ultralytics nn modules. 3 | keywords: Ultralytics, YOLO, Detection, Pose, RTDETRDecoder, nn modules, guides 4 | --- 5 | 6 | # Reference for `ultralytics/nn/modules/head.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/head.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/head.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/head.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.nn.modules.head.Detect 15 | 16 |



17 | 18 | ## ::: ultralytics.nn.modules.head.Segment 19 | 20 |



21 | 22 | ## ::: ultralytics.nn.modules.head.OBB 23 | 24 |



25 | 26 | ## ::: ultralytics.nn.modules.head.Pose 27 | 28 |



29 | 30 | ## ::: ultralytics.nn.modules.head.Classify 31 | 32 |



33 | 34 | ## ::: ultralytics.nn.modules.head.WorldDetect 35 | 36 |



37 | 38 | ## ::: ultralytics.nn.modules.head.LRPCHead 39 | 40 |



41 | 42 | ## ::: ultralytics.nn.modules.head.YOLOEDetect 43 | 44 |



45 | 46 | ## ::: ultralytics.nn.modules.head.YOLOESegment 47 | 48 |



49 | 50 | ## ::: ultralytics.nn.modules.head.RTDETRDecoder 51 | 52 |



53 | 54 | ## ::: ultralytics.nn.modules.head.v10Detect 55 | 56 |

57 | -------------------------------------------------------------------------------- /docs/en/reference/nn/modules/utils.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the detailed reference of utility functions in the Ultralytics PyTorch modules. Learn about initialization, inverse sigmoid, and multiscale deformable attention. 3 | keywords: Ultralytics, PyTorch, utils, initialization, inverse sigmoid, multiscale deformable attention, deep learning, neural networks 4 | --- 5 | 6 | # Reference for `ultralytics/nn/modules/utils.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/utils.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.nn.modules.utils._get_clones 15 | 16 |



17 | 18 | ## ::: ultralytics.nn.modules.utils.bias_init_with_prob 19 | 20 |



21 | 22 | ## ::: ultralytics.nn.modules.utils.linear_init 23 | 24 |



25 | 26 | ## ::: ultralytics.nn.modules.utils.inverse_sigmoid 27 | 28 |



29 | 30 | ## ::: ultralytics.nn.modules.utils.multi_scale_deformable_attn_pytorch 31 | 32 |

33 | -------------------------------------------------------------------------------- /docs/en/reference/nn/text_model.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Documentation for text encoding models in Ultralytics YOLOE, supporting both OpenAI CLIP and Apple MobileCLIP implementations for vision-language tasks. 3 | keywords: YOLOE, text encoding, CLIP, MobileCLIP, TextModel, vision-language models, embeddings, Ultralytics, deep learning 4 | --- 5 | 6 | # Reference for `ultralytics/nn/text_model.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/text_model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/text_model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/text_model.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.nn.text_model.TextModel 15 | 16 |



17 | 18 | ## ::: ultralytics.nn.text_model.CLIP 19 | 20 |



21 | 22 | ## ::: ultralytics.nn.text_model.MobileCLIP 23 | 24 |



25 | 26 | ## ::: ultralytics.nn.text_model.MobileCLIPTS 27 | 28 |



29 | 30 | ## ::: ultralytics.nn.text_model.build_text_model 31 | 32 |

33 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/ai_gym.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the AI Gym class for real-time pose detection and gym step counting using Ultralytics YOLO. Learn to implement pose estimation effectively. 3 | keywords: Ultralytics, AI Gym, YOLO, pose detection, gym step counting, real-time pose estimation, Python 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/ai_gym.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/ai_gym.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/ai_gym.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/ai_gym.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.ai_gym.AIGym 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/analytics.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Analytics class in Ultralytics for visual analytics. Learn to create and update line, bar, and pie charts efficiently. 3 | keywords: Ultralytics, Analytics, Python, visual analytics, line chart, bar chart, pie chart, data visualization, AGPL-3.0 license 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/analytics.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/analytics.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/analytics.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/analytics.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.analytics.Analytics 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/distance_calculation.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics distance calculation module. Learn to calculate distances between objects in real-time video streams with our comprehensive guide. 3 | keywords: Ultralytics, distance calculation, object tracking, real-time video, centroid, distance estimation, YOLO, ML, cv2 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/distance_calculation.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/distance_calculation.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/distance_calculation.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/distance_calculation.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.distance_calculation.DistanceCalculation 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/heatmap.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to use the Ultralytics Heatmap module for real-time video analysis with object tracking and heatmap generation. 3 | keywords: Ultralytics, Heatmap, Python, Real-time Video, Object Tracking, cv2, Shapely, Computer Vision, AI 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/heatmap.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/heatmap.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/heatmap.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/heatmap.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.heatmap.Heatmap 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/instance_segmentation.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: This page provides a detailed reference for the InstanceSegmentation class in the Ultralytics solutions package, enabling instance segmentation in images and videos. 3 | keywords: Ultralytics, InstanceSegmentation, instance segmentation, masks, Python, computer vision 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/instance_segmentation.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/instance_segmentation.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/instance_segmentation.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/instance_segmentation.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.instance_segmentation.InstanceSegmentation 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/object_blurrer.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: This page provides a detailed reference for the ObjectBlurrer class in the Ultralytics solutions package, which enables real-time blurring of detected objects in images and videos. 3 | keywords: Ultralytics, ObjectBlurrer, object detection, blurring, real-time processing, Python, computer vision 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/object_blurrer.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_blurrer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_blurrer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/object_blurrer.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.object_blurrer.ObjectBlurrer 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/object_counter.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics Object Counter for real-time video streams. Learn about initializing parameters, tracking objects, and more. 3 | keywords: Ultralytics, Object Counter, Real-time Tracking, Video Stream, Python, Object Detection 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/object_counter.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_counter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_counter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/object_counter.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.object_counter.ObjectCounter 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/object_cropper.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Detailed documentation for the ObjectCropper class, part of the Ultralytics solutions package, enabling real-time cropping of detected objects from images and video streams. 3 | keywords: Ultralytics, ObjectCropper, object detection, cropping, real-time processing, Python, computer vision 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/object_cropper.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_cropper.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_cropper.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/object_cropper.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.object_cropper.ObjectCropper 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/parking_management.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics' Parking Management solution leveraging YOLO for efficient parking zone monitoring and management. 3 | keywords: Ultralytics, YOLO, parking management, computer vision, parking monitoring, AI solutions, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/parking_management.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/parking_management.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/parking_management.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/parking_management.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.parking_management.ParkingPtsSelection 15 | 16 |



17 | 18 | ## ::: ultralytics.solutions.parking_management.ParkingManagement 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/queue_management.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover the Ultralytics Queue Management script for real-time object tracking and queue management. 3 | keywords: Ultralytics, queue management, object tracking, real-time video, Python script, YOLO, AGPL-3.0 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/queue_management.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/queue_management.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/queue_management.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/queue_management.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.queue_management.QueueManager 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/region_counter.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics Object Counter for real-time video streams. Learn about initializing parameters, tracking objects, and more. 3 | keywords: Ultralytics, Object Counter, Real-time Tracking, Video Stream, Python, Object Detection 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/region_counter.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/region_counter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/region_counter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/region_counter.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.region_counter.RegionCounter 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/security_alarm.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover how Ultralytics' Security Alarm System enhances real-time surveillance with intelligent object detection and tracking. Learn about setup, monitoring, and threat detection. 3 | keywords: Ultralytics, Security Alarm System, Real-time Surveillance, Object Detection, Video Monitoring, Python, Threat Detection 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/security_alarm.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/security_alarm.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/security_alarm.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/security_alarm.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.security_alarm.SecurityAlarm 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/solutions.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics Solution Base class for real-time object counting,virtual gym, heatmaps, speed estimation using Ultralytics YOLO. Learn to implement Ultralytics solutions effectively. 3 | keywords: Ultralytics, Solutions, Object counting, Speed Estimation, Heatmaps, Queue Management, AI Gym, YOLO, pose detection, gym step counting, real-time pose estimation, Python 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/solutions.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/solutions.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/solutions.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/solutions.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.solutions.BaseSolution 15 | 16 |



17 | 18 | ## ::: ultralytics.solutions.solutions.SolutionAnnotator 19 | 20 |



21 | 22 | ## ::: ultralytics.solutions.solutions.SolutionResults 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/speed_estimation.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Ultralytics YOLO-based speed estimation script for real-time object tracking and speed measurement, optimized for accuracy and performance. 3 | keywords: Ultralytics, speed estimation, YOLO, real-time tracking, object tracking, python 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/speed_estimation.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/speed_estimation.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/speed_estimation.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/speed_estimation.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.speed_estimation.SpeedEstimator 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/streamlit_inference.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the live inference capabilities of Streamlit combined with Ultralytics YOLOv8. Learn to implement real-time object detection in your web applications with our comprehensive guide. 3 | keywords: Ultralytics, YOLOv8, live inference, real-time object detection, Streamlit, computer vision, webcam inference, object detection, Python, ML, cv2 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/streamlit_inference.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/streamlit_inference.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/streamlit_inference.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/streamlit_inference.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.streamlit_inference.Inference 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/trackzone.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover Ultralytics' TrackZone solution for real-time object tracking within defined zones. Gain insights into initializing regions, tracking objects exclusively within specific areas, and optimizing video stream processing for region-based object detection. 3 | keywords: Ultralytics, TrackZone, Object Tracking, Zone Tracking, Region Tracking, Python, Real-time Object Tracking, Video Stream Processing, Region-based Detection 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/trackzone.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/trackzone.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/trackzone.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/trackzone.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.trackzone.TrackZone 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/solutions/vision_eye.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover the Ultralytics VisionEye solution for object tracking and analysis. Learn how to initialize parameters, map vision points, and track objects in real-time. 3 | keywords: Ultralytics, VisionEye, Object Tracking, Computer Vision, Real-time Analysis, Python, AI 4 | --- 5 | 6 | # Reference for `ultralytics/solutions/vision_eye.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/vision_eye.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/vision_eye.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/vision_eye.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.solutions.vision_eye.VisionEye 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/basetrack.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover the BaseTrack classes and methods for object tracking in YOLO by Ultralytics. Learn about TrackState, BaseTrack attributes, and methods. 3 | keywords: Ultralytics, YOLO, object tracking, BaseTrack, TrackState, tracking methods, TrackState enumeration, object detection 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/basetrack.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/basetrack.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/basetrack.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/basetrack.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.basetrack.TrackState 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.basetrack.BaseTrack 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/bot_sort.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the robust object tracking capabilities of the BOTrack and BOTSORT classes in the Ultralytics Bot SORT tracker API. Enhance your YOLOv8 projects. 3 | keywords: Ultralytics, Bot SORT, BOTrack, BOTSORT, YOLOv8, object tracking, Kalman filter, ReID, GMC algorithm 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/bot_sort.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/bot_sort.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/bot_sort.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/bot_sort.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.bot_sort.BOTrack 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.bot_sort.BOTSORT 19 | 20 |



21 | 22 | ## ::: ultralytics.trackers.bot_sort.ReID 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/byte_tracker.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the BYTETracker module in Ultralytics for state-of-the-art object tracking using Kalman filtering. Learn about its classes, methods, and attributes. 3 | keywords: Ultralytics, BYTETracker, object tracking, Kalman filter, YOLOv8, documentation 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/byte_tracker.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/byte_tracker.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/byte_tracker.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/byte_tracker.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.byte_tracker.STrack 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.byte_tracker.BYTETracker 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/track.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the track.py script for Ultralytics object tracking. Learn how on_predict_start, on_predict_postprocess_end, and register_tracker functions work. 3 | keywords: Ultralytics, YOLO, object tracking, track.py, on_predict_start, on_predict_postprocess_end, register_tracker 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/track.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/track.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/track.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/track.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.track.on_predict_start 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.track.on_predict_postprocess_end 19 | 20 |



21 | 22 | ## ::: ultralytics.trackers.track.register_tracker 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/utils/gmc.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the Generalized Motion Compensation (GMC) class for tracking and object detection with methods like ORB, SIFT, ECC, and more. 3 | keywords: GMC, Generalized Motion Compensation, Ultralytics, tracking, object detection, ORB, SIFT, ECC, Sparse Optical Flow, computer vision, video frames 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/utils/gmc.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/gmc.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/gmc.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/gmc.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.utils.gmc.GMC 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/utils/kalman_filter.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Kalman filter implementations like KalmanFilterXYAH and KalmanFilterXYWH for tracking bounding boxes in image space using Ultralytics. 3 | keywords: Kalman Filter, Object Tracking, Python, Ultralytics, YOLO, Bounding Boxes, Image Processing 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/utils/kalman_filter.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/kalman_filter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/kalman_filter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/kalman_filter.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.utils.kalman_filter.KalmanFilterXYAH 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.utils.kalman_filter.KalmanFilterXYWH 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/trackers/utils/matching.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the utility functions for matching in trackers used by Ultralytics, including linear assignment, IoU distance, embedding distance, and more. 3 | keywords: Ultralytics, matching utils, linear assignment, IoU distance, embedding distance, fuse score, tracking, Python, documentation 4 | --- 5 | 6 | # Reference for `ultralytics/trackers/utils/matching.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/matching.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/matching.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/matching.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.trackers.utils.matching.linear_assignment 15 | 16 |



17 | 18 | ## ::: ultralytics.trackers.utils.matching.iou_distance 19 | 20 |



21 | 22 | ## ::: ultralytics.trackers.utils.matching.embedding_distance 23 | 24 |



25 | 26 | ## ::: ultralytics.trackers.utils.matching.fuse_score 27 | 28 |

29 | -------------------------------------------------------------------------------- /docs/en/reference/utils/autobatch.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Discover how to automatically estimate the best YOLO batch size for optimal CUDA memory usage in PyTorch using Ultralytics' autobatch utility. 3 | keywords: YOLO batch size, CUDA memory, PyTorch autobatch, Ultralytics, machine learning, optimal batch size, training batch size, YOLO model 4 | --- 5 | 6 | # Reference for `ultralytics/utils/autobatch.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/autobatch.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/autobatch.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/autobatch.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.autobatch.check_train_batch_size 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.autobatch.autobatch 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/utils/benchmarks.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore YOLO model benchmarking for speed and accuracy with formats like PyTorch, ONNX, TensorRT, and more. Detailed profiling & usage guides. 3 | keywords: YOLO, model benchmarking, ONNX, TensorRT, PyTorch, TensorFlow, CoreML, profiling, Ultralytics, model performance 4 | --- 5 | 6 | # Reference for `ultralytics/utils/benchmarks.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/benchmarks.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/benchmarks.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/benchmarks.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.benchmarks.RF100Benchmark 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.benchmarks.ProfileModels 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.benchmarks.benchmark 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/clearml.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to integrate ClearML with Ultralytics YOLO using detailed callbacks for pretraining, training, validation, and final logging. 3 | keywords: Ultralytics, YOLO, ClearML, integration, callbacks, pretraining, training, validation, logging, AI, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/clearml.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/clearml.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/clearml.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/clearml.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.clearml._log_debug_samples 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.clearml._log_plot 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.clearml.on_pretrain_routine_start 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.clearml.on_train_epoch_end 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.clearml.on_fit_epoch_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.clearml.on_val_end 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.callbacks.clearml.on_train_end 39 | 40 |

41 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/dvc.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn to integrate DVCLive with Ultralytics for enhanced logging during training. Step-by-step methods for setting up and optimizing DVC callbacks. 3 | keywords: Ultralytics, DVC, DVCLive, machine learning, logging, training, callbacks, integration 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/dvc.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/dvc.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/dvc.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/dvc.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.dvc._log_images 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.dvc._log_plots 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.dvc._log_confusion_matrix 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.dvc.on_pretrain_routine_start 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.dvc.on_pretrain_routine_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.dvc.on_train_start 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.callbacks.dvc.on_train_epoch_start 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.callbacks.dvc.on_fit_epoch_end 43 | 44 |



45 | 46 | ## ::: ultralytics.utils.callbacks.dvc.on_train_end 47 | 48 |

49 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/hub.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed guides on Ultralytics callbacks, including pretrain, model save, train start/end, and more. Enhance your ML training workflows with ease. 3 | keywords: Ultralytics, callbacks, pretrain, model save, train start, train end, validation, predict, export, training, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/hub.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/hub.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/hub.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/hub.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.hub.on_pretrain_routine_start 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.hub.on_pretrain_routine_end 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.hub.on_fit_epoch_end 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.hub.on_model_save 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.hub.on_train_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.hub.on_train_start 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.callbacks.hub.on_val_start 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.callbacks.hub.on_predict_start 43 | 44 |



45 | 46 | ## ::: ultralytics.utils.callbacks.hub.on_export_start 47 | 48 |

49 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/mlflow.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to set up and customize MLflow logging for Ultralytics YOLO. Log metrics, parameters, and model artifacts easily. 3 | keywords: MLflow, Ultralytics YOLO, logging, metrics, parameters, model artifacts, setup, tracking, customization 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/mlflow.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/mlflow.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/mlflow.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/mlflow.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.mlflow.sanitize_dict 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.mlflow.on_pretrain_routine_end 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.mlflow.on_train_epoch_end 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.mlflow.on_fit_epoch_end 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.mlflow.on_train_end 31 | 32 |

33 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/neptune.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to use NeptuneAI with Ultralytics for advanced logging and tracking of experiments. Detailed setup and callback functions included. 3 | keywords: Ultralytics, NeptuneAI, YOLO, experiment logging, machine learning, AI, callbacks, training, validation 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/neptune.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/neptune.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/neptune.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/neptune.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.neptune._log_scalars 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.neptune._log_images 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.neptune._log_plot 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.neptune.on_pretrain_routine_start 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.neptune.on_train_epoch_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.neptune.on_fit_epoch_end 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.callbacks.neptune.on_val_end 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.callbacks.neptune.on_train_end 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/raytune.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to integrate Ray Tune with Ultralytics YOLO for efficient hyperparameter tuning and performance tracking. 3 | keywords: Ultralytics, Ray Tune, hyperparameter tuning, YOLO, machine learning, deep learning, callbacks, integration, training metrics 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/raytune.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/raytune.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/raytune.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/raytune.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.raytune.on_fit_epoch_end 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/tensorboard.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to integrate and use TensorBoard with Ultralytics for effective model training visualization. 3 | keywords: Ultralytics, TensorBoard, callbacks, machine learning, training visualization, logging 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/tensorboard.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/tensorboard.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/tensorboard.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/tensorboard.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.tensorboard._log_scalars 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.tensorboard._log_tensorboard_graph 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.tensorboard.on_pretrain_routine_start 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.tensorboard.on_train_start 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.tensorboard.on_train_epoch_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.tensorboard.on_fit_epoch_end 35 | 36 |

37 | -------------------------------------------------------------------------------- /docs/en/reference/utils/callbacks/wb.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how Ultralytics YOLO integrates with WandB using custom callbacks for logging metrics and visualizations. 3 | keywords: Ultralytics, YOLO, WandB, callbacks, logging, metrics, visualizations, AI, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/utils/callbacks/wb.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/wb.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/wb.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/wb.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.callbacks.wb._custom_table 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.callbacks.wb._plot_curve 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.callbacks.wb._log_plots 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.callbacks.wb.on_pretrain_routine_start 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.callbacks.wb.on_fit_epoch_end 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.callbacks.wb.on_train_epoch_end 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.callbacks.wb.on_train_end 39 | 40 |

41 | -------------------------------------------------------------------------------- /docs/en/reference/utils/dist.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics' utilities for distributed training including DDP file generation, command setup, and cleanup. Improve multi-node training efficiency. 3 | keywords: Ultralytics, distributed training, DDP, multi-node training, network port, DDP file generation, DDP command, training utilities 4 | --- 5 | 6 | # Reference for `ultralytics/utils/dist.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/dist.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/dist.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/dist.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.dist.find_free_network_port 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.dist.generate_ddp_file 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.dist.generate_ddp_command 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.dist.ddp_cleanup 27 | 28 |

29 | -------------------------------------------------------------------------------- /docs/en/reference/utils/downloads.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore and utilize the Ultralytics download utilities to handle URLs, zip/unzip files, and manage GitHub assets effectively. 3 | keywords: Ultralytics, download utilities, URL validation, zip directory, unzip file, check disk space, Google Drive, GitHub assets, YOLO, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/utils/downloads.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/downloads.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/downloads.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/downloads.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.downloads.is_url 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.downloads.delete_dsstore 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.downloads.zip_directory 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.downloads.unzip_file 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.downloads.check_disk_space 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.downloads.get_google_drive_file_info 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.downloads.safe_download 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.downloads.get_github_assets 43 | 44 |



45 | 46 | ## ::: ultralytics.utils.downloads.attempt_download_asset 47 | 48 |



49 | 50 | ## ::: ultralytics.utils.downloads.download 51 | 52 |

53 | -------------------------------------------------------------------------------- /docs/en/reference/utils/errors.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore error handling for Ultralytics YOLO. Learn about custom exceptions like HUBModelError to manage model fetching issues effectively. 3 | keywords: Ultralytics, YOLO, error handling, HUBModelError, model fetching, custom exceptions, Python 4 | --- 5 | 6 | # Reference for `ultralytics/utils/errors.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/errors.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/errors.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/errors.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.errors.HUBModelError 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/utils/export.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to export PyTorch models to ONNX and TensorRT formats using Ultralytics utilities. Comprehensive guide for configurations, dynamic shapes, and precision optimizations. 3 | keywords: Ultralytics, YOLO, export, ONNX, TensorRT, PyTorch, model conversion, dynamic shapes, FP16, INT8, machine learning 4 | --- 5 | 6 | # Reference for `ultralytics/utils/export.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/export.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/export.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/export.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.export.export_onnx 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.export.export_engine 19 | 20 |

21 | -------------------------------------------------------------------------------- /docs/en/reference/utils/files.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the utility functions and context managers in Ultralytics like WorkingDirectory, increment_path, file_size, and more. Enhance your file handling in Python. 3 | keywords: Ultralytics, file utilities, Python, WorkingDirectory, increment_path, file_size, file_age, contexts, file handling, file management 4 | --- 5 | 6 | # Reference for `ultralytics/utils/files.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/files.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/files.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/files.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.files.WorkingDirectory 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.files.spaces_in_path 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.files.increment_path 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.files.file_age 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.files.file_date 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.files.file_size 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.files.get_latest_run 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.files.update_models 43 | 44 |

45 | -------------------------------------------------------------------------------- /docs/en/reference/utils/instance.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore Ultralytics utilities for bounding boxes and instances, providing detailed documentation on handling bbox formats, conversions, and more. 3 | keywords: Ultralytics, bounding boxes, Instances, bbox formats, conversions, AI, deep learning, YOLO, xyxy, xywh, ltwh 4 | --- 5 | 6 | # Reference for `ultralytics/utils/instance.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/instance.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/instance.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/instance.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.instance.Bboxes 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.instance.Instances 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.instance._ntuple 23 | 24 |

25 | -------------------------------------------------------------------------------- /docs/en/reference/utils/patches.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore and contribute to Ultralytics' utils/patches.py. Learn about the imread, imwrite, imshow, and torch_save functions. 3 | keywords: Ultralytics, utils, patches, imread, imwrite, imshow, torch_save, OpenCV, PyTorch, GitHub 4 | --- 5 | 6 | # Reference for `ultralytics/utils/patches.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/patches.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/patches.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/patches.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.patches.imread 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.patches.imwrite 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.patches.imshow 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.patches.torch_load 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.patches.torch_save 31 | 32 |

33 | -------------------------------------------------------------------------------- /docs/en/reference/utils/plotting.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore detailed functionalities of Ultralytics plotting utilities for data visualizations and custom annotations in ML projects. 3 | keywords: ultralytics, plotting, utilities, documentation, data visualization, annotations, python, ML tools 4 | --- 5 | 6 | # Reference for `ultralytics/utils/plotting.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/plotting.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/plotting.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/plotting.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.plotting.Colors 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.plotting.Annotator 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.plotting.plot_labels 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.plotting.save_one_box 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.plotting.plot_images 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.plotting.plot_results 35 | 36 |



37 | 38 | ## ::: ultralytics.utils.plotting.plt_color_scatter 39 | 40 |



41 | 42 | ## ::: ultralytics.utils.plotting.plot_tune_results 43 | 44 |



45 | 46 | ## ::: ultralytics.utils.plotting.output_to_target 47 | 48 |



49 | 50 | ## ::: ultralytics.utils.plotting.output_to_rotated_target 51 | 52 |



53 | 54 | ## ::: ultralytics.utils.plotting.feature_visualization 55 | 56 |

57 | -------------------------------------------------------------------------------- /docs/en/reference/utils/tal.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore the TaskAlignedAssigner in Ultralytics YOLO. Learn about the TaskAlignedMetric and its applications in object detection. 3 | keywords: Ultralytics, YOLO, TaskAlignedAssigner, object detection, machine learning, AI, Tal.py, PyTorch 4 | --- 5 | 6 | # Reference for `ultralytics/utils/tal.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tal.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tal.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/tal.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.tal.TaskAlignedAssigner 15 | 16 |



17 | 18 | ## ::: ultralytics.utils.tal.RotatedTaskAlignedAssigner 19 | 20 |



21 | 22 | ## ::: ultralytics.utils.tal.make_anchors 23 | 24 |



25 | 26 | ## ::: ultralytics.utils.tal.dist2bbox 27 | 28 |



29 | 30 | ## ::: ultralytics.utils.tal.bbox2dist 31 | 32 |



33 | 34 | ## ::: ultralytics.utils.tal.dist2rbox 35 | 36 |

37 | -------------------------------------------------------------------------------- /docs/en/reference/utils/triton.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Learn how to use the TritonRemoteModel class for interacting with remote Triton Inference Server models. Detailed guide with code examples and attributes. 3 | keywords: Ultralytics, TritonRemoteModel, Triton Inference Server, model client, inference, remote model, machine learning, AI, Python 4 | --- 5 | 6 | # Reference for `ultralytics/utils/triton.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/triton.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/triton.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/triton.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.triton.TritonRemoteModel 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/reference/utils/tuner.md: -------------------------------------------------------------------------------- 1 | --- 2 | description: Explore how to use ultralytics.utils.tuner.py for efficient hyperparameter tuning with Ray Tune. Learn implementation details and example usage. 3 | keywords: Ultralytics, tuner, hyperparameter tuning, Ray Tune, YOLO, machine learning, AI, optimization 4 | --- 5 | 6 | # Reference for `ultralytics/utils/tuner.py` 7 | 8 | !!! note 9 | 10 | This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tuner.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tuner.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/tuner.py) 🛠️. Thank you 🙏! 11 | 12 |
13 | 14 | ## ::: ultralytics.utils.tuner.run_ray_tune 15 | 16 |

17 | -------------------------------------------------------------------------------- /docs/en/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Sitemap: https://docs.ultralytics.com/sitemap.xml 3 | Sitemap: https://docs.ultralytics.com/ar/sitemap.xml 4 | Sitemap: https://docs.ultralytics.com/de/sitemap.xml 5 | Sitemap: https://docs.ultralytics.com/es/sitemap.xml 6 | Sitemap: https://docs.ultralytics.com/fr/sitemap.xml 7 | Sitemap: https://docs.ultralytics.com/it/sitemap.xml 8 | Sitemap: https://docs.ultralytics.com/ja/sitemap.xml 9 | Sitemap: https://docs.ultralytics.com/ko/sitemap.xml 10 | Sitemap: https://docs.ultralytics.com/pt/sitemap.xml 11 | Sitemap: https://docs.ultralytics.com/ru/sitemap.xml 12 | Sitemap: https://docs.ultralytics.com/tr/sitemap.xml 13 | Sitemap: https://docs.ultralytics.com/vi/sitemap.xml 14 | Sitemap: https://docs.ultralytics.com/zh/sitemap.xml 15 | -------------------------------------------------------------------------------- /docs/overrides/main.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {% extends "base.html" %} {% block announce %} 5 | 19 | {% endblock %} 20 | 21 | 22 | {% block htmltitle %} {% if page.toc|first is defined %} {% set 23 | page_specific_title = page.toc.items[0].title %} {% else %} {% set 24 | page_specific_title = page.title | striptags %} {% endif %} 25 | 26 | {%- if page_specific_title -%} {{ page_specific_title }} - {{ config.site_name 27 | }} {%- else -%} {{ config.site_name }} {%- endif -%} 28 | 29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /docs/overrides/partials/comments.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% if page.meta.comments %} 4 |

{{ lang.t("meta.comments") }}

5 | 6 | 7 |
8 | 9 | {% endif %} 10 | -------------------------------------------------------------------------------- /examples/YOLO-Series-ONNXRuntime-Rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | [package] 4 | name = "YOLO-ONNXRuntime-Rust" 5 | version = "0.1.0" 6 | edition = "2021" 7 | authors = ["Jamjamjon "] 8 | 9 | [dependencies] 10 | anyhow = "1.0.92" 11 | clap = "4.5.20" 12 | tracing = "0.1.40" 13 | tracing-subscriber = "0.3.18" 14 | usls = { version = "0.0.19", features = ["auto"] } 15 | -------------------------------------------------------------------------------- /examples/YOLOv8-Action-Recognition/requirements.txt: -------------------------------------------------------------------------------- 1 | # Ultralytics YOLO 🚀, AGPL-3.0 license 2 | 3 | ultralytics 4 | transformers 5 | -------------------------------------------------------------------------------- /examples/YOLOv8-CPP-Inference/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) 2 | 3 | project(Yolov8CPPInference VERSION 0.1) 4 | 5 | set(CMAKE_INCLUDE_CURRENT_DIR ON) 6 | 7 | # CUDA 8 | set(CUDA_TOOLKIT_ROOT_DIR "/usr/local/cuda") 9 | find_package(CUDA 11 REQUIRED) 10 | 11 | set(CMAKE_CUDA_STANDARD 11) 12 | set(CMAKE_CUDA_STANDARD_REQUIRED ON) 13 | # !CUDA 14 | 15 | # OpenCV 16 | find_package(OpenCV REQUIRED) 17 | include_directories(${OpenCV_INCLUDE_DIRS}) 18 | # !OpenCV 19 | 20 | set(PROJECT_SOURCES 21 | main.cpp 22 | 23 | inference.h 24 | inference.cpp 25 | ) 26 | 27 | add_executable(Yolov8CPPInference ${PROJECT_SOURCES}) 28 | target_link_libraries(Yolov8CPPInference ${OpenCV_LIBS}) 29 | -------------------------------------------------------------------------------- /examples/YOLOv8-MNN-CPP/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.12) 2 | project(mnn_yolo_cpp) 3 | 4 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") 5 | 6 | include_directories(${CMAKE_CURRENT_LIST_DIR}/include/) 7 | 8 | link_directories(${CMAKE_CURRENT_LIST_DIR}/libs) 9 | 10 | add_executable("main" "${CMAKE_CURRENT_LIST_DIR}/main.cpp") 11 | add_executable("main_interpreter" "${CMAKE_CURRENT_LIST_DIR}/main_interpreter.cpp") 12 | 13 | target_link_libraries("main" MNN MNN_Express MNNOpenCV) 14 | target_link_libraries("main_interpreter" MNN MNN_Express MNNOpenCV) 15 | -------------------------------------------------------------------------------- /examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | [package] 4 | name = "yolov8-rs" 5 | version = "0.1.0" 6 | edition = "2021" 7 | 8 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 9 | 10 | [dependencies] 11 | clap = { version = "4.2.4", features = ["derive"] } 12 | image = { version = "0.25.2"} 13 | imageproc = { version = "0.25.0"} 14 | ndarray = { version = "0.16" } 15 | ort = { version = "2.0.0-rc.5", features = ["cuda", "tensorrt", "load-dynamic", "copy-dylibs", "half"]} 16 | rusttype = { version = "0.9.3" } 17 | anyhow = { version = "1.0.75" } 18 | regex = { version = "1.5.4" } 19 | rand = { version = "0.8.5" } 20 | chrono = { version = "0.4.30" } 21 | half = { version = "2.3.1" } 22 | dirs = { version = "5.0.1" } 23 | ureq = { version = "2.9.1" } 24 | ab_glyph = "0.2.29" 25 | -------------------------------------------------------------------------------- /examples/YOLOv8-ONNXRuntime-Rust/src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | 3 | use yolov8_rs::{Args, YOLOv8}; 4 | 5 | fn main() -> Result<(), Box> { 6 | let args = Args::parse(); 7 | 8 | // 1. load image 9 | let x = image::ImageReader::open(&args.source)? 10 | .with_guessed_format()? 11 | .decode()?; 12 | 13 | // 2. model support dynamic batch inference, so input should be a Vec 14 | let xs = vec![x]; 15 | 16 | // You can test `--batch 2` with this 17 | // let xs = vec![x.clone(), x]; 18 | 19 | // 3. build yolov8 model 20 | let mut model = YOLOv8::new(args)?; 21 | model.summary(); // model info 22 | 23 | // 4. run 24 | let ys = model.run(&xs)?; 25 | println!("{:?}", ys); 26 | 27 | Ok(()) 28 | } 29 | -------------------------------------------------------------------------------- /examples/YOLOv8-OpenVINO-CPP-Inference/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.12) 2 | project(yolov8_openvino_example) 3 | 4 | set(CMAKE_CXX_STANDARD 14) 5 | 6 | find_package(OpenCV REQUIRED) 7 | 8 | include_directories( 9 | ${OpenCV_INCLUDE_DIRS} 10 | /path/to/intel/openvino/runtime/include 11 | ) 12 | 13 | add_executable(detect 14 | main.cc 15 | inference.cc 16 | ) 17 | 18 | target_link_libraries(detect 19 | ${OpenCV_LIBS} 20 | /path/to/intel/openvino/runtime/lib/intel64/libopenvino.so 21 | ) 22 | -------------------------------------------------------------------------------- /examples/YOLOv8-OpenVINO-CPP-Inference/main.cc: -------------------------------------------------------------------------------- 1 | #include "inference.h" 2 | 3 | #include 4 | #include 5 | 6 | int main(int argc, char **argv) { 7 | // Check if the correct number of arguments is provided 8 | if (argc != 3) { 9 | std::cerr << "usage: " << argv[0] << " " << std::endl; 10 | return 1; 11 | } 12 | 13 | // Get the model and image paths from the command-line arguments 14 | const std::string model_path = argv[1]; 15 | const std::string image_path = argv[2]; 16 | 17 | // Read the input image 18 | cv::Mat image = cv::imread(image_path); 19 | 20 | // Check if the image was successfully loaded 21 | if (image.empty()) { 22 | std::cerr << "ERROR: image is empty" << std::endl; 23 | return 1; 24 | } 25 | 26 | // Define the confidence and NMS thresholds 27 | const float confidence_threshold = 0.5; 28 | const float NMS_threshold = 0.5; 29 | 30 | // Initialize the YOLO inference with the specified model and parameters 31 | yolo::Inference inference(model_path, cv::Size(640, 640), confidence_threshold, NMS_threshold); 32 | 33 | // Run inference on the input image 34 | inference.RunInference(image); 35 | 36 | // Display the image with the detections 37 | cv::imshow("image", image); 38 | cv::waitKey(0); 39 | 40 | return 0; 41 | } 42 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from ultralytics.utils import ASSETS, ROOT, WEIGHTS_DIR, checks 4 | 5 | # Constants used in tests 6 | MODEL = WEIGHTS_DIR / "path with spaces" / "yolo11n.pt" # test spaces in path 7 | CFG = "yolo11n.yaml" 8 | SOURCE = ASSETS / "bus.jpg" 9 | SOURCES_LIST = [ASSETS / "bus.jpg", ASSETS, ASSETS / "*", ASSETS / "**/*.jpg"] 10 | TMP = (ROOT / "../tests/tmp").resolve() # temp directory for test files 11 | CUDA_IS_AVAILABLE = checks.cuda_is_available() 12 | CUDA_DEVICE_COUNT = checks.cuda_device_count() 13 | 14 | __all__ = ( 15 | "MODEL", 16 | "CFG", 17 | "SOURCE", 18 | "SOURCES_LIST", 19 | "TMP", 20 | "CUDA_IS_AVAILABLE", 21 | "CUDA_DEVICE_COUNT", 22 | ) 23 | -------------------------------------------------------------------------------- /ultralytics/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | __version__ = "8.3.119" 4 | 5 | import os 6 | 7 | # Set ENV variables (place before imports) 8 | if not os.environ.get("OMP_NUM_THREADS"): 9 | os.environ["OMP_NUM_THREADS"] = "1" # default for reduced CPU utilization during training 10 | 11 | from ultralytics.models import NAS, RTDETR, SAM, YOLO, YOLOE, FastSAM, YOLOWorld 12 | from ultralytics.utils import ASSETS, SETTINGS 13 | from ultralytics.utils.checks import check_yolo as checks 14 | from ultralytics.utils.downloads import download 15 | 16 | settings = SETTINGS 17 | __all__ = ( 18 | "__version__", 19 | "ASSETS", 20 | "YOLO", 21 | "YOLOWorld", 22 | "YOLOE", 23 | "NAS", 24 | "SAM", 25 | "FastSAM", 26 | "RTDETR", 27 | "checks", 28 | "download", 29 | "settings", 30 | ) 31 | -------------------------------------------------------------------------------- /ultralytics/assets/bus.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/triple-Mu/yolov8/e700646ea265326aad8822aa6db2a7d4fabacf85/ultralytics/assets/bus.jpg -------------------------------------------------------------------------------- /ultralytics/assets/zidane.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/triple-Mu/yolov8/e700646ea265326aad8822aa6db2a7d4fabacf85/ultralytics/assets/zidane.jpg -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/DOTAv1.5.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # DOTA 1.5 dataset https://captain-whu.github.io/DOTA/index.html for object detection in aerial images by Wuhan University 4 | # Documentation: https://docs.ultralytics.com/datasets/obb/dota-v2/ 5 | # Example usage: yolo train model=yolov8n-obb.pt data=DOTAv1.5.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── dota1.5 ← downloads here (2GB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/DOTAv1.5 # dataset root dir 13 | train: images/train # train images (relative to 'path') 1411 images 14 | val: images/val # val images (relative to 'path') 458 images 15 | test: images/test # test images (optional) 937 images 16 | 17 | # Classes for DOTA 1.5 18 | names: 19 | 0: plane 20 | 1: ship 21 | 2: storage tank 22 | 3: baseball diamond 23 | 4: tennis court 24 | 5: basketball court 25 | 6: ground track field 26 | 7: harbor 27 | 8: bridge 28 | 9: large vehicle 29 | 10: small vehicle 30 | 11: helicopter 31 | 12: roundabout 32 | 13: soccer ball field 33 | 14: swimming pool 34 | 15: container crane 35 | 36 | # Download script/URL (optional) 37 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/DOTAv1.5.zip 38 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/DOTAv1.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # DOTA 1.0 dataset https://captain-whu.github.io/DOTA/index.html for object detection in aerial images by Wuhan University 4 | # Documentation: https://docs.ultralytics.com/datasets/obb/dota-v2/ 5 | # Example usage: yolo train model=yolov8n-obb.pt data=DOTAv1.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── dota1 ← downloads here (2GB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/DOTAv1 # dataset root dir 13 | train: images/train # train images (relative to 'path') 1411 images 14 | val: images/val # val images (relative to 'path') 458 images 15 | test: images/test # test images (optional) 937 images 16 | 17 | # Classes for DOTA 1.0 18 | names: 19 | 0: plane 20 | 1: ship 21 | 2: storage tank 22 | 3: baseball diamond 23 | 4: tennis court 24 | 5: basketball court 25 | 6: ground track field 26 | 7: harbor 27 | 8: bridge 28 | 9: large vehicle 29 | 10: small vehicle 30 | 11: helicopter 31 | 12: roundabout 32 | 13: soccer ball field 33 | 14: swimming pool 34 | 35 | # Download script/URL (optional) 36 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/DOTAv1.zip 37 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/african-wildlife.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # African-wildlife dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/detect/african-wildlife/ 5 | # Example usage: yolo train data=african-wildlife.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── african-wildlife ← downloads here (100 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/african-wildlife # dataset root dir 13 | train: train/images # train images (relative to 'path') 1052 images 14 | val: valid/images # val images (relative to 'path') 225 images 15 | test: test/images # test images (relative to 'path') 227 images 16 | 17 | # Classes 18 | names: 19 | 0: buffalo 20 | 1: elephant 21 | 2: rhino 22 | 3: zebra 23 | 24 | # Download script/URL (optional) 25 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/african-wildlife.zip 26 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/brain-tumor.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Brain-tumor dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/detect/brain-tumor/ 5 | # Example usage: yolo train data=brain-tumor.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── brain-tumor ← downloads here (4.05 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/brain-tumor # dataset root dir 13 | train: train/images # train images (relative to 'path') 893 images 14 | val: valid/images # val images (relative to 'path') 223 images 15 | test: # test images (relative to 'path') 16 | 17 | # Classes 18 | names: 19 | 0: negative 20 | 1: positive 21 | 22 | # Download script/URL (optional) 23 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/brain-tumor.zip 24 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/carparts-seg.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Carparts-seg dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/segment/carparts-seg/ 5 | # Example usage: yolo train data=carparts-seg.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── carparts-seg ← downloads here (132 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/carparts-seg # dataset root dir 13 | train: train/images # train images (relative to 'path') 3516 images 14 | val: valid/images # val images (relative to 'path') 276 images 15 | test: test/images # test images (relative to 'path') 401 images 16 | 17 | # Classes 18 | names: 19 | 0: back_bumper 20 | 1: back_door 21 | 2: back_glass 22 | 3: back_left_door 23 | 4: back_left_light 24 | 5: back_light 25 | 6: back_right_door 26 | 7: back_right_light 27 | 8: front_bumper 28 | 9: front_door 29 | 10: front_glass 30 | 11: front_left_door 31 | 12: front_left_light 32 | 13: front_light 33 | 14: front_right_door 34 | 15: front_right_light 35 | 16: hood 36 | 17: left_mirror 37 | 18: object 38 | 19: right_mirror 39 | 20: tailgate 40 | 21: trunk 41 | 22: wheel 42 | 43 | # Download script/URL (optional) 44 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/carparts-seg.zip 45 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/coco8-pose.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # COCO8-pose dataset (first 8 images from COCO train2017) by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/pose/coco8-pose/ 5 | # Example usage: yolo train data=coco8-pose.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── coco8-pose ← downloads here (1 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/coco8-pose # dataset root dir 13 | train: images/train # train images (relative to 'path') 4 images 14 | val: images/val # val images (relative to 'path') 4 images 15 | test: # test images (optional) 16 | 17 | # Keypoints 18 | kpt_shape: [17, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible) 19 | flip_idx: [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15] 20 | 21 | # Classes 22 | names: 23 | 0: person 24 | 25 | # Download script/URL (optional) 26 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8-pose.zip 27 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/crack-seg.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Crack-seg dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/segment/crack-seg/ 5 | # Example usage: yolo train data=crack-seg.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── crack-seg ← downloads here (91.2 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/crack-seg # dataset root dir 13 | train: train/images # train images (relative to 'path') 3717 images 14 | val: valid/images # val images (relative to 'path') 112 images 15 | test: test/images # test images (relative to 'path') 200 images 16 | 17 | # Classes 18 | names: 19 | 0: crack 20 | 21 | # Download script/URL (optional) 22 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/crack-seg.zip 23 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/dog-pose.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Dogs dataset http://vision.stanford.edu/aditya86/ImageNetDogs/ by Stanford 4 | # Documentation: https://docs.ultralytics.com/datasets/pose/dog-pose/ 5 | # Example usage: yolo train data=dog-pose.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── dog-pose ← downloads here (337 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/dog-pose # dataset root dir 13 | train: train # train images (relative to 'path') 6773 images 14 | val: val # val images (relative to 'path') 1703 images 15 | 16 | # Keypoints 17 | kpt_shape: [24, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible) 18 | 19 | # Classes 20 | names: 21 | 0: dog 22 | 23 | # Download script/URL (optional) 24 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/dog-pose.zip 25 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/dota8-multispectral.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # DOTA8-Multispectral dataset (DOTA8 interpolated across 10 channels in the visual spectrum) by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/obb/dota8/ 5 | # Example usage: yolo train model=yolov8n-obb.pt data=dota8-multispectral.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── dota8-multispectral ← downloads here (37.3MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/dota8-multispectral # dataset root dir 13 | train: images/train # train images (relative to 'path') 4 images 14 | val: images/val # val images (relative to 'path') 4 images 15 | 16 | # Number of multispectral image channels 17 | channels: 10 18 | 19 | # Classes for DOTA 1.0 20 | names: 21 | 0: plane 22 | 1: ship 23 | 2: storage tank 24 | 3: baseball diamond 25 | 4: tennis court 26 | 5: basketball court 27 | 6: ground track field 28 | 7: harbor 29 | 8: bridge 30 | 9: large vehicle 31 | 10: small vehicle 32 | 11: helicopter 33 | 12: roundabout 34 | 13: soccer ball field 35 | 14: swimming pool 36 | 37 | # Download script/URL (optional) 38 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/dota8-multispectral.zip 39 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/dota8.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # DOTA8 dataset 8 images from split DOTAv1 dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/obb/dota8/ 5 | # Example usage: yolo train model=yolov8n-obb.pt data=dota8.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── dota8 ← downloads here (1MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/dota8 # dataset root dir 13 | train: images/train # train images (relative to 'path') 4 images 14 | val: images/val # val images (relative to 'path') 4 images 15 | 16 | # Classes for DOTA 1.0 17 | names: 18 | 0: plane 19 | 1: ship 20 | 2: storage tank 21 | 3: baseball diamond 22 | 4: tennis court 23 | 5: basketball court 24 | 6: ground track field 25 | 7: harbor 26 | 8: bridge 27 | 9: large vehicle 28 | 10: small vehicle 29 | 11: helicopter 30 | 12: roundabout 31 | 13: soccer ball field 32 | 14: swimming pool 33 | 34 | # Download script/URL (optional) 35 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/dota8.zip 36 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/hand-keypoints.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Hand Keypoints dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/pose/hand-keypoints/ 5 | # Example usage: yolo train data=hand-keypoints.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── hand-keypoints ← downloads here (369 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/hand-keypoints # dataset root dir 13 | train: train # train images (relative to 'path') 18776 images 14 | val: val # val images (relative to 'path') 7992 images 15 | 16 | # Keypoints 17 | kpt_shape: [21, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible) 18 | flip_idx: 19 | [0, 1, 2, 4, 3, 10, 11, 12, 13, 14, 5, 6, 7, 8, 9, 15, 16, 17, 18, 19, 20] 20 | 21 | # Classes 22 | names: 23 | 0: hand 24 | 25 | # Download script/URL (optional) 26 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/hand-keypoints.zip 27 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/medical-pills.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Medical-pills dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/detect/medical-pills/ 5 | # Example usage: yolo train data=medical-pills.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── medical-pills ← downloads here (8.19 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/medical-pills # dataset root dir 13 | train: train/images # train images (relative to 'path') 92 images 14 | val: valid/images # val images (relative to 'path') 23 images 15 | test: # test images (relative to 'path') 16 | 17 | # Classes 18 | names: 19 | 0: pill 20 | 21 | # Download script/URL (optional) 22 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/medical-pills.zip 23 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/package-seg.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Package-seg dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/segment/package-seg/ 5 | # Example usage: yolo train data=package-seg.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── package-seg ← downloads here (102 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/package-seg # dataset root dir 13 | train: train/images # train images (relative to 'path') 1920 images 14 | val: valid/images # val images (relative to 'path') 89 images 15 | test: test/images # test images (relative to 'path') 188 images 16 | 17 | # Classes 18 | names: 19 | 0: package 20 | 21 | # Download script/URL (optional) 22 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/package-seg.zip 23 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/signature.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Signature dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/detect/signature/ 5 | # Example usage: yolo train data=signature.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── signature ← downloads here (11.2 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/signature # dataset root dir 13 | train: train/images # train images (relative to 'path') 143 images 14 | val: valid/images # val images (relative to 'path') 35 images 15 | 16 | # Classes 17 | names: 18 | 0: signature 19 | 20 | # Download script/URL (optional) 21 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/signature.zip 22 | -------------------------------------------------------------------------------- /ultralytics/cfg/datasets/tiger-pose.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Tiger Pose dataset by Ultralytics 4 | # Documentation: https://docs.ultralytics.com/datasets/pose/tiger-pose/ 5 | # Example usage: yolo train data=tiger-pose.yaml 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── tiger-pose ← downloads here (75.3 MB) 10 | 11 | # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] 12 | path: ../datasets/tiger-pose # dataset root dir 13 | train: train # train images (relative to 'path') 210 images 14 | val: val # val images (relative to 'path') 53 images 15 | 16 | # Keypoints 17 | kpt_shape: [12, 2] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible) 18 | flip_idx: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] 19 | 20 | # Classes 21 | names: 22 | 0: tiger 23 | 24 | # Download script/URL (optional) 25 | download: https://github.com/ultralytics/assets/releases/download/v0.0.0/tiger-pose.zip 26 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLO11-cls image classification model with ResNet18 backbone 4 | # Model docs: https://docs.ultralytics.com/models/yolo11 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 1000 # number of classes 9 | 10 | # ResNet18 backbone 11 | backbone: 12 | # [from, repeats, module, args] 13 | - [-1, 1, TorchVision, [512, resnet18, DEFAULT, True, 2]] # truncate two layers from the end 14 | 15 | # YOLO11n head 16 | head: 17 | - [-1, 1, Classify, [nc]] # Classify 18 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/11/yolo11-cls.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLO11-cls image classification model 4 | # Model docs: https://docs.ultralytics.com/models/yolo11 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 1000 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolo11n-cls.yaml' will call yolo11-cls.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.50, 0.25, 1024] # summary: 86 layers, 1633584 parameters, 1633584 gradients, 0.5 GFLOPs 12 | s: [0.50, 0.50, 1024] # summary: 86 layers, 5545488 parameters, 5545488 gradients, 1.6 GFLOPs 13 | m: [0.50, 1.00, 512] # summary: 106 layers, 10455696 parameters, 10455696 gradients, 5.0 GFLOPs 14 | l: [1.00, 1.00, 512] # summary: 176 layers, 12937104 parameters, 12937104 gradients, 6.2 GFLOPs 15 | x: [1.00, 1.50, 512] # summary: 176 layers, 28458544 parameters, 28458544 gradients, 13.7 GFLOPs 16 | 17 | # YOLO11n backbone 18 | backbone: 19 | # [from, repeats, module, args] 20 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 21 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 22 | - [-1, 2, C3k2, [256, False, 0.25]] 23 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 24 | - [-1, 2, C3k2, [512, False, 0.25]] 25 | - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16 26 | - [-1, 2, C3k2, [512, True]] 27 | - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32 28 | - [-1, 2, C3k2, [1024, True]] 29 | - [-1, 2, C2PSA, [1024]] # 9 30 | 31 | # YOLO11n head 32 | head: 33 | - [-1, 1, Classify, [nc]] # Classify 34 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/12/yolo12-cls.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLO12-cls image classification model 4 | # Model docs: https://docs.ultralytics.com/models/yolo12 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolo12n-cls.yaml' will call yolo12-cls.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.50, 0.25, 1024] # summary: 152 layers, 1,820,976 parameters, 1,820,976 gradients, 3.7 GFLOPs 12 | s: [0.50, 0.50, 1024] # summary: 152 layers, 6,206,992 parameters, 6,206,992 gradients, 13.6 GFLOPs 13 | m: [0.50, 1.00, 512] # summary: 172 layers, 12,083,088 parameters, 12,083,088 gradients, 44.2 GFLOPs 14 | l: [1.00, 1.00, 512] # summary: 312 layers, 15,558,640 parameters, 15,558,640 gradients, 56.9 GFLOPs 15 | x: [1.00, 1.50, 512] # summary: 312 layers, 34,172,592 parameters, 34,172,592 gradients, 126.5 GFLOPs 16 | 17 | # YOLO12n backbone 18 | backbone: 19 | # [from, repeats, module, args] 20 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 21 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 22 | - [-1, 2, C3k2, [256, False, 0.25]] 23 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 24 | - [-1, 2, C3k2, [512, False, 0.25]] 25 | - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16 26 | - [-1, 4, A2C2f, [512, True, 4]] 27 | - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32 28 | - [-1, 4, A2C2f, [1024, True, 1]] # 8 29 | 30 | # YOLO12n head 31 | head: 32 | - [-1, 1, Classify, [nc]] # Classify 33 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10b.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10b object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | b: [0.67, 1.00, 512] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2f, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2fCIB, [1024, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2fCIB, [512, True]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2fCIB, [512, True]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10l.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10l object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | l: [1.00, 1.00, 512] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2f, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2fCIB, [1024, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2fCIB, [512, True]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2fCIB, [512, True]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10m.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10m object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | m: [0.67, 0.75, 768] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2f, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2fCIB, [1024, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2f, [512]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2fCIB, [512, True]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10n.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10n object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.33, 0.25, 1024] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2f, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2f, [1024, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2f, [512]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2f, [512]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10s.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10s object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | s: [0.33, 0.50, 1024] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2f, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2fCIB, [1024, True, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2f, [512]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2f, [512]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v10/yolov10x.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv10x object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov10 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov10n.yaml' will call yolov10.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | x: [1.00, 1.25, 512] 12 | 13 | backbone: 14 | # [from, repeats, module, args] 15 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 16 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 17 | - [-1, 3, C2f, [128, True]] 18 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 19 | - [-1, 6, C2f, [256, True]] 20 | - [-1, 1, SCDown, [512, 3, 2]] # 5-P4/16 21 | - [-1, 6, C2fCIB, [512, True]] 22 | - [-1, 1, SCDown, [1024, 3, 2]] # 7-P5/32 23 | - [-1, 3, C2fCIB, [1024, True]] 24 | - [-1, 1, SPPF, [1024, 5]] # 9 25 | - [-1, 1, PSA, [1024]] # 10 26 | 27 | # YOLOv10.0n head 28 | head: 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 31 | - [-1, 3, C2fCIB, [512, True]] # 13 32 | 33 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 34 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 35 | - [-1, 3, C2f, [256]] # 16 (P3/8-small) 36 | 37 | - [-1, 1, Conv, [256, 3, 2]] 38 | - [[-1, 13], 1, Concat, [1]] # cat head P4 39 | - [-1, 3, C2fCIB, [512, True]] # 19 (P4/16-medium) 40 | 41 | - [-1, 1, SCDown, [512, 3, 2]] 42 | - [[-1, 10], 1, Concat, [1]] # cat head P5 43 | - [-1, 3, C2fCIB, [1024, True]] # 22 (P5/32-large) 44 | 45 | - [[16, 19, 22], 1, v10Detect, [nc]] # Detect(P3, P4, P5) 46 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v3/yolov3-tiny.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLOv3-tiiny object detection model with P4/16 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov3 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | 7 | # Parameters 8 | nc: 80 # number of classes 9 | depth_multiple: 1.0 # model depth multiple 10 | width_multiple: 1.0 # layer channel multiple 11 | 12 | # YOLOv3-tiny backbone 13 | backbone: 14 | # [from, number, module, args] 15 | - [-1, 1, Conv, [16, 3, 1]] # 0 16 | - [-1, 1, nn.MaxPool2d, [2, 2, 0]] # 1-P1/2 17 | - [-1, 1, Conv, [32, 3, 1]] 18 | - [-1, 1, nn.MaxPool2d, [2, 2, 0]] # 3-P2/4 19 | - [-1, 1, Conv, [64, 3, 1]] 20 | - [-1, 1, nn.MaxPool2d, [2, 2, 0]] # 5-P3/8 21 | - [-1, 1, Conv, [128, 3, 1]] 22 | - [-1, 1, nn.MaxPool2d, [2, 2, 0]] # 7-P4/16 23 | - [-1, 1, Conv, [256, 3, 1]] 24 | - [-1, 1, nn.MaxPool2d, [2, 2, 0]] # 9-P5/32 25 | - [-1, 1, Conv, [512, 3, 1]] 26 | - [-1, 1, nn.ZeroPad2d, [[0, 1, 0, 1]]] # 11 27 | - [-1, 1, nn.MaxPool2d, [2, 1, 0]] # 12 28 | 29 | # YOLOv3-tiny head 30 | head: 31 | - [-1, 1, Conv, [1024, 3, 1]] 32 | - [-1, 1, Conv, [256, 1, 1]] 33 | - [-1, 1, Conv, [512, 3, 1]] # 15 (P5/32-large) 34 | 35 | - [-2, 1, Conv, [128, 1, 1]] 36 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 37 | - [[-1, 8], 1, Concat, [1]] # cat backbone P4 38 | - [-1, 1, Conv, [256, 3, 1]] # 19 (P4/16-medium) 39 | 40 | - [[19, 15], 1, Detect, [nc]] # Detect(P4, P5) 41 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLOv8-cls image classification model with ResNet101 backbone 4 | # Model docs: https://docs.ultralytics.com/models/yolov8 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 1000 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov8n-cls.yaml' will call yolov8-cls.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.33, 0.25, 1024] 12 | s: [0.33, 0.50, 1024] 13 | m: [0.67, 0.75, 1024] 14 | l: [1.00, 1.00, 1024] 15 | x: [1.00, 1.25, 1024] 16 | 17 | # YOLOv8.0n backbone 18 | backbone: 19 | # [from, repeats, module, args] 20 | - [-1, 1, ResNetLayer, [3, 64, 1, True, 1]] # 0-P1/2 21 | - [-1, 1, ResNetLayer, [64, 64, 1, False, 3]] # 1-P2/4 22 | - [-1, 1, ResNetLayer, [256, 128, 2, False, 4]] # 2-P3/8 23 | - [-1, 1, ResNetLayer, [512, 256, 2, False, 23]] # 3-P4/16 24 | - [-1, 1, ResNetLayer, [1024, 512, 2, False, 3]] # 4-P5/32 25 | 26 | # YOLOv8.0n head 27 | head: 28 | - [-1, 1, Classify, [nc]] # Classify 29 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLOv8-cls image classification model with ResNet50 backbone 4 | # Model docs: https://docs.ultralytics.com/models/yolov8 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 1000 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov8n-cls.yaml' will call yolov8-cls.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.33, 0.25, 1024] 12 | s: [0.33, 0.50, 1024] 13 | m: [0.67, 0.75, 1024] 14 | l: [1.00, 1.00, 1024] 15 | x: [1.00, 1.25, 1024] 16 | 17 | # YOLOv8.0n backbone 18 | backbone: 19 | # [from, repeats, module, args] 20 | - [-1, 1, ResNetLayer, [3, 64, 1, True, 1]] # 0-P1/2 21 | - [-1, 1, ResNetLayer, [64, 64, 1, False, 3]] # 1-P2/4 22 | - [-1, 1, ResNetLayer, [256, 128, 2, False, 4]] # 2-P3/8 23 | - [-1, 1, ResNetLayer, [512, 256, 2, False, 6]] # 3-P4/16 24 | - [-1, 1, ResNetLayer, [1024, 512, 2, False, 3]] # 4-P5/32 25 | 26 | # YOLOv8.0n head 27 | head: 28 | - [-1, 1, Classify, [nc]] # Classify 29 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v8/yolov8-cls.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Ultralytics YOLOv8-cls image classification model with YOLO backbone 4 | # Model docs: https://docs.ultralytics.com/models/yolov8 5 | # Task docs: https://docs.ultralytics.com/tasks/classify 6 | 7 | # Parameters 8 | nc: 1000 # number of classes 9 | scales: # model compound scaling constants, i.e. 'model=yolov8n-cls.yaml' will call yolov8-cls.yaml with scale 'n' 10 | # [depth, width, max_channels] 11 | n: [0.33, 0.25, 1024] 12 | s: [0.33, 0.50, 1024] 13 | m: [0.67, 0.75, 1024] 14 | l: [1.00, 1.00, 1024] 15 | x: [1.00, 1.25, 1024] 16 | 17 | # YOLOv8.0n backbone 18 | backbone: 19 | # [from, repeats, module, args] 20 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 21 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 22 | - [-1, 3, C2f, [128, True]] 23 | - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8 24 | - [-1, 6, C2f, [256, True]] 25 | - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16 26 | - [-1, 6, C2f, [512, True]] 27 | - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32 28 | - [-1, 3, C2f, [1024, True]] 29 | 30 | # YOLOv8.0n head 31 | head: 32 | - [-1, 1, Classify, [nc]] # Classify 33 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v9/yolov9c-seg.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv9c-seg instance segmentation model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov9 5 | # Task docs: https://docs.ultralytics.com/tasks/segment 6 | # 380 layers, 27897120 parameters, 159.4 GFLOPs 7 | 8 | # Parameters 9 | nc: 80 # number of classes 10 | 11 | # GELAN backbone 12 | backbone: 13 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 14 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 15 | - [-1, 1, RepNCSPELAN4, [256, 128, 64, 1]] # 2 16 | - [-1, 1, ADown, [256]] # 3-P3/8 17 | - [-1, 1, RepNCSPELAN4, [512, 256, 128, 1]] # 4 18 | - [-1, 1, ADown, [512]] # 5-P4/16 19 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 6 20 | - [-1, 1, ADown, [512]] # 7-P5/32 21 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 8 22 | - [-1, 1, SPPELAN, [512, 256]] # 9 23 | 24 | head: 25 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 26 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 27 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 12 28 | 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 31 | - [-1, 1, RepNCSPELAN4, [256, 256, 128, 1]] # 15 (P3/8-small) 32 | 33 | - [-1, 1, ADown, [256]] 34 | - [[-1, 12], 1, Concat, [1]] # cat head P4 35 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 18 (P4/16-medium) 36 | 37 | - [-1, 1, ADown, [512]] 38 | - [[-1, 9], 1, Concat, [1]] # cat head P5 39 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 21 (P5/32-large) 40 | 41 | - [[15, 18, 21], 1, Segment, [nc, 32, 256]] # Segment(P3, P4, P5) 42 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v9/yolov9c.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv9c object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov9 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | # 358 layers, 25590912 parameters, 104.0 GFLOPs 7 | 8 | # Parameters 9 | nc: 80 # number of classes 10 | 11 | # GELAN backbone 12 | backbone: 13 | - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2 14 | - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4 15 | - [-1, 1, RepNCSPELAN4, [256, 128, 64, 1]] # 2 16 | - [-1, 1, ADown, [256]] # 3-P3/8 17 | - [-1, 1, RepNCSPELAN4, [512, 256, 128, 1]] # 4 18 | - [-1, 1, ADown, [512]] # 5-P4/16 19 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 6 20 | - [-1, 1, ADown, [512]] # 7-P5/32 21 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 8 22 | - [-1, 1, SPPELAN, [512, 256]] # 9 23 | 24 | head: 25 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 26 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 27 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 12 28 | 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 31 | - [-1, 1, RepNCSPELAN4, [256, 256, 128, 1]] # 15 (P3/8-small) 32 | 33 | - [-1, 1, ADown, [256]] 34 | - [[-1, 12], 1, Concat, [1]] # cat head P4 35 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 18 (P4/16-medium) 36 | 37 | - [-1, 1, ADown, [512]] 38 | - [[-1, 9], 1, Concat, [1]] # cat head P5 39 | - [-1, 1, RepNCSPELAN4, [512, 512, 256, 1]] # 21 (P5/32-large) 40 | 41 | - [[15, 18, 21], 1, Detect, [nc]] # Detect(P3, P4, P5) 42 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v9/yolov9m.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv9m object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov9 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | # 348 layers, 20216160 parameters, 77.9 GFLOPs 7 | 8 | # Parameters 9 | nc: 80 # number of classes 10 | 11 | # GELAN backbone 12 | backbone: 13 | - [-1, 1, Conv, [32, 3, 2]] # 0-P1/2 14 | - [-1, 1, Conv, [64, 3, 2]] # 1-P2/4 15 | - [-1, 1, RepNCSPELAN4, [128, 128, 64, 1]] # 2 16 | - [-1, 1, AConv, [240]] # 3-P3/8 17 | - [-1, 1, RepNCSPELAN4, [240, 240, 120, 1]] # 4 18 | - [-1, 1, AConv, [360]] # 5-P4/16 19 | - [-1, 1, RepNCSPELAN4, [360, 360, 180, 1]] # 6 20 | - [-1, 1, AConv, [480]] # 7-P5/32 21 | - [-1, 1, RepNCSPELAN4, [480, 480, 240, 1]] # 8 22 | - [-1, 1, SPPELAN, [480, 240]] # 9 23 | 24 | head: 25 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 26 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 27 | - [-1, 1, RepNCSPELAN4, [360, 360, 180, 1]] # 12 28 | 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 31 | - [-1, 1, RepNCSPELAN4, [240, 240, 120, 1]] # 15 32 | 33 | - [-1, 1, AConv, [180]] 34 | - [[-1, 12], 1, Concat, [1]] # cat head P4 35 | - [-1, 1, RepNCSPELAN4, [360, 360, 180, 1]] # 18 (P4/16-medium) 36 | 37 | - [-1, 1, AConv, [240]] 38 | - [[-1, 9], 1, Concat, [1]] # cat head P5 39 | - [-1, 1, RepNCSPELAN4, [480, 480, 240, 1]] # 21 (P5/32-large) 40 | 41 | - [[15, 18, 21], 1, Detect, [nc]] # Detect(P3, P4, P5) 42 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v9/yolov9s.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv9s object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov9 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | # 544 layers, 7318368 parameters, 27.6 GFLOPs 7 | 8 | # Parameters 9 | nc: 80 # number of classes 10 | 11 | # GELAN backbone 12 | backbone: 13 | - [-1, 1, Conv, [32, 3, 2]] # 0-P1/2 14 | - [-1, 1, Conv, [64, 3, 2]] # 1-P2/4 15 | - [-1, 1, ELAN1, [64, 64, 32]] # 2 16 | - [-1, 1, AConv, [128]] # 3-P3/8 17 | - [-1, 1, RepNCSPELAN4, [128, 128, 64, 3]] # 4 18 | - [-1, 1, AConv, [192]] # 5-P4/16 19 | - [-1, 1, RepNCSPELAN4, [192, 192, 96, 3]] # 6 20 | - [-1, 1, AConv, [256]] # 7-P5/32 21 | - [-1, 1, RepNCSPELAN4, [256, 256, 128, 3]] # 8 22 | - [-1, 1, SPPELAN, [256, 128]] # 9 23 | 24 | head: 25 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 26 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 27 | - [-1, 1, RepNCSPELAN4, [192, 192, 96, 3]] # 12 28 | 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 31 | - [-1, 1, RepNCSPELAN4, [128, 128, 64, 3]] # 15 32 | 33 | - [-1, 1, AConv, [96]] 34 | - [[-1, 12], 1, Concat, [1]] # cat head P4 35 | - [-1, 1, RepNCSPELAN4, [192, 192, 96, 3]] # 18 (P4/16-medium) 36 | 37 | - [-1, 1, AConv, [128]] 38 | - [[-1, 9], 1, Concat, [1]] # cat head P5 39 | - [-1, 1, RepNCSPELAN4, [256, 256, 128, 3]] # 21 (P5/32-large) 40 | 41 | - [[15, 18, 21], 1, Detect, [nc]] # Detect(P3, P4 P5) 42 | -------------------------------------------------------------------------------- /ultralytics/cfg/models/v9/yolov9t.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # YOLOv9t object detection model with P3/8 - P5/32 outputs 4 | # Model docs: https://docs.ultralytics.com/models/yolov9 5 | # Task docs: https://docs.ultralytics.com/tasks/detect 6 | # 544 layers, 2128720 parameters, 8.5 GFLOPs 7 | 8 | # Parameters 9 | nc: 80 # number of classes 10 | 11 | # GELAN backbone 12 | backbone: 13 | - [-1, 1, Conv, [16, 3, 2]] # 0-P1/2 14 | - [-1, 1, Conv, [32, 3, 2]] # 1-P2/4 15 | - [-1, 1, ELAN1, [32, 32, 16]] # 2 16 | - [-1, 1, AConv, [64]] # 3-P3/8 17 | - [-1, 1, RepNCSPELAN4, [64, 64, 32, 3]] # 4 18 | - [-1, 1, AConv, [96]] # 5-P4/16 19 | - [-1, 1, RepNCSPELAN4, [96, 96, 48, 3]] # 6 20 | - [-1, 1, AConv, [128]] # 7-P5/32 21 | - [-1, 1, RepNCSPELAN4, [128, 128, 64, 3]] # 8 22 | - [-1, 1, SPPELAN, [128, 64]] # 9 23 | 24 | head: 25 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 26 | - [[-1, 6], 1, Concat, [1]] # cat backbone P4 27 | - [-1, 1, RepNCSPELAN4, [96, 96, 48, 3]] # 12 28 | 29 | - [-1, 1, nn.Upsample, [None, 2, "nearest"]] 30 | - [[-1, 4], 1, Concat, [1]] # cat backbone P3 31 | - [-1, 1, RepNCSPELAN4, [64, 64, 32, 3]] # 15 32 | 33 | - [-1, 1, AConv, [48]] 34 | - [[-1, 12], 1, Concat, [1]] # cat head P4 35 | - [-1, 1, RepNCSPELAN4, [96, 96, 48, 3]] # 18 (P4/16-medium) 36 | 37 | - [-1, 1, AConv, [64]] 38 | - [[-1, 9], 1, Concat, [1]] # cat head P5 39 | - [-1, 1, RepNCSPELAN4, [128, 128, 64, 3]] # 21 (P5/32-large) 40 | 41 | - [[15, 18, 21], 1, Detect, [nc]] # Detect(P3, P4, P5) 42 | -------------------------------------------------------------------------------- /ultralytics/cfg/solutions/default.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Global configuration YAML with settings and arguments for Ultralytics Solutions 4 | # For documentation see https://docs.ultralytics.com/solutions/ 5 | 6 | # Object counting settings -------------------------------------------------------------------------------------------- 7 | region: # list[tuple[int, int]] object counting, queue or speed estimation region points. 8 | show_in: True # (bool) flag to display objects moving *into* the defined region 9 | show_out: True # (bool) flag to display objects moving *out of* the defined region 10 | 11 | # Heatmaps settings ---------------------------------------------------------------------------------------------------- 12 | colormap: # (int | str) colormap for heatmap, Only OPENCV supported colormaps can be used. 13 | 14 | # Workouts monitoring settings ----------------------------------------------------------------------------------------- 15 | up_angle: 145.0 # (float) Workouts up_angle for counts, 145.0 is default value. 16 | down_angle: 90 # (float) Workouts down_angle for counts, 90 is default value. Y 17 | kpts: [6, 8, 10] # (list[int]) keypoints for workouts monitoring, i.e. for push-ups kpts have values of [6, 8, 10]. 18 | 19 | # Analytics settings --------------------------------------------------------------------------------------------------- 20 | analytics_type: "line" # (str) analytics type i.e "line", "pie", "bar" or "area" charts. 21 | json_file: # (str) parking system regions file path. 22 | 23 | # Security alarm system settings --------------------------------------------------------------------------------------- 24 | records: 5 # (int) Total detections count to send an email about security 25 | -------------------------------------------------------------------------------- /ultralytics/cfg/trackers/botsort.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Default Ultralytics settings for BoT-SORT tracker when using mode="track" 4 | # For documentation and examples see https://docs.ultralytics.com/modes/track/ 5 | # For BoT-SORT source code see https://github.com/NirAharon/BoT-SORT 6 | 7 | tracker_type: botsort # tracker type, ['botsort', 'bytetrack'] 8 | track_high_thresh: 0.25 # threshold for the first association 9 | track_low_thresh: 0.1 # threshold for the second association 10 | new_track_thresh: 0.25 # threshold for init new track if the detection does not match any tracks 11 | track_buffer: 30 # buffer to calculate the time when to remove tracks 12 | match_thresh: 0.8 # threshold for matching tracks 13 | fuse_score: True # Whether to fuse confidence scores with the iou distances before matching 14 | # min_box_area: 10 # threshold for min box areas(for tracker evaluation, not used for now) 15 | 16 | # BoT-SORT settings 17 | gmc_method: sparseOptFlow # method of global motion compensation 18 | # ReID model related thresh 19 | proximity_thresh: 0.5 # minimum IoU for valid match with ReID 20 | appearance_thresh: 0.25 # minimum appearance similarity for ReID 21 | with_reid: False 22 | model: auto # uses native features if detector is YOLO else yolo11n-cls.pt 23 | -------------------------------------------------------------------------------- /ultralytics/cfg/trackers/bytetrack.yaml: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | # Default Ultralytics settings for ByteTrack tracker when using mode="track" 4 | # For documentation and examples see https://docs.ultralytics.com/modes/track/ 5 | # For ByteTrack source code see https://github.com/ifzhang/ByteTrack 6 | 7 | tracker_type: bytetrack # tracker type, ['botsort', 'bytetrack'] 8 | track_high_thresh: 0.25 # threshold for the first association 9 | track_low_thresh: 0.1 # threshold for the second association 10 | new_track_thresh: 0.25 # threshold for init new track if the detection does not match any tracks 11 | track_buffer: 30 # buffer to calculate the time when to remove tracks 12 | match_thresh: 0.8 # threshold for matching tracks 13 | fuse_score: True # Whether to fuse confidence scores with the iou distances before matching 14 | # min_box_area: 10 # threshold for min box areas(for tracker evaluation, not used for now) 15 | -------------------------------------------------------------------------------- /ultralytics/data/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .base import BaseDataset 4 | from .build import build_dataloader, build_grounding, build_yolo_dataset, load_inference_source 5 | from .dataset import ( 6 | ClassificationDataset, 7 | GroundingDataset, 8 | SemanticDataset, 9 | YOLOConcatDataset, 10 | YOLODataset, 11 | YOLOMultiModalDataset, 12 | ) 13 | 14 | __all__ = ( 15 | "BaseDataset", 16 | "ClassificationDataset", 17 | "SemanticDataset", 18 | "YOLODataset", 19 | "YOLOMultiModalDataset", 20 | "YOLOConcatDataset", 21 | "GroundingDataset", 22 | "build_yolo_dataset", 23 | "build_grounding", 24 | "build_dataloader", 25 | "load_inference_source", 26 | ) 27 | -------------------------------------------------------------------------------- /ultralytics/data/scripts/download_weights.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 3 | 4 | # Download latest models from https://github.com/ultralytics/assets/releases 5 | # Example usage: bash ultralytics/data/scripts/download_weights.sh 6 | # parent 7 | # └── weights 8 | # ├── yolov8n.pt ← downloads here 9 | # ├── yolov8s.pt 10 | # └── ... 11 | 12 | python << EOF 13 | from ultralytics.utils.downloads import attempt_download_asset 14 | 15 | assets = [f"yolov8{size}{suffix}.pt" for size in "nsmlx" for suffix in ("", "-cls", "-seg", "-pose")] 16 | for x in assets: 17 | attempt_download_asset(f"weights/{x}") 18 | EOF 19 | -------------------------------------------------------------------------------- /ultralytics/data/scripts/get_coco128.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 3 | 4 | # Download COCO128 dataset https://www.kaggle.com/ultralytics/coco128 (first 128 images from COCO train2017) 5 | # Example usage: bash data/scripts/get_coco128.sh 6 | # parent 7 | # ├── ultralytics 8 | # └── datasets 9 | # └── coco128 ← downloads here 10 | 11 | # Download/unzip images and labels 12 | d='../datasets' # unzip directory 13 | url=https://github.com/ultralytics/assets/releases/download/v0.0.0/ 14 | f='coco128.zip' # or 'coco128-segments.zip', 68 MB 15 | echo 'Downloading' $url$f ' ...' 16 | curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f & 17 | 18 | wait # finish background tasks 19 | -------------------------------------------------------------------------------- /ultralytics/engine/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | -------------------------------------------------------------------------------- /ultralytics/models/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .fastsam import FastSAM 4 | from .nas import NAS 5 | from .rtdetr import RTDETR 6 | from .sam import SAM 7 | from .yolo import YOLO, YOLOE, YOLOWorld 8 | 9 | __all__ = "YOLO", "RTDETR", "SAM", "FastSAM", "NAS", "YOLOWorld", "YOLOE" # allow simpler import 10 | -------------------------------------------------------------------------------- /ultralytics/models/fastsam/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .model import FastSAM 4 | from .predict import FastSAMPredictor 5 | from .val import FastSAMValidator 6 | 7 | __all__ = "FastSAMPredictor", "FastSAM", "FastSAMValidator" 8 | -------------------------------------------------------------------------------- /ultralytics/models/fastsam/utils.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | 4 | def adjust_bboxes_to_image_border(boxes, image_shape, threshold=20): 5 | """ 6 | Adjust bounding boxes to stick to image border if they are within a certain threshold. 7 | 8 | Args: 9 | boxes (torch.Tensor): Bounding boxes with shape (n, 4) in xyxy format. 10 | image_shape (Tuple[int, int]): Image dimensions as (height, width). 11 | threshold (int): Pixel threshold for considering a box close to the border. 12 | 13 | Returns: 14 | boxes (torch.Tensor): Adjusted bounding boxes with shape (n, 4). 15 | """ 16 | # Image dimensions 17 | h, w = image_shape 18 | 19 | # Adjust boxes that are close to image borders 20 | boxes[boxes[:, 0] < threshold, 0] = 0 # x1 21 | boxes[boxes[:, 1] < threshold, 1] = 0 # y1 22 | boxes[boxes[:, 2] > w - threshold, 2] = w # x2 23 | boxes[boxes[:, 3] > h - threshold, 3] = h # y2 24 | return boxes 25 | -------------------------------------------------------------------------------- /ultralytics/models/nas/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .model import NAS 4 | from .predict import NASPredictor 5 | from .val import NASValidator 6 | 7 | __all__ = "NASPredictor", "NASValidator", "NAS" 8 | -------------------------------------------------------------------------------- /ultralytics/models/nas/val.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | import torch 4 | 5 | from ultralytics.models.yolo.detect import DetectionValidator 6 | from ultralytics.utils import ops 7 | 8 | __all__ = ["NASValidator"] 9 | 10 | 11 | class NASValidator(DetectionValidator): 12 | """ 13 | Ultralytics YOLO NAS Validator for object detection. 14 | 15 | Extends `DetectionValidator` from the Ultralytics models package and is designed to post-process the raw predictions 16 | generated by YOLO NAS models. It performs non-maximum suppression to remove overlapping and low-confidence boxes, 17 | ultimately producing the final detections. 18 | 19 | Attributes: 20 | args (Namespace): Namespace containing various configurations for post-processing, such as confidence and IoU 21 | thresholds. 22 | lb (torch.Tensor): Optional tensor for multilabel NMS. 23 | 24 | Examples: 25 | >>> from ultralytics import NAS 26 | >>> model = NAS("yolo_nas_s") 27 | >>> validator = model.validator 28 | Assumes that raw_preds are available 29 | >>> final_preds = validator.postprocess(raw_preds) 30 | 31 | Notes: 32 | This class is generally not instantiated directly but is used internally within the `NAS` class. 33 | """ 34 | 35 | def postprocess(self, preds_in): 36 | """Apply Non-maximum suppression to prediction outputs.""" 37 | boxes = ops.xyxy2xywh(preds_in[0][0]) # Convert bounding box format from xyxy to xywh 38 | preds = torch.cat((boxes, preds_in[0][1]), -1).permute(0, 2, 1) # Concatenate boxes with scores and permute 39 | return super().postprocess(preds) 40 | -------------------------------------------------------------------------------- /ultralytics/models/rtdetr/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .model import RTDETR 4 | from .predict import RTDETRPredictor 5 | from .val import RTDETRValidator 6 | 7 | __all__ = "RTDETRPredictor", "RTDETRValidator", "RTDETR" 8 | -------------------------------------------------------------------------------- /ultralytics/models/sam/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .model import SAM 4 | from .predict import Predictor, SAM2Predictor, SAM2VideoPredictor 5 | 6 | __all__ = "SAM", "Predictor", "SAM2Predictor", "SAM2VideoPredictor" # tuple or list of exportable items 7 | -------------------------------------------------------------------------------- /ultralytics/models/sam/modules/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | -------------------------------------------------------------------------------- /ultralytics/models/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from ultralytics.models.yolo import classify, detect, obb, pose, segment, world, yoloe 4 | 5 | from .model import YOLO, YOLOE, YOLOWorld 6 | 7 | __all__ = "classify", "segment", "detect", "pose", "obb", "world", "yoloe", "YOLO", "YOLOWorld", "YOLOE" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/classify/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from ultralytics.models.yolo.classify.predict import ClassificationPredictor 4 | from ultralytics.models.yolo.classify.train import ClassificationTrainer 5 | from ultralytics.models.yolo.classify.val import ClassificationValidator 6 | 7 | __all__ = "ClassificationPredictor", "ClassificationTrainer", "ClassificationValidator" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/detect/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .predict import DetectionPredictor 4 | from .train import DetectionTrainer 5 | from .val import DetectionValidator 6 | 7 | __all__ = "DetectionPredictor", "DetectionTrainer", "DetectionValidator" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/obb/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .predict import OBBPredictor 4 | from .train import OBBTrainer 5 | from .val import OBBValidator 6 | 7 | __all__ = "OBBPredictor", "OBBTrainer", "OBBValidator" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/pose/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .predict import PosePredictor 4 | from .train import PoseTrainer 5 | from .val import PoseValidator 6 | 7 | __all__ = "PoseTrainer", "PoseValidator", "PosePredictor" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/segment/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .predict import SegmentationPredictor 4 | from .train import SegmentationTrainer 5 | from .val import SegmentationValidator 6 | 7 | __all__ = "SegmentationPredictor", "SegmentationTrainer", "SegmentationValidator" 8 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/world/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .train import WorldTrainer 4 | 5 | __all__ = ["WorldTrainer"] 6 | -------------------------------------------------------------------------------- /ultralytics/models/yolo/yoloe/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .predict import YOLOEVPDetectPredictor, YOLOEVPSegPredictor 4 | from .train import YOLOEPEFreeTrainer, YOLOEPETrainer, YOLOETrainer, YOLOEVPTrainer 5 | from .train_seg import YOLOEPESegTrainer, YOLOESegTrainer, YOLOESegTrainerFromScratch, YOLOESegVPTrainer 6 | from .val import YOLOEDetectValidator, YOLOESegValidator 7 | 8 | __all__ = [ 9 | "YOLOETrainer", 10 | "YOLOEPETrainer", 11 | "YOLOESegTrainer", 12 | "YOLOEDetectValidator", 13 | "YOLOESegValidator", 14 | "YOLOEPESegTrainer", 15 | "YOLOESegTrainerFromScratch", 16 | "YOLOESegVPTrainer", 17 | "YOLOEVPTrainer", 18 | "YOLOEPEFreeTrainer", 19 | "YOLOEVPDetectPredictor", 20 | "YOLOEVPSegPredictor", 21 | ] 22 | -------------------------------------------------------------------------------- /ultralytics/nn/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .tasks import ( 4 | BaseModel, 5 | ClassificationModel, 6 | DetectionModel, 7 | SegmentationModel, 8 | attempt_load_one_weight, 9 | attempt_load_weights, 10 | guess_model_scale, 11 | guess_model_task, 12 | parse_model, 13 | torch_safe_load, 14 | yaml_model_load, 15 | ) 16 | 17 | __all__ = ( 18 | "attempt_load_one_weight", 19 | "attempt_load_weights", 20 | "parse_model", 21 | "yaml_model_load", 22 | "guess_model_task", 23 | "guess_model_scale", 24 | "torch_safe_load", 25 | "DetectionModel", 26 | "SegmentationModel", 27 | "ClassificationModel", 28 | "BaseModel", 29 | ) 30 | -------------------------------------------------------------------------------- /ultralytics/solutions/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .ai_gym import AIGym 4 | from .analytics import Analytics 5 | from .distance_calculation import DistanceCalculation 6 | from .heatmap import Heatmap 7 | from .instance_segmentation import InstanceSegmentation 8 | from .object_blurrer import ObjectBlurrer 9 | from .object_counter import ObjectCounter 10 | from .object_cropper import ObjectCropper 11 | from .parking_management import ParkingManagement, ParkingPtsSelection 12 | from .queue_management import QueueManager 13 | from .region_counter import RegionCounter 14 | from .security_alarm import SecurityAlarm 15 | from .speed_estimation import SpeedEstimator 16 | from .streamlit_inference import Inference 17 | from .trackzone import TrackZone 18 | from .vision_eye import VisionEye 19 | 20 | __all__ = ( 21 | "ObjectCounter", 22 | "ObjectCropper", 23 | "ObjectBlurrer", 24 | "AIGym", 25 | "RegionCounter", 26 | "SecurityAlarm", 27 | "Heatmap", 28 | "InstanceSegmentation", 29 | "VisionEye", 30 | "SpeedEstimator", 31 | "DistanceCalculation", 32 | "QueueManager", 33 | "ParkingManagement", 34 | "ParkingPtsSelection", 35 | "Analytics", 36 | "Inference", 37 | "TrackZone", 38 | ) 39 | -------------------------------------------------------------------------------- /ultralytics/trackers/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .bot_sort import BOTSORT 4 | from .byte_tracker import BYTETracker 5 | from .track import register_tracker 6 | 7 | __all__ = "register_tracker", "BOTSORT", "BYTETracker" # allow simpler import 8 | -------------------------------------------------------------------------------- /ultralytics/trackers/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | -------------------------------------------------------------------------------- /ultralytics/utils/callbacks/__init__.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from .base import add_integration_callbacks, default_callbacks, get_default_callbacks 4 | 5 | __all__ = "add_integration_callbacks", "default_callbacks", "get_default_callbacks" 6 | -------------------------------------------------------------------------------- /ultralytics/utils/callbacks/raytune.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from ultralytics.utils import SETTINGS 4 | 5 | try: 6 | assert SETTINGS["raytune"] is True # verify integration is enabled 7 | import ray 8 | from ray import tune 9 | from ray.air import session 10 | 11 | except (ImportError, AssertionError): 12 | tune = None 13 | 14 | 15 | def on_fit_epoch_end(trainer): 16 | """ 17 | Reports training metrics to Ray Tune at epoch end when a Ray session is active. 18 | 19 | Captures metrics from the trainer object and sends them to Ray Tune with the current epoch number, 20 | enabling hyperparameter tuning optimization. Only executes when within an active Ray Tune session. 21 | 22 | Args: 23 | trainer (ultralytics.engine.trainer.BaseTrainer): The Ultralytics trainer object containing metrics and epochs. 24 | 25 | Examples: 26 | >>> # Called automatically by the Ultralytics training loop 27 | >>> on_fit_epoch_end(trainer) 28 | 29 | References: 30 | Ray Tune docs: https://docs.ray.io/en/latest/tune/index.html 31 | """ 32 | if ray.train._internal.session.get_session(): # check if Ray Tune session is active 33 | metrics = trainer.metrics 34 | session.report({**metrics, **{"epoch": trainer.epoch + 1}}) 35 | 36 | 37 | callbacks = ( 38 | { 39 | "on_fit_epoch_end": on_fit_epoch_end, 40 | } 41 | if tune 42 | else {} 43 | ) 44 | -------------------------------------------------------------------------------- /ultralytics/utils/errors.py: -------------------------------------------------------------------------------- 1 | # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license 2 | 3 | from ultralytics.utils import emojis 4 | 5 | 6 | class HUBModelError(Exception): 7 | """ 8 | Exception raised when a model cannot be found or retrieved from Ultralytics HUB. 9 | 10 | This custom exception is used specifically for handling errors related to model fetching in Ultralytics YOLO. 11 | The error message is processed to include emojis for better user experience. 12 | 13 | Attributes: 14 | message (str): The error message displayed when the exception is raised. 15 | 16 | Methods: 17 | __init__: Initialize the HUBModelError with a custom message. 18 | 19 | Examples: 20 | >>> try: 21 | >>> # Code that might fail to find a model 22 | >>> raise HUBModelError("Custom model not found message") 23 | >>> except HUBModelError as e: 24 | >>> print(e) # Displays the emoji-enhanced error message 25 | """ 26 | 27 | def __init__(self, message="Model not found. Please check model URL and try again."): 28 | """ 29 | Initialize a HUBModelError exception. 30 | 31 | This exception is raised when a requested model is not found or cannot be retrieved from Ultralytics HUB. 32 | The message is processed to include emojis for better user experience. 33 | 34 | Args: 35 | message (str, optional): The error message to display when the exception is raised. 36 | 37 | Examples: 38 | >>> try: 39 | ... raise HUBModelError("Custom model error message") 40 | ... except HUBModelError as e: 41 | ... print(e) 42 | """ 43 | super().__init__(emojis(message)) 44 | --------------------------------------------------------------------------------