From 4ee24d4293d6a7b59c432a3fc836e8ede6d22bd6 Mon Sep 17 00:00:00 2001 From: boomermath Date: Wed, 23 Jul 2025 14:13:43 -0400 Subject: [PATCH 01/17] Add rknn scripts --- .../rknn-convert-tool/autoinstallrknnapi.py | 75 ++++++ scripts/rknn-convert-tool/create_onnx.py | 88 +++++++ scripts/rknn-convert-tool/create_rknn.py | 217 ++++++++++++++++++ .../rknn-convert-tool/rknn_conversion.ipynb | 168 ++++++++++++++ 4 files changed, 548 insertions(+) create mode 100644 scripts/rknn-convert-tool/autoinstallrknnapi.py create mode 100644 scripts/rknn-convert-tool/create_onnx.py create mode 100644 scripts/rknn-convert-tool/create_rknn.py create mode 100644 scripts/rknn-convert-tool/rknn_conversion.ipynb diff --git a/scripts/rknn-convert-tool/autoinstallrknnapi.py b/scripts/rknn-convert-tool/autoinstallrknnapi.py new file mode 100644 index 0000000000..238e68afd7 --- /dev/null +++ b/scripts/rknn-convert-tool/autoinstallrknnapi.py @@ -0,0 +1,75 @@ +import shutil +import sys +import platform +import urllib.request +import subprocess +from urllib.parse import urlparse +import os + +CHUNK_SIZE = 8192 + +wheel_versions = { + "arm64": { + "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + }, + "x86_64": { + "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + }, +} + +supported_arch = list(wheel_versions.keys()) + +def get_filename_from_url(url): + parsed = urlparse(url) + filename = os.path.basename(parsed.path) + if not filename: + filename = "" # never gonna get here + return filename + +if __name__ == "__main__": + arch = platform.machine() + + if not arch in supported_arch: + print(f"Unsupported architecture {arch}. Must be one of the following: {supported_arch}") + + current_version = f"{sys.version_info.major}.{sys.version_info.minor}" + supported_versions = list(wheel_versions[arch]) + + if sys.version_info.major < 3: + print(f"Must have at least python version {supported_versions[0]}") + elif not current_version in supported_versions: + print(f"Unsupported python version {current_version}, supported python versions are: {supported_versions}") + + + download_url = wheel_versions[arch][current_version] + wheel_name = get_filename_from_url(download_url) + + print(f"Downloading RKNN Toolkit2 wheel: {wheel_name}") + with urllib.request.urlopen(download_url) as response, open(wheel_name, 'wb') as out_file: + while True: + chunk = response.read(CHUNK_SIZE) + if not chunk: + break + out_file.write(chunk) + print("Download completed, now running pip install") + + try: + subprocess.run(["pip", "install", wheel_name]).check_returncode() + except subprocess.CalledProcessError as e: + print("Failed to run pip install, see output below") + print(e.output) + sys.exit(1) + + print("Python RKNN Toolkit2 installed successfully!") \ No newline at end of file diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py new file mode 100644 index 0000000000..83d70ab488 --- /dev/null +++ b/scripts/rknn-convert-tool/create_onnx.py @@ -0,0 +1,88 @@ +import subprocess +import sys +import argparse +import os.path + +yolo_git_repos = { + "yolov5": "https://github.com/airockchip/yolov5", + "yolov8": "https://github.com/airockchip/ultralytics_yolov8", + "yolov11": "https://github.com/airockchip/ultralytics_yolo11", +} + +valid_yolo_version = list(yolo_git_repos.keys()) +comma_sep_yolo_versions = ", ".join(valid_yolo_version) + +ultralytics_folder_name = "airockchip_yolo_pkg" + + +def check_git_installed(): + try: + subprocess.run(["git", "--version"]).check_returncode() + except: + print("Git is not installed or not found in your PATH.") + print("Please install Git from https://git-scm.com/downloads and try again.") + sys.exit(1) + + +def run_onnx_conversion(version, model_path): + rc_repo = yolo_git_repos[version] + + if rc_repo is None: + # achievement: how did we get here? + print( + f"Invalid yolo version \"{version}\" must be one of the following {comma_sep_yolo_versions}" + ) + + if os.path.exists(ultralytics_folder_name): + print("Existing Rockchip Repo detected, no install required") + else: + print("Cloning Rockchip repo...") + + try: + subprocess.run( + ["git", "clone", rc_repo, ultralytics_folder_name] + ).check_returncode() + except subprocess.CalledProcessError as e: + print("Failed to clone rockchip repo, see error output below") + print(e.output) + sys.exit(1) + + print("Running pip install...") + try: + subprocess.run(["pip", "install", "-e", ultralytics_folder_name]).check_returncode() + except subprocess.CalledProcessError as e: + print("Pip install rockchip repo failed, see error output") + print(e.output) + sys.exit(1) + + from ultralytics import YOLO + + model = YOLO(model_path) + model.export(format="rknn") + + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate valid ONNX file for yolo model" + ) + + parser.add_argument( + "-v", + "--version", + choices=valid_yolo_version, + required=True, + help=(f"YOLO version to use. Must be one of: {comma_sep_yolo_versions}"), + ) + + parser.add_argument( + "-m", + "--model_path", + required=True, + help=(f"Path to YOLO model"), + ) + + args = parser.parse_args() + + check_git_installed() + run_onnx_conversion(args.version, args.model_path) diff --git a/scripts/rknn-convert-tool/create_rknn.py b/scripts/rknn-convert-tool/create_rknn.py new file mode 100644 index 0000000000..32973f2da7 --- /dev/null +++ b/scripts/rknn-convert-tool/create_rknn.py @@ -0,0 +1,217 @@ +import subprocess +import sys +import random +import argparse +import os +from rknn.api import RKNN + +image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp") +DEFAULT_PLATFORM = "rk3588" + +def list_img_dir(img_dir): + return [ + os.path.abspath(os.path.join(img_dir, f)) + for f in os.listdir(img_dir) + if f.lower().endswith(image_extensions) + ] + + +def sample_imgs(num, img_list): + if len(img_list) < num: + return img_list + else: + return random.sample(img_list, num) + + +def get_image_list_from_dataset(num_imgs, yaml_dir): + print(f"Dataset detected with {yaml_dir} file") + img_raw_paths = [] + + with open(yaml_dir, "r") as yaml_file: + for line in yaml_file: + line = line.strip() + if ( + line.startswith("train:") + or line.startswith("val:") + or line.startswith("test:") + ): + img_raw_paths.append(line.split(":", 1)[1].strip()) + + no_yaml_dir = yaml_dir.replace( + "data.yaml", "dummy_dir" + ) # data.yaml sets dirs one level up + img_set_paths = [] + + for img_raw_path in img_raw_paths: + p = ( + img_raw_path + if os.path.isabs(img_raw_path) + else os.path.realpath(os.path.join(no_yaml_dir, img_raw_path)) + ) + + if os.path.exists(p): + img_set_paths.append(p) + + if len(img_set_paths) < 1: + return None + + all_imgs = [list_img_dir(path) for path in img_set_paths] + + for imgs in all_imgs: + print(len(imgs)) + + total_imgs = sum(len(group) for group in all_imgs) + + sampled_imgs = [ + sample_imgs(round((len(group) / total_imgs) * num_imgs), group) + for group in all_imgs + ] + + return [img for group in sampled_imgs for img in group] + + +def get_image_list_from_img_dir(num_imgs, img_dir): + return sample_imgs(num_imgs, list_img_dir(img_dir)) + + +def get_image_list(num_imgs, image_dir): + yaml_path = os.path.join(image_dir, "data.yaml") + + if os.path.exists(yaml_path): + return get_image_list_from_dataset(num_imgs, yaml_path) + else: + return get_image_list_from_img_dir(num_imgs, image_dir) + + +def run_rknn_conversion( + img_list_txt, disable_quant, model_path, rknn_output, verbose_logging +): + rknn = RKNN(verbose=verbose_logging, verbose_file=("rknn_convert.log" if verbose_logging else None)) + + rknn.config( + mean_values=[[0, 0, 0]], + std_values=[[255, 255, 255]], + target_platform=DEFAULT_PLATFORM, + ) + + print("Attempted RKNN load") + ret = rknn.load_onnx(model=model_path) + if ret != 0: + print("Loading model failed!") + exit(ret) + + print("Attempting RKNN build") + ret = rknn.build(do_quantization=(not disable_quant), dataset=img_list_txt) + if ret != 0: + print("Building model failed!") + exit(ret) + + print("Build succeeded! Starting export...") + ret = rknn.export_rknn(rknn_output) + if ret != 0: + print("Exporting model failed!") + exit(ret) + print("Finished export!") + + # Release + rknn.release() + + print(f'Your model is in "{rknn_output}" and ready to use!') + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate valid ONNX file for yolo model" + ) + + parser.add_argument( + "-ni", + "--num_imgs", + type=int, + default=300, + help="Number of images to use for calibration (default: 300)", + ) + + parser.add_argument( + "-d", + "--img_dir", + required=True, + help="Directory where your dataset is located (must have data.yaml), or images are located", + ) + + parser.add_argument( + "-m", + "--model_path", + required=True, + help=(f"Path to generated ONNX model"), + ) + + parser.add_argument( + "-dq", + "--disable_quantize", + type=bool, + default=False, + help="Whether to skip quantization (default: False)", + ) + + parser.add_argument( + "-o", + "--rknn_output", + default="out.rknn", + help="Where the rknn model should be outputted (default: ./out.rknn)", + ) + + parser.add_argument( + "-ds", + "--img_dataset_txt", + default="imgs.txt", + help="Where the list of images used for quantization should be outputted (default: ./imgs.txt)", + ) + + parser.add_argument( + "-vb", + "--verbose", + type=bool, + default=False, + help="Whether to enable verbose logging", + ) + + args = parser.parse_args() + + if not args.rknn_output.endswith(".rknn"): + print("RKNN output path must end in .rknn!") + sys.exit(1) + + if not args.disable_quantize: + if args.img_dir == None or len(args.img_dir) < 1: + print(f"Must specify list of images to use with --img_dir") + sys.exit(1) + + img_dir_abs = os.path.abspath(args.img_dir) + + img_list = get_image_list(args.num_imgs, img_dir_abs) + img_list_len = 0 if img_list is None else len(img_list) + + if img_list_len == 0: + print(f"No images found in {img_dir_abs}") + sys.exit(1) + elif img_list_len < args.num_imgs: + print( + f"Not enough images in your dataset/directory, you have {img_list_len} images, but need {args.num_imgs}" + ) + sys.exit(1) + + if not args.img_dataset_txt.endswith(".txt"): + print(f"Image dataset text file path must end in .txt") + sys.exit(1) + + with open(args.img_dataset_txt, "w") as set_file: + set_file.writelines(f"{img}\n" for img in img_list) + + run_rknn_conversion( + args.img_dataset_txt, + args.disable_quantize, + args.model_path, + args.rknn_output, + args.verbose, + ) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb new file mode 100644 index 0000000000..324b570a04 --- /dev/null +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -0,0 +1,168 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "63e2692b", + "metadata": {}, + "source": [] + }, + { + "cell_type": "markdown", + "id": "bb5367ce", + "metadata": {}, + "source": [ + "# RKNN Conversion Guide\n", + "\n", + "### Before you start\n", + "\n", + "Before you run the scripts/python notebook from this project, it's recommended you create a separate [python virtual environment](https://docs.python.org/3/library/venv.html) so that packages installed in for the conversion process don't conflict with other packages you may already have installed\n", + "\n", + "### Step 1: Convert to ONNX \n", + "\n", + "To convert to ONNX, simply run the `create_onnx.py` script with your model weights, see below" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0659e15f", + "metadata": {}, + "outputs": [], + "source": [ + "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", + "%run -i create_onnx.py --version yolov8 --model_path weights.pt" + ] + }, + { + "cell_type": "markdown", + "id": "86ff07e6", + "metadata": {}, + "source": [ + "### Step 2: Download RKNN API\n", + "You can either utilize a script to autodetect and install the correct Python library for you, or manually install it\n", + "\n", + "#### Automatic installation\n", + "Simply run the `autoinstallrknnapi.py` script." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ec11f96", + "metadata": {}, + "outputs": [], + "source": [ + "%run -i autoinstallrknnapi.py" + ] + }, + { + "cell_type": "markdown", + "id": "8b57fe4d", + "metadata": {}, + "source": [ + "#### Manual installation\n", + "##### How to find the correct link\n", + "Go to https://github.com/airockchip/rknn-toolkit2, and click on `rknn-toolkit2`, then `packages`.\n", + "If you are running an x86_64 CPU (e.g. most Intel and AMD CPUs) click on that, otherwise choose arm64 for ARM-based computers (e.g. M-series Macs or Snapdragon processors). If you aren't sure what CPU you are running, look up your processor architecture information from system settings.\n", + "\n", + "Once you have the correct CPU, you will see multiple packages. The file names will look something like `rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` for example. The numbers after CP correspond to your python version. If you have a Python version 3.10, for example, you want to download a package with cp310 in the name. For 3.8, you'd look for cp38, for 3.7 cp37, and so on.\n", + "\n", + "Then, once you find your desired package, locate the \"Raw\" download button, and download the package (.whl) once you do, run pip install, replacing `rknn_toolkit2.whl` with the path to the wheel file you just downloaded" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7414b120", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install rknn_toolkit2.whl" + ] + }, + { + "cell_type": "markdown", + "id": "c1db5ef0", + "metadata": {}, + "source": [ + "### Step 3: Convert to RKNN\n", + "\n", + "Simply run the `create_rknn.py` script, replacing the arguments with your own." + ] + }, + { + "cell_type": "markdown", + "id": "f41e431b", + "metadata": {}, + "source": [ + "#### Overview of `create_rknn.py` script\n", + "\n", + "##### RKNN Conversion Script Overview\n", + "\n", + "This script converts a YOLO ONNX model to RKNN format using a set of calibration images. It's designed to work with either:\n", + "\n", + "- A flat directory of images (e.g. `train/images`), **or**\n", + "- A dataset directory containing a `data.yaml` file that defines `train`, `val`, and/or `test` folders.\n", + "\n", + "You can use it from the command line or from inside a Python environment like this notebook.\n", + "\n", + "##### Arguments\n", + "\n", + "| Argument | Type | Description |\n", + "|----------|------|-------------|\n", + "| `--img_dir` (`-d`) | `str` (required) | Path to your image directory. This can either be a folder of images **or** a dataset folder with a `data.yaml`. |\n", + "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", + "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", + "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", + "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", + "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", + "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from RKNN during conversion. |\n", + "---\n", + "\n", + "\n", + "##### Notes\n", + "\n", + "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only supports RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b09656dd", + "metadata": {}, + "outputs": [], + "source": [ + "%run -i create_rknn.py --img_dir /datasets/my_imgs --model_path model.onnx" + ] + }, + { + "cell_type": "markdown", + "id": "5b3a6806", + "metadata": {}, + "source": [ + "And that's it! You should have an RKNN model file ready to deploy on an Orange PI" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 7b42d4a6688327b759bdc861c1b0a3c9a345ad05 Mon Sep 17 00:00:00 2001 From: Sam948-byte Date: Wed, 23 Jul 2025 19:37:37 -0500 Subject: [PATCH 02/17] lint --- .../rknn-convert-tool/autoinstallrknnapi.py | 44 +++++++++++-------- scripts/rknn-convert-tool/create_onnx.py | 31 ++++++------- scripts/rknn-convert-tool/create_rknn.py | 12 +++-- 3 files changed, 49 insertions(+), 38 deletions(-) diff --git a/scripts/rknn-convert-tool/autoinstallrknnapi.py b/scripts/rknn-convert-tool/autoinstallrknnapi.py index 238e68afd7..148d4ba71f 100644 --- a/scripts/rknn-convert-tool/autoinstallrknnapi.py +++ b/scripts/rknn-convert-tool/autoinstallrknnapi.py @@ -1,10 +1,9 @@ -import shutil -import sys +import os import platform -import urllib.request import subprocess +import sys +import urllib.request from urllib.parse import urlparse -import os CHUNK_SIZE = 8192 @@ -31,39 +30,46 @@ supported_arch = list(wheel_versions.keys()) + def get_filename_from_url(url): parsed = urlparse(url) filename = os.path.basename(parsed.path) if not filename: - filename = "" # never gonna get here + filename = "" # never gonna get here return filename + if __name__ == "__main__": arch = platform.machine() if not arch in supported_arch: - print(f"Unsupported architecture {arch}. Must be one of the following: {supported_arch}") + print( + f"Unsupported architecture {arch}. Must be one of the following: {supported_arch}" + ) current_version = f"{sys.version_info.major}.{sys.version_info.minor}" supported_versions = list(wheel_versions[arch]) - + if sys.version_info.major < 3: print(f"Must have at least python version {supported_versions[0]}") elif not current_version in supported_versions: - print(f"Unsupported python version {current_version}, supported python versions are: {supported_versions}") - - + print( + f"Unsupported python version {current_version}, supported python versions are: {supported_versions}" + ) + download_url = wheel_versions[arch][current_version] wheel_name = get_filename_from_url(download_url) - + print(f"Downloading RKNN Toolkit2 wheel: {wheel_name}") - with urllib.request.urlopen(download_url) as response, open(wheel_name, 'wb') as out_file: - while True: - chunk = response.read(CHUNK_SIZE) - if not chunk: - break - out_file.write(chunk) - print("Download completed, now running pip install") + with urllib.request.urlopen(download_url) as response, open( + wheel_name, "wb" + ) as out_file: + while True: + chunk = response.read(CHUNK_SIZE) + if not chunk: + break + out_file.write(chunk) + print("Download completed, now running pip install") try: subprocess.run(["pip", "install", wheel_name]).check_returncode() @@ -72,4 +78,4 @@ def get_filename_from_url(url): print(e.output) sys.exit(1) - print("Python RKNN Toolkit2 installed successfully!") \ No newline at end of file + print("Python RKNN Toolkit2 installed successfully!") diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py index 83d70ab488..2e44e54993 100644 --- a/scripts/rknn-convert-tool/create_onnx.py +++ b/scripts/rknn-convert-tool/create_onnx.py @@ -1,7 +1,7 @@ -import subprocess -import sys import argparse import os.path +import subprocess +import sys yolo_git_repos = { "yolov5": "https://github.com/airockchip/yolov5", @@ -26,18 +26,18 @@ def check_git_installed(): def run_onnx_conversion(version, model_path): rc_repo = yolo_git_repos[version] - + if rc_repo is None: # achievement: how did we get here? print( - f"Invalid yolo version \"{version}\" must be one of the following {comma_sep_yolo_versions}" + f'Invalid yolo version "{version}" must be one of the following {comma_sep_yolo_versions}' ) if os.path.exists(ultralytics_folder_name): print("Existing Rockchip Repo detected, no install required") else: print("Cloning Rockchip repo...") - + try: subprocess.run( ["git", "clone", rc_repo, ultralytics_folder_name] @@ -46,27 +46,28 @@ def run_onnx_conversion(version, model_path): print("Failed to clone rockchip repo, see error output below") print(e.output) sys.exit(1) - + print("Running pip install...") - try: - subprocess.run(["pip", "install", "-e", ultralytics_folder_name]).check_returncode() + try: + subprocess.run( + ["pip", "install", "-e", ultralytics_folder_name] + ).check_returncode() except subprocess.CalledProcessError as e: - print("Pip install rockchip repo failed, see error output") - print(e.output) - sys.exit(1) - + print("Pip install rockchip repo failed, see error output") + print(e.output) + sys.exit(1) + from ultralytics import YOLO model = YOLO(model_path) model.export(format="rknn") - if __name__ == "__main__": parser = argparse.ArgumentParser( description="Generate valid ONNX file for yolo model" ) - + parser.add_argument( "-v", "--version", @@ -74,7 +75,7 @@ def run_onnx_conversion(version, model_path): required=True, help=(f"YOLO version to use. Must be one of: {comma_sep_yolo_versions}"), ) - + parser.add_argument( "-m", "--model_path", diff --git a/scripts/rknn-convert-tool/create_rknn.py b/scripts/rknn-convert-tool/create_rknn.py index 32973f2da7..e6a6f68e7a 100644 --- a/scripts/rknn-convert-tool/create_rknn.py +++ b/scripts/rknn-convert-tool/create_rknn.py @@ -1,13 +1,14 @@ -import subprocess -import sys -import random import argparse import os +import random +import sys + from rknn.api import RKNN image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp") DEFAULT_PLATFORM = "rk3588" + def list_img_dir(img_dir): return [ os.path.abspath(os.path.join(img_dir, f)) @@ -86,7 +87,10 @@ def get_image_list(num_imgs, image_dir): def run_rknn_conversion( img_list_txt, disable_quant, model_path, rknn_output, verbose_logging ): - rknn = RKNN(verbose=verbose_logging, verbose_file=("rknn_convert.log" if verbose_logging else None)) + rknn = RKNN( + verbose=verbose_logging, + verbose_file=("rknn_convert.log" if verbose_logging else None), + ) rknn.config( mean_values=[[0, 0, 0]], From e3e825c83be71f23040967716376870627fb6c8e Mon Sep 17 00:00:00 2001 From: boomermath Date: Fri, 25 Jul 2025 11:39:57 -0400 Subject: [PATCH 03/17] Fix create_onnx.py install, add progress bar for autoinstallrknnapi.py --- .../rknn-convert-tool/autoinstallrknnapi.py | 21 ++++++++++++++++--- scripts/rknn-convert-tool/create_onnx.py | 8 ++++--- scripts/rknn-convert-tool/create_rknn.py | 13 ++++-------- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/scripts/rknn-convert-tool/autoinstallrknnapi.py b/scripts/rknn-convert-tool/autoinstallrknnapi.py index 148d4ba71f..e711154af8 100644 --- a/scripts/rknn-convert-tool/autoinstallrknnapi.py +++ b/scripts/rknn-convert-tool/autoinstallrknnapi.py @@ -1,11 +1,14 @@ -import os -import platform -import subprocess +import shutil import sys +import platform import urllib.request +import subprocess from urllib.parse import urlparse +import os CHUNK_SIZE = 8192 +CONTENT_LENGTH_HEADER = "Content-Length" +PROGRESS_BAR_SIZE = 70 wheel_versions = { "arm64": { @@ -64,11 +67,23 @@ def get_filename_from_url(url): with urllib.request.urlopen(download_url) as response, open( wheel_name, "wb" ) as out_file: + total_size = int(response.getheader(CONTENT_LENGTH_HEADER).strip()) + downloaded_chunks = 0 + while True: chunk = response.read(CHUNK_SIZE) if not chunk: break out_file.write(chunk) + downloaded_chunks += len(chunk) + ratio = downloaded_chunks / total_size + done = int(PROGRESS_BAR_SIZE * ratio) + sys.stdout.write( + f"\r[{'=' * done}{' ' * (PROGRESS_BAR_SIZE - done)}] {(ratio * 100):.2f}% " + ) + sys.stdout.flush() + + print() print("Download completed, now running pip install") try: diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py index 2e44e54993..249f387e92 100644 --- a/scripts/rknn-convert-tool/create_onnx.py +++ b/scripts/rknn-convert-tool/create_onnx.py @@ -1,7 +1,7 @@ -import argparse -import os.path import subprocess import sys +import argparse +import os.path yolo_git_repos = { "yolov5": "https://github.com/airockchip/yolov5", @@ -50,13 +50,15 @@ def run_onnx_conversion(version, model_path): print("Running pip install...") try: subprocess.run( - ["pip", "install", "-e", ultralytics_folder_name] + ["pip", "install", "-e", ultralytics_folder_name, "onnx"] ).check_returncode() except subprocess.CalledProcessError as e: print("Pip install rockchip repo failed, see error output") print(e.output) sys.exit(1) + sys.path.insert(0, os.path.abspath(ultralytics_folder_name)) + from ultralytics import YOLO model = YOLO(model_path) diff --git a/scripts/rknn-convert-tool/create_rknn.py b/scripts/rknn-convert-tool/create_rknn.py index e6a6f68e7a..c61973110f 100644 --- a/scripts/rknn-convert-tool/create_rknn.py +++ b/scripts/rknn-convert-tool/create_rknn.py @@ -1,14 +1,13 @@ +import subprocess +import sys +import random import argparse import os -import random -import sys - from rknn.api import RKNN image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp") DEFAULT_PLATFORM = "rk3588" - def list_img_dir(img_dir): return [ os.path.abspath(os.path.join(img_dir, f)) @@ -87,10 +86,7 @@ def get_image_list(num_imgs, image_dir): def run_rknn_conversion( img_list_txt, disable_quant, model_path, rknn_output, verbose_logging ): - rknn = RKNN( - verbose=verbose_logging, - verbose_file=("rknn_convert.log" if verbose_logging else None), - ) + rknn = RKNN(verbose=verbose_logging, verbose_file=("rknn_convert.log" if verbose_logging else None)) rknn.config( mean_values=[[0, 0, 0]], @@ -139,7 +135,6 @@ def run_rknn_conversion( parser.add_argument( "-d", "--img_dir", - required=True, help="Directory where your dataset is located (must have data.yaml), or images are located", ) From 9cff4bd63b032b079f0b65a0b19fd1fe1c087616 Mon Sep 17 00:00:00 2001 From: boomermath Date: Fri, 25 Jul 2025 13:54:40 -0400 Subject: [PATCH 04/17] Added installed script for google colab users --- .../rknn-convert-tool/rknn_conversion.ipynb | 168 ------------------ 1 file changed, 168 deletions(-) delete mode 100644 scripts/rknn-convert-tool/rknn_conversion.ipynb diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb deleted file mode 100644 index 324b570a04..0000000000 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ /dev/null @@ -1,168 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "63e2692b", - "metadata": {}, - "source": [] - }, - { - "cell_type": "markdown", - "id": "bb5367ce", - "metadata": {}, - "source": [ - "# RKNN Conversion Guide\n", - "\n", - "### Before you start\n", - "\n", - "Before you run the scripts/python notebook from this project, it's recommended you create a separate [python virtual environment](https://docs.python.org/3/library/venv.html) so that packages installed in for the conversion process don't conflict with other packages you may already have installed\n", - "\n", - "### Step 1: Convert to ONNX \n", - "\n", - "To convert to ONNX, simply run the `create_onnx.py` script with your model weights, see below" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0659e15f", - "metadata": {}, - "outputs": [], - "source": [ - "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", - "%run -i create_onnx.py --version yolov8 --model_path weights.pt" - ] - }, - { - "cell_type": "markdown", - "id": "86ff07e6", - "metadata": {}, - "source": [ - "### Step 2: Download RKNN API\n", - "You can either utilize a script to autodetect and install the correct Python library for you, or manually install it\n", - "\n", - "#### Automatic installation\n", - "Simply run the `autoinstallrknnapi.py` script." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7ec11f96", - "metadata": {}, - "outputs": [], - "source": [ - "%run -i autoinstallrknnapi.py" - ] - }, - { - "cell_type": "markdown", - "id": "8b57fe4d", - "metadata": {}, - "source": [ - "#### Manual installation\n", - "##### How to find the correct link\n", - "Go to https://github.com/airockchip/rknn-toolkit2, and click on `rknn-toolkit2`, then `packages`.\n", - "If you are running an x86_64 CPU (e.g. most Intel and AMD CPUs) click on that, otherwise choose arm64 for ARM-based computers (e.g. M-series Macs or Snapdragon processors). If you aren't sure what CPU you are running, look up your processor architecture information from system settings.\n", - "\n", - "Once you have the correct CPU, you will see multiple packages. The file names will look something like `rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` for example. The numbers after CP correspond to your python version. If you have a Python version 3.10, for example, you want to download a package with cp310 in the name. For 3.8, you'd look for cp38, for 3.7 cp37, and so on.\n", - "\n", - "Then, once you find your desired package, locate the \"Raw\" download button, and download the package (.whl) once you do, run pip install, replacing `rknn_toolkit2.whl` with the path to the wheel file you just downloaded" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7414b120", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install rknn_toolkit2.whl" - ] - }, - { - "cell_type": "markdown", - "id": "c1db5ef0", - "metadata": {}, - "source": [ - "### Step 3: Convert to RKNN\n", - "\n", - "Simply run the `create_rknn.py` script, replacing the arguments with your own." - ] - }, - { - "cell_type": "markdown", - "id": "f41e431b", - "metadata": {}, - "source": [ - "#### Overview of `create_rknn.py` script\n", - "\n", - "##### RKNN Conversion Script Overview\n", - "\n", - "This script converts a YOLO ONNX model to RKNN format using a set of calibration images. It's designed to work with either:\n", - "\n", - "- A flat directory of images (e.g. `train/images`), **or**\n", - "- A dataset directory containing a `data.yaml` file that defines `train`, `val`, and/or `test` folders.\n", - "\n", - "You can use it from the command line or from inside a Python environment like this notebook.\n", - "\n", - "##### Arguments\n", - "\n", - "| Argument | Type | Description |\n", - "|----------|------|-------------|\n", - "| `--img_dir` (`-d`) | `str` (required) | Path to your image directory. This can either be a folder of images **or** a dataset folder with a `data.yaml`. |\n", - "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", - "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", - "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", - "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", - "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", - "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from RKNN during conversion. |\n", - "---\n", - "\n", - "\n", - "##### Notes\n", - "\n", - "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only supports RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b09656dd", - "metadata": {}, - "outputs": [], - "source": [ - "%run -i create_rknn.py --img_dir /datasets/my_imgs --model_path model.onnx" - ] - }, - { - "cell_type": "markdown", - "id": "5b3a6806", - "metadata": {}, - "source": [ - "And that's it! You should have an RKNN model file ready to deploy on an Orange PI" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.18" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} From 96f25366c5cb8bc7d8c19d90797956dc38e708d3 Mon Sep 17 00:00:00 2001 From: boomermath Date: Fri, 25 Jul 2025 13:55:11 -0400 Subject: [PATCH 05/17] Added installed script for google colab users --- .../rknn-convert-tool/rknn_conversion.ipynb | 229 ++++++++++++++++++ 1 file changed, 229 insertions(+) create mode 100644 scripts/rknn-convert-tool/rknn_conversion.ipynb diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb new file mode 100644 index 0000000000..025b8e168c --- /dev/null +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -0,0 +1,229 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "63e2692b", + "metadata": {}, + "source": [] + }, + { + "cell_type": "markdown", + "id": "bb5367ce", + "metadata": {}, + "source": [ + "# RKNN Conversion Guide\n", + "\n", + "----------------------------\n", + "\n", + "### Before you start\n", + "\n", + "Before you run the scripts/python notebook from this project, it's recommended you create a separate [python virtual environment](https://docs.python.org/3/library/venv.html) so that packages installed for the conversion process don't conflict with other packages you may already have installed.\n", + "\n", + "\n", + "### Preinstallation (for Google Colab users)\n", + "\n", + "This notebook requires the use of external python scripts, please run this snippet with the url to the Photonvision repo " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7903189e", + "metadata": {}, + "outputs": [], + "source": [ + "GITHUB_URL = \"https://github.com/PhotonVision/photonvision\"\n", + "\n", + "import sys\n", + "from urllib.parse import urlparse\n", + "import subprocess\n", + "\n", + "parsed = urlparse(GITHUB_URL)\n", + " \n", + "if parsed.netloc.lower() not in ['github.com', 'www.github.com']:\n", + " print(\"URL must be GitHub URL!\")\n", + " sys.exit(1)\n", + " \n", + "path_parts = parsed.path.strip('/').split('/')\n", + " \n", + "if len(path_parts) < 2:\n", + " print(\"Invalid Github URL! Must have org and repo in url\")\n", + " sys.exit(1)\n", + " \n", + "org_user = path_parts[0]\n", + "repo = path_parts[1]\n", + " \n", + "if not org_user or not repo:\n", + " print(\"Invalid Github URL! Must have org and repo in url\")\n", + " sys.exit(1)\n", + " \n", + "repo_sub_url = f\"{org_user}/{repo}\"\n", + "\n", + "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/create_onnx.py\"\n", + "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/autoinstallrknnapi.py\"\n", + "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/create_rknn.py\"\n", + "\n", + "scriptUrls = [create_onnx_raw_url, auto_install_script_raw_url, create_rknn_raw_url]\n", + "\n", + "for scriptUrl in scriptUrls:\n", + " try:\n", + " subprocess.run([\"wget\", scriptUrl]).check_returncode()\n", + " except subprocess.CalledProcessError as e:\n", + " print(f\"Failed to run script download for url {scriptUrl}\")\n", + " print(e.output)\n" + ] + }, + { + "cell_type": "markdown", + "id": "d498ed79", + "metadata": {}, + "source": [ + "### Step 1: Convert to ONNX \n", + "\n", + "To convert to ONNX, simply run the `create_onnx.py` script with your model weights, see below" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0659e15f", + "metadata": {}, + "outputs": [], + "source": [ + "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", + "%run -i create_onnx.py --version yolov8 --model_path weights.pt" + ] + }, + { + "cell_type": "markdown", + "id": "86ff07e6", + "metadata": {}, + "source": [ + "### Step 2: Download RKNN API\n", + "You can either utilize a script to autodetect and install the correct Python library for you, or manually install it\n", + "\n", + "#### Automatic installation\n", + "Simply run the `autoinstallrknnapi.py` script." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ec11f96", + "metadata": {}, + "outputs": [], + "source": [ + "%run -i autoinstallrknnapi.py" + ] + }, + { + "cell_type": "markdown", + "id": "8b57fe4d", + "metadata": {}, + "source": [ + "#### Manual installation\n", + "##### How to find the correct link\n", + "Go to https://github.com/airockchip/rknn-toolkit2, and click on `rknn-toolkit2`, then `packages`.\n", + "If you are running an x86_64 CPU (e.g. most Intel and AMD CPUs) click on that, otherwise choose arm64 for ARM-based computers (e.g. M-series Macs or Snapdragon processors). If you aren't sure what CPU you are running, look up your processor architecture information from system settings.\n", + "\n", + "Once you have the correct CPU, you will see multiple packages. The file names will look something like `rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` for example. The numbers after CP correspond to your python version. If you have a Python version 3.10, for example, you want to download a package with cp310 in the name. For 3.8, you'd look for cp38, for 3.7 cp37, and so on.\n", + "\n", + "Then, once you find your desired package, locate the \"Raw\" download button, and download the package (.whl) once you do, run pip install, replacing `rknn_toolkit2.whl` with the path to the wheel file you just downloaded" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7414b120", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install rknn_toolkit2.whl" + ] + }, + { + "cell_type": "markdown", + "id": "c1db5ef0", + "metadata": {}, + "source": [ + "### Step 3: Convert to RKNN\n", + "\n", + "Simply run the `create_rknn.py` script, replacing the arguments with your own." + ] + }, + { + "cell_type": "markdown", + "id": "f41e431b", + "metadata": {}, + "source": [ + "#### Overview of `create_rknn.py` script\n", + "\n", + "##### RKNN Conversion Script Overview\n", + "\n", + "This script converts a YOLO ONNX model to RKNN format using a set of calibration images. It's designed to work with either:\n", + "\n", + "- A flat directory of images (e.g. `train/images`), **or**\n", + "- A dataset directory containing a `data.yaml` file that defines `train`, `val`, and/or `test` folders.\n", + "\n", + "You can use it from the command line or from inside a Python environment like this notebook.\n", + "\n", + "##### Arguments\n", + "\n", + "| Argument | Type | Description |\n", + "|----------|------|-------------|\n", + "| `--img_dir` (`-d`) | `str` (required) | Path to your image directory. This can either be a folder of images **or** a dataset folder with a `data.yaml`. |\n", + "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", + "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", + "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", + "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", + "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", + "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from RKNN during conversion. |\n", + "---\n", + "\n", + "\n", + "##### Notes\n", + "\n", + "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only supports RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b09656dd", + "metadata": {}, + "outputs": [], + "source": [ + "%run -i create_rknn.py --img_dir /datasets/my_imgs --model_path model.onnx" + ] + }, + { + "cell_type": "markdown", + "id": "5b3a6806", + "metadata": {}, + "source": [ + "And that's it! You should have an RKNN model file ready to deploy on an Orange PI" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From d2583665410745cef0f56928864044a4e3310786 Mon Sep 17 00:00:00 2001 From: boomermath Date: Fri, 25 Jul 2025 16:40:19 -0400 Subject: [PATCH 06/17] Make notebook work in colab, also add note for autodownload script --- scripts/rknn-convert-tool/MAINTAINERS.md | 6 ++ .../rknn-convert-tool/rknn_conversion.ipynb | 89 ++++++++++++++++--- 2 files changed, 83 insertions(+), 12 deletions(-) create mode 100644 scripts/rknn-convert-tool/MAINTAINERS.md diff --git a/scripts/rknn-convert-tool/MAINTAINERS.md b/scripts/rknn-convert-tool/MAINTAINERS.md new file mode 100644 index 0000000000..9a95f77ef3 --- /dev/null +++ b/scripts/rknn-convert-tool/MAINTAINERS.md @@ -0,0 +1,6 @@ +# Notebook + +For the installation script found in the first cell of the RKNN conversion +notebook, please make sure that `GITHUB_URL` is the current url of the repository, and `SCRIPTS_FOLDER` +is the path to the `create_onnx.py`, `autoinstallrknnapi.py`, `create_rknn.py` starting from the repo's root directory. +Make sure that all scripts required for the notebook are in the same folder. \ No newline at end of file diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 025b8e168c..4545182570 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -22,7 +22,7 @@ "\n", "### Preinstallation (for Google Colab users)\n", "\n", - "This notebook requires the use of external python scripts, please run this snippet with the url to the Photonvision repo " + "This notebook requires the use of external python scripts, please run this snippet with the URL (`GITHUB_URL`) to the Photonvision repo, if not provided already." ] }, { @@ -33,6 +33,7 @@ "outputs": [], "source": [ "GITHUB_URL = \"https://github.com/PhotonVision/photonvision\"\n", + "SCRIPTS_FOLDER = \"/scripts/rknn-convert-tool\"\n", "\n", "import sys\n", "from urllib.parse import urlparse\n", @@ -58,10 +59,11 @@ " sys.exit(1)\n", " \n", "repo_sub_url = f\"{org_user}/{repo}\"\n", + "folder_url = SCRIPTS_FOLDER.strip(\"/\")\n", "\n", - "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/create_onnx.py\"\n", - "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/autoinstallrknnapi.py\"\n", - "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/scripts/rknn-convert-tool/create_rknn.py\"\n", + "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/create_onnx.py\"\n", + "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/autoinstallrknnapi.py\"\n", + "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/create_rknn.py\"\n", "\n", "scriptUrls = [create_onnx_raw_url, auto_install_script_raw_url, create_rknn_raw_url]\n", "\n", @@ -83,6 +85,33 @@ "To convert to ONNX, simply run the `create_onnx.py` script with your model weights, see below" ] }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "#### *Notice for Colab users*\n", + "\n", + "Google Colab comes with an incompatible version of Numpy installed. To fix this, please run the following cells below and **restart your session** when prompted." + ], + "id": "d68be4aba4d3022b" + }, + { + "metadata": {}, + "cell_type": "code", + "source": [ + "%pip uninstall numpy -y\n", + "%pip install \"numpy>=1.23.0,<2.0.0\"" + ], + "id": "de0310a3e4401233", + "outputs": [], + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "Then, simply run the `create_onnx.py` script to convert your `.pt` weights", + "id": "341c6ff84cb88885" + }, { "cell_type": "code", "execution_count": null, @@ -91,7 +120,7 @@ "outputs": [], "source": [ "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", - "%run -i create_onnx.py --version yolov8 --model_path weights.pt" + "%run create_onnx.py --version yolov8 --model_path weights.pt" ] }, { @@ -112,9 +141,7 @@ "id": "7ec11f96", "metadata": {}, "outputs": [], - "source": [ - "%run -i autoinstallrknnapi.py" - ] + "source": "%run autoinstallrknnapi.py" }, { "cell_type": "markdown", @@ -183,18 +210,56 @@ "\n", "##### Notes\n", "\n", - "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only supports RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" + "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only allows the target platform to be RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" ] }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "### Quantization Note\n", + "\n", + "When performing quantization, it is critical to provide representative images of the objects or scenes you are trying to detect. These images are used to calibrate the model’s internal activations and greatly influence the final performance.\n", + "\n", + "It is recommended to use 300–500 representative images that reflect the real-world input your model will encounter. As the old saying goes, *quality* over quantity.\n", + "\n", + "Quantization will cause some loss in model accuracy. However, if your calibration images are chosen wisely, this accuracy drop should be minimal and acceptable. If the sampled images are too uniform or unrelated, your quantized model's performance may worsen significantly.\n", + "\n", + "The script will automatically sample representative images randomly from the provided dataset. While this usually works well, please verify that the dataset contains diverse and relevant examples of your target objects. As a reminder, the images used to quantize the model are stored in the text file specified by `--img_dataset_txt`.\n" + ], + "id": "5e56b2f64bf6e85f" + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "### Optional: Download a dataset from Roboflow for quantization\n", + "\n", + "Please run the below code to download a dataset from roboflow if you do not have an images to use for quantization. Feel free to replace the link in quotes with a link to your own dataset." + ], + "id": "93e0d0622df170e" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": "%wget -O roboflow.zip \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" && unzip roboflow.zip -d datasets && rm roboflow.zip", + "id": "8bf75c9dcb328c84" + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "Once you have your dataset prepared, run the below script to quantize and convert your generated ONNX model from Step 1.", + "id": "81af402f3a94679a" + }, { "cell_type": "code", "execution_count": null, "id": "b09656dd", "metadata": {}, "outputs": [], - "source": [ - "%run -i create_rknn.py --img_dir /datasets/my_imgs --model_path model.onnx" - ] + "source": "%run create_rknn.py --img_dir ./datasets --model_path weights.onnx" }, { "cell_type": "markdown", From f0ed73c765c935d13466639713a4331d8425e546 Mon Sep 17 00:00:00 2001 From: boomermath Date: Fri, 25 Jul 2025 16:52:06 -0400 Subject: [PATCH 07/17] make manual more obvious --- scripts/rknn-convert-tool/rknn_conversion.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 4545182570..120482c903 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -148,7 +148,7 @@ "id": "8b57fe4d", "metadata": {}, "source": [ - "#### Manual installation\n", + "#### Manual installation (if automatic doesn't work)\n", "##### How to find the correct link\n", "Go to https://github.com/airockchip/rknn-toolkit2, and click on `rknn-toolkit2`, then `packages`.\n", "If you are running an x86_64 CPU (e.g. most Intel and AMD CPUs) click on that, otherwise choose arm64 for ARM-based computers (e.g. M-series Macs or Snapdragon processors). If you aren't sure what CPU you are running, look up your processor architecture information from system settings.\n", From f6054698241be7d8615ff3417781f272fc494e3b Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 13:57:09 -0400 Subject: [PATCH 08/17] Update notebook --- scripts/rknn-convert-tool/MAINTAINERS.md | 6 +- .../rknn-convert-tool/rknn_conversion.ipynb | 136 +++++++++--------- 2 files changed, 69 insertions(+), 73 deletions(-) diff --git a/scripts/rknn-convert-tool/MAINTAINERS.md b/scripts/rknn-convert-tool/MAINTAINERS.md index 9a95f77ef3..a986e505a4 100644 --- a/scripts/rknn-convert-tool/MAINTAINERS.md +++ b/scripts/rknn-convert-tool/MAINTAINERS.md @@ -1,6 +1,8 @@ # Notebook For the installation script found in the first cell of the RKNN conversion -notebook, please make sure that `GITHUB_URL` is the current url of the repository, and `SCRIPTS_FOLDER` -is the path to the `create_onnx.py`, `autoinstallrknnapi.py`, `create_rknn.py` starting from the repo's root directory. +notebook, please make sure that `GITHUB_URL` is a permalink to the repository with the commit that has the +RKNN scripts, and the `SCRIPTS_FOLDER` is the path to the `create_onnx.py`, `autoinstallrknnapi.py`, `create_rknn.py` +starting from the repo's root directory. + Make sure that all scripts required for the notebook are in the same folder. \ No newline at end of file diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 120482c903..a0e464279f 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -1,11 +1,5 @@ { "cells": [ - { - "cell_type": "markdown", - "id": "63e2692b", - "metadata": {}, - "source": [] - }, { "cell_type": "markdown", "id": "bb5367ce", @@ -17,12 +11,11 @@ "\n", "### Before you start\n", "\n", - "Before you run the scripts/python notebook from this project, it's recommended you create a separate [python virtual environment](https://docs.python.org/3/library/venv.html) so that packages installed for the conversion process don't conflict with other packages you may already have installed.\n", + "Before you run the scripts or Python notebook from this project, it is recommended that you create a separate [Python virtual environment](https://docs.python.org/3/library/venv.html) so that the packages installed for the conversion process do not conflict with other packages you may already have installed.\n", "\n", + "## Preinstallation — Important Setup for Google Colab Users\n", "\n", - "### Preinstallation (for Google Colab users)\n", - "\n", - "This notebook requires the use of external python scripts, please run this snippet with the URL (`GITHUB_URL`) to the Photonvision repo, if not provided already." + "This notebook requires the use of external Python scripts. Please run this snippet with the URL (GITHUB_URL) to the Photonvision repository, if it has not already been provided." ] }, { @@ -32,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "GITHUB_URL = \"https://github.com/PhotonVision/photonvision\"\n", + "GITHUB_URL = \"https://github.com/boomermath/photonvision_rknn_fork/tree/f0ed73c765c935d13466639713a4331d8425e546\"\n", "SCRIPTS_FOLDER = \"/scripts/rknn-convert-tool\"\n", "\n", "import sys\n", @@ -40,56 +33,56 @@ "import subprocess\n", "\n", "parsed = urlparse(GITHUB_URL)\n", - " \n", + "\n", "if parsed.netloc.lower() not in ['github.com', 'www.github.com']:\n", - " print(\"URL must be GitHub URL!\")\n", - " sys.exit(1)\n", - " \n", + " print(\"URL must be a GitHub URL!\")\n", + " sys.exit(1)\n", + "\n", "path_parts = parsed.path.strip('/').split('/')\n", - " \n", + "\n", "if len(path_parts) < 2:\n", - " print(\"Invalid Github URL! Must have org and repo in url\")\n", - " sys.exit(1)\n", - " \n", + " print(\"Invalid GitHub URL! Must have org and repo in URL\")\n", + " sys.exit(1)\n", + "\n", "org_user = path_parts[0]\n", "repo = path_parts[1]\n", - " \n", + "\n", "if not org_user or not repo:\n", - " print(\"Invalid Github URL! Must have org and repo in url\")\n", - " sys.exit(1)\n", - " \n", + " print(\"Invalid GitHub URL! Must have org and repo in URL\")\n", + " sys.exit(1)\n", + "\n", + "ref = \"refs/heads/main\"\n", + "\n", + "if len(path_parts) >= 4 and path_parts[2].lower() == \"tree\":\n", + " candidate_ref = path_parts[3]\n", + " if len(candidate_ref) == 40 and all(c in '0123456789abcdef' for c in candidate_ref.lower()):\n", + " ref = candidate_ref\n", + " else:\n", + " ref = f\"refs/heads/{candidate_ref}\"\n", + "\n", "repo_sub_url = f\"{org_user}/{repo}\"\n", "folder_url = SCRIPTS_FOLDER.strip(\"/\")\n", "\n", - "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/create_onnx.py\"\n", - "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/autoinstallrknnapi.py\"\n", - "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/refs/heads/main/{folder_url}/create_rknn.py\"\n", + "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/create_onnx.py\"\n", + "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/autoinstallrknnapi.py\"\n", + "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/create_rknn.py\"\n", "\n", "scriptUrls = [create_onnx_raw_url, auto_install_script_raw_url, create_rknn_raw_url]\n", "\n", "for scriptUrl in scriptUrls:\n", " try:\n", " subprocess.run([\"wget\", scriptUrl]).check_returncode()\n", + " print(f\"Successfully downloaded: {scriptUrl}\")\n", " except subprocess.CalledProcessError as e:\n", - " print(f\"Failed to run script download for url {scriptUrl}\")\n", - " print(e.output)\n" - ] - }, - { - "cell_type": "markdown", - "id": "d498ed79", - "metadata": {}, - "source": [ - "### Step 1: Convert to ONNX \n", - "\n", - "To convert to ONNX, simply run the `create_onnx.py` script with your model weights, see below" + " print(f\"Failed to download script from URL: {scriptUrl}\")\n", + " print(e)\n" ] }, { "metadata": {}, "cell_type": "markdown", "source": [ - "#### *Notice for Colab users*\n", + "#### *Numpy Fix*\n", "\n", "Google Colab comes with an incompatible version of Numpy installed. To fix this, please run the following cells below and **restart your session** when prompted." ], @@ -109,8 +102,12 @@ { "metadata": {}, "cell_type": "markdown", - "source": "Then, simply run the `create_onnx.py` script to convert your `.pt` weights", - "id": "341c6ff84cb88885" + "source": [ + "### Step 1: Convert to ONNX\n", + "\n", + "To convert to ONNX, simply run the `create_onnx.py` script with your model weights as shown below." + ], + "id": "d498ed79" }, { "cell_type": "code", @@ -129,10 +126,12 @@ "metadata": {}, "source": [ "### Step 2: Download RKNN API\n", - "You can either utilize a script to autodetect and install the correct Python library for you, or manually install it\n", + "\n", + "You can either use a script to automatically detect and install the correct RKNN API Python library for you, or install it manually.\n", "\n", "#### Automatic installation\n", - "Simply run the `autoinstallrknnapi.py` script." + "\n", + "Please run the script below. If it does not work, refer to the instructions for manual installation.\n" ] }, { @@ -148,14 +147,15 @@ "id": "8b57fe4d", "metadata": {}, "source": [ - "#### Manual installation (if automatic doesn't work)\n", - "##### How to find the correct link\n", - "Go to https://github.com/airockchip/rknn-toolkit2, and click on `rknn-toolkit2`, then `packages`.\n", - "If you are running an x86_64 CPU (e.g. most Intel and AMD CPUs) click on that, otherwise choose arm64 for ARM-based computers (e.g. M-series Macs or Snapdragon processors). If you aren't sure what CPU you are running, look up your processor architecture information from system settings.\n", + "#### Manual Installation (If Automatic Installation Fails)\n", + "Visit the [RKNN Toolkit 2](https://github.com/airockchip/rknn-toolkit2) Github repository, then click on rknn-toolkit2, followed by packages.\n", + "If you are running an x86_64 CPU (e.g., most Intel and AMD processors), select that option; otherwise, choose arm64 for ARM-based computers (e.g., M-series Macs or Snapdragon processors). If you're unsure which CPU you're using, check your system settings for processor architecture information.\n", "\n", - "Once you have the correct CPU, you will see multiple packages. The file names will look something like `rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` for example. The numbers after CP correspond to your python version. If you have a Python version 3.10, for example, you want to download a package with cp310 in the name. For 3.8, you'd look for cp38, for 3.7 cp37, and so on.\n", + "Once you've selected the correct CPU architecture, you'll see multiple packages. The file names will look something like:\n", + "`rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl`.\n", + "The numbers after cp correspond to your Python version. For example, if you're using Python 3.10, look for a package with cp310 in the name. For Python 3.8, look for cp38; for Python 3.7, cp37, and so on.\n", "\n", - "Then, once you find your desired package, locate the \"Raw\" download button, and download the package (.whl) once you do, run pip install, replacing `rknn_toolkit2.whl` with the path to the wheel file you just downloaded" + "Once you've found the correct package, click the \"Raw\" button to download the .whl file. Then, run the following command in your terminal, replacing rknn_toolkit2.whl with the actual path to the file you downloaded:" ] }, { @@ -175,7 +175,7 @@ "source": [ "### Step 3: Convert to RKNN\n", "\n", - "Simply run the `create_rknn.py` script, replacing the arguments with your own." + "To get started, run the `create_rknn.py` script, replacing the arguments with your own values. Refer to the table below for detailed information on each argument’s purpose and usage." ] }, { @@ -183,34 +183,30 @@ "id": "f41e431b", "metadata": {}, "source": [ - "#### Overview of `create_rknn.py` script\n", - "\n", - "##### RKNN Conversion Script Overview\n", + "#### Overview of the `create_rknn.py` script\n", "\n", "This script converts a YOLO ONNX model to RKNN format using a set of calibration images. It's designed to work with either:\n", "\n", "- A flat directory of images (e.g. `train/images`), **or**\n", "- A dataset directory containing a `data.yaml` file that defines `train`, `val`, and/or `test` folders.\n", "\n", - "You can use it from the command line or from inside a Python environment like this notebook.\n", - "\n", "##### Arguments\n", "\n", - "| Argument | Type | Description |\n", - "|----------|------|-------------|\n", + "| Argument | Type | Description |\n", + "|----------|------|-----------------------------------------------------------------------------------------------------------------|\n", "| `--img_dir` (`-d`) | `str` (required) | Path to your image directory. This can either be a folder of images **or** a dataset folder with a `data.yaml`. |\n", - "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", - "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", - "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", - "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", - "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", - "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from RKNN during conversion. |\n", + "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", + "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", + "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", + "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", + "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", + "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from the RKNN API during conversion. |\n", "---\n", "\n", "\n", - "##### Notes\n", + "##### Note\n", "\n", - "As this is meant to be used with [PhotonVision](https://photonvision.org) this script only allows the target platform to be RK3588 (found in Orange Pi 5 models), but feel free to modify the script to suit your needs" + "This script is designed for use with [PhotonVision](https://photonvision.org), and by default sets the target platform for RKNN conversion to `RK3588`, a chipset commonly found in many variants of the Orange Pi 5 series (e.g., Orange Pi 5, 5 Pro, 5 Plus, 5 Max, etc.). You may modify the `TARGET_PLATFORM` value in the `create_onnx.py` script to match your specific hardware or deployment requirements if necessary." ] }, { @@ -221,7 +217,7 @@ "\n", "When performing quantization, it is critical to provide representative images of the objects or scenes you are trying to detect. These images are used to calibrate the model’s internal activations and greatly influence the final performance.\n", "\n", - "It is recommended to use 300–500 representative images that reflect the real-world input your model will encounter. As the old saying goes, *quality* over quantity.\n", + "It is recommended to use 300–500 representative images that reflect the real-world input your model will encounter. As the old saying goes, it’s quality over quantity — having a diverse, relevant set matters more than simply having many images.\n", "\n", "Quantization will cause some loss in model accuracy. However, if your calibration images are chosen wisely, this accuracy drop should be minimal and acceptable. If the sampled images are too uniform or unrelated, your quantized model's performance may worsen significantly.\n", "\n", @@ -235,7 +231,7 @@ "source": [ "### Optional: Download a dataset from Roboflow for quantization\n", "\n", - "Please run the below code to download a dataset from roboflow if you do not have an images to use for quantization. Feel free to replace the link in quotes with a link to your own dataset." + "Please run the code below to download a dataset from Roboflow if you do not have images available for quantization. You may replace the URL in quotes with a link to your own dataset.\n" ], "id": "93e0d0622df170e" }, @@ -250,7 +246,7 @@ { "metadata": {}, "cell_type": "markdown", - "source": "Once you have your dataset prepared, run the below script to quantize and convert your generated ONNX model from Step 1.", + "source": "Once your dataset is prepared, run the script below to quantize and convert the ONNX model you generated in Step 1.\n", "id": "81af402f3a94679a" }, { @@ -265,9 +261,7 @@ "cell_type": "markdown", "id": "5b3a6806", "metadata": {}, - "source": [ - "And that's it! You should have an RKNN model file ready to deploy on an Orange PI" - ] + "source": "And that’s it! Your RKNN model file is now ready for deployment on an Orange Pi." } ], "metadata": { From 08117cc111c61264272188c8a1168b4c8e1bdb3b Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 14:37:17 -0400 Subject: [PATCH 09/17] Update notebook with roboflow instructions/better formatting, update autoinstallrknnapi.py with commit permalinks --- scripts/rknn-convert-tool/MAINTAINERS.md | 9 +- .../rknn-convert-tool/autoinstallrknnapi.py | 28 ++-- .../rknn-convert-tool/rknn_conversion.ipynb | 135 +++++++++++------- 3 files changed, 100 insertions(+), 72 deletions(-) diff --git a/scripts/rknn-convert-tool/MAINTAINERS.md b/scripts/rknn-convert-tool/MAINTAINERS.md index a986e505a4..5e4a67a698 100644 --- a/scripts/rknn-convert-tool/MAINTAINERS.md +++ b/scripts/rknn-convert-tool/MAINTAINERS.md @@ -1,8 +1,7 @@ # Notebook -For the installation script found in the first cell of the RKNN conversion -notebook, please make sure that `GITHUB_URL` is a permalink to the repository with the commit that has the -RKNN scripts, and the `SCRIPTS_FOLDER` is the path to the `create_onnx.py`, `autoinstallrknnapi.py`, `create_rknn.py` -starting from the repo's root directory. +In the first cell of the RKNN conversion notebook, the installation script uses a structured list of dictionaries to define the download URLs and filenames for required scripts. Each dictionary includes a `url` (a permalink to a specific commit) and the corresponding `filename`. -Make sure that all scripts required for the notebook are in the same folder. \ No newline at end of file +Please ensure that all URLs in this array use permalinks—that is, links pointing to a specific commit hash rather than a branch name (e.g., main). This guarantees that the correct version of each script is always fetched, and prevents unexpected changes if the repository is updated in the future. + +You typically won’t need to update these permalinks unless one of the referenced scripts is modified. In that case, update the commit hash in the URLs accordingly. \ No newline at end of file diff --git a/scripts/rknn-convert-tool/autoinstallrknnapi.py b/scripts/rknn-convert-tool/autoinstallrknnapi.py index e711154af8..b6902353c2 100644 --- a/scripts/rknn-convert-tool/autoinstallrknnapi.py +++ b/scripts/rknn-convert-tool/autoinstallrknnapi.py @@ -12,22 +12,22 @@ wheel_versions = { "arm64": { - "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", + "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", }, "x86_64": { - "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/refs/heads/master/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", + "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", }, } diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index a0e464279f..f3fb781385 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -15,7 +15,7 @@ "\n", "## Preinstallation — Important Setup for Google Colab Users\n", "\n", - "This notebook requires the use of external Python scripts. Please run this snippet with the URL (GITHUB_URL) to the Photonvision repository, if it has not already been provided." + "This notebook requires the use of external Python scripts. Please run the installation script below to import these external scripts." ] }, { @@ -25,56 +25,33 @@ "metadata": {}, "outputs": [], "source": [ - "GITHUB_URL = \"https://github.com/boomermath/photonvision_rknn_fork/tree/f0ed73c765c935d13466639713a4331d8425e546\"\n", - "SCRIPTS_FOLDER = \"/scripts/rknn-convert-tool\"\n", - "\n", - "import sys\n", - "from urllib.parse import urlparse\n", "import subprocess\n", - "\n", - "parsed = urlparse(GITHUB_URL)\n", - "\n", - "if parsed.netloc.lower() not in ['github.com', 'www.github.com']:\n", - " print(\"URL must be a GitHub URL!\")\n", - " sys.exit(1)\n", - "\n", - "path_parts = parsed.path.strip('/').split('/')\n", - "\n", - "if len(path_parts) < 2:\n", - " print(\"Invalid GitHub URL! Must have org and repo in URL\")\n", - " sys.exit(1)\n", - "\n", - "org_user = path_parts[0]\n", - "repo = path_parts[1]\n", - "\n", - "if not org_user or not repo:\n", - " print(\"Invalid GitHub URL! Must have org and repo in URL\")\n", - " sys.exit(1)\n", - "\n", - "ref = \"refs/heads/main\"\n", - "\n", - "if len(path_parts) >= 4 and path_parts[2].lower() == \"tree\":\n", - " candidate_ref = path_parts[3]\n", - " if len(candidate_ref) == 40 and all(c in '0123456789abcdef' for c in candidate_ref.lower()):\n", - " ref = candidate_ref\n", - " else:\n", - " ref = f\"refs/heads/{candidate_ref}\"\n", - "\n", - "repo_sub_url = f\"{org_user}/{repo}\"\n", - "folder_url = SCRIPTS_FOLDER.strip(\"/\")\n", - "\n", - "create_onnx_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/create_onnx.py\"\n", - "auto_install_script_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/autoinstallrknnapi.py\"\n", - "create_rknn_raw_url = f\"https://raw.githubusercontent.com/{repo_sub_url}/{ref}/{folder_url}/create_rknn.py\"\n", - "\n", - "scriptUrls = [create_onnx_raw_url, auto_install_script_raw_url, create_rknn_raw_url]\n", - "\n", - "for scriptUrl in scriptUrls:\n", + "import os\n", + "\n", + "# Define scripts with URLs and inferred filenames\n", + "# DO NOT modify the filenames, just the urls\n", + "scripts = [\n", + " {\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/create_onnx.py\",\n", + " \"filename\": \"create_onnx.py\" # CREATE_ONNX_SCRIPT\n", + " },\n", + " {\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/autoinstallrknnapi.py\",\n", + " \"filename\": \"autoinstallrknnapi.py\" # AUTO_INSTALL_SCRIPT\n", + " },\n", + " {\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/create_rknn.py\",\n", + " \"filename\": \"create_rknn.py\" # CREATE_RKNN_SCRIPT\n", + " }\n", + "]\n", + "\n", + "# Download each script\n", + "for script in scripts:\n", " try:\n", - " subprocess.run([\"wget\", scriptUrl]).check_returncode()\n", - " print(f\"Successfully downloaded: {scriptUrl}\")\n", + " subprocess.run([\"wget\", script[\"url\"], \"-O\", script[\"filename\"]]).check_returncode()\n", + " print(f\"Successfully downloaded: {script['filename']}\")\n", " except subprocess.CalledProcessError as e:\n", - " print(f\"Failed to download script from URL: {scriptUrl}\")\n", + " print(f\"Failed to download script from URL: {script['url']}\")\n", " print(e)\n" ] }, @@ -105,7 +82,7 @@ "source": [ "### Step 1: Convert to ONNX\n", "\n", - "To convert to ONNX, simply run the `create_onnx.py` script with your model weights as shown below." + "To convert to ONNX, simply run the `create_onnx.py` script, providing the path to your model weights and specifying the model version, as shown below." ], "id": "d498ed79" }, @@ -231,7 +208,55 @@ "source": [ "### Optional: Download a dataset from Roboflow for quantization\n", "\n", - "Please run the code below to download a dataset from Roboflow if you do not have images available for quantization. You may replace the URL in quotes with a link to your own dataset.\n" + "Please follow the instructions below if you do not already have a set of images or a dataset available for quantization.\n", + "\n", + "### How to Obtain a Dataset URL from Roboflow\n", + "\n", + "If you do not already have a dataset containing the objects you want to detect, follow the steps below to download one from Roboflow Universe.\n", + "\n", + "---\n", + "\n", + "#### Step 1: Search for a Dataset\n", + "\n", + "Go to [Roboflow Universe](https://universe.roboflow.com) and use the search bar to locate a dataset relevant to what you want to detect.\n", + "**Note:** The dataset must include the classes or object types you intend to detect.\n", + "\n", + "---\n", + "\n", + "#### Step 2: Access the Dataset Tab\n", + "\n", + "After selecting a suitable project, navigate to the **Dataset** tab. Click the **\"Download Dataset\"** button. A prompt will appear with several options, including:\n", + "\n", + "- Train a model with this dataset\n", + "- Train from a portion of this dataset\n", + "- Download dataset\n", + "\n", + "Select **Download dataset**.\n", + "\n", + "---\n", + "\n", + "#### Step 3: Choose Format and View Download Code\n", + "\n", + "- Under **Image and Annotation Format**, choose the version of YOLO you are using:\n", + " - For **YOLOv5**, choose `YOLOv5 PyTorch`\n", + " - For **YOLOv8**, choose `YOLOv8`\n", + " - For **YOLOv11**, choose `YOLOv11`\n", + "- If multiple annotation formats are listed for your model, always select the one ending in **\"PyTorch\"**.\n", + "\n", + "Then, under **Download Options**, click **\"Show Download Code\"** and continue.\n", + "\n", + "In the resulting screen, you will see three tabs:\n", + "- **Jupyter**\n", + "- **Terminal**\n", + "- **Raw URL**\n", + "\n", + "Select the **Terminal** tab and copy the provided command.\n", + "\n", + "---\n", + "\n", + "#### Step 4: Paste and Run\n", + "\n", + "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n" ], "id": "93e0d0622df170e" }, @@ -240,14 +265,18 @@ "cell_type": "code", "outputs": [], "execution_count": null, - "source": "%wget -O roboflow.zip \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" && unzip roboflow.zip -d datasets && rm roboflow.zip", + "source": "%curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip", "id": "8bf75c9dcb328c84" }, { "metadata": {}, "cell_type": "markdown", - "source": "Once your dataset is prepared, run the script below to quantize and convert the ONNX model you generated in Step 1.\n", - "id": "81af402f3a94679a" + "source": [ + "### RKNN Conversion Script\n", + "\n", + "To quantize and convert the ONNX model to RKNN format, run the `create_rknn.py` script with the appropriate arguments. The `--model_path` argument should point to your exported ONNX model from Step 1, and `--img_dir` must reference a valid directory containing either a dataset or a set of images to be used for quantization.\n" + ], + "id": "72bad9cac670f1ab" }, { "cell_type": "code", From 4d9bb10c102e5b4cd178b53826508f1c8cf93c7d Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 14:39:07 -0400 Subject: [PATCH 10/17] Update permalinks --- scripts/rknn-convert-tool/rknn_conversion.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index f3fb781385..8d14740dec 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -29,18 +29,18 @@ "import os\n", "\n", "# Define scripts with URLs and inferred filenames\n", - "# DO NOT modify the filenames, just the urls\n", + "# DO NOT modify the filenames\n", "scripts = [\n", " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/create_onnx.py\",\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/create_onnx.py\",\n", " \"filename\": \"create_onnx.py\" # CREATE_ONNX_SCRIPT\n", " },\n", " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/autoinstallrknnapi.py\",\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/autoinstallrknnapi.py\",\n", " \"filename\": \"autoinstallrknnapi.py\" # AUTO_INSTALL_SCRIPT\n", " },\n", " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/f6054698241be7d8615ff3417781f272fc494e3b/scripts/rknn-convert-tool/create_rknn.py\",\n", + " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/create_rknn.py\",\n", " \"filename\": \"create_rknn.py\" # CREATE_RKNN_SCRIPT\n", " }\n", "]\n", From 620722fb10d043d8d796c8d854dc69bebde8ec0d Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 14:40:27 -0400 Subject: [PATCH 11/17] remove dividers (bleh) --- scripts/rknn-convert-tool/rknn_conversion.ipynb | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 8d14740dec..800e1fec67 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -208,21 +208,13 @@ "source": [ "### Optional: Download a dataset from Roboflow for quantization\n", "\n", - "Please follow the instructions below if you do not already have a set of images or a dataset available for quantization.\n", - "\n", - "### How to Obtain a Dataset URL from Roboflow\n", - "\n", - "If you do not already have a dataset containing the objects you want to detect, follow the steps below to download one from Roboflow Universe.\n", - "\n", - "---\n", + "If you do not already have a dataset or set of images containing the objects you want to detect, follow the steps below to download one from Roboflow Universe.\n", "\n", "#### Step 1: Search for a Dataset\n", "\n", "Go to [Roboflow Universe](https://universe.roboflow.com) and use the search bar to locate a dataset relevant to what you want to detect.\n", "**Note:** The dataset must include the classes or object types you intend to detect.\n", "\n", - "---\n", - "\n", "#### Step 2: Access the Dataset Tab\n", "\n", "After selecting a suitable project, navigate to the **Dataset** tab. Click the **\"Download Dataset\"** button. A prompt will appear with several options, including:\n", @@ -233,8 +225,6 @@ "\n", "Select **Download dataset**.\n", "\n", - "---\n", - "\n", "#### Step 3: Choose Format and View Download Code\n", "\n", "- Under **Image and Annotation Format**, choose the version of YOLO you are using:\n", @@ -252,8 +242,6 @@ "\n", "Select the **Terminal** tab and copy the provided command.\n", "\n", - "---\n", - "\n", "#### Step 4: Paste and Run\n", "\n", "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n" From 575f394236af743dbb8a2182969d069ec13fb222 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 14:53:32 -0400 Subject: [PATCH 12/17] formatting fixes --- .../rknn-convert-tool/rknn_conversion.ipynb | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 800e1fec67..13cffdd51d 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -178,10 +178,9 @@ "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from the RKNN API during conversion. |\n", - "---\n", "\n", "\n", - "##### Note\n", + "##### *Note*\n", "\n", "This script is designed for use with [PhotonVision](https://photonvision.org), and by default sets the target platform for RKNN conversion to `RK3588`, a chipset commonly found in many variants of the Orange Pi 5 series (e.g., Orange Pi 5, 5 Pro, 5 Plus, 5 Max, etc.). You may modify the `TARGET_PLATFORM` value in the `create_onnx.py` script to match your specific hardware or deployment requirements if necessary." ] @@ -210,12 +209,12 @@ "\n", "If you do not already have a dataset or set of images containing the objects you want to detect, follow the steps below to download one from Roboflow Universe.\n", "\n", - "#### Step 1: Search for a Dataset\n", + "#### **Step 1: Search for a Dataset**\n", "\n", "Go to [Roboflow Universe](https://universe.roboflow.com) and use the search bar to locate a dataset relevant to what you want to detect.\n", "**Note:** The dataset must include the classes or object types you intend to detect.\n", "\n", - "#### Step 2: Access the Dataset Tab\n", + "#### **Step 2: Access the Dataset Tab**\n", "\n", "After selecting a suitable project, navigate to the **Dataset** tab. Click the **\"Download Dataset\"** button. A prompt will appear with several options, including:\n", "\n", @@ -225,7 +224,7 @@ "\n", "Select **Download dataset**.\n", "\n", - "#### Step 3: Choose Format and View Download Code\n", + "#### **Step 3: Choose Format and View Download Code**\n", "\n", "- Under **Image and Annotation Format**, choose the version of YOLO you are using:\n", " - For **YOLOv5**, choose `YOLOv5 PyTorch`\n", @@ -242,7 +241,7 @@ "\n", "Select the **Terminal** tab and copy the provided command.\n", "\n", - "#### Step 4: Paste and Run\n", + "#### **Step 4: Paste and Run**\n", "\n", "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n" ], @@ -253,7 +252,7 @@ "cell_type": "code", "outputs": [], "execution_count": null, - "source": "%curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip", + "source": "!curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip", "id": "8bf75c9dcb328c84" }, { @@ -262,7 +261,11 @@ "source": [ "### RKNN Conversion Script\n", "\n", - "To quantize and convert the ONNX model to RKNN format, run the `create_rknn.py` script with the appropriate arguments. The `--model_path` argument should point to your exported ONNX model from Step 1, and `--img_dir` must reference a valid directory containing either a dataset or a set of images to be used for quantization.\n" + "To quantize and convert the ONNX model to RKNN format, run the `create_rknn.py` script with the appropriate arguments. The `--model_path` argument should point to your exported ONNX model from Step 1, and `--img_dir` must reference a valid directory containing either a dataset or a set of images to be used for quantization.\n", + "\n", + "##### *Note*\n", + "\n", + "If you followed the Roboflow dataset download instructions from the previous section, the dataset will have been extracted to your **current working directory**. In that case, you can simply set `--img_dir` to \"`.`\" to reference the current directory.\n" ], "id": "72bad9cac670f1ab" }, From 785aed9f40ccb9a158f350dd5ecf01b0f7256cb9 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 14:55:43 -0400 Subject: [PATCH 13/17] add a lil note --- scripts/rknn-convert-tool/rknn_conversion.ipynb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 13cffdd51d..37663d18af 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -243,7 +243,9 @@ "\n", "#### **Step 4: Paste and Run**\n", "\n", - "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n" + "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n", + "\n", + "Make sure to prefix the command with \"`!`\" so it executes properly in this Jupyter Notebook environment." ], "id": "93e0d0622df170e" }, From 2254a529d1cdaedf8558786eb43b6286fc5b2051 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 27 Jul 2025 16:48:25 -0400 Subject: [PATCH 14/17] make preinstallation separate header --- scripts/rknn-convert-tool/rknn_conversion.ipynb | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index 37663d18af..f8c8f699a4 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -11,12 +11,18 @@ "\n", "### Before you start\n", "\n", - "Before you run the scripts or Python notebook from this project, it is recommended that you create a separate [Python virtual environment](https://docs.python.org/3/library/venv.html) so that the packages installed for the conversion process do not conflict with other packages you may already have installed.\n", - "\n", - "## Preinstallation — Important Setup for Google Colab Users\n", + "Before you run the scripts or Python notebook from this project, it is recommended that you create a separate [Python virtual environment](https://docs.python.org/3/library/venv.html) so that the packages installed for the conversion process do not conflict with other packages you may already have installed." + ] + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "### Preinstallation — Important Setup for Google Colab Users\n", "\n", "This notebook requires the use of external Python scripts. Please run the installation script below to import these external scripts." - ] + ], + "id": "5f42d0a144caceb6" }, { "cell_type": "code", From 4aeb242ea337a93c0db20afdf7fd85db4467cca3 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sat, 2 Aug 2025 23:48:06 -0400 Subject: [PATCH 15/17] Update scripts for YOLOv5 + incorrect model detection --- .../rknn-convert-tool/autoinstallrknnapi.py | 96 ----------- scripts/rknn-convert-tool/create_onnx.py | 152 +++++++++++++----- scripts/rknn-convert-tool/create_rknn.py | 12 +- .../rknn-convert-tool/rknn_conversion.ipynb | 107 +++++++----- 4 files changed, 186 insertions(+), 181 deletions(-) delete mode 100644 scripts/rknn-convert-tool/autoinstallrknnapi.py diff --git a/scripts/rknn-convert-tool/autoinstallrknnapi.py b/scripts/rknn-convert-tool/autoinstallrknnapi.py deleted file mode 100644 index b6902353c2..0000000000 --- a/scripts/rknn-convert-tool/autoinstallrknnapi.py +++ /dev/null @@ -1,96 +0,0 @@ -import shutil -import sys -import platform -import urllib.request -import subprocess -from urllib.parse import urlparse -import os - -CHUNK_SIZE = 8192 -CONTENT_LENGTH_HEADER = "Content-Length" -PROGRESS_BAR_SIZE = 70 - -wheel_versions = { - "arm64": { - "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/arm64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", - }, - "x86_64": { - "3.6": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.7": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.8": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.9": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.10": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.11": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - "3.12": "https://github.com/airockchip/rknn-toolkit2/raw/42aa1d426c0a9e0869b6374edba009f7208a1926/rknn-toolkit2/packages/x86_64/rknn_toolkit2-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - }, -} - -supported_arch = list(wheel_versions.keys()) - - -def get_filename_from_url(url): - parsed = urlparse(url) - filename = os.path.basename(parsed.path) - if not filename: - filename = "" # never gonna get here - return filename - - -if __name__ == "__main__": - arch = platform.machine() - - if not arch in supported_arch: - print( - f"Unsupported architecture {arch}. Must be one of the following: {supported_arch}" - ) - - current_version = f"{sys.version_info.major}.{sys.version_info.minor}" - supported_versions = list(wheel_versions[arch]) - - if sys.version_info.major < 3: - print(f"Must have at least python version {supported_versions[0]}") - elif not current_version in supported_versions: - print( - f"Unsupported python version {current_version}, supported python versions are: {supported_versions}" - ) - - download_url = wheel_versions[arch][current_version] - wheel_name = get_filename_from_url(download_url) - - print(f"Downloading RKNN Toolkit2 wheel: {wheel_name}") - with urllib.request.urlopen(download_url) as response, open( - wheel_name, "wb" - ) as out_file: - total_size = int(response.getheader(CONTENT_LENGTH_HEADER).strip()) - downloaded_chunks = 0 - - while True: - chunk = response.read(CHUNK_SIZE) - if not chunk: - break - out_file.write(chunk) - downloaded_chunks += len(chunk) - ratio = downloaded_chunks / total_size - done = int(PROGRESS_BAR_SIZE * ratio) - sys.stdout.write( - f"\r[{'=' * done}{' ' * (PROGRESS_BAR_SIZE - done)}] {(ratio * 100):.2f}% " - ) - sys.stdout.flush() - - print() - print("Download completed, now running pip install") - - try: - subprocess.run(["pip", "install", wheel_name]).check_returncode() - except subprocess.CalledProcessError as e: - print("Failed to run pip install, see output below") - print(e.output) - sys.exit(1) - - print("Python RKNN Toolkit2 installed successfully!") diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py index 249f387e92..f2aa4cb0b1 100644 --- a/scripts/rknn-convert-tool/create_onnx.py +++ b/scripts/rknn-convert-tool/create_onnx.py @@ -3,16 +3,23 @@ import argparse import os.path -yolo_git_repos = { - "yolov5": "https://github.com/airockchip/yolov5", - "yolov8": "https://github.com/airockchip/ultralytics_yolov8", - "yolov11": "https://github.com/airockchip/ultralytics_yolo11", -} +# This will work for all models that don't use anchors (e.g. all YOLO models except YOLOv5/v7) +# This includes YOLOv5u +yolo_non_anchor_repo = "https://github.com/airockchip/ultralytics_yolo11" -valid_yolo_version = list(yolo_git_repos.keys()) -comma_sep_yolo_versions = ", ".join(valid_yolo_version) +# For original YOLOv5 models +yolov5_repo = "https://github.com/airockchip/yolov5" -ultralytics_folder_name = "airockchip_yolo_pkg" +valid_yolo_versions = ["yolov5", "yolov8", "yolov11"] +comma_sep_yolo_versions = ", ".join(valid_yolo_versions) + +ultralytics_folder_name_yolov5 = "airockchip_yolo_pkg_yolov5" +ultralytics_default_folder_name = "airockchip_yolo_pkg" + + +def print_bad_model_msg(): + print("This is usually due to passing in the wrong model version.") + print("Please make sure you have the right model version and try again.") def check_git_installed(): @@ -24,45 +31,106 @@ def check_git_installed(): sys.exit(1) -def run_onnx_conversion(version, model_path): - rc_repo = yolo_git_repos[version] - - if rc_repo is None: - # achievement: how did we get here? +def check_or_clone_rockchip_repo(repo_url, repo_name=ultralytics_default_folder_name): + if os.path.exists(repo_name): print( - f'Invalid yolo version "{version}" must be one of the following {comma_sep_yolo_versions}' + f'Existing Rockchip repo "{repo_name}" detected, skipping installation...' ) - - if os.path.exists(ultralytics_folder_name): - print("Existing Rockchip Repo detected, no install required") else: - print("Cloning Rockchip repo...") - + print(f'Cloning Rockchip repo to "{repo_name}"') try: - subprocess.run( - ["git", "clone", rc_repo, ultralytics_folder_name] - ).check_returncode() + subprocess.run(["git", "clone", repo_url, repo_name]).check_returncode() except subprocess.CalledProcessError as e: - print("Failed to clone rockchip repo, see error output below") + print("Failed to clone Rockchip repo, see error output below") print(e.output) - sys.exit(1) + exit(1) + +def run_pip_install_or_else_exit(args): print("Running pip install...") + try: - subprocess.run( - ["pip", "install", "-e", ultralytics_folder_name, "onnx"] - ).check_returncode() + subprocess.run(["pip", "install"] + args).check_returncode() except subprocess.CalledProcessError as e: print("Pip install rockchip repo failed, see error output") print(e.output) sys.exit(1) - sys.path.insert(0, os.path.abspath(ultralytics_folder_name)) + +def run_onnx_conversion_yolov5(model_path): + check_or_clone_rockchip_repo(yolov5_repo, ultralytics_folder_name_yolov5) + run_pip_install_or_else_exit( + [ + "-r", + os.path.join(ultralytics_folder_name_yolov5, "requirements.txt"), + "torch<2.6.0", + "onnx", + ] + ) + + model_abspath = os.path.abspath(model_path) + + try: + subprocess.run( + [ + "python", + f"{ultralytics_folder_name_yolov5}/export.py", + "--weights", + model_abspath, + "--rknpu", + "--include", + "onnx", + ], + capture_output=True, + text=True, + ).check_returncode() + except subprocess.CalledProcessError as e: + print("Failed to run YOLOv5 export, see output below") + output_string = (e.stdout or "") + (e.stderr or "") + print(output_string) + + is_bad_model = False + + if "ModuleNotFoundError" in output_string and "ultralytics" in output_string: + print( + "It seems the YOLOv5 repo could not find an ultralytics installation." + ) + is_bad_model = True + elif ( + "AttributeError" in output_string + and "_register_detect_seperate" in output_string + ): + print("It seems that you received a model attribute error.") + is_bad_model = True + + if is_bad_model: + print_bad_model_msg() + + exit(1) + + +def run_onnx_conversion_no_anchor(model_path): + check_or_clone_rockchip_repo(yolo_non_anchor_repo) + run_pip_install_or_else_exit(["-e", ultralytics_default_folder_name, "onnx"]) + + sys.path.insert(0, os.path.abspath(ultralytics_default_folder_name)) + model_abs_path = os.path.abspath(model_path) from ultralytics import YOLO - model = YOLO(model_path) - model.export(format="rknn") + try: + model = YOLO(model_abs_path) + model.export(format="rknn") + except TypeError as e: + print(e) + print() + print() + if "originally trained" in str(e): + print("Ultralytics has detected that this model is invalid.") + print_bad_model_msg() + exit(1) + except: + exit(1) if __name__ == "__main__": @@ -71,21 +139,29 @@ def run_onnx_conversion(version, model_path): ) parser.add_argument( - "-v", - "--version", - choices=valid_yolo_version, + "-m", + "--model_path", required=True, - help=(f"YOLO version to use. Must be one of: {comma_sep_yolo_versions}"), + help=(f"Path to YOLO model"), ) parser.add_argument( - "-m", - "--model_path", + "-v", + "--version", required=True, - help=(f"Path to YOLO model"), + choices=valid_yolo_versions, + help=(f"Model version, must be one of: {comma_sep_yolo_versions}"), ) args = parser.parse_args() check_git_installed() - run_onnx_conversion(args.version, args.model_path) + + if args.version.lower() == "yolov5": + run_onnx_conversion_yolov5(args.model_path) + else: + run_onnx_conversion_no_anchor(args.model_path) + + print( + "Model export finished. Please use the generated ONNX file to convert to RKNN." + ) diff --git a/scripts/rknn-convert-tool/create_rknn.py b/scripts/rknn-convert-tool/create_rknn.py index c61973110f..e3a14e638c 100644 --- a/scripts/rknn-convert-tool/create_rknn.py +++ b/scripts/rknn-convert-tool/create_rknn.py @@ -8,6 +8,7 @@ image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp") DEFAULT_PLATFORM = "rk3588" + def list_img_dir(img_dir): return [ os.path.abspath(os.path.join(img_dir, f)) @@ -86,7 +87,10 @@ def get_image_list(num_imgs, image_dir): def run_rknn_conversion( img_list_txt, disable_quant, model_path, rknn_output, verbose_logging ): - rknn = RKNN(verbose=verbose_logging, verbose_file=("rknn_convert.log" if verbose_logging else None)) + rknn = RKNN( + verbose=verbose_logging, + verbose_file=("rknn_convert.log" if verbose_logging else None), + ) rknn.config( mean_values=[[0, 0, 0]], @@ -148,8 +152,7 @@ def run_rknn_conversion( parser.add_argument( "-dq", "--disable_quantize", - type=bool, - default=False, + action="store_true", help="Whether to skip quantization (default: False)", ) @@ -170,8 +173,7 @@ def run_rknn_conversion( parser.add_argument( "-vb", "--verbose", - type=bool, - default=False, + action="store_true", help="Whether to enable verbose logging", ) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb index f8c8f699a4..4f9367b855 100644 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ b/scripts/rknn-convert-tool/rknn_conversion.ipynb @@ -15,14 +15,14 @@ ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "5f42d0a144caceb6", + "metadata": {}, "source": [ - "### Preinstallation — Important Setup for Google Colab Users\n", + "### Preinstallation\n", "\n", - "This notebook requires the use of external Python scripts. Please run the installation script below to import these external scripts." - ], - "id": "5f42d0a144caceb6" + "This notebook requires the use of external Python scripts. Please run the installation script below to import these external scripts if you do not have them already." + ] }, { "cell_type": "code", @@ -32,7 +32,6 @@ "outputs": [], "source": [ "import subprocess\n", - "import os\n", "\n", "# Define scripts with URLs and inferred filenames\n", "# DO NOT modify the filenames\n", @@ -42,10 +41,6 @@ " \"filename\": \"create_onnx.py\" # CREATE_ONNX_SCRIPT\n", " },\n", " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/autoinstallrknnapi.py\",\n", - " \"filename\": \"autoinstallrknnapi.py\" # AUTO_INSTALL_SCRIPT\n", - " },\n", - " {\n", " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/create_rknn.py\",\n", " \"filename\": \"create_rknn.py\" # CREATE_RKNN_SCRIPT\n", " }\n", @@ -62,35 +57,55 @@ ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "d68be4aba4d3022b", + "metadata": {}, "source": [ - "#### *Numpy Fix*\n", + "#### *Numpy Fix* - Important for Google Colab Users\n", "\n", "Google Colab comes with an incompatible version of Numpy installed. To fix this, please run the following cells below and **restart your session** when prompted." - ], - "id": "d68be4aba4d3022b" + ] }, { - "metadata": {}, "cell_type": "code", + "execution_count": 1, + "id": "de0310a3e4401233", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found existing installation: numpy 1.26.4\n", + "Uninstalling numpy-1.26.4:\n", + " Successfully uninstalled numpy-1.26.4\n", + "Note: you may need to restart the kernel to use updated packages.\n", + "Collecting numpy<2.0.0,>=1.23.0\n", + " Using cached numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (61 kB)\n", + "Using cached numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.3 MB)\n", + "Installing collected packages: numpy\n", + "Successfully installed numpy-1.26.4\n", + "\n", + "\u001B[1m[\u001B[0m\u001B[34;49mnotice\u001B[0m\u001B[1;39;49m]\u001B[0m\u001B[39;49m A new release of pip is available: \u001B[0m\u001B[31;49m24.0\u001B[0m\u001B[39;49m -> \u001B[0m\u001B[32;49m25.1.1\u001B[0m\n", + "\u001B[1m[\u001B[0m\u001B[34;49mnotice\u001B[0m\u001B[1;39;49m]\u001B[0m\u001B[39;49m To update, run: \u001B[0m\u001B[32;49mpip install --upgrade pip\u001B[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ "%pip uninstall numpy -y\n", "%pip install \"numpy>=1.23.0,<2.0.0\"" - ], - "id": "de0310a3e4401233", - "outputs": [], - "execution_count": null + ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "d498ed79", + "metadata": {}, "source": [ "### Step 1: Convert to ONNX\n", "\n", "To convert to ONNX, simply run the `create_onnx.py` script, providing the path to your model weights and specifying the model version, as shown below." - ], - "id": "d498ed79" + ] }, { "cell_type": "code", @@ -100,7 +115,7 @@ "outputs": [], "source": [ "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", - "%run create_onnx.py --version yolov8 --model_path weights.pt" + "%run create_onnx.py --version yolov8 --model_path yolov8n.pt" ] }, { @@ -110,11 +125,11 @@ "source": [ "### Step 2: Download RKNN API\n", "\n", - "You can either use a script to automatically detect and install the correct RKNN API Python library for you, or install it manually.\n", + "You can either use `pip` below to automatically detect and install the correct RKNN API Python library for you, or install it manually.\n", "\n", "#### Automatic installation\n", "\n", - "Please run the script below. If it does not work, refer to the instructions for manual installation.\n" + "Please run `pip` below. If it does not work, refer to the instructions for manual installation.\n" ] }, { @@ -123,7 +138,9 @@ "id": "7ec11f96", "metadata": {}, "outputs": [], - "source": "%run autoinstallrknnapi.py" + "source": [ + "%pip install rknn-toolkit2" + ] }, { "cell_type": "markdown", @@ -192,8 +209,9 @@ ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "5e56b2f64bf6e85f", + "metadata": {}, "source": [ "### Quantization Note\n", "\n", @@ -204,12 +222,12 @@ "Quantization will cause some loss in model accuracy. However, if your calibration images are chosen wisely, this accuracy drop should be minimal and acceptable. If the sampled images are too uniform or unrelated, your quantized model's performance may worsen significantly.\n", "\n", "The script will automatically sample representative images randomly from the provided dataset. While this usually works well, please verify that the dataset contains diverse and relevant examples of your target objects. As a reminder, the images used to quantize the model are stored in the text file specified by `--img_dataset_txt`.\n" - ], - "id": "5e56b2f64bf6e85f" + ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "93e0d0622df170e", + "metadata": {}, "source": [ "### Optional: Download a dataset from Roboflow for quantization\n", "\n", @@ -252,20 +270,22 @@ "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n", "\n", "Make sure to prefix the command with \"`!`\" so it executes properly in this Jupyter Notebook environment." - ], - "id": "93e0d0622df170e" + ] }, { - "metadata": {}, "cell_type": "code", - "outputs": [], "execution_count": null, - "source": "!curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip", - "id": "8bf75c9dcb328c84" + "id": "8bf75c9dcb328c84", + "metadata": {}, + "outputs": [], + "source": [ + "!curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip" + ] }, { - "metadata": {}, "cell_type": "markdown", + "id": "72bad9cac670f1ab", + "metadata": {}, "source": [ "### RKNN Conversion Script\n", "\n", @@ -274,8 +294,7 @@ "##### *Note*\n", "\n", "If you followed the Roboflow dataset download instructions from the previous section, the dataset will have been extracted to your **current working directory**. In that case, you can simply set `--img_dir` to \"`.`\" to reference the current directory.\n" - ], - "id": "72bad9cac670f1ab" + ] }, { "cell_type": "code", @@ -283,13 +302,17 @@ "id": "b09656dd", "metadata": {}, "outputs": [], - "source": "%run create_rknn.py --img_dir ./datasets --model_path weights.onnx" + "source": [ + "%run create_rknn.py --img_dir ./datasets --model_path weights.onnx" + ] }, { "cell_type": "markdown", "id": "5b3a6806", "metadata": {}, - "source": "And that’s it! Your RKNN model file is now ready for deployment on an Orange Pi." + "source": [ + "And that’s it! Your RKNN model file is now ready for deployment on an Orange Pi." + ] } ], "metadata": { @@ -308,7 +331,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.18" + "version": "3.11.13" } }, "nbformat": 4, From e35516243f02faf49f7d27ecb9758b7bf2dbceb9 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sat, 2 Aug 2025 23:51:37 -0400 Subject: [PATCH 16/17] put back error output (oops) --- scripts/rknn-convert-tool/create_onnx.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py index f2aa4cb0b1..0e87c9dbdb 100644 --- a/scripts/rknn-convert-tool/create_onnx.py +++ b/scripts/rknn-convert-tool/create_onnx.py @@ -129,8 +129,6 @@ def run_onnx_conversion_no_anchor(model_path): print("Ultralytics has detected that this model is invalid.") print_bad_model_msg() exit(1) - except: - exit(1) if __name__ == "__main__": From 3a0f9083d75a33da78a7aae40a5ee4d7b7305490 Mon Sep 17 00:00:00 2001 From: boomermath Date: Sun, 3 Aug 2025 14:38:41 -0400 Subject: [PATCH 17/17] redoing pr --- scripts/rknn-convert-tool/MAINTAINERS.md | 7 - scripts/rknn-convert-tool/create_onnx.py | 165 --------- scripts/rknn-convert-tool/create_rknn.py | 218 ----------- .../rknn-convert-tool/rknn_conversion.ipynb | 339 ------------------ 4 files changed, 729 deletions(-) delete mode 100644 scripts/rknn-convert-tool/MAINTAINERS.md delete mode 100644 scripts/rknn-convert-tool/create_onnx.py delete mode 100644 scripts/rknn-convert-tool/create_rknn.py delete mode 100644 scripts/rknn-convert-tool/rknn_conversion.ipynb diff --git a/scripts/rknn-convert-tool/MAINTAINERS.md b/scripts/rknn-convert-tool/MAINTAINERS.md deleted file mode 100644 index 5e4a67a698..0000000000 --- a/scripts/rknn-convert-tool/MAINTAINERS.md +++ /dev/null @@ -1,7 +0,0 @@ -# Notebook - -In the first cell of the RKNN conversion notebook, the installation script uses a structured list of dictionaries to define the download URLs and filenames for required scripts. Each dictionary includes a `url` (a permalink to a specific commit) and the corresponding `filename`. - -Please ensure that all URLs in this array use permalinks—that is, links pointing to a specific commit hash rather than a branch name (e.g., main). This guarantees that the correct version of each script is always fetched, and prevents unexpected changes if the repository is updated in the future. - -You typically won’t need to update these permalinks unless one of the referenced scripts is modified. In that case, update the commit hash in the URLs accordingly. \ No newline at end of file diff --git a/scripts/rknn-convert-tool/create_onnx.py b/scripts/rknn-convert-tool/create_onnx.py deleted file mode 100644 index 0e87c9dbdb..0000000000 --- a/scripts/rknn-convert-tool/create_onnx.py +++ /dev/null @@ -1,165 +0,0 @@ -import subprocess -import sys -import argparse -import os.path - -# This will work for all models that don't use anchors (e.g. all YOLO models except YOLOv5/v7) -# This includes YOLOv5u -yolo_non_anchor_repo = "https://github.com/airockchip/ultralytics_yolo11" - -# For original YOLOv5 models -yolov5_repo = "https://github.com/airockchip/yolov5" - -valid_yolo_versions = ["yolov5", "yolov8", "yolov11"] -comma_sep_yolo_versions = ", ".join(valid_yolo_versions) - -ultralytics_folder_name_yolov5 = "airockchip_yolo_pkg_yolov5" -ultralytics_default_folder_name = "airockchip_yolo_pkg" - - -def print_bad_model_msg(): - print("This is usually due to passing in the wrong model version.") - print("Please make sure you have the right model version and try again.") - - -def check_git_installed(): - try: - subprocess.run(["git", "--version"]).check_returncode() - except: - print("Git is not installed or not found in your PATH.") - print("Please install Git from https://git-scm.com/downloads and try again.") - sys.exit(1) - - -def check_or_clone_rockchip_repo(repo_url, repo_name=ultralytics_default_folder_name): - if os.path.exists(repo_name): - print( - f'Existing Rockchip repo "{repo_name}" detected, skipping installation...' - ) - else: - print(f'Cloning Rockchip repo to "{repo_name}"') - try: - subprocess.run(["git", "clone", repo_url, repo_name]).check_returncode() - except subprocess.CalledProcessError as e: - print("Failed to clone Rockchip repo, see error output below") - print(e.output) - exit(1) - - -def run_pip_install_or_else_exit(args): - print("Running pip install...") - - try: - subprocess.run(["pip", "install"] + args).check_returncode() - except subprocess.CalledProcessError as e: - print("Pip install rockchip repo failed, see error output") - print(e.output) - sys.exit(1) - - -def run_onnx_conversion_yolov5(model_path): - check_or_clone_rockchip_repo(yolov5_repo, ultralytics_folder_name_yolov5) - run_pip_install_or_else_exit( - [ - "-r", - os.path.join(ultralytics_folder_name_yolov5, "requirements.txt"), - "torch<2.6.0", - "onnx", - ] - ) - - model_abspath = os.path.abspath(model_path) - - try: - subprocess.run( - [ - "python", - f"{ultralytics_folder_name_yolov5}/export.py", - "--weights", - model_abspath, - "--rknpu", - "--include", - "onnx", - ], - capture_output=True, - text=True, - ).check_returncode() - except subprocess.CalledProcessError as e: - print("Failed to run YOLOv5 export, see output below") - output_string = (e.stdout or "") + (e.stderr or "") - print(output_string) - - is_bad_model = False - - if "ModuleNotFoundError" in output_string and "ultralytics" in output_string: - print( - "It seems the YOLOv5 repo could not find an ultralytics installation." - ) - is_bad_model = True - elif ( - "AttributeError" in output_string - and "_register_detect_seperate" in output_string - ): - print("It seems that you received a model attribute error.") - is_bad_model = True - - if is_bad_model: - print_bad_model_msg() - - exit(1) - - -def run_onnx_conversion_no_anchor(model_path): - check_or_clone_rockchip_repo(yolo_non_anchor_repo) - run_pip_install_or_else_exit(["-e", ultralytics_default_folder_name, "onnx"]) - - sys.path.insert(0, os.path.abspath(ultralytics_default_folder_name)) - model_abs_path = os.path.abspath(model_path) - - from ultralytics import YOLO - - try: - model = YOLO(model_abs_path) - model.export(format="rknn") - except TypeError as e: - print(e) - print() - print() - if "originally trained" in str(e): - print("Ultralytics has detected that this model is invalid.") - print_bad_model_msg() - exit(1) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Generate valid ONNX file for yolo model" - ) - - parser.add_argument( - "-m", - "--model_path", - required=True, - help=(f"Path to YOLO model"), - ) - - parser.add_argument( - "-v", - "--version", - required=True, - choices=valid_yolo_versions, - help=(f"Model version, must be one of: {comma_sep_yolo_versions}"), - ) - - args = parser.parse_args() - - check_git_installed() - - if args.version.lower() == "yolov5": - run_onnx_conversion_yolov5(args.model_path) - else: - run_onnx_conversion_no_anchor(args.model_path) - - print( - "Model export finished. Please use the generated ONNX file to convert to RKNN." - ) diff --git a/scripts/rknn-convert-tool/create_rknn.py b/scripts/rknn-convert-tool/create_rknn.py deleted file mode 100644 index e3a14e638c..0000000000 --- a/scripts/rknn-convert-tool/create_rknn.py +++ /dev/null @@ -1,218 +0,0 @@ -import subprocess -import sys -import random -import argparse -import os -from rknn.api import RKNN - -image_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp") -DEFAULT_PLATFORM = "rk3588" - - -def list_img_dir(img_dir): - return [ - os.path.abspath(os.path.join(img_dir, f)) - for f in os.listdir(img_dir) - if f.lower().endswith(image_extensions) - ] - - -def sample_imgs(num, img_list): - if len(img_list) < num: - return img_list - else: - return random.sample(img_list, num) - - -def get_image_list_from_dataset(num_imgs, yaml_dir): - print(f"Dataset detected with {yaml_dir} file") - img_raw_paths = [] - - with open(yaml_dir, "r") as yaml_file: - for line in yaml_file: - line = line.strip() - if ( - line.startswith("train:") - or line.startswith("val:") - or line.startswith("test:") - ): - img_raw_paths.append(line.split(":", 1)[1].strip()) - - no_yaml_dir = yaml_dir.replace( - "data.yaml", "dummy_dir" - ) # data.yaml sets dirs one level up - img_set_paths = [] - - for img_raw_path in img_raw_paths: - p = ( - img_raw_path - if os.path.isabs(img_raw_path) - else os.path.realpath(os.path.join(no_yaml_dir, img_raw_path)) - ) - - if os.path.exists(p): - img_set_paths.append(p) - - if len(img_set_paths) < 1: - return None - - all_imgs = [list_img_dir(path) for path in img_set_paths] - - for imgs in all_imgs: - print(len(imgs)) - - total_imgs = sum(len(group) for group in all_imgs) - - sampled_imgs = [ - sample_imgs(round((len(group) / total_imgs) * num_imgs), group) - for group in all_imgs - ] - - return [img for group in sampled_imgs for img in group] - - -def get_image_list_from_img_dir(num_imgs, img_dir): - return sample_imgs(num_imgs, list_img_dir(img_dir)) - - -def get_image_list(num_imgs, image_dir): - yaml_path = os.path.join(image_dir, "data.yaml") - - if os.path.exists(yaml_path): - return get_image_list_from_dataset(num_imgs, yaml_path) - else: - return get_image_list_from_img_dir(num_imgs, image_dir) - - -def run_rknn_conversion( - img_list_txt, disable_quant, model_path, rknn_output, verbose_logging -): - rknn = RKNN( - verbose=verbose_logging, - verbose_file=("rknn_convert.log" if verbose_logging else None), - ) - - rknn.config( - mean_values=[[0, 0, 0]], - std_values=[[255, 255, 255]], - target_platform=DEFAULT_PLATFORM, - ) - - print("Attempted RKNN load") - ret = rknn.load_onnx(model=model_path) - if ret != 0: - print("Loading model failed!") - exit(ret) - - print("Attempting RKNN build") - ret = rknn.build(do_quantization=(not disable_quant), dataset=img_list_txt) - if ret != 0: - print("Building model failed!") - exit(ret) - - print("Build succeeded! Starting export...") - ret = rknn.export_rknn(rknn_output) - if ret != 0: - print("Exporting model failed!") - exit(ret) - print("Finished export!") - - # Release - rknn.release() - - print(f'Your model is in "{rknn_output}" and ready to use!') - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Generate valid ONNX file for yolo model" - ) - - parser.add_argument( - "-ni", - "--num_imgs", - type=int, - default=300, - help="Number of images to use for calibration (default: 300)", - ) - - parser.add_argument( - "-d", - "--img_dir", - help="Directory where your dataset is located (must have data.yaml), or images are located", - ) - - parser.add_argument( - "-m", - "--model_path", - required=True, - help=(f"Path to generated ONNX model"), - ) - - parser.add_argument( - "-dq", - "--disable_quantize", - action="store_true", - help="Whether to skip quantization (default: False)", - ) - - parser.add_argument( - "-o", - "--rknn_output", - default="out.rknn", - help="Where the rknn model should be outputted (default: ./out.rknn)", - ) - - parser.add_argument( - "-ds", - "--img_dataset_txt", - default="imgs.txt", - help="Where the list of images used for quantization should be outputted (default: ./imgs.txt)", - ) - - parser.add_argument( - "-vb", - "--verbose", - action="store_true", - help="Whether to enable verbose logging", - ) - - args = parser.parse_args() - - if not args.rknn_output.endswith(".rknn"): - print("RKNN output path must end in .rknn!") - sys.exit(1) - - if not args.disable_quantize: - if args.img_dir == None or len(args.img_dir) < 1: - print(f"Must specify list of images to use with --img_dir") - sys.exit(1) - - img_dir_abs = os.path.abspath(args.img_dir) - - img_list = get_image_list(args.num_imgs, img_dir_abs) - img_list_len = 0 if img_list is None else len(img_list) - - if img_list_len == 0: - print(f"No images found in {img_dir_abs}") - sys.exit(1) - elif img_list_len < args.num_imgs: - print( - f"Not enough images in your dataset/directory, you have {img_list_len} images, but need {args.num_imgs}" - ) - sys.exit(1) - - if not args.img_dataset_txt.endswith(".txt"): - print(f"Image dataset text file path must end in .txt") - sys.exit(1) - - with open(args.img_dataset_txt, "w") as set_file: - set_file.writelines(f"{img}\n" for img in img_list) - - run_rknn_conversion( - args.img_dataset_txt, - args.disable_quantize, - args.model_path, - args.rknn_output, - args.verbose, - ) diff --git a/scripts/rknn-convert-tool/rknn_conversion.ipynb b/scripts/rknn-convert-tool/rknn_conversion.ipynb deleted file mode 100644 index 4f9367b855..0000000000 --- a/scripts/rknn-convert-tool/rknn_conversion.ipynb +++ /dev/null @@ -1,339 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "bb5367ce", - "metadata": {}, - "source": [ - "# RKNN Conversion Guide\n", - "\n", - "----------------------------\n", - "\n", - "### Before you start\n", - "\n", - "Before you run the scripts or Python notebook from this project, it is recommended that you create a separate [Python virtual environment](https://docs.python.org/3/library/venv.html) so that the packages installed for the conversion process do not conflict with other packages you may already have installed." - ] - }, - { - "cell_type": "markdown", - "id": "5f42d0a144caceb6", - "metadata": {}, - "source": [ - "### Preinstallation\n", - "\n", - "This notebook requires the use of external Python scripts. Please run the installation script below to import these external scripts if you do not have them already." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7903189e", - "metadata": {}, - "outputs": [], - "source": [ - "import subprocess\n", - "\n", - "# Define scripts with URLs and inferred filenames\n", - "# DO NOT modify the filenames\n", - "scripts = [\n", - " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/create_onnx.py\",\n", - " \"filename\": \"create_onnx.py\" # CREATE_ONNX_SCRIPT\n", - " },\n", - " {\n", - " \"url\": \"https://raw.githubusercontent.com/boomermath/photonvision_rknn_fork/08117cc111c61264272188c8a1168b4c8e1bdb3b/scripts/rknn-convert-tool/create_rknn.py\",\n", - " \"filename\": \"create_rknn.py\" # CREATE_RKNN_SCRIPT\n", - " }\n", - "]\n", - "\n", - "# Download each script\n", - "for script in scripts:\n", - " try:\n", - " subprocess.run([\"wget\", script[\"url\"], \"-O\", script[\"filename\"]]).check_returncode()\n", - " print(f\"Successfully downloaded: {script['filename']}\")\n", - " except subprocess.CalledProcessError as e:\n", - " print(f\"Failed to download script from URL: {script['url']}\")\n", - " print(e)\n" - ] - }, - { - "cell_type": "markdown", - "id": "d68be4aba4d3022b", - "metadata": {}, - "source": [ - "#### *Numpy Fix* - Important for Google Colab Users\n", - "\n", - "Google Colab comes with an incompatible version of Numpy installed. To fix this, please run the following cells below and **restart your session** when prompted." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "de0310a3e4401233", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Found existing installation: numpy 1.26.4\n", - "Uninstalling numpy-1.26.4:\n", - " Successfully uninstalled numpy-1.26.4\n", - "Note: you may need to restart the kernel to use updated packages.\n", - "Collecting numpy<2.0.0,>=1.23.0\n", - " Using cached numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (61 kB)\n", - "Using cached numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.3 MB)\n", - "Installing collected packages: numpy\n", - "Successfully installed numpy-1.26.4\n", - "\n", - "\u001B[1m[\u001B[0m\u001B[34;49mnotice\u001B[0m\u001B[1;39;49m]\u001B[0m\u001B[39;49m A new release of pip is available: \u001B[0m\u001B[31;49m24.0\u001B[0m\u001B[39;49m -> \u001B[0m\u001B[32;49m25.1.1\u001B[0m\n", - "\u001B[1m[\u001B[0m\u001B[34;49mnotice\u001B[0m\u001B[1;39;49m]\u001B[0m\u001B[39;49m To update, run: \u001B[0m\u001B[32;49mpip install --upgrade pip\u001B[0m\n", - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], - "source": [ - "%pip uninstall numpy -y\n", - "%pip install \"numpy>=1.23.0,<2.0.0\"" - ] - }, - { - "cell_type": "markdown", - "id": "d498ed79", - "metadata": {}, - "source": [ - "### Step 1: Convert to ONNX\n", - "\n", - "To convert to ONNX, simply run the `create_onnx.py` script, providing the path to your model weights and specifying the model version, as shown below." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0659e15f", - "metadata": {}, - "outputs": [], - "source": [ - "# where version is either yolov5, yolov8, or yolov11, and model_path is the path to your weights file (.pt)\n", - "%run create_onnx.py --version yolov8 --model_path yolov8n.pt" - ] - }, - { - "cell_type": "markdown", - "id": "86ff07e6", - "metadata": {}, - "source": [ - "### Step 2: Download RKNN API\n", - "\n", - "You can either use `pip` below to automatically detect and install the correct RKNN API Python library for you, or install it manually.\n", - "\n", - "#### Automatic installation\n", - "\n", - "Please run `pip` below. If it does not work, refer to the instructions for manual installation.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7ec11f96", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install rknn-toolkit2" - ] - }, - { - "cell_type": "markdown", - "id": "8b57fe4d", - "metadata": {}, - "source": [ - "#### Manual Installation (If Automatic Installation Fails)\n", - "Visit the [RKNN Toolkit 2](https://github.com/airockchip/rknn-toolkit2) Github repository, then click on rknn-toolkit2, followed by packages.\n", - "If you are running an x86_64 CPU (e.g., most Intel and AMD processors), select that option; otherwise, choose arm64 for ARM-based computers (e.g., M-series Macs or Snapdragon processors). If you're unsure which CPU you're using, check your system settings for processor architecture information.\n", - "\n", - "Once you've selected the correct CPU architecture, you'll see multiple packages. The file names will look something like:\n", - "`rknn_toolkit2-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl`.\n", - "The numbers after cp correspond to your Python version. For example, if you're using Python 3.10, look for a package with cp310 in the name. For Python 3.8, look for cp38; for Python 3.7, cp37, and so on.\n", - "\n", - "Once you've found the correct package, click the \"Raw\" button to download the .whl file. Then, run the following command in your terminal, replacing rknn_toolkit2.whl with the actual path to the file you downloaded:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7414b120", - "metadata": {}, - "outputs": [], - "source": [ - "%pip install rknn_toolkit2.whl" - ] - }, - { - "cell_type": "markdown", - "id": "c1db5ef0", - "metadata": {}, - "source": [ - "### Step 3: Convert to RKNN\n", - "\n", - "To get started, run the `create_rknn.py` script, replacing the arguments with your own values. Refer to the table below for detailed information on each argument’s purpose and usage." - ] - }, - { - "cell_type": "markdown", - "id": "f41e431b", - "metadata": {}, - "source": [ - "#### Overview of the `create_rknn.py` script\n", - "\n", - "This script converts a YOLO ONNX model to RKNN format using a set of calibration images. It's designed to work with either:\n", - "\n", - "- A flat directory of images (e.g. `train/images`), **or**\n", - "- A dataset directory containing a `data.yaml` file that defines `train`, `val`, and/or `test` folders.\n", - "\n", - "##### Arguments\n", - "\n", - "| Argument | Type | Description |\n", - "|----------|------|-----------------------------------------------------------------------------------------------------------------|\n", - "| `--img_dir` (`-d`) | `str` (required) | Path to your image directory. This can either be a folder of images **or** a dataset folder with a `data.yaml`. |\n", - "| `--model_path` (`-m`) | `str` (required) | Path to your YOLO ONNX model, created in Step 1. |\n", - "| `--num_imgs` (`-ni`) | `int` (default: `300`) | Number of images to use for quantization calibration. |\n", - "| `--disable_quantize` (`-dq`) | `bool` (default: `False`) | Set to `True` to skip quantization entirely, not recommended for performance. |\n", - "| `--rknn_output` (`-o`) | `str` (default: `out.rknn`) | File path where the final RKNN model should be saved. |\n", - "| `--img_dataset_txt` (`-ds`) | `str` (default: `imgs.txt`) | File path to store the list of images used during quantization. |\n", - "| `--verbose` (`-vb`) | `bool` (default: `False`) | Enable detailed logging from the RKNN API during conversion. |\n", - "\n", - "\n", - "##### *Note*\n", - "\n", - "This script is designed for use with [PhotonVision](https://photonvision.org), and by default sets the target platform for RKNN conversion to `RK3588`, a chipset commonly found in many variants of the Orange Pi 5 series (e.g., Orange Pi 5, 5 Pro, 5 Plus, 5 Max, etc.). You may modify the `TARGET_PLATFORM` value in the `create_onnx.py` script to match your specific hardware or deployment requirements if necessary." - ] - }, - { - "cell_type": "markdown", - "id": "5e56b2f64bf6e85f", - "metadata": {}, - "source": [ - "### Quantization Note\n", - "\n", - "When performing quantization, it is critical to provide representative images of the objects or scenes you are trying to detect. These images are used to calibrate the model’s internal activations and greatly influence the final performance.\n", - "\n", - "It is recommended to use 300–500 representative images that reflect the real-world input your model will encounter. As the old saying goes, it’s quality over quantity — having a diverse, relevant set matters more than simply having many images.\n", - "\n", - "Quantization will cause some loss in model accuracy. However, if your calibration images are chosen wisely, this accuracy drop should be minimal and acceptable. If the sampled images are too uniform or unrelated, your quantized model's performance may worsen significantly.\n", - "\n", - "The script will automatically sample representative images randomly from the provided dataset. While this usually works well, please verify that the dataset contains diverse and relevant examples of your target objects. As a reminder, the images used to quantize the model are stored in the text file specified by `--img_dataset_txt`.\n" - ] - }, - { - "cell_type": "markdown", - "id": "93e0d0622df170e", - "metadata": {}, - "source": [ - "### Optional: Download a dataset from Roboflow for quantization\n", - "\n", - "If you do not already have a dataset or set of images containing the objects you want to detect, follow the steps below to download one from Roboflow Universe.\n", - "\n", - "#### **Step 1: Search for a Dataset**\n", - "\n", - "Go to [Roboflow Universe](https://universe.roboflow.com) and use the search bar to locate a dataset relevant to what you want to detect.\n", - "**Note:** The dataset must include the classes or object types you intend to detect.\n", - "\n", - "#### **Step 2: Access the Dataset Tab**\n", - "\n", - "After selecting a suitable project, navigate to the **Dataset** tab. Click the **\"Download Dataset\"** button. A prompt will appear with several options, including:\n", - "\n", - "- Train a model with this dataset\n", - "- Train from a portion of this dataset\n", - "- Download dataset\n", - "\n", - "Select **Download dataset**.\n", - "\n", - "#### **Step 3: Choose Format and View Download Code**\n", - "\n", - "- Under **Image and Annotation Format**, choose the version of YOLO you are using:\n", - " - For **YOLOv5**, choose `YOLOv5 PyTorch`\n", - " - For **YOLOv8**, choose `YOLOv8`\n", - " - For **YOLOv11**, choose `YOLOv11`\n", - "- If multiple annotation formats are listed for your model, always select the one ending in **\"PyTorch\"**.\n", - "\n", - "Then, under **Download Options**, click **\"Show Download Code\"** and continue.\n", - "\n", - "In the resulting screen, you will see three tabs:\n", - "- **Jupyter**\n", - "- **Terminal**\n", - "- **Raw URL**\n", - "\n", - "Select the **Terminal** tab and copy the provided command.\n", - "\n", - "#### **Step 4: Paste and Run**\n", - "\n", - "Paste the copied command into the notebook cell below and run it. This will download and extract the dataset into your environment, making it ready for use in the quantization process.\n", - "\n", - "Make sure to prefix the command with \"`!`\" so it executes properly in this Jupyter Notebook environment." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8bf75c9dcb328c84", - "metadata": {}, - "outputs": [], - "source": [ - "!curl -L \"https://universe.roboflow.com/ds/FaF3HbDmF7?key=iMoJR25O9H\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip" - ] - }, - { - "cell_type": "markdown", - "id": "72bad9cac670f1ab", - "metadata": {}, - "source": [ - "### RKNN Conversion Script\n", - "\n", - "To quantize and convert the ONNX model to RKNN format, run the `create_rknn.py` script with the appropriate arguments. The `--model_path` argument should point to your exported ONNX model from Step 1, and `--img_dir` must reference a valid directory containing either a dataset or a set of images to be used for quantization.\n", - "\n", - "##### *Note*\n", - "\n", - "If you followed the Roboflow dataset download instructions from the previous section, the dataset will have been extracted to your **current working directory**. In that case, you can simply set `--img_dir` to \"`.`\" to reference the current directory.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b09656dd", - "metadata": {}, - "outputs": [], - "source": [ - "%run create_rknn.py --img_dir ./datasets --model_path weights.onnx" - ] - }, - { - "cell_type": "markdown", - "id": "5b3a6806", - "metadata": {}, - "source": [ - "And that’s it! Your RKNN model file is now ready for deployment on an Orange Pi." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -}