kneron_model_converter/scripts/utils/generate_batch_config.py
2026-01-28 06:16:04 +00:00

181 lines
5.4 KiB
Python

from .common import *
from .load_config import BatchConfig
import json
import subprocess
import logging
supported_hardware_platforms = ["520", "720", "530", "630", "730"]
def gen_config_for_single_model(
platform: str, model: str, id: str, template: str = None
):
if platform == "720":
eval_config = '{"ip_evaluator_cfg":"/workspace/scripts/res/ip_config_720.json"}'
else:
logging.error(
f"Platform {platform} does not support hardware cut optimization yet."
)
exit(1)
if platform == "730":
commands = [
"python",
LIBS_V2_FOLDER + "/compiler/gen_config.py",
"-t",
platform,
"-v",
"model_rel",
"-m",
model,
"-a",
eval_config,
"-o",
f"/tmp/{id}.json",
]
else:
commands = [
"python",
LIBS_FOLDER + "/compiler/gen_config.py",
"-t",
platform,
"-v",
"model_rel",
"-m",
model,
"-a",
eval_config,
"-o",
f"/tmp/{id}.json",
]
if template is not None:
commands.append("-f")
commands.append(template)
subprocess.run(commands)
return f"/tmp/{id}.json"
def generate_batch_bconf(
batch_config: BatchConfig, batch_conf_path: str, output_path: str, fw_cut_opt=None
):
model_list = []
for batch_model in batch_config.model_list:
if batch_model.path[-3:] == "bie":
model_dict = {
"id": batch_model.id,
"version": batch_model.version,
"path": batch_model.path,
}
else:
model_dict = {
"id": batch_model.id,
"version": batch_model.version,
"path": batch_model.path,
"radix_json": batch_model.radix,
}
if fw_cut_opt is not None:
model_dict["compile_cfg"] = gen_config_for_single_model(
fw_cut_opt, batch_model.path, str(batch_model.id), batch_conf_path
)
model_list.append(model_dict)
bconf_dict = {"compile_cfg": batch_conf_path, "models": model_list}
fp = open(output_path, "w")
json.dump(bconf_dict, fp)
fp.close()
def generate_batch_conf(
encryption_config: Dict,
platform: str,
output_path: str,
weight_compress=False,
flatbuffer=True,
):
additional_config = {}
if encryption_config["whether_encryption"]:
additional_config["encryption_flag"] = True
if (
"encryption mode" not in encryption_config
or encryption_config["encryption mode"] == 1
):
additional_config["encryption_key"] = encryption_config["encryption_key"]
additional_config["encryption_file"] = encryption_config["encryption_file"]
if "encryption_efuse_key" in encryption_config:
additional_config["encryption_efuse_key"] = encryption_config[
"encryption_efuse_key"
]
elif encryption_config["encryption mode"] == 2:
additional_config["encryption_efuse_key"] = encryption_config[
"encryption_efuse_key"
]
else:
raise ValueError("encryption mode can only be 1 or 2.")
else:
additional_config["encryption_flag"] = False
additional_config["weight_compress"] = weight_compress
if not flatbuffer:
additional_config["gen_setup_fbs"] = False
additional_json = json.dumps(additional_config)
if platform not in supported_hardware_platforms:
raise ValueError("Invalid version for batch compiler: " + platform)
if platform == "730":
subprocess.run(
[
"python",
LIBS_V2_FOLDER + "/compiler/gen_config.py",
"-t",
platform,
"-v",
"model_rel",
"-o",
output_path,
"-a",
additional_json,
]
)
else:
subprocess.run(
[
"python",
LIBS_FOLDER + "/compiler/gen_config.py",
"-t",
platform,
"-v",
"model_rel",
"-o",
output_path,
"-a",
additional_json,
]
)
def generate_batch_bconfig_with_models(
models, batch_conf_path: str, output_path: str, fw_cut_opt=None
):
model_list = []
for batch_model in models:
if batch_model.bie_path is None:
logging.error("Analysis is required before compile.")
model_dict = {
"id": batch_model.id,
"version": batch_model.version,
"path": batch_model.bie_path,
}
if batch_model.compiler_config_path is not None:
model_dict["compile_cfg"] = batch_model.compiler_config_path
elif fw_cut_opt is not None:
model_dict["compile_cfg"] = gen_config_for_single_model(
fw_cut_opt, batch_model.bie_path, str(batch_model.id), batch_conf_path
)
model_list.append(model_dict)
bconf_dict = {"compile_cfg": batch_conf_path, "models": model_list}
fp = open(output_path, "w")
json.dump(bconf_dict, fp)
fp.close()