Complete tests for workers, with source file format onnx and tflite

This commit is contained in:
warrenchen 2026-01-29 03:04:41 +00:00
parent 0000f19d5e
commit e93a1a5996
92 changed files with 517 additions and 81 deletions

View File

@ -37,6 +37,7 @@ ONNX → BIE → NEF。系統以 Scheduler 為控制面,搭配 Worker Pool 與
## 備註 ## 備註
- docs/Design.md 為唯一真實設計文件。 - docs/Design.md 為唯一真實設計文件。
- apps/ 與 infra 為實作中的目錄,後續可能變動。 - apps/ 與 infra 為實作中的目錄,後續可能變動。
- ktc 有本地 patch 紀錄,見 docs/KTC_PATCHES.md若 ktc 更新需重新套用並測試)。
## 目錄結構(概要) ## 目錄結構(概要)
``` ```

View File

@ -144,6 +144,44 @@ error:
--- ---
## 4.1 工作目錄與 Worker I/O 規格(落地版)
以下為實作落地時的檔案布局與 worker 互動規格(與 MinIO 路徑可一對一對應):
### 4.1.1 工作目錄
- API 取號後建立 `task_id`
- API 將使用者上傳檔案放入工作目錄:
- `{base_path}/{task_id}/`
- 參考圖片固定放在 `ref_images/`
```
{base_path}/{task_id}/
<single_input_file> # 唯一輸入檔(副檔名不固定)
ref_images/
<image files...>
```
### 4.1.2 ONNX Worker
- 輸入:工作目錄下的唯一檔案(不假設檔名 / 副檔名)
- 輸出:`out.onnx`
- 輸出位置:同一工作目錄
### 4.1.3 BIE Worker
- 輸入:`out.onnx` + `ref_images/*`
- 輸出:`out.bie`
- 輸出位置:同一工作目錄
### 4.1.4 NEF Worker
- 輸入:`out.bie`
- 輸出:`out.nef`
- 輸出位置:同一工作目錄
### 4.1.5 Core / Toolchain 路徑一致性
- Worker 需將工作目錄 path 傳給 core
- Core 需設定 toolchain 相關 path輸出與中間檔都指向該工作目錄
---
## 5. 失敗行為(簡化) ## 5. 失敗行為(簡化)
- Worker 例外 → 回報 fail - Worker 例外 → 回報 fail

37
docs/KTC_PATCHES.md Normal file
View File

@ -0,0 +1,37 @@
# KTC Local Patch Notes
This file records local changes made to `ktc` so future updates can be re-applied and re-tested.
## 2026-01-28: Prefer prebuild toolchain binaries when USE_PREBUILD is set
### Why
- Toolchain binaries/scripts were hard-coded to `/workspace/libs_V2` or `/workspace/libs`.
- In this repo, we want to use `toolchain/prebuild` without modifying those system paths.
### Changes
1) **Add `_resolve_compiler_paths()` helper**
File: `ktc/toolchain.py`
- If `USE_PREBUILD` exists and is a directory, uses:
- `prebuild/gen_config.py`
- `prebuild/batch_compile`
- `prebuild/kneron_nef_utils`
- `prebuild/opt_compile` (via `OPT_COMPILE_DIR`)
- Otherwise falls back to `LIBS_V1_FOLDER`/`LIBS_V2_FOLDER`.
2) **Use `_resolve_compiler_paths()` in these functions:**
- `gen_config_for_single_model()` → uses `gen_config.py` from resolved path and sets `OPT_COMPILE_DIR`.
- `generate_batch_conf()` → uses `gen_config.py` from resolved path.
- `encrypt_compile()` → uses `batch_compile` (both 520 and non-520 cases) and `kneron_nef_utils` (for 730).
### Reapply checklist (if KTC updated)
- Re-add `_resolve_compiler_paths()` helper to `ktc/toolchain.py`.
- Rewire the call sites listed above to use the helper.
### Tests to run
```
pytest -s ./tests/workers/test_bie_core.py
pytest -s ./tests/workers/test_nef_core.py
```
### Notes
- `kneron_nef_utils` must exist in `toolchain/prebuild/`. If missing, copy it from `libs_V2/compiler/`.

View File

@ -1,5 +1,8 @@
import onnx import onnx
import kneronnxopt import kneronnxopt
import os
import sys
from pathlib import Path
# Create a dummy function to print a message when the function is not defined in the current environment. # Create a dummy function to print a message when the function is not defined in the current environment.
def print_function_not_defined_message(*args, **kwargs): def print_function_not_defined_message(*args, **kwargs):
@ -8,9 +11,91 @@ def print_function_not_defined_message(*args, **kwargs):
) )
return None return None
kera2onnx_flow = print_function_not_defined_message def kera2onnx_flow(keras_model_path: str, optimize: int = 0, input_shape=None) -> onnx.ModelProto:
caffe2onnx_flow = print_function_not_defined_message converter_root = Path(__file__).resolve().parents[1] / "libs" / "ONNX_Convertor" / "keras-onnx"
tflite2onnx_flow = print_function_not_defined_message if not converter_root.exists():
raise FileNotFoundError(f"keras-onnx converter not found: {converter_root}")
converter_path = str(converter_root)
if converter_path not in sys.path:
sys.path.insert(0, converter_path)
try:
import onnx_keras # type: ignore
except Exception as exc:
raise RuntimeError(
"kera2onnx_flow requires the keras-onnx converter and its dependencies (keras/tensorflow)."
) from exc
onnx_keras.set_duplicate_weights(True)
converter = onnx_keras.frontend.KerasFrontend()
converter.loadFromFile(keras_model_path)
return converter.convertToOnnx(optimize, input_shape)
def caffe2onnx_flow(caffe_prototxt_path: str, caffe_model_path: str) -> onnx.ModelProto:
converter_root = Path(__file__).resolve().parents[1] / "libs" / "ONNX_Convertor" / "caffe-onnx"
if not converter_root.exists():
raise FileNotFoundError(f"caffe-onnx converter not found: {converter_root}")
converter_path = str(converter_root)
if converter_path not in sys.path:
sys.path.insert(0, converter_path)
try:
import onnx_caffe # type: ignore
except Exception as exc:
raise RuntimeError(
"caffe2onnx_flow requires the caffe-onnx converter and its dependencies (caffe)."
) from exc
converter = onnx_caffe.frontend.CaffeFrontend()
converter.loadFromFile(caffe_prototxt_path, caffe_model_path)
return converter.convertToOnnx()
def tflite2onnx_flow(tflite_path: str, release_mode: bool = True, bottom_nodes=None) -> onnx.ModelProto:
if bottom_nodes is None:
bottom_nodes = []
converter_root = Path(__file__).resolve().parents[1] / "libs" / "ONNX_Convertor" / "tflite-onnx" / "onnx_tflite"
if not converter_root.exists():
raise FileNotFoundError(f"tflite2onnx converter not found: {converter_root}")
converter_path = str(converter_root)
if converter_path not in sys.path:
sys.path.insert(0, converter_path)
try:
import tflite2onnx # type: ignore
except Exception as exc:
raise RuntimeError(
"tflite2onnx_flow requires the tflite-onnx converter and its dependencies (tensorflow)."
) from exc
# Compatibility: newer TF requires subgraph_index in _get_tensor_details.
try:
import tensorflow as tf # type: ignore
if not getattr(tf.lite.Interpreter, "_ktc_compat_patched", False):
_orig_get_tensor_details = tf.lite.Interpreter._get_tensor_details
def _get_tensor_details_compat(self, tensor_index, *args, **kwargs):
if args or "subgraph_index" in kwargs:
return _orig_get_tensor_details(self, tensor_index, *args, **kwargs)
try:
return _orig_get_tensor_details(self, tensor_index)
except TypeError:
return _orig_get_tensor_details(self, tensor_index, 0)
tf.lite.Interpreter._get_tensor_details = _get_tensor_details_compat # type: ignore
tf.lite.Interpreter._ktc_compat_patched = True # type: ignore
except Exception:
pass
output_dir = os.environ.get("KTC_WORKDIR") or os.environ.get("KTC_OUTPUT_DIR") or "/tmp"
os.makedirs(output_dir, exist_ok=True)
output_path = str(Path(output_dir) / "tflite_converted.onnx")
return tflite2onnx.main(tflite_path, output_path, add_transpose_for_channel_last_first_issue=not release_mode, bottom_nodes_name=bottom_nodes)
def torch_exported_onnx_flow( def torch_exported_onnx_flow(
m: onnx.ModelProto, disable_fuse_bn=False m: onnx.ModelProto, disable_fuse_bn=False

View File

@ -26,6 +26,28 @@ from sys_flow.run import gen_fx_model as gen_fx_model_v1
from sys_flow_v2.run import gen_fx_model as gen_fx_model_v2 from sys_flow_v2.run import gen_fx_model as gen_fx_model_v2
from sys_flow_v2.run import gen_opt_model_v2 from sys_flow_v2.run import gen_opt_model_v2
def _resolve_compiler_paths(platform: str):
"""Resolve compiler binary/script paths, preferring USE_PREBUILD when set."""
prebuild = os.environ.get("USE_PREBUILD")
if prebuild and os.path.isdir(prebuild):
return {
"bin_dir": prebuild,
"opt_dir": os.path.join(prebuild, "opt_compile"),
"gen_config": os.path.join(prebuild, "gen_config.py"),
"batch_compile": os.path.join(prebuild, "batch_compile"),
"kneron_nef_utils": os.path.join(prebuild, "kneron_nef_utils"),
}
libs_folder = LIBS_V2_FOLDER if platform == "730" else LIBS_V1_FOLDER
compiler_dir = os.path.join(libs_folder, "compiler")
return {
"bin_dir": compiler_dir,
"opt_dir": os.path.join(compiler_dir, "opt_compile"),
"gen_config": os.path.join(compiler_dir, "gen_config.py"),
"batch_compile": os.path.join(compiler_dir, "batch_compile"),
"kneron_nef_utils": os.path.join(compiler_dir, "kneron_nef_utils"),
}
if onnx.__version__ == "1.7.0": if onnx.__version__ == "1.7.0":
SUPPORTED_PLATFORMS = ["520", "720", "530", "630"] SUPPORTED_PLATFORMS = ["520", "720", "530", "630"]
else: else:
@ -98,16 +120,17 @@ class ModelConfig:
self.debug = debug self.debug = debug
self.input_fmt = input_fmt self.input_fmt = input_fmt
if onnx_model is not None: if onnx_model is not None:
self.onnx_path = os.path.join(RESULT_FOLDER, "input.onnx") output_dir = os.environ.get("KTC_OUTPUT_DIR", RESULT_FOLDER)
if os.path.isfile(RESULT_FOLDER): self.onnx_path = os.path.join(output_dir, "input.onnx")
if os.path.isfile(output_dir):
logging.error( logging.error(
f"Folder {RESULT_FOLDER} cannot be created. File with same name exists." f"Folder {output_dir} cannot be created. File with same name exists."
) )
raise ValueError( raise ValueError(
f"Folder {RESULT_FOLDER} cannot be created. File with same name exists." f"Folder {output_dir} cannot be created. File with same name exists."
) )
elif not os.path.isdir(RESULT_FOLDER): elif not os.path.isdir(output_dir):
os.makedirs(RESULT_FOLDER) os.makedirs(output_dir)
onnx.save(onnx_model, self.onnx_path) onnx.save(onnx_model, self.onnx_path)
if self.onnx_path is None and self.bie_path is None: if self.onnx_path is None and self.bie_path is None:
logging.error( logging.error(
@ -383,16 +406,12 @@ def gen_config_for_single_model(platform: str, model: str, id: str, hw_cut_opt=F
else: else:
logging.error("input_fmt should be str or dict.") logging.error("input_fmt should be str or dict.")
exit(1) exit(1)
# select libs folder based on platform compiler_paths = _resolve_compiler_paths(platform)
if platform == "730": os.environ["OPT_COMPILE_DIR"] = compiler_paths["opt_dir"]
LIBS_FOLDER = LIBS_V2_FOLDER
else:
LIBS_FOLDER = LIBS_V1_FOLDER
os.environ["OPT_COMPILE_DIR"] = LIBS_FOLDER + "/compiler/opt_compile"
additional_json = json.dumps(additional_config) additional_json = json.dumps(additional_config)
commands = [ commands = [
"python", "python",
LIBS_FOLDER + "/compiler/gen_config.py", compiler_paths["gen_config"],
"-t", "-t",
platform, platform,
"-v", "-v",
@ -451,14 +470,11 @@ def generate_batch_conf(
raise ValueError("Invalid version for batch compiler: " + platform) raise ValueError("Invalid version for batch compiler: " + platform)
# Generate config file using compiler gen_config.py script # Generate config file using compiler gen_config.py script
if platform == "730": compiler_paths = _resolve_compiler_paths(platform)
LIBS_FOLDER = LIBS_V2_FOLDER
else:
LIBS_FOLDER = LIBS_V1_FOLDER
subprocess.run( subprocess.run(
[ [
"python", "python",
LIBS_FOLDER + "/compiler/gen_config.py", compiler_paths["gen_config"],
"-t", "-t",
platform, platform,
"-v", "-v",
@ -618,6 +634,7 @@ def encrypt_compile(
# Generate config and run # Generate config and run
if platform == "520": if platform == "520":
compiler_paths = _resolve_compiler_paths(platform)
generate_batch_conf( generate_batch_conf(
enc_config, enc_config,
"520", "520",
@ -635,7 +652,7 @@ def encrypt_compile(
) )
os.chdir(output_dir) os.chdir(output_dir)
commands = [ commands = [
LIBS_V1_FOLDER + "/compiler/batch_compile", compiler_paths["batch_compile"],
"-T", "-T",
"520", "520",
output_dir + "/batch_compile_bconfig.json", output_dir + "/batch_compile_bconfig.json",
@ -660,12 +677,9 @@ def encrypt_compile(
hw_cut_opt=hardware_cut_opt, hw_cut_opt=hardware_cut_opt,
) )
os.chdir(output_dir) os.chdir(output_dir)
if platform == "730": compiler_paths = _resolve_compiler_paths(platform)
LIBS_FOLDER = LIBS_V2_FOLDER
else:
LIBS_FOLDER = LIBS_V1_FOLDER
commands = [ commands = [
LIBS_FOLDER + "/compiler/batch_compile", compiler_paths["batch_compile"],
"-T", "-T",
platform, platform,
output_dir + "/batch_compile_bconfig.json", output_dir + "/batch_compile_bconfig.json",
@ -679,7 +693,7 @@ def encrypt_compile(
# Use kne to generate nef file # Use kne to generate nef file
subprocess.run( subprocess.run(
[ [
LIBS_V2_FOLDER + "/compiler/kneron_nef_utils", compiler_paths["kneron_nef_utils"],
"-G", "-G",
"--kne", "--kne",
output_dir + "/models_730.kne", output_dir + "/models_730.kne",
@ -693,7 +707,7 @@ def encrypt_compile(
# Update model info # Update model info
subprocess.run( subprocess.run(
[ [
LIBS_V2_FOLDER + "/compiler/kneron_nef_utils", compiler_paths["kneron_nef_utils"],
"-U", "-U",
output_dir + "/models_730.nef", output_dir + "/models_730.nef",
"--model_info_version", "--model_info_version",

View File

@ -11,8 +11,8 @@ if onnx.__version__ == '1.7.0':
else: else:
from extract_bie_info import extract_bie_ioinfo from extract_bie_info import extract_bie_ioinfo
RESULT_FOLDER = "/data1/kneron_flow" RESULT_FOLDER = os.environ.get("KTC_OUTPUT_DIR", "/data1/kneron_flow")
TMP_FOLDER = "/workspace/.tmp" TMP_FOLDER = os.environ.get("KTC_WORKDIR", "/workspace/.tmp")
LIBS_V1_FOLDER = "/workspace/libs" LIBS_V1_FOLDER = "/workspace/libs"
LIBS_V2_FOLDER = "/workspace/libs_V2" LIBS_V2_FOLDER = "/workspace/libs_V2"
SCRIPT_FOLDER = "/workspace/scripts" SCRIPT_FOLDER = "/workspace/scripts"

View File

@ -401,7 +401,8 @@ def main(model_path, model_save_path=None, add_transpose_for_channel_last_first_
# add generated time to model meta data # add generated time to model meta data
helper.set_model_props(cnn_model, {'Generated Time': datetime.utcnow().strftime("%m/%d/%Y, %H:%M:%S") + " (UTC+0)"}) helper.set_model_props(cnn_model, {'Generated Time': datetime.utcnow().strftime("%m/%d/%Y, %H:%M:%S") + " (UTC+0)"})
cnn_model = onnx.utils.polish_model(cnn_model) if hasattr(onnx.utils, "polish_model"):
cnn_model = onnx.utils.polish_model(cnn_model)
# save # save
if model_save_path is not None: if model_save_path is not None:
@ -454,4 +455,3 @@ if __name__ == '__main__':
logging.getLogger('tflite2onnx').error(e) logging.getLogger('tflite2onnx').error(e)
logging.info('------------ end ------------------------') logging.info('------------ end ------------------------')

View File

@ -124,10 +124,10 @@ def create_constant_node(node_name, shape, data):
# default data type # default data type
data_type = onnx.helper.TensorProto.FLOAT data_type = onnx.helper.TensorProto.FLOAT
if data.dtype == np.int or data.dtype == np.int64: if np.issubdtype(data.dtype, np.integer):
data_type = onnx.helper.TensorProto.INT64 data_type = onnx.helper.TensorProto.INT64
elif data.dtype == np.float or data.dtype == np.float64: elif np.issubdtype(data.dtype, np.floating):
data_type = onnx.helper.TensorProto.FLOAT data_type = onnx.helper.TensorProto.FLOAT
else: else:

View File

@ -13,11 +13,12 @@ python-multipart
pyzipper pyzipper
# Model processing dependencies # Model processing dependencies
onnx onnx==1.14.1
onnxruntime onnxruntime
onnxoptimizer onnxoptimizer
tf2onnx tf2onnx
tflite2onnx tflite2onnx
tensorflow==2.16.2
keras2onnx keras2onnx
onnxsim; python_version < "3.12" onnxsim; python_version < "3.12"
onnx-tf onnx-tf
@ -35,6 +36,7 @@ seaborn
# Utilities # Utilities
python-dotenv python-dotenv
python-igraph
# Development dependencies # Development dependencies
pytest pytest

View File

@ -2,7 +2,6 @@ import os
import shutil import shutil
from typing import Dict, Any from typing import Dict, Any
import ktc
import onnx import onnx
try: try:
@ -27,6 +26,14 @@ def process_bie_core(
output_dir = os.path.dirname(output_path) output_dir = os.path.dirname(output_path)
if output_dir: if output_dir:
os.makedirs(output_dir, exist_ok=True) os.makedirs(output_dir, exist_ok=True)
work_root = parameters.get("work_dir") or output_dir
work_dir = os.path.join(work_root, "_work")
res_dir = os.path.join(work_root, "res")
os.environ.setdefault("KTC_OUTPUT_DIR", work_root)
os.environ.setdefault("KTC_WORKDIR", work_dir)
os.environ.setdefault("KTC_SCRIPT_RES", res_dir)
import ktc
model = onnx.load(onnx_file_path) model = onnx.load(onnx_file_path)
model = ktc.onnx_optimizer.onnx2onnx_flow(model, eliminate_tail=True, opt_matmul=True) model = ktc.onnx_optimizer.onnx2onnx_flow(model, eliminate_tail=True, opt_matmul=True)
@ -62,7 +69,8 @@ def process_bie_core(
bie_model_path = km.analysis({input_node_name: img_list}, output_dir=output_dir or ".") bie_model_path = km.analysis({input_node_name: img_list}, output_dir=output_dir or ".")
if os.path.abspath(bie_model_path) != os.path.abspath(output_path): if os.path.abspath(bie_model_path) != os.path.abspath(output_path):
shutil.copy2(str(bie_model_path), output_path) # Move to avoid keeping duplicate large binaries on disk.
shutil.move(str(bie_model_path), output_path)
return { return {
"file_path": output_path, "file_path": output_path,

View File

@ -2,7 +2,6 @@ import os
import shutil import shutil
from typing import Dict, Any from typing import Dict, Any
import ktc
def process_nef_core( def process_nef_core(
@ -17,6 +16,14 @@ def process_nef_core(
output_dir = os.path.dirname(output_path) output_dir = os.path.dirname(output_path)
if output_dir: if output_dir:
os.makedirs(output_dir, exist_ok=True) os.makedirs(output_dir, exist_ok=True)
work_root = parameters.get("work_dir") or output_dir
work_dir = os.path.join(work_root, "_work")
res_dir = os.path.join(work_root, "res")
os.environ.setdefault("KTC_OUTPUT_DIR", work_root)
os.environ.setdefault("KTC_WORKDIR", work_dir)
os.environ.setdefault("KTC_SCRIPT_RES", res_dir)
import ktc
km = ktc.ModelConfig( km = ktc.ModelConfig(
parameters["model_id"], parameters["model_id"],
@ -25,8 +32,10 @@ def process_nef_core(
bie_path=bie_file_path, bie_path=bie_file_path,
) )
nef_model_path = ktc.compile([km]) nef_model_path = ktc.compile([km], output_dir=output_dir or None)
shutil.copy2(str(nef_model_path), output_path) if os.path.abspath(nef_model_path) != os.path.abspath(output_path):
# Move to avoid keeping duplicate large binaries on disk.
shutil.move(str(nef_model_path), output_path)
return { return {
"file_path": output_path, "file_path": output_path,

View File

@ -1,7 +1,6 @@
import os import os
from typing import Dict, Any from typing import Dict, Any
import ktc
import onnx import onnx
@ -17,6 +16,14 @@ def process_onnx_core(
output_dir = os.path.dirname(output_path) output_dir = os.path.dirname(output_path)
if output_dir: if output_dir:
os.makedirs(output_dir, exist_ok=True) os.makedirs(output_dir, exist_ok=True)
work_root = parameters.get("work_dir") or output_dir
work_dir = os.path.join(work_root, "_work")
res_dir = os.path.join(work_root, "res")
os.environ.setdefault("KTC_OUTPUT_DIR", work_root)
os.environ.setdefault("KTC_WORKDIR", work_dir)
os.environ.setdefault("KTC_SCRIPT_RES", res_dir)
import ktc
file_extension = os.path.splitext(file_path)[1].lower() file_extension = os.path.splitext(file_path)[1].lower()
if file_extension == ".tflite": if file_extension == ".tflite":

View File

@ -31,3 +31,18 @@ if e2e_flow_path.is_dir():
prebuild_path = repo_root / "toolchain" / "prebuild" prebuild_path = repo_root / "toolchain" / "prebuild"
if prebuild_path.is_dir(): if prebuild_path.is_dir():
os.environ.setdefault("USE_PREBUILD", str(prebuild_path)) os.environ.setdefault("USE_PREBUILD", str(prebuild_path))
prebuild_lib = prebuild_path / "lib"
if prebuild_lib.is_dir():
existing_ld = os.environ.get("LD_LIBRARY_PATH", "")
ld_parts = [str(prebuild_lib)]
if existing_ld:
ld_parts.append(existing_ld)
os.environ["LD_LIBRARY_PATH"] = ":".join(ld_parts)
# Force pytest/tempfile to use repo-local temp dir instead of system /tmp.
repo_tmp = repo_root / ".tmp"
repo_tmp.mkdir(parents=True, exist_ok=True)
os.environ.setdefault("TMPDIR", str(repo_tmp))
# Avoid multiprocessing Manager sockets in restricted test environments.
os.environ.setdefault("KTC_DISABLE_MP", "1")

BIN
tests/fixtures/bie/input.bie vendored Normal file

Binary file not shown.

BIN
tests/fixtures/bie/input.onnx vendored Normal file

Binary file not shown.

BIN
tests/fixtures/bie_images/0.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
tests/fixtures/bie_images/1.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

BIN
tests/fixtures/bie_images/10.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
tests/fixtures/bie_images/100.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
tests/fixtures/bie_images/101.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/102.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/103.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/104.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

BIN
tests/fixtures/bie_images/11.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 KiB

BIN
tests/fixtures/bie_images/12.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.8 KiB

BIN
tests/fixtures/bie_images/13.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.9 KiB

BIN
tests/fixtures/bie_images/2.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

BIN
tests/fixtures/bie_images/3.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
tests/fixtures/bie_images/4.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/5.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
tests/fixtures/bie_images/6.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

BIN
tests/fixtures/bie_images/63.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

BIN
tests/fixtures/bie_images/64.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/65.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/66.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/67.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
tests/fixtures/bie_images/68.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

BIN
tests/fixtures/bie_images/69.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/7.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/70.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/71.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/72.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/73.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

BIN
tests/fixtures/bie_images/74.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

BIN
tests/fixtures/bie_images/75.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
tests/fixtures/bie_images/76.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

BIN
tests/fixtures/bie_images/77.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/78.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/79.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

BIN
tests/fixtures/bie_images/8.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

BIN
tests/fixtures/bie_images/80.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
tests/fixtures/bie_images/81.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

BIN
tests/fixtures/bie_images/82.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

BIN
tests/fixtures/bie_images/83.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

BIN
tests/fixtures/bie_images/84.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

BIN
tests/fixtures/bie_images/85.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

BIN
tests/fixtures/bie_images/86.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

BIN
tests/fixtures/bie_images/87.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

BIN
tests/fixtures/bie_images/88.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
tests/fixtures/bie_images/89.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
tests/fixtures/bie_images/9.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
tests/fixtures/bie_images/90.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

BIN
tests/fixtures/bie_images/91.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

BIN
tests/fixtures/bie_images/92.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

BIN
tests/fixtures/bie_images/93.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

BIN
tests/fixtures/bie_images/94.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

BIN
tests/fixtures/bie_images/95.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

BIN
tests/fixtures/bie_images/96.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.6 KiB

BIN
tests/fixtures/bie_images/97.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

BIN
tests/fixtures/bie_images/98.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

BIN
tests/fixtures/bie_images/99.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

BIN
tests/fixtures/onnx/input.tflite vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,79 @@
from pathlib import Path
import random
import shutil
import sys
ROOT = Path(__file__).resolve().parents[2]
sys.path.insert(0, str(ROOT))
from services.workers.onnx.core import process_onnx_core
from services.workers.bie.core import process_bie_core
from services.workers.nef.core import process_nef_core
def test_worker_flow_e2e_uses_single_workdir():
base_outputs = ROOT / "tests" / "fixtures" / "outputs"
base_outputs.mkdir(parents=True, exist_ok=True)
task_id = None
work_dir = None
for _ in range(50):
candidate = random.randint(100, 999)
candidate_dir = base_outputs / str(candidate)
if not candidate_dir.exists():
task_id = candidate
work_dir = candidate_dir
break
assert task_id is not None, "Unable to allocate a unique task id"
work_dir.mkdir(parents=True, exist_ok=False)
src_onnx_dir = ROOT / "tests" / "fixtures" / "onnx"
src_images_dir = ROOT / "tests" / "fixtures" / "bie_images"
ref_images_dir = work_dir / "ref_images"
onnx_files = [p for p in src_onnx_dir.iterdir() if p.is_file() and p.suffix == ".onnx"]
assert len(onnx_files) == 1, "Expected a single ONNX fixture file"
input_file = work_dir / onnx_files[0].name
shutil.copy2(onnx_files[0], input_file)
shutil.copytree(src_images_dir, ref_images_dir)
work_inputs = [p for p in work_dir.iterdir() if p.is_file()]
assert len(work_inputs) == 1, "Working directory must contain a single input file"
work_input_file = work_inputs[0]
onnx_output = work_dir / "out.onnx"
onnx_params = {"model_id": 10, "version": "e2e", "platform": "520", "work_dir": str(work_dir)}
onnx_result = process_onnx_core(
{"file_path": str(work_input_file)},
str(onnx_output),
onnx_params,
)
assert onnx_output.exists()
assert onnx_result["file_path"] == str(onnx_output)
assert onnx_result["file_size"] > 0
bie_output = work_dir / "out.bie"
bie_params = {"model_id": 11, "version": "e2e", "platform": "530", "work_dir": str(work_dir)}
bie_result = process_bie_core(
{"onnx_file_path": str(onnx_output), "data_dir": str(ref_images_dir)},
str(bie_output),
bie_params,
)
assert bie_output.exists()
assert bie_result["file_path"] == str(bie_output)
assert bie_result["file_size"] > 0
nef_output = work_dir / "out.nef"
nef_params = {"model_id": 12, "version": "e2e", "platform": "730", "work_dir": str(work_dir)}
nef_result = process_nef_core(
{"bie_file_path": str(bie_output)},
str(nef_output),
nef_params,
)
assert nef_output.exists()
assert nef_result["file_path"] == str(nef_output)
assert nef_result["file_size"] > 0

View File

@ -0,0 +1,79 @@
from pathlib import Path
import random
import shutil
import sys
ROOT = Path(__file__).resolve().parents[2]
sys.path.insert(0, str(ROOT))
from services.workers.onnx.core import process_onnx_core
from services.workers.bie.core import process_bie_core
from services.workers.nef.core import process_nef_core
def test_worker_flow_e2e_tflite_uses_single_workdir():
base_outputs = ROOT / "tests" / "fixtures" / "outputs"
base_outputs.mkdir(parents=True, exist_ok=True)
task_id = None
work_dir = None
for _ in range(50):
candidate = random.randint(100, 999)
candidate_dir = base_outputs / str(candidate)
if not candidate_dir.exists():
task_id = candidate
work_dir = candidate_dir
break
assert task_id is not None, "Unable to allocate a unique task id"
work_dir.mkdir(parents=True, exist_ok=False)
src_onnx_dir = ROOT / "tests" / "fixtures" / "onnx"
src_images_dir = ROOT / "tests" / "fixtures" / "bie_images"
ref_images_dir = work_dir / "ref_images"
tflite_files = [p for p in src_onnx_dir.iterdir() if p.is_file() and p.suffix == ".tflite"]
assert len(tflite_files) == 1, "Expected a single TFLite fixture file"
input_file = work_dir / tflite_files[0].name
shutil.copy2(tflite_files[0], input_file)
shutil.copytree(src_images_dir, ref_images_dir)
work_inputs = [p for p in work_dir.iterdir() if p.is_file()]
assert len(work_inputs) == 1, "Working directory must contain a single input file"
work_input_file = work_inputs[0]
onnx_output = work_dir / "out.onnx"
onnx_params = {"model_id": 20, "version": "e2e-tflite", "platform": "520", "work_dir": str(work_dir)}
onnx_result = process_onnx_core(
{"file_path": str(work_input_file)},
str(onnx_output),
onnx_params,
)
assert onnx_output.exists()
assert onnx_result["file_path"] == str(onnx_output)
assert onnx_result["file_size"] > 0
bie_output = work_dir / "out.bie"
bie_params = {"model_id": 21, "version": "e2e-tflite", "platform": "530", "work_dir": str(work_dir)}
bie_result = process_bie_core(
{"onnx_file_path": str(onnx_output), "data_dir": str(ref_images_dir)},
str(bie_output),
bie_params,
)
assert bie_output.exists()
assert bie_result["file_path"] == str(bie_output)
assert bie_result["file_size"] > 0
nef_output = work_dir / "out.nef"
nef_params = {"model_id": 22, "version": "e2e-tflite", "platform": "730", "work_dir": str(work_dir)}
nef_result = process_nef_core(
{"bie_file_path": str(bie_output)},
str(nef_output),
nef_params,
)
assert nef_output.exists()
assert nef_result["file_path"] == str(nef_output)
assert nef_result["file_size"] > 0

View File

@ -0,0 +1,26 @@
from pathlib import Path
import sys
ROOT = Path(__file__).resolve().parents[2]
sys.path.insert(0, str(ROOT))
from services.workers.onnx.core import process_onnx_core
def test_process_tflite_core_creates_output():
input_file = ROOT / "tests" / "fixtures" / "onnx" / "input.tflite"
output_file = ROOT / "tests" / "fixtures" / "outputs" / "out_from_tflite.onnx"
assert input_file.exists(), f"Missing input file: {input_file}"
params = {"model_id": 4, "version": "tflite", "platform": "520"}
result = process_onnx_core(
{"file_path": str(input_file)},
str(output_file),
params,
)
assert output_file.exists()
assert result["file_path"] == str(output_file)
assert result["file_size"] > 0

0
toolchain/prebuild/batch_compile Normal file → Executable file
View File

0
toolchain/prebuild/compile Normal file → Executable file
View File

0
toolchain/prebuild/dataConverter Normal file → Executable file
View File

0
toolchain/prebuild/gen_config.py Normal file → Executable file
View File

0
toolchain/prebuild/get_run_val.py Normal file → Executable file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

0
toolchain/prebuild/model_converter Normal file → Executable file
View File

0
toolchain/prebuild/run_fix_inference Normal file → Executable file
View File

0
toolchain/prebuild/run_updater Normal file → Executable file
View File

View File

@ -162,7 +162,8 @@ def binary_in_toolchain():
d_binary["ip_eval"] = {} d_binary["ip_eval"] = {}
for platform in MODE_HARDWARE: for platform in MODE_HARDWARE:
d_binary["ip_eval"][platform] = f"/workspace/scripts/res/ip_config_{platform}.json" res_dir = os.environ.get("KTC_SCRIPT_RES", "/workspace/scripts/res")
d_binary["ip_eval"][platform] = f"{res_dir}/ip_config_{platform}.json"
d_binary["csim"] = {} d_binary["csim"] = {}
for platform in MODE_HW_LIMIT["inc_in_toolchain"]: for platform in MODE_HW_LIMIT["inc_in_toolchain"]:

View File

@ -262,11 +262,16 @@ class report:
def __init__(self): def __init__(self):
self.err_log = defaultdict(dict) self.err_log = defaultdict(dict)
manager = multiprocessing.Manager() manager = None
# below are mutli-process safe. if os.environ.get("KTC_DISABLE_MP") != "1":
try:
manager = multiprocessing.Manager()
except Exception:
manager = None
# below are mutli-process safe when manager is available.
# but don't use dict of dict, or complex object. which is not safe. # but don't use dict of dict, or complex object. which is not safe.
self.info_collection = manager.list() # collect snr, FPS ... self.info_collection = manager.list() if manager else [] # collect snr, FPS ...
self.time_collection = manager.list() self.time_collection = manager.list() if manager else []
def add_err_record(self, err): def add_err_record(self, err):
"""Used as callback after multi-processing. """Used as callback after multi-processing.
@ -735,10 +740,14 @@ def gen_fx_model(
# working directory # working directory
# NOTE: p_working must be same as specified in template/regression_tc.json/path/cases # NOTE: p_working must be same as specified in template/regression_tc.json/path/cases
p_working = pathlib.Path("/workspace/.tmp/models") env_workdir = os.environ.get("KTC_WORKDIR")
p_working = pathlib.Path(env_workdir or "/workspace/.tmp/models")
p_working.mkdir(parents=True, exist_ok=True) p_working.mkdir(parents=True, exist_ok=True)
# prepare working_model_folder # prepare working_model_folder
env_output = os.environ.get("KTC_OUTPUT_DIR")
if env_output and p_output == "/data1/kneron_flow":
p_output = env_output
p_export = pathlib.Path(p_output) p_export = pathlib.Path(p_output)
p_export.mkdir(parents=True, exist_ok=True) p_export.mkdir(parents=True, exist_ok=True)
@ -774,6 +783,10 @@ def gen_fx_model(
p_template = p_script / "template" / "regression_tc.json" p_template = p_script / "template" / "regression_tc.json"
with open(p_template, "r") as f: with open(p_template, "r") as f:
template = json.load(f) template = json.load(f)
if env_workdir:
path_cfg = template.setdefault("path", {})
path_cfg["cases"] = str(p_working)
path_cfg["report"] = str(p_working / "report")
# verify knerex parameters # verify knerex parameters
# choose from mmse / percentage # choose from mmse / percentage
@ -837,7 +850,9 @@ def gen_fx_model(
# override the ip_evaluator in toolchain. # override the ip_evaluator in toolchain.
# s1.json will based on this file. if necessary. # s1.json will based on this file. if necessary.
ip_config = gen_ip_config(platform, weight_bandwidth, dma_bandwidth) ip_config = gen_ip_config(platform, weight_bandwidth, dma_bandwidth)
fn_ip_config = "/workspace/scripts/res/ip_config_{}.json".format(platform) res_dir = os.environ.get("KTC_SCRIPT_RES", "/workspace/scripts/res")
os.makedirs(res_dir, exist_ok=True)
fn_ip_config = os.path.join(res_dir, f"ip_config_{platform}.json")
with open(fn_ip_config, "w") as f: with open(fn_ip_config, "w") as f:
json.dump(ip_config, f, indent=4, sort_keys=True) json.dump(ip_config, f, indent=4, sort_keys=True)
@ -901,9 +916,10 @@ def gen_fx_model(
try: try:
p_model = prepare_model_ip_eval_only() p_model = prepare_model_ip_eval_only()
rfs, success_list = run_flow(p_json, [m_name]) rfs, success_list = run_flow(p_json, [m_name])
fn_to_release = rfs[0] # only one model fn_to_release = rfs[0] if rfs else {}
success = True, fn_to_release success = True, fn_to_release
except Exception as e: except Exception as e:
print(f"[ip_eval_only] exception: {e}")
success = False, None success = False, None
return success return success

View File

@ -158,7 +158,8 @@ def binary_in_toolchain():
d_binary["ip_eval"] = {} d_binary["ip_eval"] = {}
for platform in MODE_HARDWARE: for platform in MODE_HARDWARE:
d_binary["ip_eval"][platform] = f"/workspace/scripts/res/ip_config_{platform}.json" res_dir = os.environ.get("KTC_SCRIPT_RES", "/workspace/scripts/res")
d_binary["ip_eval"][platform] = f"{res_dir}/ip_config_{platform}.json"
d_binary["csim"] = {} d_binary["csim"] = {}
for platform in MODE_HW_LIMIT["inc_in_toolchain"]: for platform in MODE_HW_LIMIT["inc_in_toolchain"]:

View File

@ -145,6 +145,9 @@ def gen_opt_model(
generate a new user_config.json and run -> final.opt.onnx generate a new user_config.json and run -> final.opt.onnx
""" """
env_output = os.environ.get("KTC_OUTPUT_DIR")
if env_output and p_output == "/data1/kneron_flow":
p_output = env_output
assert platform == 730, f"Only platform 730 is supported." assert platform == 730, f"Only platform 730 is supported."
assert optimize == "o0", f"Only optimize o0 is supported." assert optimize == "o0", f"Only optimize o0 is supported."
@ -173,7 +176,8 @@ def gen_opt_model(
msg = f"Given onnx {p_onnx} does not exist!" msg = f"Given onnx {p_onnx} does not exist!"
raise FileExistsError(msg) raise FileExistsError(msg)
p_working = Path("/workspace/.tmp/models") env_workdir = os.environ.get("KTC_WORKDIR")
p_working = Path(env_workdir or "/workspace/.tmp/models")
p_model = p_working / m_name / m_name p_model = p_working / m_name / m_name
p_model_16 = p_working / f"{m_name}_16" / f"{m_name}_16" p_model_16 = p_working / f"{m_name}_16" / f"{m_name}_16"
p_model_light = p_working / f"{m_name}_light" / f"{m_name}_light" p_model_light = p_working / f"{m_name}_light" / f"{m_name}_light"
@ -343,6 +347,12 @@ def gen_opt_model_v2(
使用15bit的conv浮点运算量占比flops_ratio=0, 所有conv使用8bitflops_ratio=1.0 所有conv使用15bit. 使用15bit的conv浮点运算量占比flops_ratio=0, 所有conv使用8bitflops_ratio=1.0 所有conv使用15bit.
""" """
env_output = os.environ.get("KTC_OUTPUT_DIR")
env_workdir = os.environ.get("KTC_WORKDIR")
if env_output and p_output == "/data1/kneron_flow":
p_output = env_output
if env_workdir and p_cache == "/workspace/.tmp/models":
p_cache = env_workdir
gen_fx_model_params = { gen_fx_model_params = {
"p_onnx": p_onnx, "p_onnx": p_onnx,
"np_txt": np_txt, "np_txt": np_txt,
@ -783,10 +793,16 @@ def gen_fx_model(
# working directory # working directory
# NOTE: p_working must be same as specified in template/regression_tc.json/path/cases # NOTE: p_working must be same as specified in template/regression_tc.json/path/cases
env_workdir = os.environ.get("KTC_WORKDIR")
if env_workdir and p_cache == "/workspace/.tmp/models":
p_cache = env_workdir
p_working = Path(p_cache) p_working = Path(p_cache)
p_working.mkdir(parents=True, exist_ok=True) p_working.mkdir(parents=True, exist_ok=True)
# prepare working_model_folder # prepare working_model_folder
env_output = os.environ.get("KTC_OUTPUT_DIR")
if env_output and p_output == "/data1/kneron_flow":
p_output = env_output
p_export = Path(p_output) p_export = Path(p_output)
p_export.mkdir(parents=True, exist_ok=True) p_export.mkdir(parents=True, exist_ok=True)
@ -905,7 +921,9 @@ def gen_fx_model(
# override the ip_evaluator in toolchain. # override the ip_evaluator in toolchain.
# s1.json will based on this file. if necessary. # s1.json will based on this file. if necessary.
ip_config = gen_ip_config(platform, weight_bandwidth, dma_bandwidth) ip_config = gen_ip_config(platform, weight_bandwidth, dma_bandwidth)
fn_ip_config = f"/workspace/scripts/res/ip_config_{platform}.json" res_dir = os.environ.get("KTC_SCRIPT_RES", "/workspace/scripts/res")
os.makedirs(res_dir, exist_ok=True)
fn_ip_config = os.path.join(res_dir, f"ip_config_{platform}.json")
futils.dict2json(ip_config, fn_ip_config) futils.dict2json(ip_config, fn_ip_config)
update_config_ip_val(weight_bandwidth, dma_bandwidth, platform) update_config_ip_val(weight_bandwidth, dma_bandwidth, platform)