jim800121chen efa67d59a4 Add web frontend, MinIO storage, monitoring, and docker-compose deployment
- Frontend: rewrite Home.vue to match backend POST /jobs API (remove single-stage options)
- Frontend: add Monitor page (/monitor) for queue and job monitoring
- Frontend: add job history with localStorage tracking (per-browser)
- Frontend: fix Nginx proxy rewrite (/api -> /) and add 500MB upload limit
- Backend: add MinIO storage support (STORAGE_BACKEND=minio) alongside local mode
- Backend: add GET /queues/stats API for queue monitoring
- Backend: fix download handler for MinIO (buffer mode for Node 18 compat)
- Workers: add S3/MinIO download/upload in consumer.py with isolated temp dirs
- Workers: add s3_storage.py helper with lifecycle rule (7-day TTL)
- Docker: add docker-compose.yml with all services (web, scheduler, redis, workers)
- Docker: ports mapped to 9500 (web) and 9501 (scheduler)
- Config: add .env to .gitignore to protect secrets

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 15:04:09 +08:00

123 lines
3.5 KiB
Python

"""
Stub implementations of worker core functions.
Used when WORKER_MODE=stub, allowing development and testing of
Scheduler / Queue / UI without requiring the Kneron Toolchain environment.
Each stub:
- Sleeps to simulate processing time
- Creates a minimal output file
- Returns a result dict matching the real core function signature
"""
import os
import time
from typing import Any, Dict
def process_onnx_core_stub(
input_paths: Dict[str, str],
output_path: str,
parameters: Dict[str, Any],
) -> Dict[str, Any]:
"""Stub ONNX processing: sleep 2s, create a fake out.onnx."""
file_path = input_paths["file_path"]
if not os.path.exists(file_path):
raise FileNotFoundError(f"Input file not found: {file_path}")
os.makedirs(os.path.dirname(output_path), exist_ok=True)
time.sleep(2)
# Create minimal valid-looking output
with open(output_path, "wb") as f:
f.write(b"STUB_ONNX_OUTPUT_" + os.path.basename(file_path).encode())
return {
"file_path": output_path,
"file_size": os.path.getsize(output_path),
"eval_report": "",
"model_info": {
"model_id": parameters.get("model_id"),
"version": parameters.get("version"),
"platform": parameters.get("platform"),
},
}
def process_bie_core_stub(
input_paths: Dict[str, str],
output_path: str,
parameters: Dict[str, Any],
) -> Dict[str, Any]:
"""Stub BIE processing: sleep 3s, create a fake out.bie."""
onnx_file_path = input_paths["onnx_file_path"]
data_dir = input_paths["data_dir"]
if not os.path.exists(onnx_file_path):
raise FileNotFoundError(f"ONNX file not found: {onnx_file_path}")
os.makedirs(os.path.dirname(output_path), exist_ok=True)
# Count ref images (if any)
img_count = 0
if os.path.isdir(data_dir):
img_count = len([f for f in os.listdir(data_dir) if os.path.isfile(os.path.join(data_dir, f))])
time.sleep(3)
with open(output_path, "wb") as f:
f.write(b"STUB_BIE_OUTPUT")
return {
"file_path": output_path,
"file_size": os.path.getsize(output_path),
"model_info": {
"model_id": parameters.get("model_id"),
"version": parameters.get("version"),
"platform": parameters.get("platform"),
},
"analysis_info": {
"input_name": "stub_input",
"batch_size": 1,
"channels": 3,
"height": 224,
"width": 224,
},
"processed_images": img_count,
}
def process_nef_core_stub(
input_paths: Dict[str, str],
output_path: str,
parameters: Dict[str, Any],
) -> Dict[str, Any]:
"""Stub NEF processing: sleep 2s, create a fake out.nef."""
bie_file_path = input_paths["bie_file_path"]
if not os.path.exists(bie_file_path):
raise FileNotFoundError(f"BIE file not found: {bie_file_path}")
os.makedirs(os.path.dirname(output_path), exist_ok=True)
time.sleep(2)
with open(output_path, "wb") as f:
f.write(b"STUB_NEF_OUTPUT")
return {
"file_path": output_path,
"file_size": os.path.getsize(output_path),
"model_info": {
"model_id": parameters.get("model_id"),
"version": parameters.get("version"),
"platform": parameters.get("platform"),
},
"compilation_info": {
"optimization_level": "stub",
"memory_usage": "stub",
"inference_speed": "stub",
},
}