forked from masonhuang/cluster4npu
Phase 1 — Performance Benchmarking: - PerformanceBenchmarker: sequential vs parallel benchmark with injectable runner - PerformanceHistory: JSON-backed benchmark history with regression support - PerformanceDashboard: real-time FPS/latency display widget - BenchmarkDialog: one-click benchmark with 3-phase progress bar Phase 2 — Device Management: - DeviceManager: NPU dongle scan, assign/unassign, load balance recommendation - DeviceManagementPanel: live device status cards with auto-refresh - BottleneckAlert: dataclass for pipeline bottleneck detection Phase 3 — Advanced Features: - OptimizationEngine: 3 optimization rules (rebalance/adjust_queue/add_devices) - TemplateManager: 3 built-in pipeline templates (YOLOv5, fire detection, dual-model) Phase 4 — Report Export: - ReportExporter: PDF (reportlab, optional) and CSV export - ExportReportDialog: format selection + path picker UI 192 unit tests, all passing. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
153 lines
7.3 KiB
Python
153 lines
7.3 KiB
Python
"""
|
||
PerformanceDashboard 的單元測試。
|
||
|
||
測試策略:
|
||
- PyQt5 在 CI 環境中不可用,透過 conftest.py 的 Mock 注入繞過 import。
|
||
- 測試驗證 PerformanceDashboard 的行為邏輯:
|
||
update_stats 是否更新顯示值、reset 是否歸零、set_display_window 是否儲存設定。
|
||
- 使用 MagicMock 取代真實 QLabel,透過記錄 setText 呼叫來驗證。
|
||
"""
|
||
import sys
|
||
import pytest
|
||
from unittest.mock import MagicMock, patch, call
|
||
|
||
|
||
# ---------------------------------------------------------------------------
|
||
# 測試:PerformanceDashboard 可以建立
|
||
# ---------------------------------------------------------------------------
|
||
|
||
class TestPerformanceDashboardInit:
|
||
def should_be_importable(self):
|
||
"""PerformanceDashboard 模組應可匯入(即使 PyQt5 被 Mock)。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
assert PerformanceDashboard is not None
|
||
|
||
def should_instantiate_without_error(self):
|
||
"""PerformanceDashboard() 應可無錯誤地建立實例。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
assert dashboard is not None
|
||
|
||
|
||
# ---------------------------------------------------------------------------
|
||
# 測試:update_stats 更新顯示值
|
||
# ---------------------------------------------------------------------------
|
||
|
||
class TestUpdateStats:
|
||
def should_store_fps_after_update(self):
|
||
"""update_stats 後,current_fps 屬性應更新為傳入的值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 30.5, "avg_latency_ms": 10.0, "p95_latency_ms": 15.0})
|
||
assert dashboard.current_fps == pytest.approx(30.5)
|
||
|
||
def should_store_avg_latency_after_update(self):
|
||
"""update_stats 後,current_avg_latency_ms 屬性應更新。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 12.3, "p95_latency_ms": 20.0})
|
||
assert dashboard.current_avg_latency_ms == pytest.approx(12.3)
|
||
|
||
def should_store_p95_latency_after_update(self):
|
||
"""update_stats 後,current_p95_latency_ms 屬性應更新。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 12.0, "p95_latency_ms": 25.7})
|
||
assert dashboard.current_p95_latency_ms == pytest.approx(25.7)
|
||
|
||
def should_call_fps_label_setText(self):
|
||
"""update_stats 應對 fps_label 呼叫 setText,包含 fps 數值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.fps_label.setText.reset_mock()
|
||
dashboard.update_stats({"fps": 45.0, "avg_latency_ms": 10.0, "p95_latency_ms": 15.0})
|
||
dashboard.fps_label.setText.assert_called_once()
|
||
call_arg = dashboard.fps_label.setText.call_args[0][0]
|
||
assert "45" in call_arg
|
||
|
||
def should_call_avg_latency_label_setText(self):
|
||
"""update_stats 應對 avg_latency_label 呼叫 setText,包含延遲數值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.avg_latency_label.setText.reset_mock()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 8.5, "p95_latency_ms": 12.0})
|
||
dashboard.avg_latency_label.setText.assert_called_once()
|
||
call_arg = dashboard.avg_latency_label.setText.call_args[0][0]
|
||
assert "8.5" in call_arg or "8" in call_arg
|
||
|
||
def should_call_p95_latency_label_setText(self):
|
||
"""update_stats 應對 p95_latency_label 呼叫 setText,包含 p95 數值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.p95_latency_label.setText.reset_mock()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 8.0, "p95_latency_ms": 19.2})
|
||
dashboard.p95_latency_label.setText.assert_called_once()
|
||
call_arg = dashboard.p95_latency_label.setText.call_args[0][0]
|
||
assert "19" in call_arg
|
||
|
||
|
||
# ---------------------------------------------------------------------------
|
||
# 測試:reset 歸零
|
||
# ---------------------------------------------------------------------------
|
||
|
||
class TestReset:
|
||
def should_reset_fps_to_zero(self):
|
||
"""reset() 後 current_fps 應歸零。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 55.0, "avg_latency_ms": 5.0, "p95_latency_ms": 8.0})
|
||
dashboard.reset()
|
||
assert dashboard.current_fps == 0.0
|
||
|
||
def should_reset_avg_latency_to_zero(self):
|
||
"""reset() 後 current_avg_latency_ms 應歸零。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 12.0, "p95_latency_ms": 18.0})
|
||
dashboard.reset()
|
||
assert dashboard.current_avg_latency_ms == 0.0
|
||
|
||
def should_reset_p95_latency_to_zero(self):
|
||
"""reset() 後 current_p95_latency_ms 應歸零。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.update_stats({"fps": 30.0, "avg_latency_ms": 12.0, "p95_latency_ms": 18.0})
|
||
dashboard.reset()
|
||
assert dashboard.current_p95_latency_ms == 0.0
|
||
|
||
def should_call_label_setText_with_zero_on_reset(self):
|
||
"""reset() 應對 fps_label 呼叫 setText,更新為 0 值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.fps_label.setText.reset_mock()
|
||
dashboard.reset()
|
||
dashboard.fps_label.setText.assert_called_once()
|
||
|
||
|
||
# ---------------------------------------------------------------------------
|
||
# 測試:set_display_window 儲存設定
|
||
# ---------------------------------------------------------------------------
|
||
|
||
class TestSetDisplayWindow:
|
||
def should_store_display_window_seconds(self):
|
||
"""set_display_window(120) 後,display_window_seconds 應為 120。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.set_display_window(120)
|
||
assert dashboard.display_window_seconds == 120
|
||
|
||
def should_default_to_60_seconds(self):
|
||
"""不傳參數時 display_window_seconds 預設應為 60。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.set_display_window()
|
||
assert dashboard.display_window_seconds == 60
|
||
|
||
def should_update_display_window_on_second_call(self):
|
||
"""連續呼叫 set_display_window 應覆蓋舊值。"""
|
||
from ui.components.performance_dashboard import PerformanceDashboard
|
||
dashboard = PerformanceDashboard()
|
||
dashboard.set_display_window(30)
|
||
dashboard.set_display_window(90)
|
||
assert dashboard.display_window_seconds == 90
|