Merge branch 'main' of github.com:HuangMason320/cluster4npu

This commit is contained in:
HuangMason320 2025-07-24 11:56:40 +08:00
commit 18f1426cbc

View File

@ -168,7 +168,7 @@ class PipelineStage:
print(f"[Stage {self.stage_id}] Worker loop stopped")
def _has_inference_result(self, processed_data) -> bool:
"""Check if processed_data contains a valid inference result"""
"""Check if processed_data contains a valid inference result (like standalone code)"""
if not processed_data:
return False
@ -177,15 +177,19 @@ class PipelineStage:
if hasattr(processed_data, 'stage_results') and processed_data.stage_results:
stage_result = processed_data.stage_results.get(self.stage_id)
if stage_result:
# Check for tuple result (prob, result_str)
# Check for tuple result (prob, result_str) - like standalone code
if isinstance(stage_result, tuple) and len(stage_result) == 2:
prob, result_str = stage_result
return prob is not None and result_str is not None
# Check for dict result with actual inference data
# Check for dict result with actual inference data (not status messages)
elif isinstance(stage_result, dict):
return (stage_result.get("status") != "processing" and
stage_result.get("status") != "async" and
stage_result)
# Don't count "Processing" or "async" status as real results
if stage_result.get("status") in ["processing", "async"]:
return False
# Don't count empty results
if not stage_result or stage_result.get("result") == "Processing":
return False
return True
else:
return stage_result is not None
except Exception:
@ -227,10 +231,10 @@ class PipelineStage:
# Step 3: MultiDongle inference
if isinstance(processed_data, np.ndarray):
self.multidongle.put_input(processed_data, 'BGR565', block=False)
self.multidongle.put_input(processed_data, 'BGR565')
# Get inference result (non-blocking, async pattern like standalone code)
result = self.multidongle.get_latest_inference_result(timeout=0.001)
result = self.multidongle.get_latest_inference_result()
# Process result if available - only count actual inference results for FPS
inference_result = None