fix: Remove incompatible parameters to match standalone MultiDongle API
Key fixes: 1. Remove 'block' parameter from put_input() call - not supported in standalone code 2. Remove 'timeout' parameter from get_latest_inference_result() call 3. Improve _has_inference_result() logic to properly detect real inference results - Don't count "Processing" or "async" status as valid results - Only count actual tuple (prob, result_str) or meaningful dict results - Match standalone code behavior for FPS calculation This should resolve the "unexpected keyword argument" errors and provide accurate FPS counting like the standalone baseline. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
80275bc774
commit
f902659017
@ -168,7 +168,7 @@ class PipelineStage:
|
||||
print(f"[Stage {self.stage_id}] Worker loop stopped")
|
||||
|
||||
def _has_inference_result(self, processed_data) -> bool:
|
||||
"""Check if processed_data contains a valid inference result"""
|
||||
"""Check if processed_data contains a valid inference result (like standalone code)"""
|
||||
if not processed_data:
|
||||
return False
|
||||
|
||||
@ -177,15 +177,19 @@ class PipelineStage:
|
||||
if hasattr(processed_data, 'stage_results') and processed_data.stage_results:
|
||||
stage_result = processed_data.stage_results.get(self.stage_id)
|
||||
if stage_result:
|
||||
# Check for tuple result (prob, result_str)
|
||||
# Check for tuple result (prob, result_str) - like standalone code
|
||||
if isinstance(stage_result, tuple) and len(stage_result) == 2:
|
||||
prob, result_str = stage_result
|
||||
return prob is not None and result_str is not None
|
||||
# Check for dict result with actual inference data
|
||||
# Check for dict result with actual inference data (not status messages)
|
||||
elif isinstance(stage_result, dict):
|
||||
return (stage_result.get("status") != "processing" and
|
||||
stage_result.get("status") != "async" and
|
||||
stage_result)
|
||||
# Don't count "Processing" or "async" status as real results
|
||||
if stage_result.get("status") in ["processing", "async"]:
|
||||
return False
|
||||
# Don't count empty results
|
||||
if not stage_result or stage_result.get("result") == "Processing":
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return stage_result is not None
|
||||
except Exception:
|
||||
@ -227,10 +231,10 @@ class PipelineStage:
|
||||
|
||||
# Step 3: MultiDongle inference
|
||||
if isinstance(processed_data, np.ndarray):
|
||||
self.multidongle.put_input(processed_data, 'BGR565', block=False)
|
||||
self.multidongle.put_input(processed_data, 'BGR565')
|
||||
|
||||
# Get inference result (non-blocking, async pattern like standalone code)
|
||||
result = self.multidongle.get_latest_inference_result(timeout=0.001)
|
||||
result = self.multidongle.get_latest_inference_result()
|
||||
|
||||
# Process result if available
|
||||
inference_result = {}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user