fix: eliminate QTextCursor cross-thread signal warning on inference stop

Three related fixes to the QObject::connect / QTextCursor warning that
appeared when stopping inference:

1. StdoutCapture: replace signal emission with queue.Queue.put_nowait()
   so non-Qt SDK threads (Kneron shutdown) never touch Qt signal machinery.
   DeploymentWorker.stdout_captured signal removed; worker now accepts a
   stdout_queue and passes it to StdoutCapture.

2. start_deployment: create QTimer (100 ms) on main thread to drain the
   stdout queue via _drain_stdout_queue(). Connect worker.finished to
   _on_worker_finished to stop the timer and flush remaining output.

3. stop_deployment / wait_for_stop: the background thread was calling
   QTextEdit.append() and other widget methods directly, which internally
   creates QTextCursor queued connections — the real trigger of the
   warning. Fixed by having wait_for_stop emit _stop_done signal only;
   all UI updates moved to _on_stop_done slot (main thread).

Also adds QTextCursor import in main.py to pre-register the type with
Qt's meta-type system as a belt-and-suspenders measure.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
abin 2026-04-08 17:56:02 +08:00
parent 6e9885404c
commit be4bd617c3
2 changed files with 93 additions and 56 deletions

View File

@ -23,7 +23,7 @@ import sys
import os
import tempfile
from PyQt5.QtWidgets import QApplication, QMessageBox
from PyQt5.QtGui import QFont
from PyQt5.QtGui import QFont, QTextCursor # QTextCursor import registers it with Qt meta-type system
from PyQt5.QtCore import Qt, QSharedMemory, QCoreApplication
# Import fcntl only on Unix-like systems

View File

@ -21,6 +21,7 @@ Usage:
import os
import sys
import json
import queue
import threading
import traceback
import io
@ -59,54 +60,48 @@ except ImportError as e:
class StdoutCapture:
"""Context manager to capture stdout/stderr and emit to signal."""
"""Context manager to capture stdout/stderr into a thread-safe queue.
def __init__(self, signal_emitter):
self.signal_emitter = signal_emitter
Uses queue.Queue instead of directly emitting a Qt signal, so that
any thread (including non-Qt SDK threads) can safely write output
without triggering cross-thread signal warnings.
The dialog drains the queue from the main thread via QTimer.
"""
def __init__(self, output_queue: queue.Queue):
self.output_queue = output_queue
self.original_stdout = None
self.original_stderr = None
self.captured_output = io.StringIO()
def __enter__(self):
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr
# Create a custom write function that both prints to original and captures
output_queue = self.output_queue
class TeeWriter:
def __init__(self, original, captured, emitter):
def __init__(self, original):
self.original = original
self.captured = captured
self.emitter = emitter
self._emitting = False # Prevent recursion
def write(self, text):
# Write to original stdout/stderr (so it still appears in terminal)
# Check if original exists (it might be None in PyInstaller builds)
if self.original is not None:
self.original.write(text)
self.original.flush()
# Capture for GUI if it's a substantial message and not already emitting
if text.strip() and not self._emitting:
self._emitting = True
if text.strip():
try:
self.emitter(text)
finally:
self._emitting = False
output_queue.put_nowait(text)
except queue.Full:
pass # Drop if queue full; non-fatal
def flush(self):
# Check if original exists before calling flush
if self.original is not None:
self.original.flush()
# Replace stdout and stderr with our tee writers
sys.stdout = TeeWriter(self.original_stdout, self.captured_output, self.signal_emitter)
sys.stderr = TeeWriter(self.original_stderr, self.captured_output, self.signal_emitter)
sys.stdout = TeeWriter(self.original_stdout)
sys.stderr = TeeWriter(self.original_stderr)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Restore original stdout/stderr
sys.stdout = self.original_stdout
sys.stderr = self.original_stderr
@ -124,11 +119,11 @@ class DeploymentWorker(QThread):
frame_updated = pyqtSignal('PyQt_PyObject') # For live view
result_updated = pyqtSignal(dict) # For inference results
terminal_output = pyqtSignal(str) # For terminal output in GUI
stdout_captured = pyqtSignal(str) # For captured stdout/stderr
def __init__(self, pipeline_data: Dict[str, Any]):
def __init__(self, pipeline_data: Dict[str, Any], stdout_queue: queue.Queue):
super().__init__()
self.pipeline_data = pipeline_data
self.stdout_queue = stdout_queue # thread-safe queue; drained by dialog's QTimer
self.should_stop = False
self.orchestrator = None
@ -184,7 +179,7 @@ class DeploymentWorker(QThread):
# Create InferencePipeline instance with stdout capture
try:
# Capture all stdout/stderr during pipeline operations
with StdoutCapture(self.stdout_captured.emit):
with StdoutCapture(self.stdout_queue):
pipeline = converter.create_inference_pipeline(config)
self.progress_updated.emit(80, "Initializing workflow orchestrator...")
@ -389,6 +384,9 @@ class DeploymentWorker(QThread):
class DeploymentDialog(QDialog):
"""Main deployment dialog with comprehensive deployment management."""
# Emitted from the wait_for_stop background thread; connected slot runs on main thread
_stop_done = pyqtSignal(bool) # True = stopped cleanly
def __init__(self, pipeline_data: Dict[str, Any], parent=None):
super().__init__(parent)
@ -867,8 +865,11 @@ Stage Configurations:
self.terminal_output_display.clear()
self.terminal_output_display.append("Pipeline deployment started - terminal output will appear here...")
# Create thread-safe queue for stdout captured from non-Qt threads
self._stdout_queue: queue.Queue = queue.Queue(maxsize=1000)
# Create and start deployment worker
self.deployment_worker = DeploymentWorker(self.pipeline_data)
self.deployment_worker = DeploymentWorker(self.pipeline_data, self._stdout_queue)
self.deployment_worker.progress_updated.connect(self.update_progress)
self.deployment_worker.topology_analyzed.connect(self.update_topology_results)
self.deployment_worker.conversion_completed.connect(self.on_conversion_completed)
@ -878,8 +879,14 @@ Stage Configurations:
self.deployment_worker.frame_updated.connect(self.update_live_view)
self.deployment_worker.result_updated.connect(self.update_inference_results)
self.deployment_worker.terminal_output.connect(self.update_terminal_output)
self.deployment_worker.stdout_captured.connect(self.update_terminal_output)
# Drain stdout queue from main thread every 100 ms to avoid cross-thread signal warnings
self._stdout_drain_timer = QTimer(self)
self._stdout_drain_timer.timeout.connect(self._drain_stdout_queue)
self._stdout_drain_timer.start(100)
# Stop timer and flush queue when worker finishes (runs on main thread via signal)
self.deployment_worker.finished.connect(self._on_worker_finished)
self.deployment_worker.start()
@ -898,14 +905,30 @@ Stage Configurations:
self.deployment_worker.stop()
# Wait for worker to finish in a separate thread to avoid blocking UI
# Wait for worker to finish in a separate thread to avoid blocking UI.
# All UI updates happen in _on_stop_done (main thread, via signal).
self._stop_done.connect(self._on_stop_done)
def wait_for_stop():
if self.deployment_worker.wait(5000): # Wait up to 5 seconds
success = self.deployment_worker.wait(5000) # Wait up to 5 seconds
self._stop_done.emit(success)
import threading
threading.Thread(target=wait_for_stop, daemon=True).start()
def _on_stop_done(self, success: bool):
"""Called on the main thread after the stop background thread has waited for the worker."""
# Disconnect so re-running won't accumulate connections
try:
self._stop_done.disconnect(self._on_stop_done)
except TypeError:
pass
if success:
self.deployment_log.append("Inference stopped successfully.")
else:
self.deployment_log.append("Warning: Inference may not have stopped cleanly.")
# Update UI on main thread
self.stop_button.setVisible(False)
self.deploy_button.setEnabled(True)
self.close_button.setText("Close")
@ -913,8 +936,22 @@ Stage Configurations:
self.status_label.setText("Inference stopped")
self.dongle_status.setText("Pipeline stopped")
import threading
threading.Thread(target=wait_for_stop, daemon=True).start()
def _on_worker_finished(self):
"""Called on the main thread when the deployment worker thread exits."""
if hasattr(self, '_stdout_drain_timer'):
self._stdout_drain_timer.stop()
self._drain_stdout_queue() # flush any remaining output
def _drain_stdout_queue(self):
"""Drain the stdout queue and forward lines to the terminal display (main thread only)."""
if not hasattr(self, '_stdout_queue'):
return
try:
while True:
text = self._stdout_queue.get_nowait()
self.update_terminal_output(text)
except queue.Empty:
pass
def update_progress(self, value: int, message: str):
"""Update deployment progress."""