All checks were successful
Build & Sign Wraith / Build Windows + Sign (push) Successful in 3m4s
Same root cause as the PTY crash (v1.2.6): tokio::spawn called from
Tauri setup hook without a tokio runtime guard. Switched error watcher
to std:🧵:spawn. Also wrapped both error watcher and MCP server
spawn in individual catch_unwind blocks so neither can crash the app.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
116 lines
3.3 KiB
Rust
116 lines
3.3 KiB
Rust
//! Background error pattern scanner for terminal sessions.
|
|
//!
|
|
//! Watches scrollback buffers for common error patterns and emits
|
|
//! `mcp:error:{session_id}` events to the frontend when detected.
|
|
|
|
use std::sync::Arc;
|
|
|
|
use dashmap::DashMap;
|
|
use tauri::{AppHandle, Emitter};
|
|
|
|
use crate::mcp::ScrollbackRegistry;
|
|
|
|
/// Common error patterns to watch for across all sessions.
|
|
const ERROR_PATTERNS: &[&str] = &[
|
|
"Permission denied",
|
|
"permission denied",
|
|
"Connection refused",
|
|
"connection refused",
|
|
"No space left on device",
|
|
"Disk quota exceeded",
|
|
"Out of memory",
|
|
"OOM",
|
|
"Killed",
|
|
"Segmentation fault",
|
|
"segfault",
|
|
"FATAL",
|
|
"CRITICAL",
|
|
"panic:",
|
|
"stack overflow",
|
|
"Too many open files",
|
|
"Connection timed out",
|
|
"Connection reset by peer",
|
|
"Host key verification failed",
|
|
"command not found",
|
|
"No such file or directory",
|
|
];
|
|
|
|
/// Tracks the last scanned position per session to avoid re-emitting.
|
|
pub struct ErrorWatcher {
|
|
last_scanned: DashMap<String, usize>,
|
|
}
|
|
|
|
impl ErrorWatcher {
|
|
pub fn new() -> Self {
|
|
Self { last_scanned: DashMap::new() }
|
|
}
|
|
|
|
/// Scan all registered sessions for new error patterns.
|
|
/// Returns a list of (session_id, matched_line) pairs.
|
|
pub fn scan(&self, scrollback: &ScrollbackRegistry) -> Vec<(String, String)> {
|
|
let mut alerts = Vec::new();
|
|
|
|
// Collect session IDs and positions first to avoid holding the iter
|
|
let sessions: Vec<(String, usize)> = self.last_scanned.iter()
|
|
.map(|entry| (entry.key().clone(), *entry.value()))
|
|
.collect();
|
|
|
|
for (session_id, last_pos) in sessions {
|
|
if let Some(buf) = scrollback.get(&session_id) {
|
|
let total = buf.total_written();
|
|
if total <= last_pos {
|
|
continue;
|
|
}
|
|
|
|
let raw = buf.read_raw();
|
|
let new_start = raw.len().saturating_sub(total - last_pos);
|
|
let new_content = &raw[new_start..];
|
|
|
|
for line in new_content.lines() {
|
|
for pattern in ERROR_PATTERNS {
|
|
if line.contains(pattern) {
|
|
alerts.push((session_id.clone(), line.to_string()));
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
self.last_scanned.insert(session_id, total);
|
|
}
|
|
}
|
|
|
|
alerts
|
|
}
|
|
|
|
/// Register a session for watching.
|
|
pub fn watch(&self, session_id: &str) {
|
|
self.last_scanned.insert(session_id.to_string(), 0);
|
|
}
|
|
|
|
/// Stop watching a session.
|
|
pub fn unwatch(&self, session_id: &str) {
|
|
self.last_scanned.remove(session_id);
|
|
}
|
|
}
|
|
|
|
/// Spawn a background task that scans for errors every 2 seconds.
|
|
pub fn start_error_watcher(
|
|
watcher: Arc<ErrorWatcher>,
|
|
scrollback: ScrollbackRegistry,
|
|
app_handle: AppHandle,
|
|
) {
|
|
std::thread::spawn(move || {
|
|
loop {
|
|
std::thread::sleep(std::time::Duration::from_secs(2));
|
|
|
|
let alerts = watcher.scan(&scrollback);
|
|
for (session_id, line) in alerts {
|
|
let _ = app_handle.emit("mcp:error", serde_json::json!({
|
|
"sessionId": session_id,
|
|
"message": line,
|
|
}));
|
|
}
|
|
}
|
|
});
|
|
}
|