All checks were successful
Build & Sign Wraith / Build Windows + Sign (push) Successful in 3m46s
- percent_decode: collect bytes into Vec<u8> then String::from_utf8_lossy to handle non-ASCII paths correctly instead of casting byte as char (corrupted codepoints > 127) - format_mtime: remove dead `st`/`y`/`_y` variables and unused UNIX_EPOCH/Duration imports - reorder_connections/reorder_groups: wrap UPDATE loops in BEGIN/COMMIT transactions with ROLLBACK on error to prevent partial sort order writes - scan_network: validate subnet matches 3-octet format before use in remote shell commands Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
241 lines
12 KiB
Rust
241 lines
12 KiB
Rust
// Global debug log macro — must be declared before modules that use it
|
|
#[macro_export]
|
|
macro_rules! wraith_log {
|
|
($($arg:tt)*) => {{
|
|
let msg = format!($($arg)*);
|
|
let _ = $crate::write_log(&$crate::data_directory().join("wraith.log"), &msg);
|
|
}};
|
|
}
|
|
|
|
pub mod db;
|
|
pub mod vault;
|
|
pub mod settings;
|
|
pub mod connections;
|
|
pub mod credentials;
|
|
pub mod ssh;
|
|
pub mod sftp;
|
|
pub mod rdp;
|
|
pub mod theme;
|
|
pub mod workspace;
|
|
pub mod pty;
|
|
pub mod mcp;
|
|
pub mod scanner;
|
|
pub mod commands;
|
|
pub mod utils;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use db::Database;
|
|
use vault::VaultService;
|
|
use credentials::CredentialService;
|
|
use settings::SettingsService;
|
|
use connections::ConnectionService;
|
|
use sftp::SftpService;
|
|
use ssh::session::SshService;
|
|
use rdp::RdpService;
|
|
use theme::ThemeService;
|
|
use workspace::WorkspaceService;
|
|
use pty::PtyService;
|
|
use mcp::ScrollbackRegistry;
|
|
use mcp::error_watcher::ErrorWatcher;
|
|
|
|
pub struct AppState {
|
|
pub db: Database,
|
|
pub vault: tokio::sync::Mutex<Option<VaultService>>,
|
|
pub settings: SettingsService,
|
|
pub connections: ConnectionService,
|
|
pub credentials: tokio::sync::Mutex<Option<CredentialService>>,
|
|
pub ssh: SshService,
|
|
pub sftp: SftpService,
|
|
pub rdp: RdpService,
|
|
pub theme: ThemeService,
|
|
pub workspace: WorkspaceService,
|
|
pub pty: PtyService,
|
|
pub scrollback: ScrollbackRegistry,
|
|
pub error_watcher: std::sync::Arc<ErrorWatcher>,
|
|
}
|
|
|
|
impl AppState {
|
|
pub fn new(data_dir: PathBuf) -> Result<Self, Box<dyn std::error::Error>> {
|
|
std::fs::create_dir_all(&data_dir)?;
|
|
let database = Database::open(&data_dir.join("wraith.db"))?;
|
|
database.migrate()?;
|
|
let settings = SettingsService::new(database.clone());
|
|
Ok(Self {
|
|
db: database.clone(),
|
|
vault: tokio::sync::Mutex::new(None),
|
|
connections: ConnectionService::new(database.clone()),
|
|
credentials: tokio::sync::Mutex::new(None),
|
|
ssh: SshService::new(database.clone()),
|
|
sftp: SftpService::new(),
|
|
rdp: RdpService::new(),
|
|
theme: ThemeService::new(database),
|
|
workspace: WorkspaceService::new(settings.clone()),
|
|
settings,
|
|
pty: PtyService::new(),
|
|
scrollback: ScrollbackRegistry::new(),
|
|
error_watcher: std::sync::Arc::new(ErrorWatcher::new()),
|
|
})
|
|
}
|
|
|
|
pub fn clone_services(&self) -> (SshService, rdp::RdpService, SftpService, ScrollbackRegistry, std::sync::Arc<ErrorWatcher>) {
|
|
(self.ssh.clone(), self.rdp.clone(), self.sftp.clone(), self.scrollback.clone(), self.error_watcher.clone())
|
|
}
|
|
|
|
pub fn is_first_run(&self) -> bool {
|
|
self.settings.get("vault_salt").unwrap_or_default().is_empty()
|
|
}
|
|
|
|
pub async fn is_unlocked(&self) -> bool {
|
|
self.vault.lock().await.is_some()
|
|
}
|
|
}
|
|
|
|
pub fn data_directory() -> PathBuf {
|
|
if let Ok(appdata) = std::env::var("APPDATA") { return PathBuf::from(appdata).join("Wraith"); }
|
|
if let Ok(home) = std::env::var("HOME") {
|
|
if cfg!(target_os = "macos") { return PathBuf::from(home).join("Library").join("Application Support").join("Wraith"); }
|
|
if let Ok(xdg) = std::env::var("XDG_DATA_HOME") { return PathBuf::from(xdg).join("wraith"); }
|
|
return PathBuf::from(home).join(".local").join("share").join("wraith");
|
|
}
|
|
PathBuf::from(".")
|
|
}
|
|
|
|
/// Cached log file handle — opened once on first use, reused for all subsequent
|
|
/// writes. Avoids the open/close syscall pair that the original implementation
|
|
/// paid on every `wraith_log!` invocation.
|
|
static LOG_FILE: std::sync::OnceLock<std::sync::Mutex<std::fs::File>> = std::sync::OnceLock::new();
|
|
|
|
fn write_log(path: &std::path::Path, msg: &str) -> std::io::Result<()> {
|
|
use std::io::Write;
|
|
|
|
let handle = LOG_FILE.get_or_init(|| {
|
|
let file = std::fs::OpenOptions::new()
|
|
.create(true)
|
|
.append(true)
|
|
.open(path)
|
|
.expect("failed to open wraith.log");
|
|
std::sync::Mutex::new(file)
|
|
});
|
|
|
|
let mut f = handle.lock().unwrap_or_else(|e| e.into_inner());
|
|
let elapsed = std::time::SystemTime::now()
|
|
.duration_since(std::time::UNIX_EPOCH)
|
|
.unwrap_or_default()
|
|
.as_secs();
|
|
writeln!(f, "[{}] {}", elapsed, msg)
|
|
}
|
|
|
|
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
|
pub fn run() {
|
|
// Install rustls crypto provider before any TLS operations (RDP needs this)
|
|
let _ = tokio_rustls::rustls::crypto::aws_lc_rs::default_provider().install_default();
|
|
|
|
// Initialize file-based logging to data_dir/wraith.log
|
|
let log_path = data_directory().join("wraith.log");
|
|
let _ = write_log(&log_path, "=== Wraith starting ===");
|
|
|
|
let app_state = match AppState::new(data_directory()) {
|
|
Ok(s) => s,
|
|
Err(e) => {
|
|
let _ = write_log(&log_path, &format!("FATAL: AppState init failed: {}", e));
|
|
panic!("Failed to init AppState: {}", e);
|
|
}
|
|
};
|
|
app_state.theme.seed_builtins();
|
|
|
|
tauri::Builder::default()
|
|
.plugin(tauri_plugin_shell::init())
|
|
.plugin(tauri_plugin_updater::Builder::new().build())
|
|
.manage(app_state)
|
|
.setup(|app| {
|
|
#[cfg(debug_assertions)]
|
|
{
|
|
use tauri::Manager;
|
|
if let Some(window) = app.get_webview_window("main") {
|
|
window.open_devtools();
|
|
}
|
|
}
|
|
|
|
// Start MCP and error watcher — completely non-fatal.
|
|
{
|
|
use tauri::Manager;
|
|
let log_file = data_directory().join("wraith.log");
|
|
let _ = write_log(&log_file, "Setup: starting MCP and error watcher");
|
|
|
|
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
app.state::<AppState>().inner().clone_services()
|
|
})) {
|
|
Ok(state) => {
|
|
let (ssh, rdp, sftp, scrollback, watcher) = state;
|
|
let _ = write_log(&log_file, "Setup: cloned services OK");
|
|
|
|
// Error watcher — std::thread, no tokio needed
|
|
let watcher_for_mcp = watcher.clone();
|
|
let app_handle = app.handle().clone();
|
|
let app_handle_for_mcp = app.handle().clone();
|
|
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
mcp::error_watcher::start_error_watcher(watcher, scrollback.clone(), app_handle);
|
|
}));
|
|
let _ = write_log(&log_file, "Setup: error watcher started");
|
|
|
|
// MCP HTTP server — needs async runtime
|
|
let log_file2 = log_file.clone();
|
|
let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
|
tauri::async_runtime::spawn(async move {
|
|
match mcp::server::start_mcp_server(ssh, rdp, sftp, scrollback, app_handle_for_mcp, watcher_for_mcp).await {
|
|
Ok(port) => { let _ = write_log(&log_file2, &format!("MCP server started on localhost:{}", port)); }
|
|
Err(e) => { let _ = write_log(&log_file2, &format!("MCP server FAILED: {}", e)); }
|
|
}
|
|
});
|
|
}));
|
|
let _ = write_log(&log_file, "Setup: MCP spawn dispatched");
|
|
|
|
// Download/update MCP bridge binary if needed
|
|
let app_ver = app.config().version.clone().unwrap_or_else(|| "0.0.0".to_string());
|
|
let log_file3 = log_file.clone();
|
|
tauri::async_runtime::spawn(async move {
|
|
match mcp::bridge_manager::ensure_bridge(&app_ver).await {
|
|
Ok(()) => { let _ = write_log(&log_file3, "Setup: MCP bridge binary OK"); }
|
|
Err(e) => { let _ = write_log(&log_file3, &format!("Setup: MCP bridge download failed: {}", e)); }
|
|
}
|
|
});
|
|
}
|
|
Err(panic) => {
|
|
let msg = if let Some(s) = panic.downcast_ref::<String>() {
|
|
s.clone()
|
|
} else if let Some(s) = panic.downcast_ref::<&str>() {
|
|
s.to_string()
|
|
} else {
|
|
format!("{:?}", panic.type_id())
|
|
};
|
|
let _ = write_log(&log_file, &format!("MCP startup panicked: {}", msg));
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
})
|
|
.invoke_handler(tauri::generate_handler![
|
|
commands::vault::is_first_run, commands::vault::create_vault, commands::vault::unlock, commands::vault::is_unlocked,
|
|
commands::settings::get_setting, commands::settings::set_setting,
|
|
commands::connections::list_connections, commands::connections::create_connection, commands::connections::get_connection, commands::connections::update_connection, commands::connections::delete_connection,
|
|
commands::connections::list_groups, commands::connections::create_group, commands::connections::delete_group, commands::connections::rename_group, commands::connections::search_connections, commands::connections::reorder_connections, commands::connections::reorder_groups,
|
|
commands::credentials::list_credentials, commands::credentials::create_password, commands::credentials::create_ssh_key, commands::credentials::delete_credential, commands::credentials::decrypt_password, commands::credentials::decrypt_ssh_key,
|
|
commands::ssh_commands::connect_ssh, commands::ssh_commands::connect_ssh_with_key, commands::ssh_commands::ssh_write, commands::ssh_commands::ssh_resize, commands::ssh_commands::disconnect_ssh, commands::ssh_commands::disconnect_session, commands::ssh_commands::list_ssh_sessions,
|
|
commands::sftp_commands::sftp_list, commands::sftp_commands::sftp_read_file, commands::sftp_commands::sftp_write_file, commands::sftp_commands::sftp_mkdir, commands::sftp_commands::sftp_delete, commands::sftp_commands::sftp_rename,
|
|
commands::rdp_commands::connect_rdp, commands::rdp_commands::rdp_get_frame, commands::rdp_commands::rdp_send_mouse, commands::rdp_commands::rdp_send_key, commands::rdp_commands::rdp_send_clipboard, commands::rdp_commands::disconnect_rdp, commands::rdp_commands::list_rdp_sessions,
|
|
commands::theme_commands::list_themes, commands::theme_commands::get_theme,
|
|
commands::pty_commands::list_available_shells, commands::pty_commands::spawn_local_shell, commands::pty_commands::pty_write, commands::pty_commands::pty_resize, commands::pty_commands::disconnect_pty,
|
|
commands::mcp_commands::mcp_list_sessions, commands::mcp_commands::mcp_terminal_read, commands::mcp_commands::mcp_terminal_execute, commands::mcp_commands::mcp_get_session_context, commands::mcp_commands::mcp_bridge_path,
|
|
commands::scanner_commands::scan_network, commands::scanner_commands::scan_ports, commands::scanner_commands::quick_scan,
|
|
commands::tools_commands::tool_ping, commands::tools_commands::tool_traceroute, commands::tools_commands::tool_wake_on_lan, commands::tools_commands::tool_generate_ssh_key, commands::tools_commands::tool_generate_password,
|
|
commands::tools_commands_r2::tool_dns_lookup, commands::tools_commands_r2::tool_whois, commands::tools_commands_r2::tool_bandwidth_iperf, commands::tools_commands_r2::tool_bandwidth_speedtest, commands::tools_commands_r2::tool_subnet_calc,
|
|
commands::updater::check_for_updates,
|
|
commands::workspace_commands::save_workspace, commands::workspace_commands::load_workspace,
|
|
commands::docker_commands::docker_list_containers, commands::docker_commands::docker_list_images, commands::docker_commands::docker_list_volumes, commands::docker_commands::docker_action,
|
|
])
|
|
.run(tauri::generate_context!())
|
|
.expect("error while running tauri application");
|
|
}
|