mirror of
https://github.com/outbackdingo/firezone.git
synced 2026-01-27 10:18:54 +00:00
refactor(gui-client): tidy up app startup (#9468)
As part of investigating #9400, I refactored the startup code of the GUI client to play around with the initialization order of the runtime and other parts. After finding the root cause (fixed in #9469), I figured it would still be nice to land these improvements. This refactors the app startup: - Only initialize what is absolutely necessary outside of `try_main`: The runtime and the telemetry instance. - Settings are loaded earlier - Telemetry is initializer earlier - Add a bootstrap logger that logs to stdout whilst we are booting - Re-order some code inside `gui::run` to bundle the initialization of Tauri into a single command chain --------- Signed-off-by: Thomas Eizinger <thomas@eizinger.io> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -10,29 +10,26 @@ use anyhow::{Context as _, Result, bail};
|
||||
use clap::{Args, Parser};
|
||||
use controller::Failure;
|
||||
use firezone_gui_client::{controller, deep_link, elevation, gui, logging, settings};
|
||||
use firezone_telemetry::Telemetry;
|
||||
use firezone_telemetry::{Telemetry, analytics};
|
||||
use settings::AdvancedSettingsLegacy;
|
||||
use tokio::runtime::Runtime;
|
||||
use tracing::subscriber::DefaultGuard;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
fn main() -> ExitCode {
|
||||
let bootstrap_log_guard =
|
||||
firezone_logging::setup_bootstrap().expect("Failed to setup bootstrap logger");
|
||||
|
||||
// Mitigates a bug in Ubuntu 22.04 - Under Wayland, some features of the window decorations like minimizing, closing the windows, etc., doesn't work unless you double-click the titlebar first.
|
||||
// SAFETY: No other thread is running yet
|
||||
unsafe {
|
||||
std::env::set_var("GDK_BACKEND", "x11");
|
||||
}
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
// TODO: Remove, this is only needed for Portal connections and the GUI process doesn't connect to the Portal. Unless it's also needed for update checks.
|
||||
rustls::crypto::ring::default_provider()
|
||||
.install_default()
|
||||
.expect("Calling `install_default` only once per process should always succeed");
|
||||
|
||||
let mut telemetry = Telemetry::default();
|
||||
let settings = settings::load_advanced_settings::<AdvancedSettingsLegacy>().unwrap_or_default();
|
||||
let rt = tokio::runtime::Runtime::new().expect("Couldn't start Tokio runtime");
|
||||
let rt = tokio::runtime::Runtime::new().expect("failed to build runtime");
|
||||
|
||||
match try_main(cli, &rt, &mut telemetry, settings) {
|
||||
match try_main(&rt, bootstrap_log_guard, &mut telemetry) {
|
||||
Ok(()) => {
|
||||
rt.block_on(telemetry.stop());
|
||||
|
||||
@@ -49,11 +46,12 @@ fn main() -> ExitCode {
|
||||
}
|
||||
|
||||
fn try_main(
|
||||
cli: Cli,
|
||||
rt: &tokio::runtime::Runtime,
|
||||
rt: &Runtime,
|
||||
bootstrap_log_guard: DefaultGuard,
|
||||
telemetry: &mut Telemetry,
|
||||
mut settings: AdvancedSettingsLegacy,
|
||||
) -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let config = gui::RunConfig {
|
||||
inject_faults: cli.inject_faults,
|
||||
debug_update_check: cli.debug_update_check,
|
||||
@@ -66,18 +64,50 @@ fn try_main(
|
||||
fail_with: cli.fail_on_purpose(),
|
||||
};
|
||||
|
||||
let mut advanced_settings =
|
||||
settings::load_advanced_settings::<AdvancedSettingsLegacy>().unwrap_or_default();
|
||||
|
||||
let mdm_settings = settings::load_mdm_settings()
|
||||
.inspect_err(|e| tracing::debug!("Failed to load MDM settings {e:#}"))
|
||||
.unwrap_or_default();
|
||||
|
||||
let api_url = mdm_settings
|
||||
.api_url
|
||||
.as_ref()
|
||||
.unwrap_or(&advanced_settings.api_url)
|
||||
.to_string();
|
||||
|
||||
telemetry.start(
|
||||
&api_url,
|
||||
firezone_gui_client::RELEASE,
|
||||
firezone_telemetry::GUI_DSN,
|
||||
);
|
||||
|
||||
// Don't fix the log filter for smoke tests because we can't show a dialog there.
|
||||
if !config.smoke_test {
|
||||
fix_log_filter(&mut settings)?;
|
||||
fix_log_filter(&mut advanced_settings)?;
|
||||
}
|
||||
|
||||
let log_filter = std::env::var("RUST_LOG").unwrap_or_else(|_| settings.log_filter.clone());
|
||||
let log_filter = std::env::var("RUST_LOG")
|
||||
.ok()
|
||||
.or(mdm_settings.log_filter.clone())
|
||||
.unwrap_or_else(|| advanced_settings.log_filter.clone());
|
||||
|
||||
drop(bootstrap_log_guard);
|
||||
|
||||
let logging::Handles {
|
||||
logger: _logger,
|
||||
reloader,
|
||||
} = firezone_gui_client::logging::setup_gui(&log_filter)?;
|
||||
|
||||
// Get the device ID before starting Tokio, so that all the worker threads will inherit the correct scope.
|
||||
// Technically this means we can fail to get the device ID on a newly-installed system, since the Tunnel service may not have fully started up when the GUI process reaches this point, but in practice it's unlikely.
|
||||
if let Ok(id) = firezone_bin_shared::device_id::get() {
|
||||
Telemetry::set_firezone_id(id.id.clone());
|
||||
|
||||
analytics::identify(id.id, api_url, firezone_gui_client::RELEASE.to_owned());
|
||||
}
|
||||
|
||||
match cli.command {
|
||||
None if cli.check_elevation() => match elevation::gui_check() {
|
||||
Ok(true) => {}
|
||||
@@ -118,7 +148,7 @@ fn try_main(
|
||||
}
|
||||
Some(Cmd::SmokeTest) => {
|
||||
// Can't check elevation here because the Windows CI is always elevated
|
||||
gui::run(rt, telemetry, config, settings, reloader)?;
|
||||
gui::run(rt, config, mdm_settings, advanced_settings, reloader)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
@@ -126,7 +156,7 @@ fn try_main(
|
||||
|
||||
// Happy-path: Run the GUI.
|
||||
|
||||
match gui::run(rt, telemetry, config, settings, reloader) {
|
||||
match gui::run(rt, config, mdm_settings, advanced_settings, reloader) {
|
||||
Ok(()) => {}
|
||||
Err(anyhow) => {
|
||||
if anyhow
|
||||
|
||||
@@ -17,11 +17,10 @@ use crate::{
|
||||
};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use firezone_logging::err_with_src;
|
||||
use firezone_telemetry::{Telemetry, analytics};
|
||||
use futures::SinkExt as _;
|
||||
use std::time::Duration;
|
||||
use tauri::{Emitter, Manager};
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::{runtime::Runtime, sync::mpsc};
|
||||
use tokio_stream::StreamExt;
|
||||
use tracing::instrument;
|
||||
|
||||
@@ -46,7 +45,7 @@ pub use os::set_autostart;
|
||||
/// Note that this never gets Dropped because of
|
||||
/// <https://github.com/tauri-apps/tauri/issues/8631>
|
||||
pub(crate) struct Managed {
|
||||
pub ctlr_tx: CtlrTx,
|
||||
pub req_tx: mpsc::Sender<ControllerRequest>,
|
||||
pub inject_faults: bool,
|
||||
}
|
||||
|
||||
@@ -225,46 +224,14 @@ pub enum ServerMsg {
|
||||
/// Runs the Tauri GUI and returns on exit or unrecoverable error
|
||||
#[instrument(skip_all)]
|
||||
pub fn run(
|
||||
rt: &tokio::runtime::Runtime,
|
||||
telemetry: &mut Telemetry,
|
||||
rt: &Runtime,
|
||||
config: RunConfig,
|
||||
mdm_settings: MdmSettings,
|
||||
advanced_settings: AdvancedSettingsLegacy,
|
||||
reloader: firezone_logging::FilterReloadHandle,
|
||||
) -> Result<()> {
|
||||
let mdm_settings = settings::load_mdm_settings()
|
||||
.inspect_err(|e| tracing::debug!("Failed to load MDM settings {e:#}"))
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Some(directives) = mdm_settings.log_filter.as_ref() {
|
||||
if let Err(e) = reloader.reload(directives) {
|
||||
tracing::info!(%directives, "Failed to apply MDM logging directives: {e:#}");
|
||||
}
|
||||
}
|
||||
|
||||
let api_url = mdm_settings
|
||||
.api_url
|
||||
.as_ref()
|
||||
.unwrap_or(&advanced_settings.api_url);
|
||||
|
||||
telemetry.start(
|
||||
api_url.as_str(),
|
||||
crate::RELEASE,
|
||||
firezone_telemetry::GUI_DSN,
|
||||
);
|
||||
|
||||
// Get the device ID before starting Tokio, so that all the worker threads will inherit the correct scope.
|
||||
// Technically this means we can fail to get the device ID on a newly-installed system, since the Tunnel service may not have fully started up when the GUI process reaches this point, but in practice it's unlikely.
|
||||
if let Ok(id) = firezone_bin_shared::device_id::get() {
|
||||
Telemetry::set_firezone_id(id.id.clone());
|
||||
|
||||
analytics::identify(id.id, api_url.to_string(), crate::RELEASE.to_owned());
|
||||
}
|
||||
|
||||
// Needed for the deep link server
|
||||
tauri::async_runtime::set(rt.handle().clone());
|
||||
|
||||
let _guard = rt.enter();
|
||||
|
||||
let gui_ipc = match rt.block_on(create_gui_ipc_server()) {
|
||||
Ok(gui_ipc) => gui_ipc,
|
||||
Err(e) => {
|
||||
@@ -278,35 +245,9 @@ pub fn run(
|
||||
rt.block_on(settings::migrate_legacy_settings(advanced_settings));
|
||||
|
||||
let (ctlr_tx, ctlr_rx) = mpsc::channel(5);
|
||||
let req_tx = ctlr_tx.clone();
|
||||
let (ready_tx, mut ready_rx) = mpsc::channel::<tauri::AppHandle>(1);
|
||||
|
||||
let managed = Managed {
|
||||
ctlr_tx: ctlr_tx.clone(),
|
||||
inject_faults: config.inject_faults,
|
||||
};
|
||||
|
||||
let app = tauri::Builder::default()
|
||||
.manage(managed)
|
||||
.on_window_event(|window, event| {
|
||||
if let tauri::WindowEvent::CloseRequested { api, .. } = event {
|
||||
// Keep the frontend running but just hide this webview
|
||||
// Per https://tauri.app/v1/guides/features/system-tray/#preventing-the-app-from-closing
|
||||
// Closing the window fully seems to deallocate it or something.
|
||||
|
||||
if let Err(e) = window.hide() {
|
||||
tracing::warn!("Failed to hide window: {}", err_with_src(&e))
|
||||
};
|
||||
api.prevent_close();
|
||||
}
|
||||
})
|
||||
.invoke_handler(crate::view::generate_handler())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_notification::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.build(tauri::generate_context!())
|
||||
.context("Failed to build Tauri app instance")?;
|
||||
|
||||
// Spawn the setup task.
|
||||
// Everything we need to do once Tauri is fully initialised goes in here.
|
||||
let setup_task = rt.spawn(async move {
|
||||
@@ -321,7 +262,9 @@ pub fn run(
|
||||
if mdm_settings.check_for_updates.is_none_or(|check| check) {
|
||||
// Check for updates
|
||||
tokio::spawn(async move {
|
||||
if let Err(error) = updates::checker_task(updates_tx, config.debug_update_check).await {
|
||||
if let Err(error) =
|
||||
updates::checker_task(updates_tx, config.debug_update_check).await
|
||||
{
|
||||
tracing::error!("Error in updates::checker_task: {error:#}");
|
||||
}
|
||||
});
|
||||
@@ -345,7 +288,8 @@ pub fn run(
|
||||
if !config.no_deep_links {
|
||||
// The single-instance check is done, so register our exe
|
||||
// to handle deep links
|
||||
let exe = tauri_utils::platform::current_exe().context("Can't find our own exe path")?;
|
||||
let exe =
|
||||
tauri_utils::platform::current_exe().context("Can't find our own exe path")?;
|
||||
deep_link::register(exe).context("Failed to register deep link handler")?;
|
||||
}
|
||||
|
||||
@@ -382,20 +326,17 @@ pub fn run(
|
||||
"BUNDLE_ID should match bundle ID in tauri.conf.json"
|
||||
);
|
||||
|
||||
let tray =
|
||||
system_tray::Tray::new(
|
||||
app_handle.clone(),
|
||||
|app, event| match handle_system_tray_event(app, event) {
|
||||
Ok(_) => {}
|
||||
Err(e) => tracing::error!("{e}"),
|
||||
},
|
||||
)?;
|
||||
let tray = system_tray::Tray::new(app_handle.clone(), |app, event| {
|
||||
match handle_system_tray_event(app, event) {
|
||||
Ok(_) => {}
|
||||
Err(e) => tracing::error!("{e}"),
|
||||
}
|
||||
})?;
|
||||
let integration = TauriIntegration {
|
||||
app: app_handle,
|
||||
tray,
|
||||
};
|
||||
|
||||
|
||||
// Spawn the controller
|
||||
let ctrl_task = tokio::spawn(Controller::start(
|
||||
ctlr_tx,
|
||||
@@ -406,32 +347,54 @@ pub fn run(
|
||||
advanced_settings,
|
||||
reloader,
|
||||
updates_rx,
|
||||
gui_ipc
|
||||
gui_ipc,
|
||||
));
|
||||
|
||||
anyhow::Ok(ctrl_task)
|
||||
});
|
||||
|
||||
// Run the Tauri app to completion, i.e. until `app_handle.exit(0)` is called.
|
||||
// This blocks the current thread!
|
||||
app.run_return(move |app_handle, event| {
|
||||
#[expect(
|
||||
clippy::wildcard_enum_match_arm,
|
||||
reason = "We only care about these two events from Tauri"
|
||||
)]
|
||||
match event {
|
||||
tauri::RunEvent::ExitRequested {
|
||||
api, code: None, .. // `code: None` means the user closed the last window.
|
||||
} => {
|
||||
api.prevent_exit();
|
||||
tauri::Builder::default()
|
||||
.manage(Managed {
|
||||
req_tx,
|
||||
inject_faults: config.inject_faults,
|
||||
})
|
||||
.on_window_event(|window, event| {
|
||||
if let tauri::WindowEvent::CloseRequested { api, .. } = event {
|
||||
// Keep the frontend running but just hide this webview
|
||||
// Per https://tauri.app/v1/guides/features/system-tray/#preventing-the-app-from-closing
|
||||
// Closing the window fully seems to deallocate it or something.
|
||||
|
||||
if let Err(e) = window.hide() {
|
||||
tracing::warn!("Failed to hide window: {}", err_with_src(&e))
|
||||
};
|
||||
api.prevent_close();
|
||||
}
|
||||
tauri::RunEvent::Ready => {
|
||||
// Notify our setup task that we are ready!
|
||||
let _ = ready_tx.try_send(app_handle.clone());
|
||||
})
|
||||
.invoke_handler(crate::view::generate_handler())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_notification::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.build(tauri::generate_context!())
|
||||
.context("Failed to build Tauri app instance")?
|
||||
.run_return(move |app_handle, event| {
|
||||
#[expect(
|
||||
clippy::wildcard_enum_match_arm,
|
||||
reason = "We only care about these two events from Tauri"
|
||||
)]
|
||||
match event {
|
||||
tauri::RunEvent::ExitRequested {
|
||||
api, code: None, .. // `code: None` means the user closed the last window.
|
||||
} => {
|
||||
api.prevent_exit();
|
||||
}
|
||||
tauri::RunEvent::Ready => {
|
||||
// Notify our setup task that we are ready!
|
||||
let _ = ready_tx.try_send(app_handle.clone());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Wait until the controller task finishes.
|
||||
rt.block_on(async move {
|
||||
@@ -530,7 +493,7 @@ async fn smoke_test(ctlr_tx: CtlrTx) -> Result<()> {
|
||||
fn handle_system_tray_event(app: &tauri::AppHandle, event: system_tray::Event) -> Result<()> {
|
||||
app.try_state::<Managed>()
|
||||
.context("can't get Managed struct from Tauri")?
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.blocking_send(ControllerRequest::SystemTrayMenu(event))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ async fn clear_logs(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::ClearLogs(tx))
|
||||
.await
|
||||
.context("Failed to send `ClearLogs` command")?;
|
||||
@@ -53,7 +53,7 @@ async fn clear_logs(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
|
||||
#[tauri::command]
|
||||
async fn export_logs(app: tauri::AppHandle, managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
show_export_dialog(&app, managed.ctlr_tx.clone())?;
|
||||
show_export_dialog(&app, managed.req_tx.clone())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -68,7 +68,7 @@ async fn apply_general_settings(
|
||||
}
|
||||
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::ApplyGeneralSettings(Box::new(settings)))
|
||||
.await
|
||||
.context("Failed to send `ApplyGeneralSettings` command")?;
|
||||
@@ -86,7 +86,7 @@ async fn apply_advanced_settings(
|
||||
}
|
||||
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::ApplyAdvancedSettings(Box::new(settings)))
|
||||
.await
|
||||
.context("Failed to send `ApplySettings` command")?;
|
||||
@@ -104,7 +104,7 @@ async fn reset_advanced_settings(managed: tauri::State<'_, Managed>) -> Result<(
|
||||
#[tauri::command]
|
||||
async fn reset_general_settings(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::ResetGeneralSettings)
|
||||
.await
|
||||
.context("Failed to send `ResetGeneralSettings` command")?;
|
||||
@@ -149,7 +149,7 @@ fn show_export_dialog(app: &tauri::AppHandle, ctlr_tx: CtlrTx) -> Result<()> {
|
||||
#[tauri::command]
|
||||
async fn sign_in(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::SignIn)
|
||||
.await
|
||||
.context("Failed to send `SignIn` command")?;
|
||||
@@ -160,7 +160,7 @@ async fn sign_in(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
#[tauri::command]
|
||||
async fn sign_out(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::SignOut)
|
||||
.await
|
||||
.context("Failed to send `SignOut` command")?;
|
||||
@@ -171,7 +171,7 @@ async fn sign_out(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
#[tauri::command]
|
||||
async fn update_state(managed: tauri::State<'_, Managed>) -> Result<()> {
|
||||
managed
|
||||
.ctlr_tx
|
||||
.req_tx
|
||||
.send(ControllerRequest::UpdateState)
|
||||
.await
|
||||
.context("Failed to send `UpdateState` command")?;
|
||||
|
||||
@@ -61,6 +61,19 @@ where
|
||||
Ok(reload_handle1.merge(reload_handle2))
|
||||
}
|
||||
|
||||
/// Sets up a bootstrap logger.
|
||||
pub fn setup_bootstrap() -> Result<DefaultGuard> {
|
||||
let directives = std::env::var("RUST_LOG").unwrap_or_else(|_| "info".to_string());
|
||||
|
||||
let (filter, _) = try_filter(&directives).context("failed to parse directives")?;
|
||||
let layer = tracing_subscriber::fmt::layer()
|
||||
.event_format(Format::new())
|
||||
.with_filter(filter);
|
||||
let subscriber = Registry::default().with(layer);
|
||||
|
||||
Ok(tracing::dispatcher::set_default(&subscriber.into()))
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::disallowed_methods,
|
||||
reason = "This is the alternative function."
|
||||
|
||||
Reference in New Issue
Block a user