Update source files

This commit is contained in:
2025-10-16 00:02:34 -05:00
commit 74127b0829
182 changed files with 30644 additions and 0 deletions

7
crates/borders-desktop/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
# Generated by Cargo
# will have compiled files and executables
/target/
# Generated by Tauri
# will have schema files for capabilities auto-completion
/gen/schemas

View File

@@ -0,0 +1,29 @@
[package]
name = "borders-desktop"
version.workspace = true
edition.workspace = true
authors.workspace = true
[features]
default = []
tracy = ["dep:tracing-tracy", "dep:tracy-client"]
[build-dependencies]
tauri-build = { version = "2", features = [] }
[dependencies]
bevy_ecs = { version = "0.17", default-features = false, features = ["std"] }
borders-core = { path = "../borders-core", features = ["ui"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tauri = { version = "2", features = [] }
tauri-plugin-opener = "2"
tauri-plugin-process = "2"
tokio = { version = "1", features = ["time"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing-tracy = { version = "0.11", default-features = false, optional = true }
tracy-client = { version = "0.18.2", optional = true }
[package.metadata.cargo-machete]
ignored = ["tauri-build"]

View File

@@ -0,0 +1,3 @@
fn main() {
tauri_build::build()
}

View File

@@ -0,0 +1,17 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default",
"description": "Capability for the main window",
"windows": ["main"],
"permissions": [
"core:default",
{
"identifier": "opener:allow-open-url",
"allow": [
{
"url": "https://github.com/Xevion"
}
]
}
]
}

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 974 B

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 903 B

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

Binary file not shown.

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 85 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -0,0 +1,51 @@
use borders_core::telemetry::{self, TelemetryEvent};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tauri::Manager;
/// Analytics event from the frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalyticsEventPayload {
pub event: String,
#[serde(default)]
pub properties: HashMap<String, serde_json::Value>,
}
/// Tauri command to track analytics events from the frontend
#[tauri::command]
pub async fn track_analytics_event(payload: AnalyticsEventPayload) -> Result<(), String> {
tracing::debug!("Tracking analytics event: {}", payload.event);
let event = TelemetryEvent { event: payload.event, properties: payload.properties };
// Track the event asynchronously (Tauri handles the async context)
telemetry::track(event).await;
Ok(())
}
/// Tauri command to flush pending analytics events
#[tauri::command]
pub async fn flush_analytics() -> Result<(), String> {
if let Some(client) = telemetry::client() {
client.flush().await;
Ok(())
} else {
Err("Telemetry client not initialized".to_string())
}
}
/// Tauri command to request app exit
///
/// Simply closes the window - analytics flush happens in ExitRequested event handler
#[tauri::command]
pub async fn request_exit(app_handle: tauri::AppHandle) -> Result<(), String> {
tracing::debug!("Exit requested via command");
// Close the window (will trigger ExitRequested event → analytics flush)
if let Some(window) = app_handle.get_webview_window("main") {
window.close().map_err(|e| e.to_string())?;
}
Ok(())
}

View File

@@ -0,0 +1,105 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use crate::plugin::{TauriPlugin, generate_tauri_context};
use borders_core::app::App;
use borders_core::time::Time;
mod analytics;
mod plugin;
mod render_bridge;
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
let mut app = App::new();
// Initialize time tracking
app.insert_resource(Time::new());
TauriPlugin::new(|| {
let _guard = tracing::trace_span!("tauri_build").entered();
tauri::Builder::default().plugin(tauri_plugin_opener::init()).plugin(tauri_plugin_process::init()).invoke_handler(tauri::generate_handler![render_bridge::register_init_channel, render_bridge::send_frontend_message, render_bridge::handle_render_input, render_bridge::handle_camera_update, render_bridge::handle_map_query, render_bridge::get_game_state, analytics::track_analytics_event, analytics::flush_analytics, analytics::request_exit,]).build(generate_tauri_context()).expect("error while building tauri application")
})
.build_and_run(app);
}
fn main() {
// Initialize tracing before Bevy
#[cfg(feature = "tracy")]
{
use tracing_subscriber::fmt::format::DefaultFields;
// Initialize Tracy profiler client
let _ = tracy_client::Client::start();
use tracing_subscriber::layer::SubscriberExt;
struct BareTracyConfig {
fmt: DefaultFields,
}
impl tracing_tracy::Config for BareTracyConfig {
type Formatter = DefaultFields;
fn formatter(&self) -> &Self::Formatter {
&self.fmt
}
fn format_fields_in_zone_name(&self) -> bool {
false
}
}
let tracy_layer = tracing_tracy::TracyLayer::new(BareTracyConfig { fmt: DefaultFields::default() });
tracing::subscriber::set_global_default(tracing_subscriber::registry().with(tracy_layer)).expect("setup tracy layer");
}
#[cfg(not(feature = "tracy"))]
{
use tracing_subscriber::fmt::time::FormatTime;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
#[cfg(debug_assertions)]
let log_filter = "borders_core=debug,borders_protocol=debug,borders_desktop=debug,iron_borders=debug,info";
#[cfg(not(debug_assertions))]
let log_filter = "borders_core=warn,borders_protocol=warn,iron_borders=warn,error";
struct CustomTimeFormat;
impl FormatTime for CustomTimeFormat {
fn format_time(&self, w: &mut tracing_subscriber::fmt::format::Writer<'_>) -> std::fmt::Result {
let now = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap();
let total_secs = now.as_secs();
let nanos = now.subsec_nanos();
let secs_in_day = total_secs % 86400;
let hours = secs_in_day / 3600;
let minutes = (secs_in_day % 3600) / 60;
let seconds = secs_in_day % 60;
let millis = nanos / 1_000_000;
let micros = (nanos / 1_000) % 1_000;
write!(w, "{:02}:{:02}:{:02}.{:03}{:03}", hours, minutes, seconds, millis, micros)
}
}
tracing_subscriber::registry().with(tracing_subscriber::EnvFilter::new(log_filter)).with(tracing_subscriber::fmt::layer().with_timer(CustomTimeFormat)).init();
}
// Log build information
tracing::info!(git_commit = borders_core::build_info::git_commit_short(), build_time = borders_core::build_info::BUILD_TIME, "Iron Borders v{} © 2025 Ryan Walters. All Rights Reserved.", borders_core::build_info::VERSION);
// Initialize telemetry
{
let _guard = tracing::trace_span!("telemetry_init").entered();
tokio::runtime::Runtime::new().unwrap().block_on(async {
borders_core::telemetry::init(borders_core::telemetry::TelemetryConfig::default()).await;
borders_core::telemetry::track_session_start().await;
});
tracing::info!("Observability ready");
}
run();
}

View File

@@ -0,0 +1,199 @@
//! Tauri-Bevy integration plugin
//!
//! This module provides the main integration between Tauri and Bevy, handling
//! the main application loop and event bridging.
use borders_core::app::{App, Plugin, Update};
use borders_core::time::Time;
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use tauri::{Manager, RunEvent};
#[cfg(not(target_arch = "wasm32"))]
use std::time::Instant;
#[cfg(target_arch = "wasm32")]
use web_time::Instant;
use crate::render_bridge::{TauriRenderBridgeTransport, cache_leaderboard_snapshot_system};
const TARGET_FPS: f64 = 60.0;
pub fn generate_tauri_context() -> tauri::Context {
tauri::generate_context!()
}
fn setup_tauri_integration(app: &mut App, tauri_app: &tauri::AppHandle, shared_render_state: Arc<Mutex<Option<borders_core::ui::protocol::RenderInit>>>, shared_leaderboard_state: Arc<Mutex<Option<borders_core::ui::protocol::LeaderboardSnapshot>>>) {
let _guard = tracing::trace_span!("setup_tauri_integration").entered();
tracing::debug!("Setup tauri integration");
// Register state for render bridge commands
tauri_app.manage(Arc::new(Mutex::new(None::<borders_core::ui::protocol::CameraStateUpdate>)));
tauri_app.manage(Arc::new(Mutex::new(None::<borders_core::networking::GameView>)));
// InputState - shared between Tauri commands and ECS systems
let input_state_shared = Arc::new(Mutex::new(borders_core::ui::input::InputState::new()));
tauri_app.manage(input_state_shared.clone());
app.insert_non_send_resource(input_state_shared);
// Register shared state with Tauri (for get_game_state command)
tauri_app.manage(shared_render_state.clone());
tauri_app.manage(shared_leaderboard_state.clone());
// Get the message queue and init channel from the transport (already added as plugin)
let transport = app.world().get_resource::<borders_core::ui::RenderBridge<TauriRenderBridgeTransport>>().expect("RenderBridge should be added by plugin");
let message_queue = transport.transport.inbound_messages();
let init_channel_storage = transport.transport.init_channel();
tauri_app.manage(message_queue);
tauri_app.manage(init_channel_storage);
// Store shared states in world
app.insert_non_send_resource(shared_leaderboard_state);
}
pub struct TauriPlugin {
setup: Box<dyn Fn() -> tauri::App + Send + Sync>,
}
impl TauriPlugin {
pub fn new<F>(setup: F) -> Self
where
F: Fn() -> tauri::App + Send + Sync + 'static,
{
Self { setup: Box::new(setup) }
}
}
impl TauriPlugin {
pub fn build_and_run(self, mut app: App) -> ! {
let tauri_app = {
let _guard = tracing::debug_span!("tauri_plugin_build").entered();
let tauri_app = (self.setup)();
// Create shared state for game state recovery
let shared_render_state = Arc::new(Mutex::new(None::<borders_core::ui::protocol::RenderInit>));
let shared_leaderboard_state = Arc::new(Mutex::new(None::<borders_core::ui::protocol::LeaderboardSnapshot>));
// Create transport for Tauri frontend (handles both render and UI communication)
let transport = TauriRenderBridgeTransport::new(tauri_app.handle().clone(), shared_render_state.clone());
// Add the render bridge plugin to handle all frontend communication
borders_core::ui::FrontendPlugin::new(transport).build(&mut app);
// Set up Tauri integration directly (no startup system needed)
setup_tauri_integration(&mut app, tauri_app.handle(), shared_render_state.clone(), shared_leaderboard_state.clone());
// Add the leaderboard caching system
app.add_systems(Update, cache_leaderboard_snapshot_system);
// Pre-initialize game plugin BEFORE entering the event loop
// This prevents a 300ms delay waiting for the first loop iteration
{
let _guard = tracing::debug_span!("pre_initialize_game").entered();
tracing::info!("Pre-initializing game systems...");
borders_core::GamePlugin::new(borders_core::plugin::NetworkMode::Local).build(&mut app);
app.run_startup();
app.finish();
app.cleanup();
tracing::info!("Game systems pre-initialized");
}
tauri_app
};
// Run the app (already initialized)
run_tauri_app(app, tauri_app, true); // Pass true to skip re-init
std::process::exit(0)
}
}
pub fn run_tauri_app(app: App, tauri_app: tauri::App, already_initialized: bool) {
let app_rc = Rc::new(RefCell::new(app));
let mut tauri_app = tauri_app;
let mut is_initialized = already_initialized; // Skip init if already done
let mut last_frame_time = Instant::now();
let target_frame_duration = Duration::from_secs_f64(1.0 / TARGET_FPS);
loop {
let _guard = tracing::trace_span!("main_frame").entered();
let frame_start = Instant::now();
#[allow(deprecated)]
tauri_app.run_iteration(move |_app_handle, event: RunEvent| {
match event {
tauri::RunEvent::Ready => {
// Event acknowledged, actual setup happens below
}
tauri::RunEvent::ExitRequested { .. } => {
// Track session end and flush analytics before exit
if borders_core::telemetry::client().is_some() {
tracing::debug!("ExitRequested: tracking session end and flushing analytics");
// Create a minimal runtime for blocking operations
let runtime = tokio::runtime::Builder::new_current_thread().enable_time().enable_io().build().expect("Failed to create tokio runtime for flush");
runtime.block_on(async {
// Track session end event
borders_core::telemetry::track_session_end().await;
// Flush all pending events (the batch-triggered send is now synchronous)
if let Some(client) = borders_core::telemetry::client() {
let timeout = std::time::Duration::from_millis(500);
match tokio::time::timeout(timeout, client.flush()).await {
Ok(_) => {
tracing::debug!("Analytics flushed successfully before exit")
}
Err(_) => {
tracing::warn!("Analytics flush timed out after 500ms")
}
}
}
});
}
}
_ => (),
}
});
if tauri_app.webview_windows().is_empty() {
tauri_app.cleanup_before_exit();
break;
}
// Initialize game plugin on first iteration after Tauri is ready
if !is_initialized {
let _guard = tracing::debug_span!("app_initialization").entered();
let mut app = app_rc.borrow_mut();
// Add core game plugin
borders_core::GamePlugin::new(borders_core::plugin::NetworkMode::Local).build(&mut app);
app.run_startup();
app.finish();
app.cleanup();
is_initialized = true;
last_frame_time = Instant::now(); // Reset timer after initialization
tracing::info!("Game initialized");
}
// Update time resource with delta from PREVIOUS frame
let mut app = app_rc.borrow_mut();
let delta = frame_start.duration_since(last_frame_time);
if let Some(mut time) = app.world_mut().get_resource_mut::<Time>() {
time.update(delta);
}
app.update();
let frame_duration = frame_start.elapsed();
if frame_duration < target_frame_duration {
std::thread::sleep(target_frame_duration - frame_duration);
}
last_frame_time = frame_start;
}
}

View File

@@ -0,0 +1,222 @@
//! Tauri-specific frontend transport and command handlers
//!
//! This module provides the Tauri implementation of FrontendTransport,
//! along with Tauri command handlers for input events, camera updates, and
//! state recovery.
use std::collections::VecDeque;
use std::sync::{Arc, Mutex};
use bevy_ecs::message::MessageReader;
use bevy_ecs::system::NonSend;
use borders_core::networking::GameView;
use borders_core::ui::FrontendTransport;
use borders_core::ui::protocol::{BackendMessage, CameraStateUpdate, FrontendMessage, LeaderboardSnapshot, MapQuery, MapQueryResponse, RenderInit, RenderInputEvent, TerrainType};
use serde::{Deserialize, Serialize};
use tauri::{AppHandle, Emitter, ipc::Channel};
use tracing::trace;
/// Storage for the init channel used for binary streaming of initialization data
pub struct InitChannelStorage(pub Arc<Mutex<Option<Channel<Vec<u8>>>>>);
/// Tauri-specific frontend transport using Tauri events
#[derive(Clone)]
pub struct TauriRenderBridgeTransport {
app_handle: AppHandle,
/// Shared state for RenderInit (accessible from Tauri commands)
shared_render_state: Arc<Mutex<Option<RenderInit>>>,
/// Inbound messages from the frontend
inbound_messages: Arc<Mutex<VecDeque<FrontendMessage>>>,
/// Init channel for binary data streaming (terrain + territory)
init_channel: Arc<Mutex<Option<Channel<Vec<u8>>>>>,
}
impl TauriRenderBridgeTransport {
pub fn new(app_handle: AppHandle, shared_render_state: Arc<Mutex<Option<RenderInit>>>) -> Self {
Self { app_handle, shared_render_state, inbound_messages: Arc::new(Mutex::new(VecDeque::new())), init_channel: Arc::new(Mutex::new(None)) }
}
/// Get a reference to the inbound messages queue (for Tauri command handler)
pub fn inbound_messages(&self) -> Arc<Mutex<VecDeque<FrontendMessage>>> {
self.inbound_messages.clone()
}
/// Get the init channel for registration
pub fn init_channel(&self) -> InitChannelStorage {
InitChannelStorage(self.init_channel.clone())
}
}
impl FrontendTransport for TauriRenderBridgeTransport {
fn supports_init_binary(&self) -> bool {
true // Tauri supports binary channel streaming
}
fn send_backend_message(&self, message: &BackendMessage) -> Result<(), String> {
let _guard = tracing::trace_span!(
"tauri_send_backend_message",
message_type = match message {
BackendMessage::RenderInit(_) => "RenderInit",
BackendMessage::TerritorySnapshot(_) => "TerritorySnapshot",
BackendMessage::TerritoryDelta(_) => "TerritoryDelta",
BackendMessage::TerrainInit(_) => "TerrainInit",
BackendMessage::TerrainPalette(_) => "TerrainPalette",
BackendMessage::PaletteInit(_) => "PaletteInit",
BackendMessage::CameraCommand(_) => "CameraCommand",
BackendMessage::MapQueryResponse(_) => "MapQueryResponse",
BackendMessage::LeaderboardSnapshot(_) => "LeaderboardSnapshot",
BackendMessage::AttacksUpdate(_) => "AttacksUpdate",
BackendMessage::ShipsUpdate(_) => "ShipsUpdate",
BackendMessage::GameEnded { .. } => "GameEnded",
BackendMessage::SpawnPhaseUpdate { .. } => "SpawnPhaseUpdate",
BackendMessage::SpawnPhaseEnded => "SpawnPhaseEnded",
BackendMessage::HighlightNation { .. } => "HighlightNation",
}
)
.entered();
// Cache RenderInit for state recovery on reload
if let BackendMessage::RenderInit(render_init) = message
&& let Ok(mut state) = self.shared_render_state.lock()
{
*state = Some(render_init.clone());
}
self.app_handle.emit("backend:message", message).map_err(|e| format!("Failed to emit backend message: {}", e))
}
fn send_init_binary(&self, data: Vec<u8>) -> Result<(), String> {
let _guard = tracing::trace_span!("tauri_send_init_binary", size = data.len()).entered();
let ch_lock = self.init_channel.lock().map_err(|_| "Failed to lock init channel")?;
let channel = ch_lock.as_ref().ok_or("Init channel not registered")?;
channel.send(data).map_err(|e| format!("Failed to send init data via channel: {}", e))
}
fn send_binary_delta(&self, data: Vec<u8>) -> Result<(), String> {
let _guard = tracing::trace_span!("tauri_send_binary_delta", size = data.len()).entered();
self.app_handle.emit("render:pixel_stream", &data).map_err(|e| format!("Failed to emit pixel stream: {}", e))
}
fn try_recv_frontend_message(&self) -> Option<FrontendMessage> {
if let Ok(mut messages) = self.inbound_messages.lock() { messages.pop_front() } else { None }
}
}
/// Tauri command to register the init channel for binary streaming
#[tauri::command]
pub fn register_init_channel(init_channel: Channel<Vec<u8>>, channel_storage: tauri::State<InitChannelStorage>) -> Result<(), String> {
tracing::info!("Init channel registered");
channel_storage
.0
.lock()
.map_err(|_| {
tracing::error!("Failed to acquire lock on init channel storage");
"Failed to acquire lock on init channel storage".to_string()
})?
.replace(init_channel);
Ok(())
}
/// Tauri command handler for receiving frontend messages
#[tauri::command]
pub fn send_frontend_message(message: FrontendMessage, bridge: tauri::State<Arc<Mutex<VecDeque<FrontendMessage>>>>) -> Result<(), String> {
tracing::info!("Frontend sent message: {:?}", message);
if let Ok(mut messages) = bridge.lock() {
messages.push_back(message);
tracing::debug!("Message queued, queue size: {}", messages.len());
Ok(())
} else {
tracing::error!("Failed to acquire lock on message queue");
Err("Failed to acquire lock on message queue".to_string())
}
}
/// Handle input events from the frontend
#[tauri::command]
pub fn handle_render_input(event: RenderInputEvent, input_state: tauri::State<Arc<Mutex<borders_core::ui::input::InputState>>>) -> Result<(), String> {
let mut state = input_state.lock().map_err(|e| format!("Failed to lock input state: {}", e))?;
// TODO: Get actual map width from GameView or TerrainData
let map_width = 2560; // Placeholder
borders_core::ui::handle_render_input(&event, &mut state, map_width)
}
/// Handle camera state updates from the frontend
#[tauri::command]
pub fn handle_camera_update(update: CameraStateUpdate, bridge: tauri::State<Arc<Mutex<Option<CameraStateUpdate>>>>) -> Result<(), String> {
let mut state = bridge.lock().map_err(|e| format!("Failed to lock camera state: {}", e))?;
borders_core::ui::handle_camera_update(update, &mut state)
}
/// Handle map queries from the frontend
#[tauri::command]
pub fn handle_map_query(query: MapQuery, game_view: tauri::State<Arc<Mutex<Option<GameView>>>>) -> Result<MapQueryResponse, String> {
let view = game_view.lock().map_err(|e| format!("Failed to lock game view: {}", e))?;
let Some(ref view) = *view else {
return Err("Game view not available".to_string());
};
match query {
MapQuery::GetOwnerAt { x: _, y: _ } => {
// This query is not used with Pixi.js frontend - frontend sends tile indices directly
Ok(MapQueryResponse::Owner { owner_id: 0, tile_index: None })
}
MapQuery::GetTileInfo { tile_index } => {
let owner_id = view.get_owner(tile_index);
// TODO: Get actual terrain and troop data
Ok(MapQueryResponse::TileInfo {
tile_index,
owner_id,
terrain: TerrainType::Land, // Placeholder
troops: 0, // Placeholder
})
}
MapQuery::FindPlayerTerritory { player_id } => {
let tile_index = view.find_tile_owned_by(player_id);
Ok(MapQueryResponse::PlayerTerritory { tile_index })
}
MapQuery::ScreenToTile { screen_x: _, screen_y: _ } => {
// This would need camera state to work properly
Ok(MapQueryResponse::TileIndex { index: None })
}
}
}
/// Combined state for recovery after reload
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GameStateRecovery {
pub render_init: Option<RenderInit>,
pub leaderboard_snapshot: Option<LeaderboardSnapshot>,
}
/// Get current game state for frontend recovery after reload
#[tauri::command]
pub fn get_game_state(render_state: tauri::State<Arc<Mutex<Option<RenderInit>>>>, leaderboard_state: tauri::State<Arc<Mutex<Option<LeaderboardSnapshot>>>>) -> Result<GameStateRecovery, String> {
let render_init = render_state.lock().map_err(|e| format!("Failed to lock render state: {}", e))?.clone();
let leaderboard_snapshot = leaderboard_state.lock().map_err(|e| format!("Failed to lock leaderboard state: {}", e))?.clone();
Ok(GameStateRecovery { render_init, leaderboard_snapshot })
}
/// System to cache leaderboard snapshots for state recovery
pub fn cache_leaderboard_snapshot_system(mut events: MessageReader<BackendMessage>, shared_leaderboard_state: Option<NonSend<Arc<Mutex<Option<LeaderboardSnapshot>>>>>) {
let Some(shared_state) = shared_leaderboard_state else {
return;
};
for event in events.read() {
if let BackendMessage::LeaderboardSnapshot(snapshot) = event
&& let Ok(mut state) = shared_state.lock()
{
*state = Some(snapshot.clone());
trace!("Cached leaderboard snapshot for state recovery");
}
}
}

View File

@@ -0,0 +1,40 @@
{
"$schema": "https://schema.tauri.app/config/2",
"productName": "iron-borders",
"version": "0.1.0",
"identifier": "com.xevion.iron-borders",
"build": {
"beforeDevCommand": "pnpm dev",
"devUrl": "http://localhost:1420",
"beforeBuildCommand": "pnpm build:desktop",
"frontendDist": "../../frontend/dist/client"
},
"app": {
"windows": [
{
"title": "Iron Borders",
"width": 1280,
"height": 720
}
],
"security": {
"csp": null
}
},
"plugins": {
"process": {
"all": true
}
},
"bundle": {
"active": true,
"targets": ["appimage", "deb", "rpm", "dmg"],
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
]
}
}