mirror of
https://github.com/Xevion/xevion.dev.git
synced 2026-01-31 06:26:44 -06:00
feat: add ISR cache with stale-while-revalidate pattern
Implements in-memory caching for SSR pages using moka with: - Configurable fresh/stale TTLs (60s/300s defaults) - Background refresh for stale entries - Cache invalidation on project/tag mutations - Pre-cached icon collections on startup - Skips cache for authenticated requests
This commit is contained in:
Vendored
+1
@@ -1,4 +1,5 @@
|
|||||||
.env*
|
.env*
|
||||||
|
/*.txt
|
||||||
web/node_modules/
|
web/node_modules/
|
||||||
target/
|
target/
|
||||||
.vscode/
|
.vscode/
|
||||||
|
|||||||
Generated
+72
@@ -82,6 +82,7 @@ dependencies = [
|
|||||||
"futures",
|
"futures",
|
||||||
"include_dir",
|
"include_dir",
|
||||||
"mime_guess",
|
"mime_guess",
|
||||||
|
"moka",
|
||||||
"nu-ansi-term",
|
"nu-ansi-term",
|
||||||
"rand 0.9.2",
|
"rand 0.9.2",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
@@ -113,6 +114,17 @@ dependencies = [
|
|||||||
"password-hash",
|
"password-hash",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-lock"
|
||||||
|
version = "3.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311"
|
||||||
|
dependencies = [
|
||||||
|
"event-listener",
|
||||||
|
"event-listener-strategy",
|
||||||
|
"pin-project-lite",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atoi"
|
name = "atoi"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
@@ -904,6 +916,24 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-channel"
|
||||||
|
version = "0.5.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-epoch"
|
||||||
|
version = "0.9.18"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-queue"
|
name = "crossbeam-queue"
|
||||||
version = "0.3.12"
|
version = "0.3.12"
|
||||||
@@ -1119,6 +1149,16 @@ dependencies = [
|
|||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "event-listener-strategy"
|
||||||
|
version = "0.5.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
|
||||||
|
dependencies = [
|
||||||
|
"event-listener",
|
||||||
|
"pin-project-lite",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastrand"
|
name = "fastrand"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
@@ -1943,6 +1983,26 @@ dependencies = [
|
|||||||
"windows-sys 0.61.2",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "moka"
|
||||||
|
version = "0.12.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a3dec6bd31b08944e08b58fd99373893a6c17054d6f3ea5006cc894f4f4eee2a"
|
||||||
|
dependencies = [
|
||||||
|
"async-lock",
|
||||||
|
"crossbeam-channel",
|
||||||
|
"crossbeam-epoch",
|
||||||
|
"crossbeam-utils",
|
||||||
|
"equivalent",
|
||||||
|
"event-listener",
|
||||||
|
"futures-util",
|
||||||
|
"parking_lot",
|
||||||
|
"portable-atomic",
|
||||||
|
"smallvec",
|
||||||
|
"tagptr",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-ansi-term"
|
name = "nu-ansi-term"
|
||||||
version = "0.50.3"
|
version = "0.50.3"
|
||||||
@@ -2143,6 +2203,12 @@ version = "0.3.32"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "portable-atomic"
|
||||||
|
version = "1.13.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "potential_utf"
|
name = "potential_utf"
|
||||||
version = "0.1.4"
|
version = "0.1.4"
|
||||||
@@ -3103,6 +3169,12 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tagptr"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.69"
|
version = "1.0.69"
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ dotenvy = "0.15"
|
|||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
include_dir = "0.7.4"
|
include_dir = "0.7.4"
|
||||||
mime_guess = "2.0.5"
|
mime_guess = "2.0.5"
|
||||||
|
moka = { version = "0.12.12", features = ["future"] }
|
||||||
nu-ansi-term = "0.50.3"
|
nu-ansi-term = "0.50.3"
|
||||||
rand = "0.9.2"
|
rand = "0.9.2"
|
||||||
reqwest = { version = "0.13.1", default-features = false, features = ["rustls", "charset", "json", "stream"] }
|
reqwest = { version = "0.13.1", default-features = false, features = ["rustls", "charset", "json", "stream"] }
|
||||||
|
|||||||
+313
@@ -0,0 +1,313 @@
|
|||||||
|
//! ISR (Incremental Static Regeneration) cache implementation
|
||||||
|
//!
|
||||||
|
//! Provides in-memory caching for SSR pages with:
|
||||||
|
//! - TTL-based expiration
|
||||||
|
//! - Stale-while-revalidate pattern
|
||||||
|
//! - Singleflight (via moka's built-in coalescing)
|
||||||
|
//! - On-demand invalidation
|
||||||
|
|
||||||
|
use axum::http::{HeaderMap, StatusCode};
|
||||||
|
use dashmap::DashSet;
|
||||||
|
use moka::future::Cache;
|
||||||
|
use std::{
|
||||||
|
sync::Arc,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Cached response data
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct CachedResponse {
|
||||||
|
pub status: StatusCode,
|
||||||
|
pub headers: HeaderMap,
|
||||||
|
pub body: axum::body::Bytes,
|
||||||
|
pub cached_at: Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CachedResponse {
|
||||||
|
pub fn new(status: StatusCode, headers: HeaderMap, body: axum::body::Bytes) -> Self {
|
||||||
|
Self {
|
||||||
|
status,
|
||||||
|
headers,
|
||||||
|
body,
|
||||||
|
cached_at: Instant::now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this response is still fresh (within fresh_duration)
|
||||||
|
pub fn is_fresh(&self, fresh_duration: Duration) -> bool {
|
||||||
|
self.cached_at.elapsed() < fresh_duration
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if this response is stale but still usable (within stale_duration)
|
||||||
|
pub fn is_stale_but_usable(&self, fresh_duration: Duration, stale_duration: Duration) -> bool {
|
||||||
|
let age = self.cached_at.elapsed();
|
||||||
|
age >= fresh_duration && age < stale_duration
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the age of this cached response
|
||||||
|
pub fn age(&self) -> Duration {
|
||||||
|
self.cached_at.elapsed()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for the ISR cache
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct IsrCacheConfig {
|
||||||
|
/// Maximum number of cached entries
|
||||||
|
pub max_entries: u64,
|
||||||
|
/// Duration a response is considered fresh (served without refresh)
|
||||||
|
pub fresh_duration: Duration,
|
||||||
|
/// Total duration before entry is evicted (stale responses served during refresh)
|
||||||
|
pub stale_duration: Duration,
|
||||||
|
/// Whether caching is enabled
|
||||||
|
pub enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for IsrCacheConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
max_entries: 1000,
|
||||||
|
fresh_duration: Duration::from_secs(60),
|
||||||
|
stale_duration: Duration::from_secs(300),
|
||||||
|
enabled: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IsrCacheConfig {
|
||||||
|
/// Load configuration from environment variables
|
||||||
|
pub fn from_env() -> Self {
|
||||||
|
let max_entries = std::env::var("ISR_CACHE_MAX_ENTRIES")
|
||||||
|
.ok()
|
||||||
|
.and_then(|v| v.parse().ok())
|
||||||
|
.unwrap_or(1000);
|
||||||
|
|
||||||
|
let fresh_sec = std::env::var("ISR_CACHE_FRESH_SEC")
|
||||||
|
.ok()
|
||||||
|
.and_then(|v| v.parse().ok())
|
||||||
|
.unwrap_or(60);
|
||||||
|
|
||||||
|
let stale_sec = std::env::var("ISR_CACHE_STALE_SEC")
|
||||||
|
.ok()
|
||||||
|
.and_then(|v| v.parse().ok())
|
||||||
|
.unwrap_or(300);
|
||||||
|
|
||||||
|
let enabled = std::env::var("ISR_CACHE_ENABLED")
|
||||||
|
.map(|v| v != "false" && v != "0")
|
||||||
|
.unwrap_or(true);
|
||||||
|
|
||||||
|
Self {
|
||||||
|
max_entries,
|
||||||
|
fresh_duration: Duration::from_secs(fresh_sec),
|
||||||
|
stale_duration: Duration::from_secs(stale_sec),
|
||||||
|
enabled,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// ISR cache for SSR page responses
|
||||||
|
pub struct IsrCache {
|
||||||
|
cache: Cache<String, Arc<CachedResponse>>,
|
||||||
|
/// Tracks paths currently being refreshed in background
|
||||||
|
refreshing: DashSet<String>,
|
||||||
|
pub config: IsrCacheConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IsrCache {
|
||||||
|
/// Create a new ISR cache with the given configuration
|
||||||
|
pub fn new(config: IsrCacheConfig) -> Self {
|
||||||
|
let cache = Cache::builder()
|
||||||
|
.max_capacity(config.max_entries)
|
||||||
|
// Use stale_duration as TTL - we handle fresh/stale logic ourselves
|
||||||
|
.time_to_live(config.stale_duration)
|
||||||
|
.name("isr_cache")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
cache,
|
||||||
|
refreshing: DashSet::new(),
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a cached response if it exists
|
||||||
|
pub async fn get(&self, path: &str) -> Option<Arc<CachedResponse>> {
|
||||||
|
if !self.config.enabled {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self.cache.get(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a response into the cache
|
||||||
|
pub async fn insert(&self, path: String, response: CachedResponse) {
|
||||||
|
if !self.config.enabled {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.cache.insert(path, Arc::new(response)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if a path is currently being refreshed
|
||||||
|
pub fn is_refreshing(&self, path: &str) -> bool {
|
||||||
|
self.refreshing.contains(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark a path as being refreshed. Returns true if it wasn't already refreshing.
|
||||||
|
pub fn start_refresh(&self, path: &str) -> bool {
|
||||||
|
self.refreshing.insert(path.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark a path refresh as complete
|
||||||
|
pub fn end_refresh(&self, path: &str) {
|
||||||
|
self.refreshing.remove(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Invalidate a single cached path
|
||||||
|
pub async fn invalidate(&self, path: &str) {
|
||||||
|
self.cache.invalidate(path).await;
|
||||||
|
tracing::debug!(path = %path, "Cache entry invalidated");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Invalidate multiple cached paths
|
||||||
|
pub async fn invalidate_many(&self, paths: &[&str]) {
|
||||||
|
for path in paths {
|
||||||
|
self.cache.invalidate(*path).await;
|
||||||
|
}
|
||||||
|
tracing::info!(paths = ?paths, "Cache entries invalidated");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Invalidate all entries matching a prefix
|
||||||
|
pub async fn invalidate_prefix(&self, prefix: &str) {
|
||||||
|
// moka doesn't have prefix invalidation, so we need to iterate
|
||||||
|
// This is O(n) but invalidation should be infrequent
|
||||||
|
let prefix_owned = prefix.to_string();
|
||||||
|
self.cache
|
||||||
|
.invalidate_entries_if(move |key, _| key.starts_with(&prefix_owned))
|
||||||
|
.ok();
|
||||||
|
tracing::info!(prefix = %prefix, "Cache entries with prefix invalidated");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Invalidate all cached entries
|
||||||
|
pub async fn invalidate_all(&self) {
|
||||||
|
self.cache.invalidate_all();
|
||||||
|
tracing::info!("All cache entries invalidated");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get cache statistics
|
||||||
|
pub fn stats(&self) -> CacheStats {
|
||||||
|
CacheStats {
|
||||||
|
entry_count: self.cache.entry_count(),
|
||||||
|
weighted_size: self.cache.weighted_size(),
|
||||||
|
refreshing_count: self.refreshing.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Cache statistics for observability
|
||||||
|
#[derive(Debug, Clone, serde::Serialize)]
|
||||||
|
pub struct CacheStats {
|
||||||
|
pub entry_count: u64,
|
||||||
|
pub weighted_size: u64,
|
||||||
|
pub refreshing_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determines if a path should be cached
|
||||||
|
///
|
||||||
|
/// Excludes:
|
||||||
|
/// - Admin pages (session-specific)
|
||||||
|
/// - API routes (handled separately)
|
||||||
|
/// - Internal routes
|
||||||
|
/// - Static assets (served directly from embedded files)
|
||||||
|
pub fn is_cacheable_path(path: &str) -> bool {
|
||||||
|
// Never cache admin pages - they're session-specific
|
||||||
|
if path.starts_with("/admin") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Never cache API routes
|
||||||
|
if path.starts_with("/api/") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Never cache internal routes
|
||||||
|
if path.starts_with("/internal/") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't cache static assets (they're served from embedded files anyway)
|
||||||
|
if path.starts_with("/_app/") || path.starts_with("/.") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize a path into a cache key
|
||||||
|
///
|
||||||
|
/// For now, keeps query strings as part of the key since SSR pages
|
||||||
|
/// may render differently based on query params (e.g., ?tag=rust)
|
||||||
|
pub fn cache_key(path: &str, query: Option<&str>) -> String {
|
||||||
|
match query {
|
||||||
|
Some(q) if !q.is_empty() => format!("{path}?{q}"),
|
||||||
|
_ => path.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_cacheable_path() {
|
||||||
|
// Should cache
|
||||||
|
assert!(is_cacheable_path("/"));
|
||||||
|
assert!(is_cacheable_path("/projects"));
|
||||||
|
assert!(is_cacheable_path("/projects/my-project"));
|
||||||
|
|
||||||
|
// Should not cache
|
||||||
|
assert!(!is_cacheable_path("/admin"));
|
||||||
|
assert!(!is_cacheable_path("/admin/projects"));
|
||||||
|
assert!(!is_cacheable_path("/api/projects"));
|
||||||
|
assert!(!is_cacheable_path("/internal/health"));
|
||||||
|
assert!(!is_cacheable_path("/_app/immutable/foo.js"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cache_key() {
|
||||||
|
assert_eq!(cache_key("/projects", None), "/projects");
|
||||||
|
assert_eq!(cache_key("/projects", Some("")), "/projects");
|
||||||
|
assert_eq!(
|
||||||
|
cache_key("/projects", Some("tag=rust")),
|
||||||
|
"/projects?tag=rust"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cached_response_freshness() {
|
||||||
|
let response = CachedResponse::new(
|
||||||
|
StatusCode::OK,
|
||||||
|
HeaderMap::new(),
|
||||||
|
axum::body::Bytes::from_static(b"test"),
|
||||||
|
);
|
||||||
|
|
||||||
|
let fresh = Duration::from_millis(100);
|
||||||
|
let stale = Duration::from_millis(200);
|
||||||
|
|
||||||
|
// Should be fresh immediately
|
||||||
|
assert!(response.is_fresh(fresh));
|
||||||
|
assert!(!response.is_stale_but_usable(fresh, stale));
|
||||||
|
|
||||||
|
// Wait a bit
|
||||||
|
tokio::time::sleep(Duration::from_millis(110)).await;
|
||||||
|
|
||||||
|
// Should be stale but usable
|
||||||
|
assert!(!response.is_fresh(fresh));
|
||||||
|
assert!(response.is_stale_but_usable(fresh, stale));
|
||||||
|
|
||||||
|
// Wait more
|
||||||
|
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||||
|
|
||||||
|
// Should be neither fresh nor usable
|
||||||
|
assert!(!response.is_fresh(fresh));
|
||||||
|
assert!(!response.is_stale_but_usable(fresh, stale));
|
||||||
|
}
|
||||||
|
}
|
||||||
+43
-14
@@ -243,6 +243,9 @@ pub async fn create_project_handler(
|
|||||||
|
|
||||||
tracing::info!(project_id = %project.id, project_name = %project.name, "Project created");
|
tracing::info!(project_id = %project.id, project_name = %project.name, "Project created");
|
||||||
|
|
||||||
|
// Invalidate cached pages that display projects
|
||||||
|
state.isr_cache.invalidate_many(&["/", "/projects"]).await;
|
||||||
|
|
||||||
(
|
(
|
||||||
StatusCode::CREATED,
|
StatusCode::CREATED,
|
||||||
Json(project.to_api_admin_project(tags)),
|
Json(project.to_api_admin_project(tags)),
|
||||||
@@ -410,6 +413,14 @@ pub async fn update_project_handler(
|
|||||||
|
|
||||||
tracing::info!(project_id = %project.id, project_name = %project.name, "Project updated");
|
tracing::info!(project_id = %project.id, project_name = %project.name, "Project updated");
|
||||||
|
|
||||||
|
// Invalidate cached pages that display projects
|
||||||
|
// Also invalidate slug-based path in case project detail pages exist
|
||||||
|
let project_path = format!("/projects/{}", project.slug);
|
||||||
|
state
|
||||||
|
.isr_cache
|
||||||
|
.invalidate_many(&["/", "/projects", &project_path])
|
||||||
|
.await;
|
||||||
|
|
||||||
Json(project.to_api_admin_project(tags)).into_response()
|
Json(project.to_api_admin_project(tags)).into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -469,6 +480,14 @@ pub async fn delete_project_handler(
|
|||||||
match db::delete_project(&state.pool, project_id).await {
|
match db::delete_project(&state.pool, project_id).await {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
tracing::info!(project_id = %project_id, project_name = %project.name, "Project deleted");
|
tracing::info!(project_id = %project_id, project_name = %project.name, "Project deleted");
|
||||||
|
|
||||||
|
// Invalidate cached pages that display projects
|
||||||
|
let project_path = format!("/projects/{}", project.slug);
|
||||||
|
state
|
||||||
|
.isr_cache
|
||||||
|
.invalidate_many(&["/", "/projects", &project_path])
|
||||||
|
.await;
|
||||||
|
|
||||||
Json(project.to_api_admin_project(tags)).into_response()
|
Json(project.to_api_admin_project(tags)).into_response()
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@@ -588,13 +607,18 @@ pub async fn add_project_tag_handler(
|
|||||||
};
|
};
|
||||||
|
|
||||||
match db::add_tag_to_project(&state.pool, project_id, tag_id).await {
|
match db::add_tag_to_project(&state.pool, project_id, tag_id).await {
|
||||||
Ok(()) => (
|
Ok(()) => {
|
||||||
StatusCode::CREATED,
|
// Invalidate cached pages - tags affect how projects are displayed
|
||||||
Json(serde_json::json!({
|
state.isr_cache.invalidate_many(&["/", "/projects"]).await;
|
||||||
"message": "Tag added to project"
|
|
||||||
})),
|
(
|
||||||
)
|
StatusCode::CREATED,
|
||||||
.into_response(),
|
Json(serde_json::json!({
|
||||||
|
"message": "Tag added to project"
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
Err(sqlx::Error::Database(db_err)) if db_err.is_foreign_key_violation() => (
|
Err(sqlx::Error::Database(db_err)) if db_err.is_foreign_key_violation() => (
|
||||||
StatusCode::NOT_FOUND,
|
StatusCode::NOT_FOUND,
|
||||||
Json(serde_json::json!({
|
Json(serde_json::json!({
|
||||||
@@ -655,13 +679,18 @@ pub async fn remove_project_tag_handler(
|
|||||||
};
|
};
|
||||||
|
|
||||||
match db::remove_tag_from_project(&state.pool, project_id, tag_id).await {
|
match db::remove_tag_from_project(&state.pool, project_id, tag_id).await {
|
||||||
Ok(()) => (
|
Ok(()) => {
|
||||||
StatusCode::OK,
|
// Invalidate cached pages - tags affect how projects are displayed
|
||||||
Json(serde_json::json!({
|
state.isr_cache.invalidate_many(&["/", "/projects"]).await;
|
||||||
"message": "Tag removed from project"
|
|
||||||
})),
|
(
|
||||||
)
|
StatusCode::OK,
|
||||||
.into_response(),
|
Json(serde_json::json!({
|
||||||
|
"message": "Tag removed from project"
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
tracing::error!(error = %err, "Failed to remove tag from project");
|
tracing::error!(error = %err, "Failed to remove tag from project");
|
||||||
(
|
(
|
||||||
|
|||||||
+12
-2
@@ -78,7 +78,12 @@ pub async fn create_tag_handler(
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(tag) => (StatusCode::CREATED, Json(tag.to_api_tag())).into_response(),
|
Ok(tag) => {
|
||||||
|
// Invalidate cached pages - tag list appears on project pages
|
||||||
|
state.isr_cache.invalidate_many(&["/", "/projects"]).await;
|
||||||
|
|
||||||
|
(StatusCode::CREATED, Json(tag.to_api_tag())).into_response()
|
||||||
|
}
|
||||||
Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => (
|
Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => (
|
||||||
StatusCode::CONFLICT,
|
StatusCode::CONFLICT,
|
||||||
Json(serde_json::json!({
|
Json(serde_json::json!({
|
||||||
@@ -219,7 +224,12 @@ pub async fn update_tag_handler(
|
|||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(updated_tag) => Json(updated_tag.to_api_tag()).into_response(),
|
Ok(updated_tag) => {
|
||||||
|
// Invalidate cached pages - tag updates affect project displays
|
||||||
|
state.isr_cache.invalidate_many(&["/", "/projects"]).await;
|
||||||
|
|
||||||
|
Json(updated_tag.to_api_tag()).into_response()
|
||||||
|
}
|
||||||
Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => (
|
Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => (
|
||||||
StatusCode::CONFLICT,
|
StatusCode::CONFLICT,
|
||||||
Json(serde_json::json!({
|
Json(serde_json::json!({
|
||||||
|
|||||||
+15
@@ -7,6 +7,7 @@ use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitEx
|
|||||||
|
|
||||||
mod assets;
|
mod assets;
|
||||||
mod auth;
|
mod auth;
|
||||||
|
mod cache;
|
||||||
mod config;
|
mod config;
|
||||||
mod db;
|
mod db;
|
||||||
mod formatter;
|
mod formatter;
|
||||||
@@ -22,6 +23,7 @@ mod state;
|
|||||||
mod tarpit;
|
mod tarpit;
|
||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
|
use cache::{IsrCache, IsrCacheConfig};
|
||||||
use config::{Args, ListenAddr};
|
use config::{Args, ListenAddr};
|
||||||
use formatter::{CustomJsonFormatter, CustomPrettyFormatter};
|
use formatter::{CustomJsonFormatter, CustomPrettyFormatter};
|
||||||
use health::HealthChecker;
|
use health::HealthChecker;
|
||||||
@@ -153,12 +155,25 @@ async fn main() {
|
|||||||
"Tarpit initialized"
|
"Tarpit initialized"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Initialize ISR cache
|
||||||
|
let isr_cache_config = IsrCacheConfig::from_env();
|
||||||
|
let isr_cache = Arc::new(IsrCache::new(isr_cache_config.clone()));
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
enabled = isr_cache_config.enabled,
|
||||||
|
max_entries = isr_cache_config.max_entries,
|
||||||
|
fresh_sec = isr_cache_config.fresh_duration.as_secs(),
|
||||||
|
stale_sec = isr_cache_config.stale_duration.as_secs(),
|
||||||
|
"ISR cache initialized"
|
||||||
|
);
|
||||||
|
|
||||||
let state = Arc::new(AppState {
|
let state = Arc::new(AppState {
|
||||||
client,
|
client,
|
||||||
health_checker,
|
health_checker,
|
||||||
tarpit_state,
|
tarpit_state,
|
||||||
pool: pool.clone(),
|
pool: pool.clone(),
|
||||||
session_manager: session_manager.clone(),
|
session_manager: session_manager.clone(),
|
||||||
|
isr_cache,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Regenerate common OGP images on startup
|
// Regenerate common OGP images on startup
|
||||||
|
|||||||
+147
-47
@@ -6,19 +6,21 @@ use axum::{
|
|||||||
use std::{net::SocketAddr, sync::Arc, time::Duration};
|
use std::{net::SocketAddr, sync::Arc, time::Duration};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
assets, db,
|
assets,
|
||||||
|
cache::{self, CachedResponse},
|
||||||
|
db,
|
||||||
state::{AppState, ProxyError},
|
state::{AppState, ProxyError},
|
||||||
tarpit::{self, TarpitState},
|
tarpit::{self, TarpitState},
|
||||||
utils,
|
utils,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// ISR handler - serves pages through Bun SSR with session validation
|
/// ISR handler - serves pages through Bun SSR with caching and session validation
|
||||||
#[tracing::instrument(skip(state, req), fields(path = %req.uri().path(), method = %req.method()))]
|
#[tracing::instrument(skip(state, req), fields(path = %req.uri().path(), method = %req.method()))]
|
||||||
pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Response {
|
pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Response {
|
||||||
let method = req.method().clone();
|
let method = req.method().clone();
|
||||||
let uri = req.uri();
|
let uri = req.uri();
|
||||||
let path = uri.path();
|
let path = uri.path();
|
||||||
let query = uri.query().unwrap_or("");
|
let query = uri.query();
|
||||||
|
|
||||||
if method != axum::http::Method::GET && method != axum::http::Method::HEAD {
|
if method != axum::http::Method::GET && method != axum::http::Method::HEAD {
|
||||||
tracing::warn!(method = %method, path = %path, "Non-GET/HEAD request to non-API route");
|
tracing::warn!(method = %method, path = %path, "Non-GET/HEAD request to non-API route");
|
||||||
@@ -72,14 +74,11 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
|
|||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
let path_with_query = if query.is_empty() {
|
let path_with_query = cache::cache_key(path, query);
|
||||||
path.to_string()
|
|
||||||
} else {
|
|
||||||
format!("{path}?{query}")
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build trusted headers to forward to downstream
|
// Build trusted headers to forward to downstream
|
||||||
let mut forward_headers = HeaderMap::new();
|
let mut forward_headers = HeaderMap::new();
|
||||||
|
let mut is_authenticated = false;
|
||||||
|
|
||||||
// SECURITY: Strip any X-Session-User header from incoming request to prevent spoofing
|
// SECURITY: Strip any X-Session-User header from incoming request to prevent spoofing
|
||||||
|
|
||||||
@@ -101,6 +100,7 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
|
|||||||
axum::http::HeaderValue::from_str(&session.username)
|
axum::http::HeaderValue::from_str(&session.username)
|
||||||
{
|
{
|
||||||
forward_headers.insert("x-session-user", username_value);
|
forward_headers.insert("x-session-user", username_value);
|
||||||
|
is_authenticated = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -111,51 +111,64 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Determine if this request can use the cache
|
||||||
|
// Skip cache for authenticated requests (they see different content)
|
||||||
|
let use_cache = !is_authenticated && cache::is_cacheable_path(path);
|
||||||
|
|
||||||
|
// Try to serve from cache for public requests
|
||||||
|
if use_cache {
|
||||||
|
if let Some(cached) = state.isr_cache.get(&path_with_query).await {
|
||||||
|
let fresh_duration = state.isr_cache.config.fresh_duration;
|
||||||
|
let stale_duration = state.isr_cache.config.stale_duration;
|
||||||
|
|
||||||
|
if cached.is_fresh(fresh_duration) {
|
||||||
|
// Fresh cache hit - serve immediately
|
||||||
|
let age_ms = cached.age().as_millis() as u64;
|
||||||
|
tracing::debug!(cache = "hit", age_ms, "ISR cache hit (fresh)");
|
||||||
|
|
||||||
|
return serve_cached_response(&cached, is_head);
|
||||||
|
} else if cached.is_stale_but_usable(fresh_duration, stale_duration) {
|
||||||
|
// Stale cache hit - serve immediately and refresh in background
|
||||||
|
let age_ms = cached.age().as_millis() as u64;
|
||||||
|
tracing::debug!(cache = "stale", age_ms, "ISR cache hit (stale, refreshing)");
|
||||||
|
|
||||||
|
// Spawn background refresh if not already refreshing
|
||||||
|
if state.isr_cache.start_refresh(&path_with_query) {
|
||||||
|
let state_clone = state.clone();
|
||||||
|
let path_clone = path_with_query.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
refresh_cache_entry(state_clone, path_clone).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return serve_cached_response(&cached, is_head);
|
||||||
|
}
|
||||||
|
// Cache entry is too old - fall through to fetch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss or non-cacheable - fetch from Bun
|
||||||
let start = std::time::Instant::now();
|
let start = std::time::Instant::now();
|
||||||
|
|
||||||
match proxy_to_bun(&path_with_query, state.clone(), forward_headers).await {
|
match proxy_to_bun(&path_with_query, state.clone(), forward_headers).await {
|
||||||
Ok((status, headers, body)) => {
|
Ok((status, headers, body)) => {
|
||||||
let duration_ms = start.elapsed().as_millis() as u64;
|
let duration_ms = start.elapsed().as_millis() as u64;
|
||||||
let cache = "miss";
|
|
||||||
|
|
||||||
let is_static = utils::is_static_asset(path);
|
// Cache successful responses for public requests
|
||||||
let is_page = utils::is_page_route(path);
|
if use_cache && status.is_success() {
|
||||||
|
let cached_response = CachedResponse::new(status, headers.clone(), body.clone());
|
||||||
match (status.as_u16(), is_static, is_page) {
|
state
|
||||||
(200..=299, true, _) => {
|
.isr_cache
|
||||||
tracing::trace!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
.insert(path_with_query.clone(), cached_response)
|
||||||
}
|
.await;
|
||||||
(404, true, _) => {
|
tracing::debug!(
|
||||||
tracing::warn!(
|
cache = "miss",
|
||||||
status = status.as_u16(),
|
status = status.as_u16(),
|
||||||
duration_ms,
|
duration_ms,
|
||||||
cache,
|
"ISR request (cached)"
|
||||||
"ISR request - missing asset"
|
);
|
||||||
);
|
} else {
|
||||||
}
|
log_isr_request(path, status, duration_ms, "bypass");
|
||||||
(500..=599, true, _) => {
|
|
||||||
tracing::error!(
|
|
||||||
status = status.as_u16(),
|
|
||||||
duration_ms,
|
|
||||||
cache,
|
|
||||||
"ISR request - server error"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
(200..=299, _, true) => {
|
|
||||||
tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
|
||||||
}
|
|
||||||
(404, _, true) => {}
|
|
||||||
(500..=599, _, _) => {
|
|
||||||
tracing::error!(
|
|
||||||
status = status.as_u16(),
|
|
||||||
duration_ms,
|
|
||||||
cache,
|
|
||||||
"ISR request - server error"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Intercept error responses for HTML requests
|
// Intercept error responses for HTML requests
|
||||||
@@ -194,6 +207,93 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Serve a cached response
|
||||||
|
fn serve_cached_response(cached: &CachedResponse, is_head: bool) -> Response {
|
||||||
|
if is_head {
|
||||||
|
(cached.status, cached.headers.clone()).into_response()
|
||||||
|
} else {
|
||||||
|
(cached.status, cached.headers.clone(), cached.body.clone()).into_response()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Background task to refresh a stale cache entry
|
||||||
|
async fn refresh_cache_entry(state: Arc<AppState>, cache_key: String) {
|
||||||
|
// No auth headers for background refresh (public content only)
|
||||||
|
let forward_headers = HeaderMap::new();
|
||||||
|
|
||||||
|
match proxy_to_bun(&cache_key, state.clone(), forward_headers).await {
|
||||||
|
Ok((status, headers, body)) => {
|
||||||
|
if status.is_success() {
|
||||||
|
let cached_response = CachedResponse::new(status, headers, body);
|
||||||
|
state
|
||||||
|
.isr_cache
|
||||||
|
.insert(cache_key.clone(), cached_response)
|
||||||
|
.await;
|
||||||
|
tracing::debug!(path = %cache_key, "Cache entry refreshed");
|
||||||
|
} else {
|
||||||
|
tracing::warn!(
|
||||||
|
path = %cache_key,
|
||||||
|
status = status.as_u16(),
|
||||||
|
"Background refresh returned non-success status, keeping stale entry"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
tracing::warn!(
|
||||||
|
path = %cache_key,
|
||||||
|
error = %err,
|
||||||
|
"Background refresh failed, keeping stale entry"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark refresh as complete
|
||||||
|
state.isr_cache.end_refresh(&cache_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Log ISR request with appropriate level based on status
|
||||||
|
fn log_isr_request(path: &str, status: StatusCode, duration_ms: u64, cache: &str) {
|
||||||
|
let is_static = utils::is_static_asset(path);
|
||||||
|
let is_page = utils::is_page_route(path);
|
||||||
|
|
||||||
|
match (status.as_u16(), is_static, is_page) {
|
||||||
|
(200..=299, true, _) => {
|
||||||
|
tracing::trace!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
||||||
|
}
|
||||||
|
(404, true, _) => {
|
||||||
|
tracing::warn!(
|
||||||
|
status = status.as_u16(),
|
||||||
|
duration_ms,
|
||||||
|
cache,
|
||||||
|
"ISR request - missing asset"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
(500..=599, true, _) => {
|
||||||
|
tracing::error!(
|
||||||
|
status = status.as_u16(),
|
||||||
|
duration_ms,
|
||||||
|
cache,
|
||||||
|
"ISR request - server error"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
(200..=299, _, true) => {
|
||||||
|
tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
||||||
|
}
|
||||||
|
(404, _, true) => {}
|
||||||
|
(500..=599, _, _) => {
|
||||||
|
tracing::error!(
|
||||||
|
status = status.as_u16(),
|
||||||
|
duration_ms,
|
||||||
|
cache,
|
||||||
|
"ISR request - server error"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Proxy a request to Bun SSR
|
/// Proxy a request to Bun SSR
|
||||||
pub async fn proxy_to_bun(
|
pub async fn proxy_to_bun(
|
||||||
path: &str,
|
path: &str,
|
||||||
|
|||||||
+5
-1
@@ -1,6 +1,9 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{auth::SessionManager, health::HealthChecker, http::HttpClient, tarpit::TarpitState};
|
use crate::{
|
||||||
|
auth::SessionManager, cache::IsrCache, health::HealthChecker, http::HttpClient,
|
||||||
|
tarpit::TarpitState,
|
||||||
|
};
|
||||||
|
|
||||||
/// Application state shared across all handlers
|
/// Application state shared across all handlers
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@@ -10,6 +13,7 @@ pub struct AppState {
|
|||||||
pub tarpit_state: Arc<TarpitState>,
|
pub tarpit_state: Arc<TarpitState>,
|
||||||
pub pool: sqlx::PgPool,
|
pub pool: sqlx::PgPool,
|
||||||
pub session_manager: Arc<SessionManager>,
|
pub session_manager: Arc<SessionManager>,
|
||||||
|
pub isr_cache: Arc<IsrCache>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur during proxying to Bun
|
/// Errors that can occur during proxying to Bun
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
import type { Handle, HandleServerError } from "@sveltejs/kit";
|
import type { Handle, HandleServerError } from "@sveltejs/kit";
|
||||||
import { dev } from "$app/environment";
|
import { dev } from "$app/environment";
|
||||||
import { initLogger } from "$lib/logger";
|
import { initLogger } from "$lib/logger";
|
||||||
|
import { preCacheCollections } from "$lib/server/icons";
|
||||||
import { getLogger } from "@logtape/logtape";
|
import { getLogger } from "@logtape/logtape";
|
||||||
import { minify } from "html-minifier-terser";
|
import { minify } from "html-minifier-terser";
|
||||||
|
|
||||||
await initLogger();
|
await initLogger();
|
||||||
|
|
||||||
|
// Pre-cache icon collections before handling any requests
|
||||||
|
await preCacheCollections();
|
||||||
|
|
||||||
const logger = getLogger(["ssr", "error"]);
|
const logger = getLogger(["ssr", "error"]);
|
||||||
|
|
||||||
export const handle: Handle = async ({ event, resolve }) => {
|
export const handle: Handle = async ({ event, resolve }) => {
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
<script lang="ts" module>
|
|
||||||
import { renderIconSVG } from "$lib/server/icons";
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script lang="ts">
|
|
||||||
import { cn } from "$lib/utils";
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
icon: string;
|
|
||||||
class?: string;
|
|
||||||
size?: number;
|
|
||||||
fallback?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
let {
|
|
||||||
icon,
|
|
||||||
class: className,
|
|
||||||
size,
|
|
||||||
fallback = "lucide:help-circle",
|
|
||||||
}: Props = $props();
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{#await renderIconSVG(icon, { class: cn("inline-block", className), size })}
|
|
||||||
<!-- Loading state during SSR (shouldn't be visible) -->
|
|
||||||
{:then svg}
|
|
||||||
{#if svg}
|
|
||||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
|
||||||
{@html svg}
|
|
||||||
{:else}
|
|
||||||
<!-- Fallback icon if primary fails -->
|
|
||||||
{#await renderIconSVG( fallback, { class: cn("inline-block", className), size }, ) then fallbackSvg}
|
|
||||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
|
||||||
{@html fallbackSvg}
|
|
||||||
{/await}
|
|
||||||
{/if}
|
|
||||||
{/await}
|
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { dev } from "$app/environment";
|
||||||
import { configure, getConsoleSink, type LogRecord } from "@logtape/logtape";
|
import { configure, getConsoleSink, type LogRecord } from "@logtape/logtape";
|
||||||
|
|
||||||
interface RailwayLogEntry {
|
interface RailwayLogEntry {
|
||||||
@@ -27,6 +28,9 @@ export async function initLogger() {
|
|||||||
const useJsonLogs =
|
const useJsonLogs =
|
||||||
process.env.LOG_JSON === "true" || process.env.LOG_JSON === "1";
|
process.env.LOG_JSON === "true" || process.env.LOG_JSON === "1";
|
||||||
|
|
||||||
|
const logLevel = (process.env.LOG_LEVEL?.toLowerCase() ??
|
||||||
|
(dev ? "debug" : "info")) as "debug" | "info" | "warning" | "error";
|
||||||
|
|
||||||
const jsonSink = (record: LogRecord) => {
|
const jsonSink = (record: LogRecord) => {
|
||||||
process.stdout.write(railwayFormatter(record));
|
process.stdout.write(railwayFormatter(record));
|
||||||
};
|
};
|
||||||
@@ -47,7 +51,7 @@ export async function initLogger() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
category: [],
|
category: [],
|
||||||
lowestLevel: "debug",
|
lowestLevel: logLevel,
|
||||||
sinks: [useJsonLogs ? "json" : "console"],
|
sinks: [useJsonLogs ? "json" : "console"],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|||||||
+211
-107
@@ -1,20 +1,16 @@
|
|||||||
import { readFile } from "fs/promises";
|
|
||||||
import { join } from "path";
|
|
||||||
import type { IconifyJSON } from "@iconify/types";
|
import type { IconifyJSON } from "@iconify/types";
|
||||||
import { getIconData, iconToSVG, replaceIDs } from "@iconify/utils";
|
import { getIconData, iconToSVG, replaceIDs } from "@iconify/utils";
|
||||||
import { getLogger } from "@logtape/logtape";
|
import { getLogger } from "@logtape/logtape";
|
||||||
import type {
|
import type { IconCollection, IconIdentifier, IconRenderOptions } from "$lib/types/icons";
|
||||||
IconCollection,
|
|
||||||
IconData,
|
|
||||||
IconIdentifier,
|
|
||||||
IconRenderOptions,
|
|
||||||
} from "$lib/types/icons";
|
|
||||||
|
|
||||||
const logger = getLogger(["server", "icons"]);
|
const logger = getLogger(["server", "icons"]);
|
||||||
|
|
||||||
// In-memory cache for icon collections
|
// In-memory cache for loaded icon collections
|
||||||
const collectionCache = new Map<string, IconifyJSON>();
|
const collectionCache = new Map<string, IconifyJSON>();
|
||||||
|
|
||||||
|
// Loading promises to prevent concurrent loads of the same collection
|
||||||
|
const loadingPromises = new Map<string, Promise<IconifyJSON | null>>();
|
||||||
|
|
||||||
// Collections to pre-cache on server startup
|
// Collections to pre-cache on server startup
|
||||||
const PRE_CACHE_COLLECTIONS = [
|
const PRE_CACHE_COLLECTIONS = [
|
||||||
"lucide",
|
"lucide",
|
||||||
@@ -25,7 +21,7 @@ const PRE_CACHE_COLLECTIONS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// Default fallback icon
|
// Default fallback icon
|
||||||
const DEFAULT_FALLBACK_ICON = "lucide:help-circle";
|
const DEFAULT_FALLBACK_ICON: IconIdentifier = "lucide:help-circle";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse icon identifier into collection and name
|
* Parse icon identifier into collection and name
|
||||||
@@ -41,26 +37,13 @@ function parseIdentifier(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load icon collection from @iconify/json
|
* Load icon collection from disk via dynamic import (internal - no caching logic)
|
||||||
*/
|
*/
|
||||||
async function loadCollection(collection: string): Promise<IconifyJSON | null> {
|
async function loadCollectionFromDisk(collection: string): Promise<IconifyJSON | null> {
|
||||||
// Check cache first
|
|
||||||
if (collectionCache.has(collection)) {
|
|
||||||
return collectionCache.get(collection)!;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const iconifyJsonPath = join(
|
// Dynamic import - Bun resolves the package path automatically
|
||||||
process.cwd(),
|
const module = await import(`@iconify/json/json/${collection}.json`);
|
||||||
"node_modules",
|
const iconSet: IconifyJSON = module.default;
|
||||||
"@iconify",
|
|
||||||
"json",
|
|
||||||
"json",
|
|
||||||
`${collection}.json`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const data = await readFile(iconifyJsonPath, "utf-8");
|
|
||||||
const iconSet: IconifyJSON = JSON.parse(data);
|
|
||||||
|
|
||||||
// Cache the collection
|
// Cache the collection
|
||||||
collectionCache.set(collection, iconSet);
|
collectionCache.set(collection, iconSet);
|
||||||
@@ -79,9 +62,203 @@ async function loadCollection(collection: string): Promise<IconifyJSON | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get icon data by identifier
|
* Load icon collection with caching and concurrent load protection.
|
||||||
|
* Multiple concurrent requests for the same collection will wait for a single load.
|
||||||
*/
|
*/
|
||||||
export async function getIcon(identifier: string): Promise<IconData | null> {
|
async function loadCollection(collection: string): Promise<IconifyJSON | null> {
|
||||||
|
// Return cached if available
|
||||||
|
if (collectionCache.has(collection)) {
|
||||||
|
return collectionCache.get(collection)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for in-progress load if another request is already loading this collection
|
||||||
|
const existingPromise = loadingPromises.get(collection);
|
||||||
|
if (existingPromise) {
|
||||||
|
return existingPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start new load and store promise so concurrent requests can wait
|
||||||
|
const loadPromise = loadCollectionFromDisk(collection);
|
||||||
|
loadingPromises.set(collection, loadPromise);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await loadPromise;
|
||||||
|
} finally {
|
||||||
|
loadingPromises.delete(collection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render icon data to SVG string (internal)
|
||||||
|
*/
|
||||||
|
function renderIconData(
|
||||||
|
iconData: ReturnType<typeof getIconData>,
|
||||||
|
options: IconRenderOptions = {},
|
||||||
|
): string {
|
||||||
|
if (!iconData) {
|
||||||
|
throw new Error("Icon data is null");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert icon data to SVG attributes
|
||||||
|
const renderData = iconToSVG(iconData);
|
||||||
|
|
||||||
|
// Get SVG body
|
||||||
|
const body = replaceIDs(iconData.body);
|
||||||
|
|
||||||
|
// Build SVG element with options applied
|
||||||
|
const attributes: Record<string, string> = {
|
||||||
|
...renderData.attributes,
|
||||||
|
xmlns: "http://www.w3.org/2000/svg",
|
||||||
|
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
||||||
|
};
|
||||||
|
|
||||||
|
if (options.class) {
|
||||||
|
attributes.class = options.class;
|
||||||
|
}
|
||||||
|
if (options.size) {
|
||||||
|
attributes.width = String(options.size);
|
||||||
|
attributes.height = String(options.size);
|
||||||
|
}
|
||||||
|
|
||||||
|
const attributeString = Object.entries(attributes)
|
||||||
|
.map(([key, value]) => `${key}="${value}"`)
|
||||||
|
.join(" ");
|
||||||
|
|
||||||
|
let svg = `<svg ${attributeString}>${body}</svg>`;
|
||||||
|
|
||||||
|
// Apply custom color (replace currentColor)
|
||||||
|
if (options.color) {
|
||||||
|
svg = svg.replace(/currentColor/g, options.color);
|
||||||
|
}
|
||||||
|
|
||||||
|
return svg;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the default fallback icon (internal helper)
|
||||||
|
*/
|
||||||
|
async function renderFallbackIcon(options: IconRenderOptions): Promise<string | null> {
|
||||||
|
const parsed = parseIdentifier(DEFAULT_FALLBACK_ICON);
|
||||||
|
if (!parsed) return null;
|
||||||
|
|
||||||
|
const iconSet = await loadCollection(parsed.collection);
|
||||||
|
if (!iconSet) return null;
|
||||||
|
|
||||||
|
const iconData = getIconData(iconSet, parsed.name);
|
||||||
|
if (!iconData) return null;
|
||||||
|
|
||||||
|
return renderIconData(iconData, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render multiple icons efficiently in a single batch.
|
||||||
|
* Groups icons by collection, loads each collection once, then renders all icons.
|
||||||
|
*
|
||||||
|
* @param identifiers - Array of icon identifiers (e.g., ["lucide:home", "simple-icons:github"])
|
||||||
|
* @param options - Render options applied to all icons
|
||||||
|
* @returns Map of identifier to rendered SVG string (missing icons get fallback)
|
||||||
|
*/
|
||||||
|
export async function renderIconsBatch(
|
||||||
|
identifiers: string[],
|
||||||
|
options: IconRenderOptions = {},
|
||||||
|
): Promise<Map<string, string>> {
|
||||||
|
const results = new Map<string, string>();
|
||||||
|
|
||||||
|
if (identifiers.length === 0) {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse and group by collection
|
||||||
|
const byCollection = new Map<string, { identifier: string; name: string }[]>();
|
||||||
|
const invalidIdentifiers: string[] = [];
|
||||||
|
|
||||||
|
for (const identifier of identifiers) {
|
||||||
|
const parsed = parseIdentifier(identifier);
|
||||||
|
if (!parsed) {
|
||||||
|
invalidIdentifiers.push(identifier);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const group = byCollection.get(parsed.collection) || [];
|
||||||
|
group.push({ identifier, name: parsed.name });
|
||||||
|
byCollection.set(parsed.collection, group);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invalidIdentifiers.length > 0) {
|
||||||
|
logger.warn("Invalid icon identifiers in batch", { identifiers: invalidIdentifiers });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load all needed collections in parallel
|
||||||
|
const collections = Array.from(byCollection.keys());
|
||||||
|
const loadedCollections = await Promise.all(
|
||||||
|
collections.map(async (collection) => ({
|
||||||
|
collection,
|
||||||
|
iconSet: await loadCollection(collection),
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Build lookup map
|
||||||
|
const collectionMap = new Map<string, IconifyJSON>();
|
||||||
|
for (const { collection, iconSet } of loadedCollections) {
|
||||||
|
if (iconSet) {
|
||||||
|
collectionMap.set(collection, iconSet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render all icons
|
||||||
|
const missingIcons: string[] = [];
|
||||||
|
|
||||||
|
for (const [collection, icons] of byCollection) {
|
||||||
|
const iconSet = collectionMap.get(collection);
|
||||||
|
if (!iconSet) {
|
||||||
|
missingIcons.push(...icons.map((i) => i.identifier));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const { identifier, name } of icons) {
|
||||||
|
const iconData = getIconData(iconSet, name);
|
||||||
|
if (!iconData) {
|
||||||
|
missingIcons.push(identifier);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const svg = renderIconData(iconData, options);
|
||||||
|
results.set(identifier, svg);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Failed to render icon: ${identifier}`, {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
});
|
||||||
|
missingIcons.push(identifier);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add fallback for missing icons
|
||||||
|
if (missingIcons.length > 0) {
|
||||||
|
logger.warn("Icons not found in batch, using fallback", {
|
||||||
|
missing: missingIcons,
|
||||||
|
fallback: DEFAULT_FALLBACK_ICON,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Render fallback icon once
|
||||||
|
const fallbackSvg = await renderFallbackIcon(options);
|
||||||
|
if (fallbackSvg) {
|
||||||
|
for (const identifier of missingIcons) {
|
||||||
|
results.set(identifier, fallbackSvg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get single icon data (for API endpoint use only)
|
||||||
|
*/
|
||||||
|
export async function getIconForApi(
|
||||||
|
identifier: string,
|
||||||
|
): Promise<{ identifier: string; collection: string; name: string; svg: string } | null> {
|
||||||
const parsed = parseIdentifier(identifier);
|
const parsed = parseIdentifier(identifier);
|
||||||
if (!parsed) {
|
if (!parsed) {
|
||||||
logger.warn(`Invalid icon identifier: ${identifier}`);
|
logger.warn(`Invalid icon identifier: ${identifier}`);
|
||||||
@@ -95,14 +272,12 @@ export async function getIcon(identifier: string): Promise<IconData | null> {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get icon data from the set
|
|
||||||
const iconData = getIconData(iconSet, name);
|
const iconData = getIconData(iconSet, name);
|
||||||
if (!iconData) {
|
if (!iconData) {
|
||||||
logger.warn(`Icon not found: ${identifier}`);
|
logger.warn(`Icon not found: ${identifier}`);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build SVG
|
|
||||||
const svg = renderIconData(iconData);
|
const svg = renderIconData(iconData);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -114,74 +289,7 @@ export async function getIcon(identifier: string): Promise<IconData | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Render icon data to SVG string
|
* Get all available collections with metadata
|
||||||
*/
|
|
||||||
function renderIconData(iconData: ReturnType<typeof getIconData>): string {
|
|
||||||
if (!iconData) {
|
|
||||||
throw new Error("Icon data is null");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert icon data to SVG attributes
|
|
||||||
const renderData = iconToSVG(iconData);
|
|
||||||
|
|
||||||
// Get SVG body
|
|
||||||
const body = replaceIDs(iconData.body);
|
|
||||||
|
|
||||||
// Build SVG element
|
|
||||||
const attributes = {
|
|
||||||
...renderData.attributes,
|
|
||||||
xmlns: "http://www.w3.org/2000/svg",
|
|
||||||
"xmlns:xlink": "http://www.w3.org/1999/xlink",
|
|
||||||
};
|
|
||||||
|
|
||||||
const attributeString = Object.entries(attributes)
|
|
||||||
.map(([key, value]) => `${key}="${value}"`)
|
|
||||||
.join(" ");
|
|
||||||
|
|
||||||
return `<svg ${attributeString}>${body}</svg>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Render icon SVG with custom options
|
|
||||||
*/
|
|
||||||
export async function renderIconSVG(
|
|
||||||
identifier: string,
|
|
||||||
options: IconRenderOptions = {},
|
|
||||||
): Promise<string | null> {
|
|
||||||
const iconData = await getIcon(identifier);
|
|
||||||
|
|
||||||
if (!iconData) {
|
|
||||||
// Try fallback icon if provided, otherwise use default
|
|
||||||
if (identifier !== DEFAULT_FALLBACK_ICON) {
|
|
||||||
logger.warn(`Icon not found, using fallback: ${identifier}`);
|
|
||||||
return renderIconSVG(DEFAULT_FALLBACK_ICON, options);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
let svg = iconData.svg;
|
|
||||||
|
|
||||||
// Apply custom class
|
|
||||||
if (options.class) {
|
|
||||||
svg = svg.replace("<svg ", `<svg class="${options.class}" `);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply custom size
|
|
||||||
if (options.size) {
|
|
||||||
svg = svg.replace(/width="[^"]*"/, `width="${options.size}"`);
|
|
||||||
svg = svg.replace(/height="[^"]*"/, `height="${options.size}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply custom color (replace currentColor)
|
|
||||||
if (options.color) {
|
|
||||||
svg = svg.replace(/currentColor/g, options.color);
|
|
||||||
}
|
|
||||||
|
|
||||||
return svg;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all available collections
|
|
||||||
*/
|
*/
|
||||||
export async function getCollections(): Promise<IconCollection[]> {
|
export async function getCollections(): Promise<IconCollection[]> {
|
||||||
const collections: IconCollection[] = [];
|
const collections: IconCollection[] = [];
|
||||||
@@ -210,8 +318,7 @@ export async function searchIcons(
|
|||||||
query: string,
|
query: string,
|
||||||
limit: number = 50,
|
limit: number = 50,
|
||||||
): Promise<{ identifier: string; collection: string; name: string }[]> {
|
): Promise<{ identifier: string; collection: string; name: string }[]> {
|
||||||
const results: { identifier: string; collection: string; name: string }[] =
|
const results: { identifier: string; collection: string; name: string }[] = [];
|
||||||
[];
|
|
||||||
|
|
||||||
// Parse query for collection prefix (e.g., "lucide:home" or "lucide:")
|
// Parse query for collection prefix (e.g., "lucide:home" or "lucide:")
|
||||||
const colonIndex = query.indexOf(":");
|
const colonIndex = query.indexOf(":");
|
||||||
@@ -254,7 +361,8 @@ export async function searchIcons(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pre-cache common icon collections on server startup
|
* Pre-cache common icon collections on server startup.
|
||||||
|
* Call this in hooks.server.ts before handling requests.
|
||||||
*/
|
*/
|
||||||
export async function preCacheCollections(): Promise<void> {
|
export async function preCacheCollections(): Promise<void> {
|
||||||
logger.info("Pre-caching icon collections...", {
|
logger.info("Pre-caching icon collections...", {
|
||||||
@@ -270,7 +378,3 @@ export async function preCacheCollections(): Promise<void> {
|
|||||||
cached: collectionCache.size,
|
cached: collectionCache.size,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Future enhancement - Support color customization in icon identifiers
|
|
||||||
// Format idea: "lucide:home#color=blue-500" or separate color field in DB
|
|
||||||
// Would allow per-project icon theming without hardcoded styles
|
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
import type { PageServerLoad } from "./$types";
|
import type { PageServerLoad } from "./$types";
|
||||||
import { apiFetch } from "$lib/api.server";
|
import { apiFetch } from "$lib/api.server";
|
||||||
import { renderIconSVG } from "$lib/server/icons";
|
import { renderIconsBatch } from "$lib/server/icons";
|
||||||
import type { AdminProject } from "$lib/admin-types";
|
import type { AdminProject } from "$lib/admin-types";
|
||||||
|
|
||||||
|
const CLOCK_ICON = "lucide:clock";
|
||||||
|
|
||||||
export const load: PageServerLoad = async ({ fetch, parent }) => {
|
export const load: PageServerLoad = async ({ fetch, parent }) => {
|
||||||
// Get settings from parent layout
|
// Get settings from parent layout
|
||||||
const parentData = await parent();
|
const parentData = await parent();
|
||||||
@@ -10,36 +12,50 @@ export const load: PageServerLoad = async ({ fetch, parent }) => {
|
|||||||
|
|
||||||
const projects = await apiFetch<AdminProject[]>("/api/projects", { fetch });
|
const projects = await apiFetch<AdminProject[]>("/api/projects", { fetch });
|
||||||
|
|
||||||
// Pre-render tag icons and clock icons (server-side only)
|
// Collect all icon identifiers for batch rendering
|
||||||
const projectsWithIcons = await Promise.all(
|
const smallIconIds = new Set<string>();
|
||||||
projects.map(async (project) => {
|
const largeIconIds = new Set<string>();
|
||||||
const tagsWithIcons = await Promise.all(
|
|
||||||
project.tags.map(async (tag) => ({
|
|
||||||
...tag,
|
|
||||||
iconSvg: tag.icon
|
|
||||||
? (await renderIconSVG(tag.icon, { size: 12 })) || ""
|
|
||||||
: "",
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
|
|
||||||
const clockIconSvg =
|
// Add static icons
|
||||||
(await renderIconSVG("lucide:clock", { size: 12 })) || "";
|
smallIconIds.add(CLOCK_ICON);
|
||||||
|
|
||||||
return {
|
// Collect tag icons (size 12)
|
||||||
...project,
|
for (const project of projects) {
|
||||||
tags: tagsWithIcons,
|
for (const tag of project.tags) {
|
||||||
clockIconSvg,
|
if (tag.icon) {
|
||||||
};
|
smallIconIds.add(tag.icon);
|
||||||
}),
|
}
|
||||||
);
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Pre-render social link icons (server-side only)
|
// Collect social link icons (size 16)
|
||||||
const socialLinksWithIcons = await Promise.all(
|
for (const link of settings.socialLinks) {
|
||||||
settings.socialLinks.map(async (link) => ({
|
if (link.icon) {
|
||||||
...link,
|
largeIconIds.add(link.icon);
|
||||||
iconSvg: (await renderIconSVG(link.icon, { size: 16 })) || "",
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch render all icons (two batches for different sizes)
|
||||||
|
const [smallIcons, largeIcons] = await Promise.all([
|
||||||
|
renderIconsBatch([...smallIconIds], { size: 12 }),
|
||||||
|
renderIconsBatch([...largeIconIds], { size: 16 }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Map icons back to projects
|
||||||
|
const projectsWithIcons = projects.map((project) => ({
|
||||||
|
...project,
|
||||||
|
tags: project.tags.map((tag) => ({
|
||||||
|
...tag,
|
||||||
|
iconSvg: tag.icon ? smallIcons.get(tag.icon) ?? "" : "",
|
||||||
})),
|
})),
|
||||||
);
|
clockIconSvg: smallIcons.get(CLOCK_ICON) ?? "",
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Map icons back to social links
|
||||||
|
const socialLinksWithIcons = settings.socialLinks.map((link) => ({
|
||||||
|
...link,
|
||||||
|
iconSvg: largeIcons.get(link.icon) ?? "",
|
||||||
|
}));
|
||||||
|
|
||||||
return {
|
return {
|
||||||
projects: projectsWithIcons,
|
projects: projectsWithIcons,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { json, error } from "@sveltejs/kit";
|
import { json, error } from "@sveltejs/kit";
|
||||||
import type { RequestHandler } from "./$types";
|
import type { RequestHandler } from "./$types";
|
||||||
import { requireAuth } from "$lib/server/auth";
|
import { requireAuth } from "$lib/server/auth";
|
||||||
import { getIcon } from "$lib/server/icons";
|
import { getIconForApi } from "$lib/server/icons";
|
||||||
|
|
||||||
export const GET: RequestHandler = async (event) => {
|
export const GET: RequestHandler = async (event) => {
|
||||||
// Require authentication
|
// Require authentication
|
||||||
@@ -10,7 +10,7 @@ export const GET: RequestHandler = async (event) => {
|
|||||||
const { collection, name } = event.params;
|
const { collection, name } = event.params;
|
||||||
const identifier = `${collection}:${name}`;
|
const identifier = `${collection}:${name}`;
|
||||||
|
|
||||||
const iconData = await getIcon(identifier);
|
const iconData = await getIconForApi(identifier);
|
||||||
|
|
||||||
if (!iconData) {
|
if (!iconData) {
|
||||||
throw error(404, `Icon not found: ${identifier}`);
|
throw error(404, `Icon not found: ${identifier}`);
|
||||||
|
|||||||
Reference in New Issue
Block a user