mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 04:23:34 -06:00
feat: optimize asset delivery with build-time compression and encoding negotiation
This commit is contained in:
Generated
+112
@@ -26,6 +26,21 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alloc-no-stdlib"
|
||||||
|
version = "2.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alloc-stdlib"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "allocator-api2"
|
name = "allocator-api2"
|
||||||
version = "0.2.21"
|
version = "0.2.21"
|
||||||
@@ -106,6 +121,19 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-compression"
|
||||||
|
version = "0.4.33"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
|
||||||
|
dependencies = [
|
||||||
|
"compression-codecs",
|
||||||
|
"compression-core",
|
||||||
|
"futures-core",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.89"
|
version = "0.1.89"
|
||||||
@@ -330,6 +358,27 @@ dependencies = [
|
|||||||
"generic-array",
|
"generic-array",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "brotli"
|
||||||
|
version = "8.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
"alloc-stdlib",
|
||||||
|
"brotli-decompressor",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "brotli-decompressor"
|
||||||
|
version = "5.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
|
||||||
|
dependencies = [
|
||||||
|
"alloc-no-stdlib",
|
||||||
|
"alloc-stdlib",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bstr"
|
name = "bstr"
|
||||||
version = "1.12.0"
|
version = "1.12.0"
|
||||||
@@ -407,6 +456,8 @@ version = "1.2.34"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
|
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"jobserver",
|
||||||
|
"libc",
|
||||||
"shlex",
|
"shlex",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -501,6 +552,26 @@ dependencies = [
|
|||||||
"time",
|
"time",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "compression-codecs"
|
||||||
|
version = "0.4.32"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
|
||||||
|
dependencies = [
|
||||||
|
"brotli",
|
||||||
|
"compression-core",
|
||||||
|
"flate2",
|
||||||
|
"memchr",
|
||||||
|
"zstd",
|
||||||
|
"zstd-safe",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "compression-core"
|
||||||
|
version = "0.4.31"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "concurrent-queue"
|
name = "concurrent-queue"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@@ -1642,6 +1713,16 @@ version = "1.0.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jobserver"
|
||||||
|
version = "0.1.34"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom 0.3.3",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.77"
|
version = "0.3.77"
|
||||||
@@ -3603,14 +3684,17 @@ version = "0.6.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"async-compression",
|
||||||
"bitflags 2.9.4",
|
"bitflags 2.9.4",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"http 1.3.1",
|
"http 1.3.1",
|
||||||
"http-body 1.0.1",
|
"http-body 1.0.1",
|
||||||
"iri-string",
|
"iri-string",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
"tower",
|
"tower",
|
||||||
"tower-layer",
|
"tower-layer",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
@@ -4563,3 +4647,31 @@ dependencies = [
|
|||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.106",
|
"syn 2.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd"
|
||||||
|
version = "0.13.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
|
||||||
|
dependencies = [
|
||||||
|
"zstd-safe",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd-safe"
|
||||||
|
version = "7.2.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
|
||||||
|
dependencies = [
|
||||||
|
"zstd-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd-sys"
|
||||||
|
version = "2.0.16+zstd.1.5.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"pkg-config",
|
||||||
|
]
|
||||||
|
|||||||
+1
-1
@@ -48,7 +48,7 @@ url = "2.5"
|
|||||||
governor = "0.10.1"
|
governor = "0.10.1"
|
||||||
serde_path_to_error = "0.1.17"
|
serde_path_to_error = "0.1.17"
|
||||||
num-format = "0.4.4"
|
num-format = "0.4.4"
|
||||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
|
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
|
||||||
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
||||||
mime_guess = { version = "2.0", optional = true }
|
mime_guess = { version = "2.0", optional = true }
|
||||||
clap = { version = "4.5", features = ["derive"] }
|
clap = { version = "4.5", features = ["derive"] }
|
||||||
|
|||||||
+5
-2
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install zstd for pre-compression
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy backend Cargo.toml for build-time version retrieval
|
# Copy backend Cargo.toml for build-time version retrieval
|
||||||
COPY ./Cargo.toml ./
|
COPY ./Cargo.toml ./
|
||||||
|
|
||||||
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
|
|||||||
# Copy frontend source code
|
# Copy frontend source code
|
||||||
COPY ./web ./
|
COPY ./web ./
|
||||||
|
|
||||||
# Build frontend
|
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
|
||||||
RUN bun run build
|
RUN bun run build && bun run scripts/compress-assets.ts
|
||||||
|
|
||||||
# --- Chef Base Stage ---
|
# --- Chef Base Stage ---
|
||||||
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||||
|
|||||||
+114
-19
@@ -1,14 +1,18 @@
|
|||||||
//! Embedded assets for the web frontend
|
//! Embedded assets for the web frontend.
|
||||||
//!
|
//!
|
||||||
//! This module handles serving static assets that are embedded into the binary
|
//! Serves static assets embedded into the binary at compile time using rust-embed.
|
||||||
//! at compile time using rust-embed.
|
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
|
||||||
|
//! generated at build time by `web/scripts/compress-assets.ts`.
|
||||||
|
|
||||||
|
use axum::http::{HeaderMap, HeaderValue, header};
|
||||||
use dashmap::DashMap;
|
use dashmap::DashMap;
|
||||||
use rapidhash::v3::rapidhash_v3;
|
use rapidhash::v3::rapidhash_v3;
|
||||||
use rust_embed::RustEmbed;
|
use rust_embed::RustEmbed;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
|
||||||
|
|
||||||
/// Embedded web assets from the dist directory
|
/// Embedded web assets from the dist directory
|
||||||
#[derive(RustEmbed)]
|
#[derive(RustEmbed)]
|
||||||
#[folder = "web/dist/"]
|
#[folder = "web/dist/"]
|
||||||
@@ -21,17 +25,15 @@ pub struct WebAssets;
|
|||||||
pub struct AssetHash(u64);
|
pub struct AssetHash(u64);
|
||||||
|
|
||||||
impl AssetHash {
|
impl AssetHash {
|
||||||
/// Create a new AssetHash from u64 value
|
|
||||||
pub fn new(hash: u64) -> Self {
|
pub fn new(hash: u64) -> Self {
|
||||||
Self(hash)
|
Self(hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hash as a hex string
|
|
||||||
pub fn to_hex(&self) -> String {
|
pub fn to_hex(&self) -> String {
|
||||||
format!("{:016x}", self.0)
|
format!("{:016x}", self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the hash as a quoted hex string
|
/// Get the hash as a quoted hex string (for ETag headers)
|
||||||
pub fn quoted(&self) -> String {
|
pub fn quoted(&self) -> String {
|
||||||
format!("\"{}\"", self.to_hex())
|
format!("\"{}\"", self.to_hex())
|
||||||
}
|
}
|
||||||
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl AssetMetadata {
|
impl AssetMetadata {
|
||||||
/// Check if the etag matches the asset hash
|
|
||||||
pub fn etag_matches(&self, etag: &str) -> bool {
|
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||||
// Remove quotes if present (ETags are typically quoted)
|
|
||||||
let etag = etag.trim_matches('"');
|
let etag = etag.trim_matches('"');
|
||||||
|
|
||||||
// ETags generated from u64 hex should be 16 characters
|
|
||||||
etag.len() == 16
|
etag.len() == 16
|
||||||
&& u64::from_str_radix(etag, 16)
|
&& u64::from_str_radix(etag, 16)
|
||||||
.map(|parsed| parsed == self.hash.0)
|
.map(|parsed| parsed == self.hash.0)
|
||||||
@@ -68,28 +66,125 @@ impl AssetMetadata {
|
|||||||
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
||||||
|
|
||||||
/// Get cached asset metadata for a file path, caching on-demand
|
/// Get cached asset metadata for a file path, caching on-demand
|
||||||
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
|
||||||
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||||
// Check cache first
|
|
||||||
if let Some(cached) = ASSET_CACHE.get(path) {
|
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||||
return cached.value().clone();
|
return cached.value().clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate MIME type
|
|
||||||
let mime_type = mime_guess::from_path(path)
|
let mime_type = mime_guess::from_path(path)
|
||||||
.first()
|
.first()
|
||||||
.map(|mime| mime.to_string());
|
.map(|mime| mime.to_string());
|
||||||
|
let hash = AssetHash::new(rapidhash_v3(content));
|
||||||
// Calculate RapidHash hash (using u64 native output size)
|
|
||||||
let hash_value = rapidhash_v3(content);
|
|
||||||
let hash = AssetHash::new(hash_value);
|
|
||||||
|
|
||||||
let metadata = AssetMetadata { mime_type, hash };
|
let metadata = AssetMetadata { mime_type, hash };
|
||||||
|
|
||||||
// Only cache if we haven't exceeded the limit
|
|
||||||
if ASSET_CACHE.len() < 1000 {
|
if ASSET_CACHE.len() < 1000 {
|
||||||
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
metadata
|
metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set appropriate `Cache-Control` header based on the asset path.
|
||||||
|
///
|
||||||
|
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
|
||||||
|
/// safe to cache indefinitely. Other assets get shorter cache durations.
|
||||||
|
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
|
||||||
|
let cache_control = if path.contains("immutable/") {
|
||||||
|
// SvelteKit fingerprinted assets — cache forever
|
||||||
|
"public, max-age=31536000, immutable"
|
||||||
|
} else if path == "index.html" || path.ends_with(".html") {
|
||||||
|
"public, max-age=300"
|
||||||
|
} else {
|
||||||
|
match path.rsplit_once('.').map(|(_, ext)| ext) {
|
||||||
|
Some("css" | "js") => "public, max-age=86400",
|
||||||
|
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
|
||||||
|
_ => "public, max-age=3600",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Ok(value) = HeaderValue::from_str(cache_control) {
|
||||||
|
headers.insert(header::CACHE_CONTROL, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serve an embedded asset with content encoding negotiation.
|
||||||
|
///
|
||||||
|
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
|
||||||
|
/// the client's `Accept-Encoding` header, falling back to the uncompressed
|
||||||
|
/// original. Returns `None` if the asset doesn't exist at all.
|
||||||
|
pub fn try_serve_asset_with_encoding(
|
||||||
|
path: &str,
|
||||||
|
request_headers: &HeaderMap,
|
||||||
|
) -> Option<axum::response::Response> {
|
||||||
|
use axum::response::IntoResponse;
|
||||||
|
|
||||||
|
let asset_path = path.strip_prefix('/').unwrap_or(path);
|
||||||
|
|
||||||
|
// Get the uncompressed original first (for metadata: MIME type, ETag)
|
||||||
|
let original = WebAssets::get(asset_path)?;
|
||||||
|
let metadata = get_asset_metadata_cached(asset_path, &original.data);
|
||||||
|
|
||||||
|
// Check ETag for conditional requests (304 Not Modified)
|
||||||
|
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||||
|
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||||
|
{
|
||||||
|
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mime_type = metadata
|
||||||
|
.mime_type
|
||||||
|
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||||
|
|
||||||
|
// Only attempt pre-compressed variants for files above the compression
|
||||||
|
// threshold — the build script skips smaller files too.
|
||||||
|
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
|
||||||
|
parse_accepted_encodings(request_headers)
|
||||||
|
} else {
|
||||||
|
vec![ContentEncoding::Identity]
|
||||||
|
};
|
||||||
|
|
||||||
|
for encoding in &accepted_encodings {
|
||||||
|
if *encoding == ContentEncoding::Identity {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let compressed_path = format!("{}{}", asset_path, encoding.extension());
|
||||||
|
if let Some(compressed) = WebAssets::get(&compressed_path) {
|
||||||
|
let mut response_headers = HeaderMap::new();
|
||||||
|
|
||||||
|
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||||
|
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||||
|
}
|
||||||
|
if let Some(ce) = encoding.header_value() {
|
||||||
|
response_headers.insert(header::CONTENT_ENCODING, ce);
|
||||||
|
}
|
||||||
|
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||||
|
response_headers.insert(header::ETAG, etag_val);
|
||||||
|
}
|
||||||
|
// Vary so caches distinguish by encoding
|
||||||
|
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
|
||||||
|
set_cache_control(&mut response_headers, asset_path);
|
||||||
|
|
||||||
|
return Some(
|
||||||
|
(
|
||||||
|
axum::http::StatusCode::OK,
|
||||||
|
response_headers,
|
||||||
|
compressed.data,
|
||||||
|
)
|
||||||
|
.into_response(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No compressed variant found — serve uncompressed original
|
||||||
|
let mut response_headers = HeaderMap::new();
|
||||||
|
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||||
|
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||||
|
}
|
||||||
|
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||||
|
response_headers.insert(header::ETAG, etag_val);
|
||||||
|
}
|
||||||
|
set_cache_control(&mut response_headers, asset_path);
|
||||||
|
|
||||||
|
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,196 @@
|
|||||||
|
//! Content encoding negotiation for pre-compressed asset serving.
|
||||||
|
//!
|
||||||
|
//! Parses Accept-Encoding headers with quality values and returns
|
||||||
|
//! supported encodings in priority order for content negotiation.
|
||||||
|
|
||||||
|
use axum::http::{HeaderMap, HeaderValue, header};
|
||||||
|
|
||||||
|
/// Minimum size threshold for compression (bytes).
|
||||||
|
///
|
||||||
|
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
|
||||||
|
pub const COMPRESSION_MIN_SIZE: usize = 512;
|
||||||
|
|
||||||
|
/// Supported content encodings in priority order (best compression first).
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum ContentEncoding {
|
||||||
|
Zstd,
|
||||||
|
Brotli,
|
||||||
|
Gzip,
|
||||||
|
Identity,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ContentEncoding {
|
||||||
|
/// File extension suffix for pre-compressed variant lookup.
|
||||||
|
#[inline]
|
||||||
|
pub fn extension(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Zstd => ".zst",
|
||||||
|
Self::Brotli => ".br",
|
||||||
|
Self::Gzip => ".gz",
|
||||||
|
Self::Identity => "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `Content-Encoding` header value, or `None` for identity.
|
||||||
|
#[inline]
|
||||||
|
pub fn header_value(&self) -> Option<HeaderValue> {
|
||||||
|
match self {
|
||||||
|
Self::Zstd => Some(HeaderValue::from_static("zstd")),
|
||||||
|
Self::Brotli => Some(HeaderValue::from_static("br")),
|
||||||
|
Self::Gzip => Some(HeaderValue::from_static("gzip")),
|
||||||
|
Self::Identity => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Default priority when quality values are equal (higher = better).
|
||||||
|
#[inline]
|
||||||
|
fn default_priority(&self) -> u8 {
|
||||||
|
match self {
|
||||||
|
Self::Zstd => 4,
|
||||||
|
Self::Brotli => 3,
|
||||||
|
Self::Gzip => 2,
|
||||||
|
Self::Identity => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
|
||||||
|
///
|
||||||
|
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
|
||||||
|
/// When quality values are equal: zstd > brotli > gzip > identity.
|
||||||
|
/// Encodings with `q=0` are excluded.
|
||||||
|
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
|
||||||
|
let Some(accept) = headers
|
||||||
|
.get(header::ACCEPT_ENCODING)
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
else {
|
||||||
|
return vec![ContentEncoding::Identity];
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
|
||||||
|
|
||||||
|
for part in accept.split(',') {
|
||||||
|
let part = part.trim();
|
||||||
|
if part.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
|
||||||
|
let q = params
|
||||||
|
.split(';')
|
||||||
|
.find_map(|p| p.trim().strip_prefix("q="))
|
||||||
|
.and_then(|q| q.parse::<f32>().ok())
|
||||||
|
.unwrap_or(1.0);
|
||||||
|
(enc.trim(), q)
|
||||||
|
} else {
|
||||||
|
(part, 1.0)
|
||||||
|
};
|
||||||
|
|
||||||
|
if quality == 0.0 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let encoding = match encoding_str.to_lowercase().as_str() {
|
||||||
|
"zstd" => ContentEncoding::Zstd,
|
||||||
|
"br" | "brotli" => ContentEncoding::Brotli,
|
||||||
|
"gzip" | "x-gzip" => ContentEncoding::Gzip,
|
||||||
|
"*" => ContentEncoding::Gzip,
|
||||||
|
"identity" => ContentEncoding::Identity,
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
encodings.push((encoding, quality));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by quality (desc), then default priority (desc)
|
||||||
|
encodings.sort_by(|a, b| {
|
||||||
|
b.1.partial_cmp(&a.1)
|
||||||
|
.unwrap_or(std::cmp::Ordering::Equal)
|
||||||
|
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
|
||||||
|
});
|
||||||
|
|
||||||
|
if encodings.is_empty() {
|
||||||
|
vec![ContentEncoding::Identity]
|
||||||
|
} else {
|
||||||
|
encodings.into_iter().map(|(e, _)| e).collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_all_encodings() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
|
||||||
|
let encodings = parse_accepted_encodings(&headers);
|
||||||
|
assert_eq!(encodings[0], ContentEncoding::Zstd);
|
||||||
|
assert_eq!(encodings[1], ContentEncoding::Brotli);
|
||||||
|
assert_eq!(encodings[2], ContentEncoding::Gzip);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_with_quality_values() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::ACCEPT_ENCODING,
|
||||||
|
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
|
||||||
|
);
|
||||||
|
let encodings = parse_accepted_encodings(&headers);
|
||||||
|
assert_eq!(encodings[0], ContentEncoding::Gzip);
|
||||||
|
assert_eq!(encodings[1], ContentEncoding::Zstd);
|
||||||
|
assert_eq!(encodings[2], ContentEncoding::Brotli);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_no_header_returns_identity() {
|
||||||
|
let headers = HeaderMap::new();
|
||||||
|
let encodings = parse_accepted_encodings(&headers);
|
||||||
|
assert_eq!(encodings, vec![ContentEncoding::Identity]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_disabled_encoding_excluded() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::ACCEPT_ENCODING,
|
||||||
|
"zstd;q=0, br, gzip".parse().unwrap(),
|
||||||
|
);
|
||||||
|
let encodings = parse_accepted_encodings(&headers);
|
||||||
|
assert_eq!(encodings[0], ContentEncoding::Brotli);
|
||||||
|
assert_eq!(encodings[1], ContentEncoding::Gzip);
|
||||||
|
assert!(!encodings.contains(&ContentEncoding::Zstd));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_real_chrome_header() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(
|
||||||
|
header::ACCEPT_ENCODING,
|
||||||
|
"gzip, deflate, br, zstd".parse().unwrap(),
|
||||||
|
);
|
||||||
|
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extensions() {
|
||||||
|
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
|
||||||
|
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
|
||||||
|
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
|
||||||
|
assert_eq!(ContentEncoding::Identity.extension(), "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_header_values() {
|
||||||
|
assert_eq!(
|
||||||
|
ContentEncoding::Zstd.header_value().unwrap(),
|
||||||
|
HeaderValue::from_static("zstd")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
ContentEncoding::Brotli.header_value().unwrap(),
|
||||||
|
HeaderValue::from_static("br")
|
||||||
|
);
|
||||||
|
assert!(ContentEncoding::Identity.header_value().is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,8 @@ pub mod admin;
|
|||||||
#[cfg(feature = "embed-assets")]
|
#[cfg(feature = "embed-assets")]
|
||||||
pub mod assets;
|
pub mod assets;
|
||||||
pub mod auth;
|
pub mod auth;
|
||||||
|
#[cfg(feature = "embed-assets")]
|
||||||
|
pub mod encoding;
|
||||||
pub mod extractors;
|
pub mod extractors;
|
||||||
pub mod routes;
|
pub mod routes;
|
||||||
pub mod session_cache;
|
pub mod session_cache;
|
||||||
|
|||||||
+32
-97
@@ -13,11 +13,9 @@ use crate::web::admin;
|
|||||||
use crate::web::auth::{self, AuthConfig};
|
use crate::web::auth::{self, AuthConfig};
|
||||||
#[cfg(feature = "embed-assets")]
|
#[cfg(feature = "embed-assets")]
|
||||||
use axum::{
|
use axum::{
|
||||||
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
http::{HeaderMap, StatusCode, Uri},
|
||||||
response::{Html, IntoResponse},
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "embed-assets")]
|
|
||||||
use http::header;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
use std::{collections::BTreeMap, time::Duration};
|
use std::{collections::BTreeMap, time::Duration};
|
||||||
@@ -27,48 +25,14 @@ use crate::state::AppState;
|
|||||||
use crate::status::ServiceStatus;
|
use crate::status::ServiceStatus;
|
||||||
#[cfg(not(feature = "embed-assets"))]
|
#[cfg(not(feature = "embed-assets"))]
|
||||||
use tower_http::cors::{Any, CorsLayer};
|
use tower_http::cors::{Any, CorsLayer};
|
||||||
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
|
use tower_http::{
|
||||||
|
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
|
||||||
|
trace::TraceLayer,
|
||||||
|
};
|
||||||
use tracing::{Span, debug, trace, warn};
|
use tracing::{Span, debug, trace, warn};
|
||||||
|
|
||||||
#[cfg(feature = "embed-assets")]
|
#[cfg(feature = "embed-assets")]
|
||||||
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
use crate::web::assets::try_serve_asset_with_encoding;
|
||||||
|
|
||||||
/// Set appropriate caching headers based on asset type
|
|
||||||
#[cfg(feature = "embed-assets")]
|
|
||||||
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
|
||||||
let headers = response.headers_mut();
|
|
||||||
|
|
||||||
// Set ETag
|
|
||||||
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
|
||||||
headers.insert(header::ETAG, etag_value);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set Cache-Control based on asset type
|
|
||||||
let cache_control = if path.starts_with("assets/") {
|
|
||||||
// Static assets with hashed filenames - long-term cache
|
|
||||||
"public, max-age=31536000, immutable"
|
|
||||||
} else if path == "index.html" {
|
|
||||||
// HTML files - short-term cache
|
|
||||||
"public, max-age=300"
|
|
||||||
} else {
|
|
||||||
match path.split_once('.').map(|(_, extension)| extension) {
|
|
||||||
Some(ext) => match ext {
|
|
||||||
// CSS/JS files - medium-term cache
|
|
||||||
"css" | "js" => "public, max-age=86400",
|
|
||||||
// Images - long-term cache
|
|
||||||
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
|
||||||
// Default for other files
|
|
||||||
_ => "public, max-age=3600",
|
|
||||||
},
|
|
||||||
// Default for files without an extension
|
|
||||||
None => "public, max-age=3600",
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
|
||||||
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates the web server router
|
/// Creates the web server router
|
||||||
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
||||||
@@ -125,6 +89,13 @@ pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
|||||||
}
|
}
|
||||||
|
|
||||||
router.layer((
|
router.layer((
|
||||||
|
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
|
||||||
|
// assets already have Content-Encoding set, so tower-http skips them.
|
||||||
|
CompressionLayer::new()
|
||||||
|
.zstd(true)
|
||||||
|
.br(true)
|
||||||
|
.gzip(true)
|
||||||
|
.quality(tower_http::CompressionLevel::Fastest),
|
||||||
TraceLayer::new_for_http()
|
TraceLayer::new_for_http()
|
||||||
.make_span_with(|request: &Request<Body>| {
|
.make_span_with(|request: &Request<Body>| {
|
||||||
tracing::debug_span!("request", path = request.uri().path())
|
tracing::debug_span!("request", path = request.uri().path())
|
||||||
@@ -171,71 +142,35 @@ pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handler that extracts request information for caching
|
/// SPA fallback handler with content encoding negotiation.
|
||||||
|
///
|
||||||
|
/// Serves embedded static assets with pre-compressed variants when available,
|
||||||
|
/// falling back to `index.html` for SPA client-side routing.
|
||||||
#[cfg(feature = "embed-assets")]
|
#[cfg(feature = "embed-assets")]
|
||||||
async fn fallback(request: Request) -> Response {
|
async fn fallback(request: Request) -> axum::response::Response {
|
||||||
let uri = request.uri().clone();
|
let uri = request.uri().clone();
|
||||||
let headers = request.headers().clone();
|
let headers = request.headers().clone();
|
||||||
handle_spa_fallback_with_headers(uri, headers).await
|
handle_spa_fallback(uri, headers).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
|
||||||
/// This version includes HTTP caching headers and ETag support
|
|
||||||
#[cfg(feature = "embed-assets")]
|
#[cfg(feature = "embed-assets")]
|
||||||
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
|
||||||
let path = uri.path().trim_start_matches('/');
|
let path = uri.path();
|
||||||
|
|
||||||
if let Some(content) = WebAssets::get(path) {
|
|
||||||
// Get asset metadata (MIME type and hash) with caching
|
|
||||||
let metadata = get_asset_metadata_cached(path, &content.data);
|
|
||||||
|
|
||||||
// Check if client has a matching ETag (conditional request)
|
|
||||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
|
||||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
|
||||||
{
|
|
||||||
return StatusCode::NOT_MODIFIED.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
|
||||||
let mut response = (
|
|
||||||
[(
|
|
||||||
header::CONTENT_TYPE,
|
|
||||||
// For unknown types, set to application/octet-stream
|
|
||||||
metadata
|
|
||||||
.mime_type
|
|
||||||
.unwrap_or("application/octet-stream".to_string()),
|
|
||||||
)],
|
|
||||||
content.data,
|
|
||||||
)
|
|
||||||
.into_response();
|
|
||||||
|
|
||||||
// Set caching headers
|
|
||||||
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
|
||||||
|
|
||||||
|
// Try serving the exact asset (with encoding negotiation)
|
||||||
|
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
|
||||||
return response;
|
return response;
|
||||||
} else {
|
|
||||||
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
|
||||||
if path.starts_with("assets/") {
|
|
||||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fall back to the SPA index.html
|
// SvelteKit assets under _app/ that don't exist are a hard 404
|
||||||
match WebAssets::get("index.html") {
|
let trimmed = path.trim_start_matches('/');
|
||||||
Some(content) => {
|
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
|
||||||
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||||
|
}
|
||||||
|
|
||||||
// Check if client has a matching ETag for index.html
|
// SPA fallback: serve index.html with encoding negotiation
|
||||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
match try_serve_asset_with_encoding("/index.html", &request_headers) {
|
||||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
Some(response) => response,
|
||||||
{
|
|
||||||
return StatusCode::NOT_MODIFIED.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut response = Html(content.data).into_response();
|
|
||||||
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
|
||||||
response
|
|
||||||
}
|
|
||||||
None => (
|
None => (
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
"Failed to load index.html",
|
"Failed to load index.html",
|
||||||
|
|||||||
@@ -0,0 +1,148 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Pre-compress static assets with maximum compression levels.
|
||||||
|
* Run after `bun run build`.
|
||||||
|
*
|
||||||
|
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
|
||||||
|
* These are embedded alongside originals by rust-embed and served via
|
||||||
|
* content negotiation in src/web/assets.rs.
|
||||||
|
*/
|
||||||
|
import { readdir, stat, readFile, writeFile } from "fs/promises";
|
||||||
|
import { join, extname } from "path";
|
||||||
|
import { gzipSync, brotliCompressSync, constants } from "zlib";
|
||||||
|
import { $ } from "bun";
|
||||||
|
|
||||||
|
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
|
||||||
|
const MIN_SIZE = 512;
|
||||||
|
|
||||||
|
const COMPRESSIBLE_EXTENSIONS = new Set([
|
||||||
|
".js",
|
||||||
|
".css",
|
||||||
|
".html",
|
||||||
|
".json",
|
||||||
|
".svg",
|
||||||
|
".txt",
|
||||||
|
".xml",
|
||||||
|
".map",
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Check if zstd CLI is available
|
||||||
|
let hasZstd = false;
|
||||||
|
try {
|
||||||
|
await $`which zstd`.quiet();
|
||||||
|
hasZstd = true;
|
||||||
|
} catch {
|
||||||
|
console.warn("Warning: zstd not found, skipping .zst generation");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function* walkDir(dir: string): AsyncGenerator<string> {
|
||||||
|
try {
|
||||||
|
const entries = await readdir(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
const path = join(dir, entry.name);
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
yield* walkDir(path);
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
yield path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Directory doesn't exist, skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function compressFile(path: string): Promise<void> {
|
||||||
|
const ext = extname(path);
|
||||||
|
|
||||||
|
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
|
||||||
|
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
|
||||||
|
|
||||||
|
const stats = await stat(path);
|
||||||
|
if (stats.size < MIN_SIZE) return;
|
||||||
|
|
||||||
|
// Skip if all compressed variants already exist
|
||||||
|
const variantsExist = await Promise.all([
|
||||||
|
stat(`${path}.br`).then(
|
||||||
|
() => true,
|
||||||
|
() => false
|
||||||
|
),
|
||||||
|
stat(`${path}.gz`).then(
|
||||||
|
() => true,
|
||||||
|
() => false
|
||||||
|
),
|
||||||
|
hasZstd
|
||||||
|
? stat(`${path}.zst`).then(
|
||||||
|
() => true,
|
||||||
|
() => false
|
||||||
|
)
|
||||||
|
: Promise.resolve(false),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (variantsExist.every((exists) => exists || !hasZstd)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await readFile(path);
|
||||||
|
const originalSize = content.length;
|
||||||
|
|
||||||
|
// Brotli (maximum quality = 11)
|
||||||
|
const brContent = brotliCompressSync(content, {
|
||||||
|
params: {
|
||||||
|
[constants.BROTLI_PARAM_QUALITY]: 11,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await writeFile(`${path}.br`, brContent);
|
||||||
|
|
||||||
|
// Gzip (level 9)
|
||||||
|
const gzContent = gzipSync(content, { level: 9 });
|
||||||
|
await writeFile(`${path}.gz`, gzContent);
|
||||||
|
|
||||||
|
// Zstd (level 19 - maximum)
|
||||||
|
if (hasZstd) {
|
||||||
|
try {
|
||||||
|
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
|
||||||
|
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
|
||||||
|
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log("Pre-compressing static assets...");
|
||||||
|
|
||||||
|
// Banner uses adapter-static with output in dist/
|
||||||
|
const dirs = ["dist"];
|
||||||
|
let scannedFiles = 0;
|
||||||
|
let compressedFiles = 0;
|
||||||
|
|
||||||
|
for (const dir of dirs) {
|
||||||
|
for await (const file of walkDir(dir)) {
|
||||||
|
const ext = extname(file);
|
||||||
|
scannedFiles++;
|
||||||
|
|
||||||
|
if (
|
||||||
|
COMPRESSIBLE_EXTENSIONS.has(ext) &&
|
||||||
|
!file.endsWith(".br") &&
|
||||||
|
!file.endsWith(".gz") &&
|
||||||
|
!file.endsWith(".zst")
|
||||||
|
) {
|
||||||
|
const stats = await stat(file);
|
||||||
|
if (stats.size >= MIN_SIZE) {
|
||||||
|
await compressFile(file);
|
||||||
|
compressedFiles++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((e) => {
|
||||||
|
console.error("Compression failed:", e);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
+1
-1
@@ -144,7 +144,7 @@ export class BannerApiClient {
|
|||||||
return this.request<User[]>("/admin/users");
|
return this.request<User[]>("/admin/users");
|
||||||
}
|
}
|
||||||
|
|
||||||
async setUserAdmin(discordId: string, isAdmin: boolean): Promise<User> {
|
async setUserAdmin(discordId: bigint, isAdmin: boolean): Promise<User> {
|
||||||
const response = await this.fetchFn(`${this.baseUrl}/admin/users/${discordId}/admin`, {
|
const response = await this.fetchFn(`${this.baseUrl}/admin/users/${discordId}/admin`, {
|
||||||
method: "PUT",
|
method: "PUT",
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
export interface SearchMeta {
|
export interface SearchMeta {
|
||||||
totalCount: number;
|
totalCount: number;
|
||||||
durationMs: number;
|
durationMs: number;
|
||||||
timestamp: Date;
|
timestamp: Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
let { meta }: { meta: SearchMeta | null } = $props();
|
let { meta }: { meta: SearchMeta | null } = $props();
|
||||||
|
|
||||||
let formattedTime = $derived(
|
let formattedTime = $derived(
|
||||||
meta
|
meta
|
||||||
? meta.timestamp.toLocaleTimeString(undefined, {
|
? meta.timestamp.toLocaleTimeString(undefined, {
|
||||||
hour: "2-digit",
|
hour: "2-digit",
|
||||||
minute: "2-digit",
|
minute: "2-digit",
|
||||||
second: "2-digit",
|
second: "2-digit",
|
||||||
})
|
})
|
||||||
: ""
|
: ""
|
||||||
);
|
);
|
||||||
|
|
||||||
let countLabel = $derived(meta ? meta.totalCount.toLocaleString() : "");
|
let countLabel = $derived(meta ? meta.totalCount.toLocaleString() : "");
|
||||||
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
|
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
|
||||||
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
|
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if meta}
|
{#if meta}
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ const navItems = [
|
|||||||
<div class="border-border border-b p-4">
|
<div class="border-border border-b p-4">
|
||||||
<h2 class="text-lg font-semibold">Admin</h2>
|
<h2 class="text-lg font-semibold">Admin</h2>
|
||||||
{#if authStore.user}
|
{#if authStore.user}
|
||||||
<p class="text-muted-foreground text-sm">{authStore.user.username}</p>
|
<p class="text-muted-foreground text-sm">{authStore.user.discordUsername}</p>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
<nav class="flex-1 space-y-1 p-2">
|
<nav class="flex-1 space-y-1 p-2">
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { Shield, ShieldOff } from "@lucide/svelte";
|
|||||||
|
|
||||||
let users = $state<User[]>([]);
|
let users = $state<User[]>([]);
|
||||||
let error = $state<string | null>(null);
|
let error = $state<string | null>(null);
|
||||||
let updating = $state<string | null>(null);
|
let updating = $state<bigint | null>(null);
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
try {
|
try {
|
||||||
@@ -52,14 +52,14 @@ async function toggleAdmin(user: User) {
|
|||||||
{#each users as user}
|
{#each users as user}
|
||||||
<tr class="border-border border-b last:border-b-0">
|
<tr class="border-border border-b last:border-b-0">
|
||||||
<td class="flex items-center gap-2 px-4 py-3">
|
<td class="flex items-center gap-2 px-4 py-3">
|
||||||
{#if user.avatarHash}
|
{#if user.discordAvatarHash}
|
||||||
<img
|
<img
|
||||||
src="https://cdn.discordapp.com/avatars/{user.discordId}/{user.avatarHash}.png?size=32"
|
src="https://cdn.discordapp.com/avatars/{user.discordId}/{user.discordAvatarHash}.png?size=32"
|
||||||
alt=""
|
alt=""
|
||||||
class="h-6 w-6 rounded-full"
|
class="h-6 w-6 rounded-full"
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
{user.username}
|
{user.discordUsername}
|
||||||
</td>
|
</td>
|
||||||
<td class="text-muted-foreground px-4 py-3 font-mono text-xs">{user.discordId}</td>
|
<td class="text-muted-foreground px-4 py-3 font-mono text-xs">{user.discordId}</td>
|
||||||
<td class="px-4 py-3">
|
<td class="px-4 py-3">
|
||||||
|
|||||||
Reference in New Issue
Block a user