refactor: cleanup struct fields, resolve clippy lints, resolve/ignore svelte lints, checks error on warn

This commit is contained in:
2026-01-14 22:55:40 -06:00
parent e83133cfcc
commit 89e1ab097d
50 changed files with 276 additions and 1114 deletions
-48
View File
@@ -146,11 +146,6 @@ impl IsrCache {
self.cache.insert(path, Arc::new(response)).await;
}
/// Check if a path is currently being refreshed
pub fn is_refreshing(&self, path: &str) -> bool {
self.refreshing.contains(path)
}
/// Mark a path as being refreshed. Returns true if it wasn't already refreshing.
pub fn start_refresh(&self, path: &str) -> bool {
self.refreshing.insert(path.to_string())
@@ -166,49 +161,6 @@ impl IsrCache {
self.cache.invalidate(path).await;
tracing::debug!(path = %path, "Cache entry invalidated");
}
/// Invalidate multiple cached paths
pub async fn invalidate_many(&self, paths: &[&str]) {
for path in paths {
self.cache.invalidate(*path).await;
}
tracing::info!(paths = ?paths, "Cache entries invalidated");
}
/// Invalidate all entries matching a prefix
pub async fn invalidate_prefix(&self, prefix: &str) {
// moka doesn't have prefix invalidation, so we need to iterate
// This is O(n) but invalidation should be infrequent
let prefix_owned = prefix.to_string();
self.cache
.invalidate_entries_if(move |key, _| key.starts_with(&prefix_owned))
.ok();
tracing::info!(prefix = %prefix, "Cache entries with prefix invalidated");
}
/// Invalidate all cached entries
pub async fn invalidate_all(&self) {
let previous_count = self.cache.entry_count();
self.cache.invalidate_all();
tracing::info!(previous_count, "All cache entries invalidated");
}
/// Get cache statistics
pub fn stats(&self) -> CacheStats {
CacheStats {
entry_count: self.cache.entry_count(),
weighted_size: self.cache.weighted_size(),
refreshing_count: self.refreshing.len(),
}
}
}
/// Cache statistics for observability
#[derive(Debug, Clone, serde::Serialize)]
pub struct CacheStats {
pub entry_count: u64,
pub weighted_size: u64,
pub refreshing_count: usize,
}
/// Determines if a path should be cached
+8 -10
View File
@@ -111,17 +111,15 @@ pub async fn session(client: ApiClient, json: bool) -> Result<(), Box<dyn std::e
} else {
output::error("Session expired or invalid");
}
} else if json {
println!(
"{}",
serde_json::json!({
"authenticated": false,
})
);
} else {
if json {
println!(
"{}",
serde_json::json!({
"authenticated": false,
})
);
} else {
output::info("Not logged in");
}
output::info("Not logged in");
}
Ok(())
+9 -8
View File
@@ -90,10 +90,10 @@ async fn create(
json: bool,
) -> Result<(), Box<dyn std::error::Error>> {
// Validate color if provided
if let Some(ref c) = color {
if !c.chars().all(|ch| ch.is_ascii_hexdigit()) || c.len() != 6 {
return Err("Color must be a 6-character hex string (e.g., '3b82f6')".into());
}
if let Some(ref c) = color
&& (!c.chars().all(|ch| ch.is_ascii_hexdigit()) || c.len() != 6)
{
return Err("Color must be a 6-character hex string (e.g., '3b82f6')".into());
}
let request = CreateTagRequest {
@@ -128,10 +128,11 @@ async fn update(
json: bool,
) -> Result<(), Box<dyn std::error::Error>> {
// Validate color if provided
if let Some(ref c) = color {
if !c.is_empty() && (!c.chars().all(|ch| ch.is_ascii_hexdigit()) || c.len() != 6) {
return Err("Color must be a 6-character hex string (e.g., '3b82f6')".into());
}
if let Some(ref c) = color
&& !c.is_empty()
&& (!c.chars().all(|ch| ch.is_ascii_hexdigit()) || c.len() != 6)
{
return Err("Color must be a 6-character hex string (e.g., '3b82f6')".into());
}
// First fetch the current tag
+1 -1
View File
@@ -36,7 +36,7 @@ pub enum Command {
Seed,
/// API client for managing content remotely
Api(ApiArgs),
Api(Box<ApiArgs>),
}
#[derive(Parser, Debug)]
+36 -90
View File
@@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
/// Media type enum matching PostgreSQL enum
@@ -19,15 +18,10 @@ pub struct DbProjectMedia {
pub project_id: Uuid,
pub display_order: i32,
pub media_type: MediaType,
pub original_filename: String,
pub r2_base_path: String,
pub variants: serde_json::Value,
pub width: Option<i32>,
pub height: Option<i32>,
pub size_bytes: i64,
pub blurhash: Option<String>,
pub metadata: Option<serde_json::Value>,
pub created_at: OffsetDateTime,
}
/// Variant info for images
@@ -158,34 +152,34 @@ impl DbProjectMedia {
// Parse the JSONB variants
if let Some(obj) = self.variants.as_object() {
// Handle image variants
if let Some(thumb) = obj.get("thumb") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(thumb.clone()) {
variants.thumb = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(thumb) = obj.get("thumb")
&& let Ok(v) = serde_json::from_value::<ImageVariant>(thumb.clone())
{
variants.thumb = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(medium) = obj.get("medium") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(medium.clone()) {
variants.medium = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(medium) = obj.get("medium")
&& let Ok(v) = serde_json::from_value::<ImageVariant>(medium.clone())
{
variants.medium = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(full) = obj.get("full") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(full.clone()) {
variants.full = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(full) = obj.get("full")
&& let Ok(v) = serde_json::from_value::<ImageVariant>(full.clone())
{
variants.full = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
// Handle original - could be image or video
@@ -212,14 +206,14 @@ impl DbProjectMedia {
}
// Handle video poster
if let Some(poster) = obj.get("poster") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(poster.clone()) {
variants.poster = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
if let Some(poster) = obj.get("poster")
&& let Ok(v) = serde_json::from_value::<ImageVariant>(poster.clone())
{
variants.poster = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
@@ -242,15 +236,10 @@ pub async fn get_media_for_project(
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
metadata
FROM project_media
WHERE project_id = $1
ORDER BY display_order ASC
@@ -274,15 +263,10 @@ pub async fn get_media_by_id(
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
metadata
FROM project_media
WHERE id = $1
"#,
@@ -309,6 +293,7 @@ pub async fn get_next_display_order(pool: &PgPool, project_id: Uuid) -> Result<i
}
/// Create a new media record
#[allow(clippy::too_many_arguments)]
pub async fn create_media(
pool: &PgPool,
project_id: Uuid,
@@ -337,15 +322,10 @@ pub async fn create_media(
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
metadata
"#,
project_id,
display_order,
@@ -414,37 +394,3 @@ pub async fn reorder_media(
tx.commit().await?;
Ok(())
}
/// Update media metadata (focal point, alt text, etc.)
pub async fn update_media_metadata(
pool: &PgPool,
id: Uuid,
metadata: serde_json::Value,
) -> Result<DbProjectMedia, sqlx::Error> {
sqlx::query_as!(
DbProjectMedia,
r#"
UPDATE project_media
SET metadata = $2
WHERE id = $1
RETURNING
id,
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
"#,
id,
metadata
)
.fetch_one(pool)
.await
}
+10 -48
View File
@@ -24,7 +24,6 @@ pub struct DbProject {
pub demo_url: Option<String>,
pub last_github_activity: Option<OffsetDateTime>,
pub created_at: OffsetDateTime,
pub updated_at: OffsetDateTime,
}
// API response types
@@ -168,8 +167,7 @@ pub async fn get_public_projects(pool: &PgPool) -> Result<Vec<DbProject>, sqlx::
github_repo,
demo_url,
last_github_activity,
created_at,
updated_at
created_at
FROM projects
WHERE status != 'hidden'
ORDER BY COALESCE(last_github_activity, created_at) DESC
@@ -209,8 +207,7 @@ pub async fn get_all_projects_admin(pool: &PgPool) -> Result<Vec<DbProject>, sql
github_repo,
demo_url,
last_github_activity,
created_at,
updated_at
created_at
FROM projects
ORDER BY COALESCE(last_github_activity, created_at) DESC
"#
@@ -249,10 +246,8 @@ pub async fn get_project_by_id(pool: &PgPool, id: Uuid) -> Result<Option<DbProje
status as "status: ProjectStatus",
github_repo,
demo_url,
last_github_activity,
created_at,
updated_at
created_at
FROM projects
WHERE id = $1
"#,
@@ -279,37 +274,8 @@ pub async fn get_project_by_id_with_tags(
}
}
/// Get single project by slug
pub async fn get_project_by_slug(
pool: &PgPool,
slug: &str,
) -> Result<Option<DbProject>, sqlx::Error> {
query_as!(
DbProject,
r#"
SELECT
id,
slug,
name,
short_description,
description,
status as "status: ProjectStatus",
github_repo,
demo_url,
last_github_activity,
created_at,
updated_at
FROM projects
WHERE slug = $1
"#,
slug
)
.fetch_optional(pool)
.await
}
/// Create project (without tags - tags handled separately)
#[allow(clippy::too_many_arguments)]
pub async fn create_project(
pool: &PgPool,
name: &str,
@@ -320,9 +286,7 @@ pub async fn create_project(
github_repo: Option<&str>,
demo_url: Option<&str>,
) -> Result<DbProject, sqlx::Error> {
let slug = slug_override
.map(|s| slugify(s))
.unwrap_or_else(|| slugify(name));
let slug = slug_override.map(slugify).unwrap_or_else(|| slugify(name));
query_as!(
DbProject,
@@ -330,7 +294,7 @@ pub async fn create_project(
INSERT INTO projects (slug, name, short_description, description, status, github_repo, demo_url)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus",
github_repo, demo_url, last_github_activity, created_at, updated_at
github_repo, demo_url, last_github_activity, created_at
"#,
slug,
name,
@@ -345,6 +309,7 @@ pub async fn create_project(
}
/// Update project (without tags - tags handled separately)
#[allow(clippy::too_many_arguments)]
pub async fn update_project(
pool: &PgPool,
id: Uuid,
@@ -356,9 +321,7 @@ pub async fn update_project(
github_repo: Option<&str>,
demo_url: Option<&str>,
) -> Result<DbProject, sqlx::Error> {
let slug = slug_override
.map(|s| slugify(s))
.unwrap_or_else(|| slugify(name));
let slug = slug_override.map(slugify).unwrap_or_else(|| slugify(name));
query_as!(
DbProject,
@@ -368,7 +331,7 @@ pub async fn update_project(
status = $6, github_repo = $7, demo_url = $8
WHERE id = $1
RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus",
github_repo, demo_url, last_github_activity, created_at, updated_at
github_repo, demo_url, last_github_activity, created_at
"#,
id,
slug,
@@ -447,8 +410,7 @@ pub async fn get_projects_with_github_repo(pool: &PgPool) -> Result<Vec<DbProjec
github_repo,
demo_url,
last_github_activity,
created_at,
updated_at
created_at
FROM projects
WHERE github_repo IS NOT NULL
ORDER BY updated_at DESC
+4 -10
View File
@@ -1,19 +1,15 @@
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
// Site settings models
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct DbSiteIdentity {
pub id: i32,
pub display_name: String,
pub occupation: String,
pub bio: String,
pub site_title: String,
pub created_at: OffsetDateTime,
pub updated_at: OffsetDateTime,
}
#[derive(Debug, Clone, sqlx::FromRow)]
@@ -25,8 +21,6 @@ pub struct DbSocialLink {
pub icon: String,
pub visible: bool,
pub display_order: i32,
pub created_at: OffsetDateTime,
pub updated_at: OffsetDateTime,
}
// API response types
@@ -119,7 +113,7 @@ pub async fn get_site_settings(pool: &PgPool) -> Result<ApiSiteSettings, sqlx::E
let identity = sqlx::query_as!(
DbSiteIdentity,
r#"
SELECT id, display_name, occupation, bio, site_title, created_at, updated_at
SELECT display_name, occupation, bio, site_title
FROM site_identity
WHERE id = 1
"#
@@ -131,7 +125,7 @@ pub async fn get_site_settings(pool: &PgPool) -> Result<ApiSiteSettings, sqlx::E
let social_links = sqlx::query_as!(
DbSocialLink,
r#"
SELECT id, platform, label, value, icon, visible, display_order, created_at, updated_at
SELECT id, platform, label, value, icon, visible, display_order
FROM social_links
ORDER BY display_order ASC
"#
@@ -155,7 +149,7 @@ pub async fn update_site_identity(
UPDATE site_identity
SET display_name = $1, occupation = $2, bio = $3, site_title = $4
WHERE id = 1
RETURNING id, display_name, occupation, bio, site_title, created_at, updated_at
RETURNING display_name, occupation, bio, site_title
"#,
req.display_name,
req.occupation,
@@ -177,7 +171,7 @@ pub async fn update_social_link(
UPDATE social_links
SET platform = $2, label = $3, value = $4, icon = $5, visible = $6, display_order = $7
WHERE id = $1
RETURNING id, platform, label, value, icon, visible, display_order, created_at, updated_at
RETURNING id, platform, label, value, icon, visible, display_order
"#,
link_id,
req.platform,
+8 -92
View File
@@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
use super::slugify;
@@ -13,20 +12,6 @@ pub struct DbTag {
pub name: String,
pub icon: Option<String>,
pub color: Option<String>,
pub created_at: OffsetDateTime,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct DbProjectTag {
pub project_id: Uuid,
pub tag_id: Uuid,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct DbTagCooccurrence {
pub tag_a: Uuid,
pub tag_b: Uuid,
pub count: i32,
}
// API response types
@@ -80,16 +65,14 @@ pub async fn create_tag(
icon: Option<&str>,
color: Option<&str>,
) -> Result<DbTag, sqlx::Error> {
let slug = slug_override
.map(|s| slugify(s))
.unwrap_or_else(|| slugify(name));
let slug = slug_override.map(slugify).unwrap_or_else(|| slugify(name));
sqlx::query_as!(
DbTag,
r#"
INSERT INTO tags (slug, name, icon, color)
VALUES ($1, $2, $3, $4)
RETURNING id, slug, name, icon, color, created_at
RETURNING id, slug, name, icon, color
"#,
slug,
name,
@@ -100,25 +83,11 @@ pub async fn create_tag(
.await
}
pub async fn get_tag_by_id(pool: &PgPool, id: Uuid) -> Result<Option<DbTag>, sqlx::Error> {
sqlx::query_as!(
DbTag,
r#"
SELECT id, slug, name, icon, color, created_at
FROM tags
WHERE id = $1
"#,
id
)
.fetch_optional(pool)
.await
}
pub async fn get_tag_by_slug(pool: &PgPool, slug: &str) -> Result<Option<DbTag>, sqlx::Error> {
sqlx::query_as!(
DbTag,
r#"
SELECT id, slug, name, icon, color, created_at
SELECT id, slug, name, icon, color
FROM tags
WHERE slug = $1
"#,
@@ -128,19 +97,6 @@ pub async fn get_tag_by_slug(pool: &PgPool, slug: &str) -> Result<Option<DbTag>,
.await
}
pub async fn get_all_tags(pool: &PgPool) -> Result<Vec<DbTag>, sqlx::Error> {
sqlx::query_as!(
DbTag,
r#"
SELECT id, slug, name, icon, color, created_at
FROM tags
ORDER BY name ASC
"#
)
.fetch_all(pool)
.await
}
pub async fn get_all_tags_with_counts(pool: &PgPool) -> Result<Vec<(DbTag, i32)>, sqlx::Error> {
let rows = sqlx::query!(
r#"
@@ -150,11 +106,10 @@ pub async fn get_all_tags_with_counts(pool: &PgPool) -> Result<Vec<(DbTag, i32)>
t.name,
t.icon,
t.color,
t.created_at,
COUNT(pt.project_id)::int as "project_count!"
FROM tags t
LEFT JOIN project_tags pt ON t.id = pt.tag_id
GROUP BY t.id, t.slug, t.name, t.icon, t.color, t.created_at
GROUP BY t.id, t.slug, t.name, t.icon, t.color
ORDER BY t.name ASC
"#
)
@@ -170,7 +125,6 @@ pub async fn get_all_tags_with_counts(pool: &PgPool) -> Result<Vec<(DbTag, i32)>
name: row.name,
icon: row.icon,
color: row.color,
created_at: row.created_at,
};
(tag, row.project_count)
})
@@ -185,9 +139,7 @@ pub async fn update_tag(
icon: Option<&str>,
color: Option<&str>,
) -> Result<DbTag, sqlx::Error> {
let slug = slug_override
.map(|s| slugify(s))
.unwrap_or_else(|| slugify(name));
let slug = slug_override.map(slugify).unwrap_or_else(|| slugify(name));
sqlx::query_as!(
DbTag,
@@ -195,7 +147,7 @@ pub async fn update_tag(
UPDATE tags
SET slug = $2, name = $3, icon = $4, color = $5
WHERE id = $1
RETURNING id, slug, name, icon, color, created_at
RETURNING id, slug, name, icon, color
"#,
id,
slug,
@@ -207,39 +159,6 @@ pub async fn update_tag(
.await
}
pub async fn delete_tag(pool: &PgPool, id: Uuid) -> Result<(), sqlx::Error> {
sqlx::query!("DELETE FROM tags WHERE id = $1", id)
.execute(pool)
.await?;
Ok(())
}
pub async fn tag_exists_by_name(pool: &PgPool, name: &str) -> Result<bool, sqlx::Error> {
let result = sqlx::query!(
r#"
SELECT EXISTS(SELECT 1 FROM tags WHERE LOWER(name) = LOWER($1)) as "exists!"
"#,
name
)
.fetch_one(pool)
.await?;
Ok(result.exists)
}
pub async fn tag_exists_by_slug(pool: &PgPool, slug: &str) -> Result<bool, sqlx::Error> {
let result = sqlx::query!(
r#"
SELECT EXISTS(SELECT 1 FROM tags WHERE slug = $1) as "exists!"
"#,
slug
)
.fetch_one(pool)
.await?;
Ok(result.exists)
}
// Project-Tag association queries
pub async fn add_tag_to_project(
@@ -283,7 +202,7 @@ pub async fn get_tags_for_project(
sqlx::query_as!(
DbTag,
r#"
SELECT t.id, t.slug, t.name, t.icon, t.color, t.created_at
SELECT t.id, t.slug, t.name, t.icon, t.color
FROM tags t
JOIN project_tags pt ON t.id = pt.tag_id
WHERE pt.project_id = $1
@@ -312,8 +231,7 @@ pub async fn get_projects_for_tag(
p.github_repo,
p.demo_url,
p.last_github_activity,
p.created_at,
p.updated_at
p.created_at
FROM projects p
JOIN project_tags pt ON p.id = pt.project_id
WHERE pt.tag_id = $1
@@ -404,7 +322,6 @@ pub async fn get_related_tags(
t.name,
t.icon,
t.color,
t.created_at,
tc.count
FROM tag_cooccurrence tc
JOIN tags t ON (tc.tag_a = t.id OR tc.tag_b = t.id)
@@ -427,7 +344,6 @@ pub async fn get_related_tags(
name: row.name,
icon: row.icon,
color: row.color,
created_at: row.created_at,
};
(tag, row.count)
})
+1 -1
View File
@@ -350,7 +350,7 @@ pub async fn sync_github_activity(pool: &PgPool) -> Result<SyncStats, Box<dyn st
// Only update if newer than current value
let should_update = project
.last_github_activity
.map_or(true, |current| activity_time > current);
.is_none_or(|current| activity_time > current);
if should_update {
if let Err(e) = crate::db::projects::update_last_github_activity(
+6 -8
View File
@@ -85,14 +85,12 @@ pub async fn proxy_icons_handler(
// Build trusted headers with session info
let mut forward_headers = HeaderMap::new();
if let Some(cookie) = jar.get("admin_session") {
if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) {
if let Some(session) = state.session_manager.validate_session(session_id) {
if let Ok(username_value) = axum::http::HeaderValue::from_str(&session.username) {
forward_headers.insert("x-session-user", username_value);
}
}
}
if let Some(cookie) = jar.get("admin_session")
&& let Ok(session_id) = ulid::Ulid::from_string(cookie.value())
&& let Some(session) = state.session_manager.validate_session(session_id)
&& let Ok(username_value) = axum::http::HeaderValue::from_str(&session.username)
{
forward_headers.insert("x-session-user", username_value);
}
match proxy::proxy_to_bun(&path_with_query, state, forward_headers).await {
+5 -6
View File
@@ -121,12 +121,11 @@ pub async fn api_logout_handler(
State(state): State<Arc<AppState>>,
jar: axum_extra::extract::CookieJar,
) -> (axum_extra::extract::CookieJar, StatusCode) {
if let Some(cookie) = jar.get("admin_session") {
if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) {
if let Err(e) = state.session_manager.delete_session(session_id).await {
tracing::error!(error = %e, "Failed to delete session during logout");
}
}
if let Some(cookie) = jar.get("admin_session")
&& let Ok(session_id) = ulid::Ulid::from_string(cookie.value())
&& let Err(e) = state.session_manager.delete_session(session_id).await
{
tracing::error!(error = %e, "Failed to delete session during logout");
}
let cookie = axum_extra::extract::cookie::Cookie::build(("admin_session", ""))
+22 -22
View File
@@ -56,17 +56,17 @@ pub async fn create_tag_handler(
}
// Validate color if provided
if let Some(ref color) = payload.color {
if !utils::validate_hex_color(color) {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Validation error",
"message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')"
})),
)
.into_response();
}
if let Some(ref color) = payload.color
&& !utils::validate_hex_color(color)
{
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Validation error",
"message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')"
})),
)
.into_response();
}
match db::create_tag(
@@ -176,17 +176,17 @@ pub async fn update_tag_handler(
}
// Validate color if provided
if let Some(ref color) = payload.color {
if !utils::validate_hex_color(color) {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Validation error",
"message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')"
})),
)
.into_response();
}
if let Some(ref color) = payload.color
&& !utils::validate_hex_color(color)
{
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Validation error",
"message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')"
})),
)
.into_response();
}
let tag = match db::get_tag_by_slug(&state.pool, &slug).await {
-5
View File
@@ -1,4 +1,3 @@
use reqwest::Method;
use std::path::PathBuf;
use std::time::Duration;
use thiserror::Error;
@@ -86,10 +85,6 @@ impl HttpClient {
pub fn post(&self, path: &str) -> reqwest::RequestBuilder {
self.client.post(self.build_url(path))
}
pub fn request(&self, method: Method, path: &str) -> reqwest::RequestBuilder {
self.client.request(method, self.build_url(path))
}
}
#[cfg(test)]
+1 -1
View File
@@ -93,7 +93,7 @@ async fn main() {
}
Some(Command::Api(api_args)) => {
// API client commands - no tracing needed
if let Err(e) = cli::api::run(api_args).await {
if let Err(e) = cli::api::run(*api_args).await {
eprintln!("Error: {}", e);
std::process::exit(1);
}
-3
View File
@@ -10,9 +10,6 @@ pub const THUMB_WIDTH: u32 = 300;
pub const MEDIUM_WIDTH: u32 = 800;
pub const FULL_WIDTH: u32 = 1600;
/// Quality setting for WebP encoding (0-100)
const WEBP_QUALITY: u8 = 85;
/// Result of processing an uploaded image
#[derive(Debug)]
pub struct ProcessedImage {
+50 -55
View File
@@ -84,12 +84,12 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
}
// Check if this is a static asset that exists in embedded CLIENT_ASSETS
if utils::is_static_asset(path) {
if let Some(response) = assets::try_serve_embedded_asset(path) {
return response;
}
// If not found in embedded assets, continue to proxy (might be in Bun's static dir)
if utils::is_static_asset(path)
&& let Some(response) = assets::try_serve_embedded_asset(path)
{
return response;
}
// If not found in embedded assets, continue to proxy (might be in Bun's static dir)
// Check if this is a prerendered page
if let Some(response) = assets::try_serve_prerendered_page(path) {
@@ -104,39 +104,36 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
let mut is_authenticated = false;
// Forward request ID to Bun (set by RequestIdLayer)
if let Some(request_id) = req.extensions().get::<crate::middleware::RequestId>() {
if let Ok(header_value) = axum::http::HeaderValue::from_str(&request_id.0) {
forward_headers.insert("x-request-id", header_value);
}
if let Some(request_id) = req.extensions().get::<crate::middleware::RequestId>()
&& let Ok(header_value) = axum::http::HeaderValue::from_str(&request_id.0)
{
forward_headers.insert("x-request-id", header_value);
}
// SECURITY: Strip any X-Session-User header from incoming request to prevent spoofing
// Extract and validate session from cookie
if let Some(cookie_header) = req.headers().get(axum::http::header::COOKIE) {
if let Ok(cookie_str) = cookie_header.to_str() {
// Parse cookies manually to find admin_session
for cookie_pair in cookie_str.split(';') {
let cookie_pair = cookie_pair.trim();
if let Some((name, value)) = cookie_pair.split_once('=') {
if name == "admin_session" {
// Found session cookie, validate it
if let Ok(session_id) = ulid::Ulid::from_string(value) {
if let Some(session) =
state.session_manager.validate_session(session_id)
{
// Session is valid - add trusted header
if let Ok(username_value) =
axum::http::HeaderValue::from_str(&session.username)
{
forward_headers.insert("x-session-user", username_value);
is_authenticated = true;
}
}
}
break;
if let Some(cookie_header) = req.headers().get(axum::http::header::COOKIE)
&& let Ok(cookie_str) = cookie_header.to_str()
{
// Parse cookies manually to find admin_session
for cookie_pair in cookie_str.split(';') {
let cookie_pair = cookie_pair.trim();
if let Some((name, value)) = cookie_pair.split_once('=')
&& name == "admin_session"
{
// Found session cookie, validate it
if let Ok(session_id) = ulid::Ulid::from_string(value)
&& let Some(session) = state.session_manager.validate_session(session_id)
{
// Session is valid - add trusted header
if let Ok(username_value) = axum::http::HeaderValue::from_str(&session.username)
{
forward_headers.insert("x-session-user", username_value);
is_authenticated = true;
}
}
break;
}
}
}
@@ -146,35 +143,33 @@ pub async fn isr_handler(State(state): State<Arc<AppState>>, req: Request) -> Re
let use_cache = !is_authenticated && cache::is_cacheable_path(path);
// Try to serve from cache for public requests
if use_cache {
if let Some(cached) = state.isr_cache.get(&path_with_query).await {
let fresh_duration = state.isr_cache.config.fresh_duration;
let stale_duration = state.isr_cache.config.stale_duration;
if use_cache && let Some(cached) = state.isr_cache.get(&path_with_query).await {
let fresh_duration = state.isr_cache.config.fresh_duration;
let stale_duration = state.isr_cache.config.stale_duration;
if cached.is_fresh(fresh_duration) {
// Fresh cache hit - serve immediately
let age_ms = cached.age().as_millis() as u64;
tracing::debug!(cache = "hit", age_ms, "ISR cache hit (fresh)");
if cached.is_fresh(fresh_duration) {
// Fresh cache hit - serve immediately
let age_ms = cached.age().as_millis() as u64;
tracing::debug!(cache = "hit", age_ms, "ISR cache hit (fresh)");
return serve_cached_response(&cached, is_head);
} else if cached.is_stale_but_usable(fresh_duration, stale_duration) {
// Stale cache hit - serve immediately and refresh in background
let age_ms = cached.age().as_millis() as u64;
tracing::debug!(cache = "stale", age_ms, "ISR cache hit (stale, refreshing)");
return serve_cached_response(&cached, is_head);
} else if cached.is_stale_but_usable(fresh_duration, stale_duration) {
// Stale cache hit - serve immediately and refresh in background
let age_ms = cached.age().as_millis() as u64;
tracing::debug!(cache = "stale", age_ms, "ISR cache hit (stale, refreshing)");
// Spawn background refresh if not already refreshing
if state.isr_cache.start_refresh(&path_with_query) {
let state_clone = state.clone();
let path_clone = path_with_query.clone();
tokio::spawn(async move {
refresh_cache_entry(state_clone, path_clone).await;
});
}
return serve_cached_response(&cached, is_head);
// Spawn background refresh if not already refreshing
if state.isr_cache.start_refresh(&path_with_query) {
let state_clone = state.clone();
let path_clone = path_with_query.clone();
tokio::spawn(async move {
refresh_cache_entry(state_clone, path_clone).await;
});
}
// Cache entry is too old - fall through to fetch
return serve_cached_response(&cached, is_head);
}
// Cache entry is too old - fall through to fetch
}
// Cache miss or non-cacheable - fetch from Bun
-21
View File
@@ -57,27 +57,6 @@ impl R2Client {
.cloned()
}
pub async fn get_object(&self, key: &str) -> Result<Vec<u8>, String> {
let result = self
.client
.get_object()
.bucket(&self.bucket)
.key(key)
.send()
.await
.map_err(|e| format!("Failed to get object from R2: {e}"))?;
let bytes = result
.body
.collect()
.await
.map_err(|e| format!("Failed to read object body: {e}"))?
.into_bytes()
.to_vec();
Ok(bytes)
}
pub async fn put_object(
&self,
key: &str,
-2
View File
@@ -20,14 +20,12 @@ pub struct AppState {
#[derive(Debug)]
pub enum ProxyError {
Network(reqwest::Error),
Other(String),
}
impl std::fmt::Display for ProxyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ProxyError::Network(e) => write!(f, "Network error: {e}"),
ProxyError::Other(s) => write!(f, "{s}"),
}
}
}
+27 -27
View File
@@ -34,10 +34,10 @@ pub fn is_page_route(path: &str) -> bool {
/// Check if the request accepts HTML responses
pub fn accepts_html(headers: &HeaderMap) -> bool {
if let Some(accept) = headers.get(header::ACCEPT) {
if let Ok(accept_str) = accept.to_str() {
return accept_str.contains("text/html") || accept_str.contains("*/*");
}
if let Some(accept) = headers.get(header::ACCEPT)
&& let Ok(accept_str) = accept.to_str()
{
return accept_str.contains("text/html") || accept_str.contains("*/*");
}
// Default to true for requests without Accept header (browsers typically send it)
true
@@ -46,34 +46,34 @@ pub fn accepts_html(headers: &HeaderMap) -> bool {
/// Determines if request prefers raw content (CLI tools) over HTML
pub fn prefers_raw_content(headers: &HeaderMap) -> bool {
// Check User-Agent for known CLI tools first (most reliable)
if let Some(ua) = headers.get(header::USER_AGENT) {
if let Ok(ua_str) = ua.to_str() {
let ua_lower = ua_str.to_lowercase();
if ua_lower.starts_with("curl/")
|| ua_lower.starts_with("wget/")
|| ua_lower.starts_with("httpie/")
|| ua_lower.contains("curlie")
{
return true;
}
if let Some(ua) = headers.get(header::USER_AGENT)
&& let Ok(ua_str) = ua.to_str()
{
let ua_lower = ua_str.to_lowercase();
if ua_lower.starts_with("curl/")
|| ua_lower.starts_with("wget/")
|| ua_lower.starts_with("httpie/")
|| ua_lower.contains("curlie")
{
return true;
}
}
// Check Accept header - if it explicitly prefers text/html, serve HTML
if let Some(accept) = headers.get(header::ACCEPT) {
if let Ok(accept_str) = accept.to_str() {
// If text/html appears before */* in the list, they prefer HTML
if let Some(html_pos) = accept_str.find("text/html") {
if let Some(wildcard_pos) = accept_str.find("*/*") {
return html_pos > wildcard_pos;
}
// Has text/html but no */* → prefers HTML
return false;
}
// Has */* but no text/html → probably a CLI tool
if accept_str.contains("*/*") && !accept_str.contains("text/html") {
return true;
if let Some(accept) = headers.get(header::ACCEPT)
&& let Ok(accept_str) = accept.to_str()
{
// If text/html appears before */* in the list, they prefer HTML
if let Some(html_pos) = accept_str.find("text/html") {
if let Some(wildcard_pos) = accept_str.find("*/*") {
return html_pos > wildcard_pos;
}
// Has text/html but no */* → prefers HTML
return false;
}
// Has */* but no text/html → probably a CLI tool
if accept_str.contains("*/*") && !accept_str.contains("text/html") {
return true;
}
}