diff --git a/src/auth.rs b/src/auth.rs index 4842324..d827792 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -241,3 +241,26 @@ pub async fn ensure_admin_user(pool: &PgPool) -> Result<(), Box Option { + let session_cookie = jar.get("admin_session")?; + let session_id = ulid::Ulid::from_string(session_cookie.value()).ok()?; + state.session_manager.validate_session(session_id) +} + +/// Return a 401 Unauthorized response for API endpoints +pub fn require_auth_response() -> impl axum::response::IntoResponse { + use axum::{Json, http::StatusCode}; + + ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({ + "error": "Unauthorized", + "message": "Authentication required" + })), + ) +} diff --git a/src/db.rs b/src/db.rs deleted file mode 100644 index e566be2..0000000 --- a/src/db.rs +++ /dev/null @@ -1,1122 +0,0 @@ -use serde::{Deserialize, Serialize}; -use sqlx::{PgPool, postgres::PgPoolOptions}; -use time::OffsetDateTime; -use uuid::Uuid; - -// Database types -#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)] -#[sqlx(type_name = "project_status", rename_all = "lowercase")] -pub enum ProjectStatus { - Active, - Maintained, - Archived, - Hidden, -} - -// Database model -#[derive(Debug, Clone, sqlx::FromRow)] -#[allow(dead_code)] -pub struct DbProject { - pub id: Uuid, - pub slug: String, - pub name: String, - pub short_description: String, - pub description: String, - pub status: ProjectStatus, - pub github_repo: Option, - pub demo_url: Option, - pub last_github_activity: Option, - pub created_at: OffsetDateTime, - pub updated_at: OffsetDateTime, -} - -// Tag database models -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct DbTag { - pub id: Uuid, - pub slug: String, - pub name: String, - pub icon: Option, - pub color: Option, - pub created_at: OffsetDateTime, -} - -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct DbProjectTag { - pub project_id: Uuid, - pub tag_id: Uuid, -} - -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct DbTagCooccurrence { - pub tag_a: Uuid, - pub tag_b: Uuid, - pub count: i32, -} - -// API response types -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiProjectLink { - pub url: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub title: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiProject { - pub id: String, - pub slug: String, - pub name: String, - #[serde(rename = "shortDescription")] - pub short_description: String, - pub links: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiTag { - pub id: String, - pub slug: String, - pub name: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub icon: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub color: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiProjectWithTags { - #[serde(flatten)] - pub project: ApiProject, - pub tags: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiTagWithCount { - #[serde(flatten)] - pub tag: ApiTag, - pub project_count: i32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiRelatedTag { - #[serde(flatten)] - pub tag: ApiTag, - pub cooccurrence_count: i32, -} - -impl DbTag { - /// Convert database tag to API response format - pub fn to_api_tag(&self) -> ApiTag { - ApiTag { - id: self.id.to_string(), - slug: self.slug.clone(), - name: self.name.clone(), - icon: self.icon.clone(), - color: self.color.clone(), - } - } -} - -impl DbProject { - /// Convert database project to API response format - pub fn to_api_project(&self) -> ApiProject { - let mut links = Vec::new(); - - if let Some(ref repo) = self.github_repo { - links.push(ApiProjectLink { - url: format!("https://github.com/{}", repo), - title: Some("GitHub".to_string()), - }); - } - - if let Some(ref demo) = self.demo_url { - links.push(ApiProjectLink { - url: demo.clone(), - title: Some("Demo".to_string()), - }); - } - - ApiProject { - id: self.id.to_string(), - slug: self.slug.clone(), - name: self.name.clone(), - short_description: self.short_description.clone(), - links, - } - } -} - -// Connection pool creation -pub async fn create_pool(database_url: &str) -> Result { - PgPoolOptions::new() - .max_connections(20) - .acquire_timeout(std::time::Duration::from_secs(3)) - .connect(database_url) - .await -} - -// Queries -pub async fn get_public_projects(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_as!( - DbProject, - r#" - SELECT - id, - slug, - name, - short_description, - description, - status as "status: ProjectStatus", - github_repo, - demo_url, - last_github_activity, - created_at, - updated_at - FROM projects - WHERE status != 'hidden' - ORDER BY updated_at DESC - "# - ) - .fetch_all(pool) - .await -} - -pub async fn get_public_projects_with_tags( - pool: &PgPool, -) -> Result)>, sqlx::Error> { - let projects = get_public_projects(pool).await?; - - let mut result = Vec::new(); - for project in projects { - let tags = get_tags_for_project(pool, project.id).await?; - result.push((project, tags)); - } - - Ok(result) -} - -pub async fn health_check(pool: &PgPool) -> Result<(), sqlx::Error> { - sqlx::query!("SELECT 1 as check") - .fetch_one(pool) - .await - .map(|_| ()) -} - -// Helper function: slugify text -pub fn slugify(text: &str) -> String { - text.to_lowercase() - .chars() - .map(|c| { - if c.is_alphanumeric() { - c - } else if c.is_whitespace() || c == '-' || c == '_' { - '-' - } else { - '\0' - } - }) - .collect::() - .split('-') - .filter(|s| !s.is_empty()) - .collect::>() - .join("-") -} - -// Tag CRUD queries - -pub async fn create_tag( - pool: &PgPool, - name: &str, - slug_override: Option<&str>, - icon: Option<&str>, - color: Option<&str>, -) -> Result { - let slug = slug_override - .map(|s| slugify(s)) - .unwrap_or_else(|| slugify(name)); - - sqlx::query_as!( - DbTag, - r#" - INSERT INTO tags (slug, name, icon, color) - VALUES ($1, $2, $3, $4) - RETURNING id, slug, name, icon, color, created_at - "#, - slug, - name, - icon, - color - ) - .fetch_one(pool) - .await -} - -pub async fn get_tag_by_id(pool: &PgPool, id: Uuid) -> Result, sqlx::Error> { - sqlx::query_as!( - DbTag, - r#" - SELECT id, slug, name, icon, color, created_at - FROM tags - WHERE id = $1 - "#, - id - ) - .fetch_optional(pool) - .await -} - -pub async fn get_tag_by_slug(pool: &PgPool, slug: &str) -> Result, sqlx::Error> { - sqlx::query_as!( - DbTag, - r#" - SELECT id, slug, name, icon, color, created_at - FROM tags - WHERE slug = $1 - "#, - slug - ) - .fetch_optional(pool) - .await -} - -pub async fn get_all_tags(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_as!( - DbTag, - r#" - SELECT id, slug, name, icon, color, created_at - FROM tags - ORDER BY name ASC - "# - ) - .fetch_all(pool) - .await -} - -pub async fn get_all_tags_with_counts(pool: &PgPool) -> Result, sqlx::Error> { - let rows = sqlx::query!( - r#" - SELECT - t.id, - t.slug, - t.name, - t.icon, - t.color, - t.created_at, - COUNT(pt.project_id)::int as "project_count!" - FROM tags t - LEFT JOIN project_tags pt ON t.id = pt.tag_id - GROUP BY t.id, t.slug, t.name, t.icon, t.color, t.created_at - ORDER BY t.name ASC - "# - ) - .fetch_all(pool) - .await?; - - Ok(rows - .into_iter() - .map(|row| { - let tag = DbTag { - id: row.id, - slug: row.slug, - name: row.name, - icon: row.icon, - color: row.color, - created_at: row.created_at, - }; - (tag, row.project_count) - }) - .collect()) -} - -pub async fn update_tag( - pool: &PgPool, - id: Uuid, - name: &str, - slug_override: Option<&str>, - icon: Option<&str>, - color: Option<&str>, -) -> Result { - let slug = slug_override - .map(|s| slugify(s)) - .unwrap_or_else(|| slugify(name)); - - sqlx::query_as!( - DbTag, - r#" - UPDATE tags - SET slug = $2, name = $3, icon = $4, color = $5 - WHERE id = $1 - RETURNING id, slug, name, icon, color, created_at - "#, - id, - slug, - name, - icon, - color - ) - .fetch_one(pool) - .await -} - -pub async fn delete_tag(pool: &PgPool, id: Uuid) -> Result<(), sqlx::Error> { - sqlx::query!("DELETE FROM tags WHERE id = $1", id) - .execute(pool) - .await?; - Ok(()) -} - -pub async fn tag_exists_by_name(pool: &PgPool, name: &str) -> Result { - let result = sqlx::query!( - r#" - SELECT EXISTS(SELECT 1 FROM tags WHERE LOWER(name) = LOWER($1)) as "exists!" - "#, - name - ) - .fetch_one(pool) - .await?; - - Ok(result.exists) -} - -pub async fn tag_exists_by_slug(pool: &PgPool, slug: &str) -> Result { - let result = sqlx::query!( - r#" - SELECT EXISTS(SELECT 1 FROM tags WHERE slug = $1) as "exists!" - "#, - slug - ) - .fetch_one(pool) - .await?; - - Ok(result.exists) -} - -// Project-Tag association queries - -pub async fn add_tag_to_project( - pool: &PgPool, - project_id: Uuid, - tag_id: Uuid, -) -> Result<(), sqlx::Error> { - sqlx::query!( - r#" - INSERT INTO project_tags (project_id, tag_id) - VALUES ($1, $2) - ON CONFLICT (project_id, tag_id) DO NOTHING - "#, - project_id, - tag_id - ) - .execute(pool) - .await?; - Ok(()) -} - -pub async fn remove_tag_from_project( - pool: &PgPool, - project_id: Uuid, - tag_id: Uuid, -) -> Result<(), sqlx::Error> { - sqlx::query!( - "DELETE FROM project_tags WHERE project_id = $1 AND tag_id = $2", - project_id, - tag_id - ) - .execute(pool) - .await?; - Ok(()) -} - -pub async fn get_tags_for_project( - pool: &PgPool, - project_id: Uuid, -) -> Result, sqlx::Error> { - sqlx::query_as!( - DbTag, - r#" - SELECT t.id, t.slug, t.name, t.icon, t.color, t.created_at - FROM tags t - JOIN project_tags pt ON t.id = pt.tag_id - WHERE pt.project_id = $1 - ORDER BY t.name ASC - "#, - project_id - ) - .fetch_all(pool) - .await -} - -pub async fn get_projects_for_tag( - pool: &PgPool, - tag_id: Uuid, -) -> Result, sqlx::Error> { - sqlx::query_as!( - DbProject, - r#" - SELECT - p.id, - p.slug, - p.name, - p.short_description, - p.description, - p.status as "status: ProjectStatus", - p.github_repo, - p.demo_url, - p.last_github_activity, - p.created_at, - p.updated_at - FROM projects p - JOIN project_tags pt ON p.id = pt.project_id - WHERE pt.tag_id = $1 - ORDER BY p.updated_at DESC - "#, - tag_id - ) - .fetch_all(pool) - .await -} - -// Tag cooccurrence queries - -pub async fn recalculate_tag_cooccurrence(pool: &PgPool) -> Result<(), sqlx::Error> { - // Delete existing cooccurrence data - sqlx::query!("DELETE FROM tag_cooccurrence") - .execute(pool) - .await?; - - // Calculate and insert new cooccurrence data - sqlx::query!( - r#" - INSERT INTO tag_cooccurrence (tag_a, tag_b, count) - SELECT - LEAST(t1.tag_id, t2.tag_id) as tag_a, - GREATEST(t1.tag_id, t2.tag_id) as tag_b, - COUNT(*)::int as count - FROM project_tags t1 - JOIN project_tags t2 ON t1.project_id = t2.project_id - WHERE t1.tag_id < t2.tag_id - GROUP BY tag_a, tag_b - HAVING COUNT(*) > 0 - "# - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_related_tags( - pool: &PgPool, - tag_id: Uuid, - limit: i64, -) -> Result, sqlx::Error> { - let rows = sqlx::query!( - r#" - SELECT - t.id, - t.slug, - t.name, - t.icon, - t.color, - t.created_at, - tc.count - FROM tag_cooccurrence tc - JOIN tags t ON (tc.tag_a = t.id OR tc.tag_b = t.id) - WHERE (tc.tag_a = $1 OR tc.tag_b = $1) AND t.id != $1 - ORDER BY tc.count DESC, t.name ASC - LIMIT $2 - "#, - tag_id, - limit - ) - .fetch_all(pool) - .await?; - - Ok(rows - .into_iter() - .map(|row| { - let tag = DbTag { - id: row.id, - slug: row.slug, - name: row.name, - icon: row.icon, - color: row.color, - created_at: row.created_at, - }; - (tag, row.count) - }) - .collect()) -} - -// Project CRUD request/response types - -#[derive(Debug, Deserialize)] -pub struct CreateProjectRequest { - pub name: String, - pub slug: Option, - pub short_description: String, - pub description: String, - pub status: ProjectStatus, - pub github_repo: Option, - pub demo_url: Option, - pub tag_ids: Vec, // UUID strings -} - -#[derive(Debug, Deserialize)] -pub struct UpdateProjectRequest { - pub name: String, - pub slug: Option, - pub short_description: String, - pub description: String, - pub status: ProjectStatus, - pub github_repo: Option, - pub demo_url: Option, - pub tag_ids: Vec, // UUID strings -} - -// Response type for admin project list/detail (includes tags and metadata) -#[derive(Debug, Clone, Serialize)] -pub struct ApiAdminProject { - #[serde(flatten)] - pub project: ApiProject, - pub tags: Vec, - pub status: String, - pub description: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub github_repo: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub demo_url: Option, - #[serde(rename = "createdAt")] - pub created_at: String, // ISO 8601 - #[serde(rename = "updatedAt")] - pub updated_at: String, // ISO 8601 - #[serde(rename = "lastGithubActivity", skip_serializing_if = "Option::is_none")] - pub last_github_activity: Option, // ISO 8601 -} - -impl DbProject { - pub fn to_api_admin_project(&self, tags: Vec) -> ApiAdminProject { - ApiAdminProject { - project: self.to_api_project(), - tags: tags.into_iter().map(|t| t.to_api_tag()).collect(), - status: format!("{:?}", self.status).to_lowercase(), - description: self.description.clone(), - github_repo: self.github_repo.clone(), - demo_url: self.demo_url.clone(), - created_at: self - .created_at - .format(&time::format_description::well_known::Rfc3339) - .unwrap(), - updated_at: self - .updated_at - .format(&time::format_description::well_known::Rfc3339) - .unwrap(), - last_github_activity: self.last_github_activity.map(|dt| { - dt.format(&time::format_description::well_known::Rfc3339) - .unwrap() - }), - } - } -} - -// Admin stats response -#[derive(Debug, Serialize)] -pub struct AdminStats { - #[serde(rename = "totalProjects")] - pub total_projects: i32, - #[serde(rename = "projectsByStatus")] - pub projects_by_status: serde_json::Value, - #[serde(rename = "totalTags")] - pub total_tags: i32, -} - -// Project CRUD queries - -/// Get all projects (admin view - includes hidden) -pub async fn get_all_projects_admin(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_as!( - DbProject, - r#" - SELECT - id, - slug, - name, - short_description, - description, - status as "status: ProjectStatus", - github_repo, - demo_url, - last_github_activity, - created_at, - updated_at - FROM projects - ORDER BY updated_at DESC - "# - ) - .fetch_all(pool) - .await -} - -/// Get all projects with tags (admin view) -pub async fn get_all_projects_with_tags_admin( - pool: &PgPool, -) -> Result)>, sqlx::Error> { - let projects = get_all_projects_admin(pool).await?; - - let mut result = Vec::new(); - for project in projects { - let tags = get_tags_for_project(pool, project.id).await?; - result.push((project, tags)); - } - - Ok(result) -} - -/// Get single project by ID -pub async fn get_project_by_id(pool: &PgPool, id: Uuid) -> Result, sqlx::Error> { - sqlx::query_as!( - DbProject, - r#" - SELECT - id, - slug, - name, - short_description, - description, - status as "status: ProjectStatus", - github_repo, - demo_url, - - last_github_activity, - created_at, - updated_at - FROM projects - WHERE id = $1 - "#, - id - ) - .fetch_optional(pool) - .await -} - -/// Get single project by ID with tags -pub async fn get_project_by_id_with_tags( - pool: &PgPool, - id: Uuid, -) -> Result)>, sqlx::Error> { - let project = get_project_by_id(pool, id).await?; - - match project { - Some(p) => { - let tags = get_tags_for_project(pool, p.id).await?; - Ok(Some((p, tags))) - } - None => Ok(None), - } -} - -/// Get single project by slug -pub async fn get_project_by_slug( - pool: &PgPool, - slug: &str, -) -> Result, sqlx::Error> { - sqlx::query_as!( - DbProject, - r#" - SELECT - id, - slug, - name, - short_description, - description, - status as "status: ProjectStatus", - github_repo, - demo_url, - - last_github_activity, - created_at, - updated_at - FROM projects - WHERE slug = $1 - "#, - slug - ) - .fetch_optional(pool) - .await -} - -/// Create project (without tags - tags handled separately) -pub async fn create_project( - pool: &PgPool, - name: &str, - slug_override: Option<&str>, - short_description: &str, - description: &str, - status: ProjectStatus, - github_repo: Option<&str>, - demo_url: Option<&str>, -) -> Result { - let slug = slug_override - .map(|s| slugify(s)) - .unwrap_or_else(|| slugify(name)); - - sqlx::query_as!( - DbProject, - r#" - INSERT INTO projects (slug, name, short_description, description, status, github_repo, demo_url) - VALUES ($1, $2, $3, $4, $5, $6, $7) - RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus", - github_repo, demo_url, last_github_activity, created_at, updated_at - "#, - slug, - name, - short_description, - description, - status as ProjectStatus, - github_repo, - demo_url - ) - .fetch_one(pool) - .await -} - -/// Update project (without tags - tags handled separately) -pub async fn update_project( - pool: &PgPool, - id: Uuid, - name: &str, - slug_override: Option<&str>, - short_description: &str, - description: &str, - status: ProjectStatus, - github_repo: Option<&str>, - demo_url: Option<&str>, -) -> Result { - let slug = slug_override - .map(|s| slugify(s)) - .unwrap_or_else(|| slugify(name)); - - sqlx::query_as!( - DbProject, - r#" - UPDATE projects - SET slug = $2, name = $3, short_description = $4, description = $5, - status = $6, github_repo = $7, demo_url = $8 - WHERE id = $1 - RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus", - github_repo, demo_url, last_github_activity, created_at, updated_at - "#, - id, - slug, - name, - short_description, - description, - status as ProjectStatus, - github_repo, - demo_url - ) - .fetch_one(pool) - .await -} - -/// Delete project (CASCADE will handle tags) -pub async fn delete_project(pool: &PgPool, id: Uuid) -> Result<(), sqlx::Error> { - sqlx::query!("DELETE FROM projects WHERE id = $1", id) - .execute(pool) - .await?; - Ok(()) -} - -/// Set project tags (smart diff implementation) -pub async fn set_project_tags( - pool: &PgPool, - project_id: Uuid, - tag_ids: &[Uuid], -) -> Result<(), sqlx::Error> { - // Get current tags - let current_tags = get_tags_for_project(pool, project_id).await?; - let current_ids: Vec = current_tags.iter().map(|t| t.id).collect(); - - // Find tags to add (in new list but not in current) - let to_add: Vec = tag_ids - .iter() - .filter(|id| !current_ids.contains(id)) - .copied() - .collect(); - - // Find tags to remove (in current but not in new list) - let to_remove: Vec = current_ids - .iter() - .filter(|id| !tag_ids.contains(id)) - .copied() - .collect(); - - // Add new tags - for tag_id in to_add { - add_tag_to_project(pool, project_id, tag_id).await?; - } - - // Remove old tags - for tag_id in to_remove { - remove_tag_from_project(pool, project_id, tag_id).await?; - } - - Ok(()) -} - -/// Get admin stats -pub async fn get_admin_stats(pool: &PgPool) -> Result { - // Get project counts by status - let status_counts = sqlx::query!( - r#" - SELECT - status as "status!: ProjectStatus", - COUNT(*)::int as "count!" - FROM projects - GROUP BY status - "# - ) - .fetch_all(pool) - .await?; - - let mut projects_by_status = serde_json::json!({ - "active": 0, - "maintained": 0, - "archived": 0, - "hidden": 0, - }); - - let mut total_projects = 0; - for row in status_counts { - let status_str = format!("{:?}", row.status).to_lowercase(); - projects_by_status[status_str] = serde_json::json!(row.count); - total_projects += row.count; - } - - // Get total tags - let tag_count = sqlx::query!("SELECT COUNT(*)::int as \"count!\" FROM tags") - .fetch_one(pool) - .await?; - - Ok(AdminStats { - total_projects, - projects_by_status, - total_tags: tag_count.count, - }) -} - -// Site settings models and queries - -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct DbSiteIdentity { - pub id: i32, - pub display_name: String, - pub occupation: String, - pub bio: String, - pub site_title: String, - pub created_at: OffsetDateTime, - pub updated_at: OffsetDateTime, -} - -#[derive(Debug, Clone, sqlx::FromRow)] -pub struct DbSocialLink { - pub id: Uuid, - pub platform: String, - pub label: String, - pub value: String, - pub icon: String, - pub visible: bool, - pub display_order: i32, - pub created_at: OffsetDateTime, - pub updated_at: OffsetDateTime, -} - -// API response types -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiSiteIdentity { - #[serde(rename = "displayName")] - pub display_name: String, - pub occupation: String, - pub bio: String, - #[serde(rename = "siteTitle")] - pub site_title: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiSocialLink { - pub id: String, - pub platform: String, - pub label: String, - pub value: String, - pub icon: String, - pub visible: bool, - #[serde(rename = "displayOrder")] - pub display_order: i32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ApiSiteSettings { - pub identity: ApiSiteIdentity, - #[serde(rename = "socialLinks")] - pub social_links: Vec, -} - -// Request types for updates -#[derive(Debug, Deserialize)] -pub struct UpdateSiteIdentityRequest { - #[serde(rename = "displayName")] - pub display_name: String, - pub occupation: String, - pub bio: String, - #[serde(rename = "siteTitle")] - pub site_title: String, -} - -#[derive(Debug, Deserialize)] -pub struct UpdateSocialLinkRequest { - pub id: String, - pub platform: String, - pub label: String, - pub value: String, - pub icon: String, - pub visible: bool, - #[serde(rename = "displayOrder")] - pub display_order: i32, -} - -#[derive(Debug, Deserialize)] -pub struct UpdateSiteSettingsRequest { - pub identity: UpdateSiteIdentityRequest, - #[serde(rename = "socialLinks")] - pub social_links: Vec, -} - -// Conversion implementations -impl DbSiteIdentity { - pub fn to_api(&self) -> ApiSiteIdentity { - ApiSiteIdentity { - display_name: self.display_name.clone(), - occupation: self.occupation.clone(), - bio: self.bio.clone(), - site_title: self.site_title.clone(), - } - } -} - -impl DbSocialLink { - pub fn to_api(&self) -> ApiSocialLink { - ApiSocialLink { - id: self.id.to_string(), - platform: self.platform.clone(), - label: self.label.clone(), - value: self.value.clone(), - icon: self.icon.clone(), - visible: self.visible, - display_order: self.display_order, - } - } -} - -// Query functions -pub async fn get_site_settings(pool: &PgPool) -> Result { - // Get identity (single row) - let identity = sqlx::query_as!( - DbSiteIdentity, - r#" - SELECT id, display_name, occupation, bio, site_title, created_at, updated_at - FROM site_identity - WHERE id = 1 - "# - ) - .fetch_one(pool) - .await?; - - // Get social links (ordered) - let social_links = sqlx::query_as!( - DbSocialLink, - r#" - SELECT id, platform, label, value, icon, visible, display_order, created_at, updated_at - FROM social_links - ORDER BY display_order ASC - "# - ) - .fetch_all(pool) - .await?; - - Ok(ApiSiteSettings { - identity: identity.to_api(), - social_links: social_links.into_iter().map(|sl| sl.to_api()).collect(), - }) -} - -pub async fn update_site_identity( - pool: &PgPool, - req: &UpdateSiteIdentityRequest, -) -> Result { - sqlx::query_as!( - DbSiteIdentity, - r#" - UPDATE site_identity - SET display_name = $1, occupation = $2, bio = $3, site_title = $4 - WHERE id = 1 - RETURNING id, display_name, occupation, bio, site_title, created_at, updated_at - "#, - req.display_name, - req.occupation, - req.bio, - req.site_title - ) - .fetch_one(pool) - .await -} - -pub async fn update_social_link( - pool: &PgPool, - link_id: Uuid, - req: &UpdateSocialLinkRequest, -) -> Result { - sqlx::query_as!( - DbSocialLink, - r#" - UPDATE social_links - SET platform = $2, label = $3, value = $4, icon = $5, visible = $6, display_order = $7 - WHERE id = $1 - RETURNING id, platform, label, value, icon, visible, display_order, created_at, updated_at - "#, - link_id, - req.platform, - req.label, - req.value, - req.icon, - req.visible, - req.display_order - ) - .fetch_one(pool) - .await -} - -pub async fn update_site_settings( - pool: &PgPool, - req: &UpdateSiteSettingsRequest, -) -> Result { - // Update identity - let identity = update_site_identity(pool, &req.identity).await?; - - // Update each social link - let mut updated_links = Vec::new(); - for link_req in &req.social_links { - let link_id = Uuid::parse_str(&link_req.id).map_err(|_| { - sqlx::Error::Decode(Box::new(std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid UUID format", - ))) - })?; - let link = update_social_link(pool, link_id, link_req).await?; - updated_links.push(link); - } - - Ok(ApiSiteSettings { - identity: identity.to_api(), - social_links: updated_links.into_iter().map(|sl| sl.to_api()).collect(), - }) -} diff --git a/src/db/mod.rs b/src/db/mod.rs new file mode 100644 index 0000000..d9ef3cd --- /dev/null +++ b/src/db/mod.rs @@ -0,0 +1,57 @@ +pub mod projects; +pub mod settings; +pub mod tags; + +// Re-export all types and functions +pub use projects::*; +pub use settings::*; +pub use tags::*; + +use sqlx::{PgPool, postgres::PgPoolOptions}; + +/// Database connection pool creation +pub async fn create_pool(database_url: &str) -> Result { + PgPoolOptions::new() + .max_connections(20) + .acquire_timeout(std::time::Duration::from_secs(3)) + .connect(database_url) + .await +} + +/// Health check query +pub async fn health_check(pool: &PgPool) -> Result<(), sqlx::Error> { + sqlx::query!("SELECT 1 as check") + .fetch_one(pool) + .await + .map(|_| ()) +} + +/// Slugify text for URL-safe identifiers +pub fn slugify(text: &str) -> String { + text.to_lowercase() + .chars() + .map(|c| { + if c.is_alphanumeric() { + c + } else if c.is_whitespace() || c == '-' || c == '_' { + '-' + } else { + '\0' + } + }) + .collect::() + .split('-') + .filter(|s| !s.is_empty()) + .collect::>() + .join("-") +} + +/// Project status enum +#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, serde::Serialize, serde::Deserialize)] +#[sqlx(type_name = "project_status", rename_all = "lowercase")] +pub enum ProjectStatus { + Active, + Maintained, + Archived, + Hidden, +} diff --git a/src/db/projects.rs b/src/db/projects.rs new file mode 100644 index 0000000..e03df1b --- /dev/null +++ b/src/db/projects.rs @@ -0,0 +1,425 @@ +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use time::OffsetDateTime; +use uuid::Uuid; + +use super::{ProjectStatus, slugify}; + +// Database model +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbProject { + pub id: Uuid, + pub slug: String, + pub name: String, + pub short_description: String, + pub description: String, + pub status: ProjectStatus, + pub github_repo: Option, + pub demo_url: Option, + pub last_github_activity: Option, + pub created_at: OffsetDateTime, + pub updated_at: OffsetDateTime, +} + +// API response types +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiProjectLink { + pub url: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiProject { + pub id: String, + pub slug: String, + pub name: String, + #[serde(rename = "shortDescription")] + pub short_description: String, + pub links: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiAdminProject { + #[serde(flatten)] + pub project: ApiProject, + pub tags: Vec, + pub status: String, + pub description: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub github_repo: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub demo_url: Option, + #[serde(rename = "createdAt")] + pub created_at: String, // ISO 8601 + #[serde(rename = "updatedAt")] + pub updated_at: String, // ISO 8601 + #[serde(rename = "lastGithubActivity", skip_serializing_if = "Option::is_none")] + pub last_github_activity: Option, // ISO 8601 +} + +impl DbProject { + /// Convert database project to API response format + pub fn to_api_project(&self) -> ApiProject { + let mut links = Vec::new(); + + if let Some(ref repo) = self.github_repo { + links.push(ApiProjectLink { + url: format!("https://github.com/{}", repo), + title: Some("GitHub".to_string()), + }); + } + + if let Some(ref demo) = self.demo_url { + links.push(ApiProjectLink { + url: demo.clone(), + title: Some("Demo".to_string()), + }); + } + + ApiProject { + id: self.id.to_string(), + slug: self.slug.clone(), + name: self.name.clone(), + short_description: self.short_description.clone(), + links, + } + } + + pub fn to_api_admin_project(&self, tags: Vec) -> ApiAdminProject { + ApiAdminProject { + project: self.to_api_project(), + tags: tags.into_iter().map(|t| t.to_api_tag()).collect(), + status: format!("{:?}", self.status).to_lowercase(), + description: self.description.clone(), + github_repo: self.github_repo.clone(), + demo_url: self.demo_url.clone(), + created_at: self + .created_at + .format(&time::format_description::well_known::Rfc3339) + .unwrap(), + updated_at: self + .updated_at + .format(&time::format_description::well_known::Rfc3339) + .unwrap(), + last_github_activity: self.last_github_activity.map(|dt| { + dt.format(&time::format_description::well_known::Rfc3339) + .unwrap() + }), + } + } +} + +// Request types for CRUD operations + +#[derive(Debug, Deserialize)] +pub struct CreateProjectRequest { + pub name: String, + pub slug: Option, + pub short_description: String, + pub description: String, + pub status: ProjectStatus, + pub github_repo: Option, + pub demo_url: Option, + pub tag_ids: Vec, // UUID strings +} + +#[derive(Debug, Deserialize)] +pub struct UpdateProjectRequest { + pub name: String, + pub slug: Option, + pub short_description: String, + pub description: String, + pub status: ProjectStatus, + pub github_repo: Option, + pub demo_url: Option, + pub tag_ids: Vec, // UUID strings +} + +// Admin stats response +#[derive(Debug, Serialize)] +pub struct AdminStats { + #[serde(rename = "totalProjects")] + pub total_projects: i32, + #[serde(rename = "projectsByStatus")] + pub projects_by_status: serde_json::Value, + #[serde(rename = "totalTags")] + pub total_tags: i32, +} + +// Query functions + +pub async fn get_public_projects(pool: &PgPool) -> Result, sqlx::Error> { + sqlx::query_as!( + DbProject, + r#" + SELECT + id, + slug, + name, + short_description, + description, + status as "status: ProjectStatus", + github_repo, + demo_url, + last_github_activity, + created_at, + updated_at + FROM projects + WHERE status != 'hidden' + ORDER BY updated_at DESC + "# + ) + .fetch_all(pool) + .await +} + +pub async fn get_public_projects_with_tags( + pool: &PgPool, +) -> Result)>, sqlx::Error> { + let projects = get_public_projects(pool).await?; + + let mut result = Vec::new(); + for project in projects { + let tags = super::tags::get_tags_for_project(pool, project.id).await?; + result.push((project, tags)); + } + + Ok(result) +} + +/// Get all projects (admin view - includes hidden) +pub async fn get_all_projects_admin(pool: &PgPool) -> Result, sqlx::Error> { + sqlx::query_as!( + DbProject, + r#" + SELECT + id, + slug, + name, + short_description, + description, + status as "status: ProjectStatus", + github_repo, + demo_url, + last_github_activity, + created_at, + updated_at + FROM projects + ORDER BY updated_at DESC + "# + ) + .fetch_all(pool) + .await +} + +/// Get all projects with tags (admin view) +pub async fn get_all_projects_with_tags_admin( + pool: &PgPool, +) -> Result)>, sqlx::Error> { + let projects = get_all_projects_admin(pool).await?; + + let mut result = Vec::new(); + for project in projects { + let tags = super::tags::get_tags_for_project(pool, project.id).await?; + result.push((project, tags)); + } + + Ok(result) +} + +/// Get single project by ID +pub async fn get_project_by_id(pool: &PgPool, id: Uuid) -> Result, sqlx::Error> { + sqlx::query_as!( + DbProject, + r#" + SELECT + id, + slug, + name, + short_description, + description, + status as "status: ProjectStatus", + github_repo, + demo_url, + + last_github_activity, + created_at, + updated_at + FROM projects + WHERE id = $1 + "#, + id + ) + .fetch_optional(pool) + .await +} + +/// Get single project by ID with tags +pub async fn get_project_by_id_with_tags( + pool: &PgPool, + id: Uuid, +) -> Result)>, sqlx::Error> { + let project = get_project_by_id(pool, id).await?; + + match project { + Some(p) => { + let tags = super::tags::get_tags_for_project(pool, p.id).await?; + Ok(Some((p, tags))) + } + None => Ok(None), + } +} + +/// Get single project by slug +pub async fn get_project_by_slug( + pool: &PgPool, + slug: &str, +) -> Result, sqlx::Error> { + sqlx::query_as!( + DbProject, + r#" + SELECT + id, + slug, + name, + short_description, + description, + status as "status: ProjectStatus", + github_repo, + demo_url, + + last_github_activity, + created_at, + updated_at + FROM projects + WHERE slug = $1 + "#, + slug + ) + .fetch_optional(pool) + .await +} + +/// Create project (without tags - tags handled separately) +pub async fn create_project( + pool: &PgPool, + name: &str, + slug_override: Option<&str>, + short_description: &str, + description: &str, + status: ProjectStatus, + github_repo: Option<&str>, + demo_url: Option<&str>, +) -> Result { + let slug = slug_override + .map(|s| slugify(s)) + .unwrap_or_else(|| slugify(name)); + + sqlx::query_as!( + DbProject, + r#" + INSERT INTO projects (slug, name, short_description, description, status, github_repo, demo_url) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus", + github_repo, demo_url, last_github_activity, created_at, updated_at + "#, + slug, + name, + short_description, + description, + status as ProjectStatus, + github_repo, + demo_url + ) + .fetch_one(pool) + .await +} + +/// Update project (without tags - tags handled separately) +pub async fn update_project( + pool: &PgPool, + id: Uuid, + name: &str, + slug_override: Option<&str>, + short_description: &str, + description: &str, + status: ProjectStatus, + github_repo: Option<&str>, + demo_url: Option<&str>, +) -> Result { + let slug = slug_override + .map(|s| slugify(s)) + .unwrap_or_else(|| slugify(name)); + + sqlx::query_as!( + DbProject, + r#" + UPDATE projects + SET slug = $2, name = $3, short_description = $4, description = $5, + status = $6, github_repo = $7, demo_url = $8 + WHERE id = $1 + RETURNING id, slug, name, short_description, description, status as "status: ProjectStatus", + github_repo, demo_url, last_github_activity, created_at, updated_at + "#, + id, + slug, + name, + short_description, + description, + status as ProjectStatus, + github_repo, + demo_url + ) + .fetch_one(pool) + .await +} + +/// Delete project (CASCADE will handle tags) +pub async fn delete_project(pool: &PgPool, id: Uuid) -> Result<(), sqlx::Error> { + sqlx::query!("DELETE FROM projects WHERE id = $1", id) + .execute(pool) + .await?; + Ok(()) +} + +/// Get admin stats +pub async fn get_admin_stats(pool: &PgPool) -> Result { + // Get project counts by status + let status_counts = sqlx::query!( + r#" + SELECT + status as "status!: ProjectStatus", + COUNT(*)::int as "count!" + FROM projects + GROUP BY status + "# + ) + .fetch_all(pool) + .await?; + + let mut projects_by_status = serde_json::json!({ + "active": 0, + "maintained": 0, + "archived": 0, + "hidden": 0, + }); + + let mut total_projects = 0; + for row in status_counts { + let status_str = format!("{:?}", row.status).to_lowercase(); + projects_by_status[status_str] = serde_json::json!(row.count); + total_projects += row.count; + } + + // Get total tags + let tag_count = sqlx::query!("SELECT COUNT(*)::int as \"count!\" FROM tags") + .fetch_one(pool) + .await?; + + Ok(AdminStats { + total_projects, + projects_by_status, + total_tags: tag_count.count, + }) +} diff --git a/src/db/settings.rs b/src/db/settings.rs new file mode 100644 index 0000000..fb9af00 --- /dev/null +++ b/src/db/settings.rs @@ -0,0 +1,220 @@ +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use time::OffsetDateTime; +use uuid::Uuid; + +// Site settings models + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbSiteIdentity { + pub id: i32, + pub display_name: String, + pub occupation: String, + pub bio: String, + pub site_title: String, + pub created_at: OffsetDateTime, + pub updated_at: OffsetDateTime, +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbSocialLink { + pub id: Uuid, + pub platform: String, + pub label: String, + pub value: String, + pub icon: String, + pub visible: bool, + pub display_order: i32, + pub created_at: OffsetDateTime, + pub updated_at: OffsetDateTime, +} + +// API response types +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiSiteIdentity { + #[serde(rename = "displayName")] + pub display_name: String, + pub occupation: String, + pub bio: String, + #[serde(rename = "siteTitle")] + pub site_title: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiSocialLink { + pub id: String, + pub platform: String, + pub label: String, + pub value: String, + pub icon: String, + pub visible: bool, + #[serde(rename = "displayOrder")] + pub display_order: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiSiteSettings { + pub identity: ApiSiteIdentity, + #[serde(rename = "socialLinks")] + pub social_links: Vec, +} + +// Request types for updates +#[derive(Debug, Deserialize)] +pub struct UpdateSiteIdentityRequest { + #[serde(rename = "displayName")] + pub display_name: String, + pub occupation: String, + pub bio: String, + #[serde(rename = "siteTitle")] + pub site_title: String, +} + +#[derive(Debug, Deserialize)] +pub struct UpdateSocialLinkRequest { + pub id: String, + pub platform: String, + pub label: String, + pub value: String, + pub icon: String, + pub visible: bool, + #[serde(rename = "displayOrder")] + pub display_order: i32, +} + +#[derive(Debug, Deserialize)] +pub struct UpdateSiteSettingsRequest { + pub identity: UpdateSiteIdentityRequest, + #[serde(rename = "socialLinks")] + pub social_links: Vec, +} + +// Conversion implementations +impl DbSiteIdentity { + pub fn to_api(&self) -> ApiSiteIdentity { + ApiSiteIdentity { + display_name: self.display_name.clone(), + occupation: self.occupation.clone(), + bio: self.bio.clone(), + site_title: self.site_title.clone(), + } + } +} + +impl DbSocialLink { + pub fn to_api(&self) -> ApiSocialLink { + ApiSocialLink { + id: self.id.to_string(), + platform: self.platform.clone(), + label: self.label.clone(), + value: self.value.clone(), + icon: self.icon.clone(), + visible: self.visible, + display_order: self.display_order, + } + } +} + +// Query functions +pub async fn get_site_settings(pool: &PgPool) -> Result { + // Get identity (single row) + let identity = sqlx::query_as!( + DbSiteIdentity, + r#" + SELECT id, display_name, occupation, bio, site_title, created_at, updated_at + FROM site_identity + WHERE id = 1 + "# + ) + .fetch_one(pool) + .await?; + + // Get social links (ordered) + let social_links = sqlx::query_as!( + DbSocialLink, + r#" + SELECT id, platform, label, value, icon, visible, display_order, created_at, updated_at + FROM social_links + ORDER BY display_order ASC + "# + ) + .fetch_all(pool) + .await?; + + Ok(ApiSiteSettings { + identity: identity.to_api(), + social_links: social_links.into_iter().map(|sl| sl.to_api()).collect(), + }) +} + +pub async fn update_site_identity( + pool: &PgPool, + req: &UpdateSiteIdentityRequest, +) -> Result { + sqlx::query_as!( + DbSiteIdentity, + r#" + UPDATE site_identity + SET display_name = $1, occupation = $2, bio = $3, site_title = $4 + WHERE id = 1 + RETURNING id, display_name, occupation, bio, site_title, created_at, updated_at + "#, + req.display_name, + req.occupation, + req.bio, + req.site_title + ) + .fetch_one(pool) + .await +} + +pub async fn update_social_link( + pool: &PgPool, + link_id: Uuid, + req: &UpdateSocialLinkRequest, +) -> Result { + sqlx::query_as!( + DbSocialLink, + r#" + UPDATE social_links + SET platform = $2, label = $3, value = $4, icon = $5, visible = $6, display_order = $7 + WHERE id = $1 + RETURNING id, platform, label, value, icon, visible, display_order, created_at, updated_at + "#, + link_id, + req.platform, + req.label, + req.value, + req.icon, + req.visible, + req.display_order + ) + .fetch_one(pool) + .await +} + +pub async fn update_site_settings( + pool: &PgPool, + req: &UpdateSiteSettingsRequest, +) -> Result { + // Update identity + let identity = update_site_identity(pool, &req.identity).await?; + + // Update each social link + let mut updated_links = Vec::new(); + for link_req in &req.social_links { + let link_id = Uuid::parse_str(&link_req.id).map_err(|_| { + sqlx::Error::Decode(Box::new(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Invalid UUID format", + ))) + })?; + let link = update_social_link(pool, link_id, link_req).await?; + updated_links.push(link); + } + + Ok(ApiSiteSettings { + identity: identity.to_api(), + social_links: updated_links.into_iter().map(|sl| sl.to_api()).collect(), + }) +} diff --git a/src/db/tags.rs b/src/db/tags.rs new file mode 100644 index 0000000..05f84b1 --- /dev/null +++ b/src/db/tags.rs @@ -0,0 +1,432 @@ +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use time::OffsetDateTime; +use uuid::Uuid; + +use super::slugify; + +// Tag database models +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbTag { + pub id: Uuid, + pub slug: String, + pub name: String, + pub icon: Option, + pub color: Option, + pub created_at: OffsetDateTime, +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbProjectTag { + pub project_id: Uuid, + pub tag_id: Uuid, +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct DbTagCooccurrence { + pub tag_a: Uuid, + pub tag_b: Uuid, + pub count: i32, +} + +// API response types +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiTag { + pub id: String, + pub slug: String, + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub icon: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub color: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiTagWithCount { + #[serde(flatten)] + pub tag: ApiTag, + pub project_count: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiRelatedTag { + #[serde(flatten)] + pub tag: ApiTag, + pub cooccurrence_count: i32, +} + +impl DbTag { + /// Convert database tag to API response format + pub fn to_api_tag(&self) -> ApiTag { + ApiTag { + id: self.id.to_string(), + slug: self.slug.clone(), + name: self.name.clone(), + icon: self.icon.clone(), + color: self.color.clone(), + } + } +} + +// Tag CRUD queries + +pub async fn create_tag( + pool: &PgPool, + name: &str, + slug_override: Option<&str>, + icon: Option<&str>, + color: Option<&str>, +) -> Result { + let slug = slug_override + .map(|s| slugify(s)) + .unwrap_or_else(|| slugify(name)); + + sqlx::query_as!( + DbTag, + r#" + INSERT INTO tags (slug, name, icon, color) + VALUES ($1, $2, $3, $4) + RETURNING id, slug, name, icon, color, created_at + "#, + slug, + name, + icon, + color + ) + .fetch_one(pool) + .await +} + +pub async fn get_tag_by_id(pool: &PgPool, id: Uuid) -> Result, sqlx::Error> { + sqlx::query_as!( + DbTag, + r#" + SELECT id, slug, name, icon, color, created_at + FROM tags + WHERE id = $1 + "#, + id + ) + .fetch_optional(pool) + .await +} + +pub async fn get_tag_by_slug(pool: &PgPool, slug: &str) -> Result, sqlx::Error> { + sqlx::query_as!( + DbTag, + r#" + SELECT id, slug, name, icon, color, created_at + FROM tags + WHERE slug = $1 + "#, + slug + ) + .fetch_optional(pool) + .await +} + +pub async fn get_all_tags(pool: &PgPool) -> Result, sqlx::Error> { + sqlx::query_as!( + DbTag, + r#" + SELECT id, slug, name, icon, color, created_at + FROM tags + ORDER BY name ASC + "# + ) + .fetch_all(pool) + .await +} + +pub async fn get_all_tags_with_counts(pool: &PgPool) -> Result, sqlx::Error> { + let rows = sqlx::query!( + r#" + SELECT + t.id, + t.slug, + t.name, + t.icon, + t.color, + t.created_at, + COUNT(pt.project_id)::int as "project_count!" + FROM tags t + LEFT JOIN project_tags pt ON t.id = pt.tag_id + GROUP BY t.id, t.slug, t.name, t.icon, t.color, t.created_at + ORDER BY t.name ASC + "# + ) + .fetch_all(pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| { + let tag = DbTag { + id: row.id, + slug: row.slug, + name: row.name, + icon: row.icon, + color: row.color, + created_at: row.created_at, + }; + (tag, row.project_count) + }) + .collect()) +} + +pub async fn update_tag( + pool: &PgPool, + id: Uuid, + name: &str, + slug_override: Option<&str>, + icon: Option<&str>, + color: Option<&str>, +) -> Result { + let slug = slug_override + .map(|s| slugify(s)) + .unwrap_or_else(|| slugify(name)); + + sqlx::query_as!( + DbTag, + r#" + UPDATE tags + SET slug = $2, name = $3, icon = $4, color = $5 + WHERE id = $1 + RETURNING id, slug, name, icon, color, created_at + "#, + id, + slug, + name, + icon, + color + ) + .fetch_one(pool) + .await +} + +pub async fn delete_tag(pool: &PgPool, id: Uuid) -> Result<(), sqlx::Error> { + sqlx::query!("DELETE FROM tags WHERE id = $1", id) + .execute(pool) + .await?; + Ok(()) +} + +pub async fn tag_exists_by_name(pool: &PgPool, name: &str) -> Result { + let result = sqlx::query!( + r#" + SELECT EXISTS(SELECT 1 FROM tags WHERE LOWER(name) = LOWER($1)) as "exists!" + "#, + name + ) + .fetch_one(pool) + .await?; + + Ok(result.exists) +} + +pub async fn tag_exists_by_slug(pool: &PgPool, slug: &str) -> Result { + let result = sqlx::query!( + r#" + SELECT EXISTS(SELECT 1 FROM tags WHERE slug = $1) as "exists!" + "#, + slug + ) + .fetch_one(pool) + .await?; + + Ok(result.exists) +} + +// Project-Tag association queries + +pub async fn add_tag_to_project( + pool: &PgPool, + project_id: Uuid, + tag_id: Uuid, +) -> Result<(), sqlx::Error> { + sqlx::query!( + r#" + INSERT INTO project_tags (project_id, tag_id) + VALUES ($1, $2) + ON CONFLICT (project_id, tag_id) DO NOTHING + "#, + project_id, + tag_id + ) + .execute(pool) + .await?; + Ok(()) +} + +pub async fn remove_tag_from_project( + pool: &PgPool, + project_id: Uuid, + tag_id: Uuid, +) -> Result<(), sqlx::Error> { + sqlx::query!( + "DELETE FROM project_tags WHERE project_id = $1 AND tag_id = $2", + project_id, + tag_id + ) + .execute(pool) + .await?; + Ok(()) +} + +pub async fn get_tags_for_project( + pool: &PgPool, + project_id: Uuid, +) -> Result, sqlx::Error> { + sqlx::query_as!( + DbTag, + r#" + SELECT t.id, t.slug, t.name, t.icon, t.color, t.created_at + FROM tags t + JOIN project_tags pt ON t.id = pt.tag_id + WHERE pt.project_id = $1 + ORDER BY t.name ASC + "#, + project_id + ) + .fetch_all(pool) + .await +} + +pub async fn get_projects_for_tag( + pool: &PgPool, + tag_id: Uuid, +) -> Result, sqlx::Error> { + sqlx::query_as!( + super::projects::DbProject, + r#" + SELECT + p.id, + p.slug, + p.name, + p.short_description, + p.description, + p.status as "status: super::ProjectStatus", + p.github_repo, + p.demo_url, + p.last_github_activity, + p.created_at, + p.updated_at + FROM projects p + JOIN project_tags pt ON p.id = pt.project_id + WHERE pt.tag_id = $1 + ORDER BY p.updated_at DESC + "#, + tag_id + ) + .fetch_all(pool) + .await +} + +/// Set project tags (smart diff implementation) +pub async fn set_project_tags( + pool: &PgPool, + project_id: Uuid, + tag_ids: &[Uuid], +) -> Result<(), sqlx::Error> { + // Get current tags + let current_tags = get_tags_for_project(pool, project_id).await?; + let current_ids: Vec = current_tags.iter().map(|t| t.id).collect(); + + // Find tags to add (in new list but not in current) + let to_add: Vec = tag_ids + .iter() + .filter(|id| !current_ids.contains(id)) + .copied() + .collect(); + + // Find tags to remove (in current but not in new list) + let to_remove: Vec = current_ids + .iter() + .filter(|id| !tag_ids.contains(id)) + .copied() + .collect(); + + // Add new tags + for tag_id in to_add { + add_tag_to_project(pool, project_id, tag_id).await?; + } + + // Remove old tags + for tag_id in to_remove { + remove_tag_from_project(pool, project_id, tag_id).await?; + } + + Ok(()) +} + +// Tag cooccurrence queries + +pub async fn recalculate_tag_cooccurrence(pool: &PgPool) -> Result<(), sqlx::Error> { + // Delete existing cooccurrence data + sqlx::query!("DELETE FROM tag_cooccurrence") + .execute(pool) + .await?; + + // Calculate and insert new cooccurrence data + sqlx::query!( + r#" + INSERT INTO tag_cooccurrence (tag_a, tag_b, count) + SELECT + LEAST(t1.tag_id, t2.tag_id) as tag_a, + GREATEST(t1.tag_id, t2.tag_id) as tag_b, + COUNT(*)::int as count + FROM project_tags t1 + JOIN project_tags t2 ON t1.project_id = t2.project_id + WHERE t1.tag_id < t2.tag_id + GROUP BY tag_a, tag_b + HAVING COUNT(*) > 0 + "# + ) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn get_related_tags( + pool: &PgPool, + tag_id: Uuid, + limit: i64, +) -> Result, sqlx::Error> { + let rows = sqlx::query!( + r#" + SELECT + t.id, + t.slug, + t.name, + t.icon, + t.color, + t.created_at, + tc.count + FROM tag_cooccurrence tc + JOIN tags t ON (tc.tag_a = t.id OR tc.tag_b = t.id) + WHERE (tc.tag_a = $1 OR tc.tag_b = $1) AND t.id != $1 + ORDER BY tc.count DESC, t.name ASC + LIMIT $2 + "#, + tag_id, + limit + ) + .fetch_all(pool) + .await?; + + Ok(rows + .into_iter() + .map(|row| { + let tag = DbTag { + id: row.id, + slug: row.slug, + name: row.name, + icon: row.icon, + color: row.color, + created_at: row.created_at, + }; + (tag, row.count) + }) + .collect()) +} diff --git a/src/handlers/assets.rs b/src/handlers/assets.rs new file mode 100644 index 0000000..c6aa61f --- /dev/null +++ b/src/handlers/assets.rs @@ -0,0 +1,102 @@ +use axum::{ + Json, + extract::{Request, State}, + http::{HeaderMap, StatusCode}, + response::{IntoResponse, Response}, +}; +use std::sync::Arc; + +use crate::{assets, proxy, state::AppState, utils}; + +/// Serve PGP public key +pub async fn serve_pgp_key() -> impl IntoResponse { + if let Some(content) = assets::get_static_file("publickey.asc") { + let mut headers = HeaderMap::new(); + headers.insert( + axum::http::header::CONTENT_TYPE, + axum::http::HeaderValue::from_static("application/pgp-keys"), + ); + headers.insert( + axum::http::header::CONTENT_DISPOSITION, + axum::http::HeaderValue::from_static("attachment; filename=\"publickey.asc\""), + ); + headers.insert( + axum::http::header::CACHE_CONTROL, + axum::http::HeaderValue::from_static("public, max-age=86400"), + ); + (StatusCode::OK, headers, content).into_response() + } else { + (StatusCode::NOT_FOUND, "PGP key not found").into_response() + } +} + +/// Redirect /keys to /pgp +pub async fn redirect_to_pgp() -> impl IntoResponse { + axum::response::Redirect::permanent("/pgp") +} + +/// Handle /pgp route - serve HTML page or raw key based on User-Agent +pub async fn handle_pgp_route( + State(state): State>, + headers: HeaderMap, + req: Request, +) -> Response { + if utils::prefers_raw_content(&headers) { + // Serve raw .asc file for CLI tools + serve_pgp_key().await.into_response() + } else { + // Proxy to Bun for HTML page + proxy::isr_handler(State(state), req).await + } +} + +/// Proxy icon requests to SvelteKit +pub async fn proxy_icons_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, + axum::extract::Path(path): axum::extract::Path, + req: Request, +) -> impl IntoResponse { + let full_path = format!("/api/icons/{}", path); + let query = req.uri().query().unwrap_or(""); + + let bun_url = if state.downstream_url.starts_with('/') || state.downstream_url.starts_with("./") + { + if query.is_empty() { + format!("http://localhost{}", full_path) + } else { + format!("http://localhost{}?{}", full_path, query) + } + } else if query.is_empty() { + format!("{}{}", state.downstream_url, full_path) + } else { + format!("{}{}?{}", state.downstream_url, full_path, query) + }; + + // Build trusted headers with session info + let mut forward_headers = HeaderMap::new(); + + if let Some(cookie) = jar.get("admin_session") { + if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) { + if let Some(session) = state.session_manager.validate_session(session_id) { + if let Ok(username_value) = axum::http::HeaderValue::from_str(&session.username) { + forward_headers.insert("x-session-user", username_value); + } + } + } + } + + match proxy::proxy_to_bun(&bun_url, state, forward_headers).await { + Ok((status, headers, body)) => (status, headers, body).into_response(), + Err(err) => { + tracing::error!(error = %err, path = %full_path, "Failed to proxy icon request"); + ( + StatusCode::BAD_GATEWAY, + Json(serde_json::json!({ + "error": "Failed to fetch icon data" + })), + ) + .into_response() + } + } +} diff --git a/src/handlers/auth.rs b/src/handlers/auth.rs new file mode 100644 index 0000000..8c92b49 --- /dev/null +++ b/src/handlers/auth.rs @@ -0,0 +1,166 @@ +use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; +use std::sync::Arc; + +use crate::{auth, state::AppState}; + +#[derive(serde::Deserialize)] +pub struct LoginRequest { + pub username: String, + pub password: String, +} + +#[derive(serde::Serialize)] +pub struct LoginResponse { + pub success: bool, + pub username: String, +} + +#[derive(serde::Serialize)] +pub struct SessionResponse { + pub authenticated: bool, + pub username: String, +} + +/// Login handler - creates a new session +pub async fn api_login_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> Result<(axum_extra::extract::CookieJar, Json), impl IntoResponse> { + let user = match auth::get_admin_user(&state.pool, &payload.username).await { + Ok(Some(user)) => user, + Ok(None) => { + return Err(( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({ + "error": "Invalid credentials", + "message": "Username or password incorrect" + })), + )); + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch admin user"); + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to authenticate" + })), + )); + } + }; + + let password_valid = match auth::verify_password(&payload.password, &user.password_hash) { + Ok(valid) => valid, + Err(err) => { + tracing::error!(error = %err, "Failed to verify password"); + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to authenticate" + })), + )); + } + }; + + if !password_valid { + return Err(( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({ + "error": "Invalid credentials", + "message": "Username or password incorrect" + })), + )); + } + + let session = match state + .session_manager + .create_session(user.id, user.username.clone()) + .await + { + Ok(session) => session, + Err(err) => { + tracing::error!(error = %err, "Failed to create session"); + return Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to create session" + })), + )); + } + }; + + let cookie = + axum_extra::extract::cookie::Cookie::build(("admin_session", session.id.to_string())) + .path("/") + .http_only(true) + .same_site(axum_extra::extract::cookie::SameSite::Lax) + .max_age(time::Duration::days(7)) + .build(); + + let jar = jar.add(cookie); + + tracing::info!(username = %user.username, "User logged in"); + + Ok(( + jar, + Json(LoginResponse { + success: true, + username: user.username, + }), + )) +} + +/// Logout handler - deletes the session +pub async fn api_logout_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, +) -> (axum_extra::extract::CookieJar, StatusCode) { + if let Some(cookie) = jar.get("admin_session") { + if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) { + if let Err(e) = state.session_manager.delete_session(session_id).await { + tracing::error!(error = %e, "Failed to delete session during logout"); + } + } + } + + let cookie = axum_extra::extract::cookie::Cookie::build(("admin_session", "")) + .path("/") + .max_age(time::Duration::ZERO) + .build(); + + (jar.add(cookie), StatusCode::OK) +} + +/// Session check handler - returns current session status +pub async fn api_session_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + let session_cookie = jar.get("admin_session"); + + let session_id = session_cookie.and_then(|cookie| ulid::Ulid::from_string(cookie.value()).ok()); + + let session = session_id.and_then(|id| state.session_manager.validate_session(id)); + + match session { + Some(session) => ( + StatusCode::OK, + Json(SessionResponse { + authenticated: true, + username: session.username, + }), + ) + .into_response(), + None => ( + StatusCode::UNAUTHORIZED, + Json(serde_json::json!({ + "error": "Unauthorized", + "message": "No valid session" + })), + ) + .into_response(), + } +} diff --git a/src/handlers/health.rs b/src/handlers/health.rs new file mode 100644 index 0000000..ded3efc --- /dev/null +++ b/src/handlers/health.rs @@ -0,0 +1,15 @@ +use axum::{extract::State, http::StatusCode, response::IntoResponse}; +use std::sync::Arc; + +use crate::state::AppState; + +/// Health check endpoint - returns 200 if both DB and Bun are healthy +pub async fn health_handler(State(state): State>) -> impl IntoResponse { + let healthy = state.health_checker.check().await; + + if healthy { + (StatusCode::OK, "OK") + } else { + (StatusCode::SERVICE_UNAVAILABLE, "Unhealthy") + } +} diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs new file mode 100644 index 0000000..a7a5d8e --- /dev/null +++ b/src/handlers/mod.rs @@ -0,0 +1,35 @@ +pub mod assets; +pub mod auth; +pub mod health; +pub mod projects; +pub mod settings; +pub mod tags; + +// Re-export handlers for easier imports +pub use assets::*; +pub use auth::*; +pub use health::*; +pub use projects::*; +pub use settings::*; +pub use tags::*; + +// Request/Response types used by handlers + +#[derive(serde::Deserialize)] +pub struct CreateTagRequest { + pub name: String, + pub slug: Option, + pub color: Option, +} + +#[derive(serde::Deserialize)] +pub struct UpdateTagRequest { + pub name: String, + pub slug: Option, + pub color: Option, +} + +#[derive(serde::Deserialize)] +pub struct AddProjectTagRequest { + pub tag_id: String, +} diff --git a/src/handlers/projects.rs b/src/handlers/projects.rs new file mode 100644 index 0000000..c9fc5ec --- /dev/null +++ b/src/handlers/projects.rs @@ -0,0 +1,677 @@ +use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; +use std::sync::Arc; + +use crate::{auth, db, handlers::AddProjectTagRequest, state::AppState}; + +/// List all projects - returns filtered data based on auth status +pub async fn projects_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + let is_admin = auth::check_session(&state, &jar).is_some(); + + if is_admin { + // Admin view: return all projects with tags + match db::get_all_projects_with_tags_admin(&state.pool).await { + Ok(projects_with_tags) => { + let response: Vec = projects_with_tags + .into_iter() + .map(|(project, tags)| project.to_api_admin_project(tags)) + .collect(); + Json(response).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch admin projects"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch projects" + })), + ) + .into_response() + } + } + } else { + // Public view: return non-hidden projects with tags + match db::get_public_projects_with_tags(&state.pool).await { + Ok(projects_with_tags) => { + let response: Vec = projects_with_tags + .into_iter() + .map(|(project, tags)| project.to_api_admin_project(tags)) + .collect(); + Json(response).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch public projects"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch projects" + })), + ) + .into_response() + } + } + } +} + +/// Get a single project by ID +pub async fn get_project_handler( + State(state): State>, + axum::extract::Path(id): axum::extract::Path, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + let is_admin = auth::check_session(&state, &jar).is_some(); + + match db::get_project_by_id_with_tags(&state.pool, project_id).await { + Ok(Some((project, tags))) => { + // If project is hidden and user is not admin, return 404 + if project.status == db::ProjectStatus::Hidden && !is_admin { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Project not found" + })), + ) + .into_response(); + } + + Json(project.to_api_admin_project(tags)).into_response() + } + Ok(None) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Project not found" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to fetch project"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch project" + })), + ) + .into_response() + } + } +} + +/// Create a new project (requires authentication) +pub async fn create_project_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + // Check auth + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + + // Validate request + if payload.name.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Project name cannot be empty" + })), + ) + .into_response(); + } + + if payload.short_description.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Project short description cannot be empty" + })), + ) + .into_response(); + } + + // Parse tag UUIDs + let tag_ids: Result, _> = payload + .tag_ids + .iter() + .map(|id| uuid::Uuid::parse_str(id)) + .collect(); + + let tag_ids = match tag_ids { + Ok(ids) => ids, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Invalid tag UUID format" + })), + ) + .into_response(); + } + }; + + // Create project + let project = match db::create_project( + &state.pool, + &payload.name, + payload.slug.as_deref(), + &payload.short_description, + &payload.description, + payload.status, + payload.github_repo.as_deref(), + payload.demo_url.as_deref(), + ) + .await + { + Ok(p) => p, + Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => { + return ( + StatusCode::CONFLICT, + Json(serde_json::json!({ + "error": "Conflict", + "message": "A project with this slug already exists" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, "Failed to create project"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to create project" + })), + ) + .into_response(); + } + }; + + // Set tags + if let Err(err) = db::set_project_tags(&state.pool, project.id, &tag_ids).await { + tracing::error!(error = %err, project_id = %project.id, "Failed to set project tags"); + } + + // Fetch project with tags to return + let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await { + Ok(Some(data)) => data, + Ok(None) => { + tracing::error!(project_id = %project.id, "Project not found after creation"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch created project" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, project_id = %project.id, "Failed to fetch created project"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch created project" + })), + ) + .into_response(); + } + }; + + tracing::info!(project_id = %project.id, project_name = %project.name, "Project created"); + + ( + StatusCode::CREATED, + Json(project.to_api_admin_project(tags)), + ) + .into_response() +} + +/// Update an existing project (requires authentication) +pub async fn update_project_handler( + State(state): State>, + axum::extract::Path(id): axum::extract::Path, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + // Check auth + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + + // Parse project ID + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + // Validate exists + if db::get_project_by_id(&state.pool, project_id) + .await + .ok() + .flatten() + .is_none() + { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Project not found" + })), + ) + .into_response(); + } + + // Validate request + if payload.name.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Project name cannot be empty" + })), + ) + .into_response(); + } + + if payload.short_description.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Project short description cannot be empty" + })), + ) + .into_response(); + } + + // Parse tag UUIDs + let tag_ids: Result, _> = payload + .tag_ids + .iter() + .map(|id| uuid::Uuid::parse_str(id)) + .collect(); + + let tag_ids = match tag_ids { + Ok(ids) => ids, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Invalid tag UUID format" + })), + ) + .into_response(); + } + }; + + // Update project + let project = match db::update_project( + &state.pool, + project_id, + &payload.name, + payload.slug.as_deref(), + &payload.short_description, + &payload.description, + payload.status, + payload.github_repo.as_deref(), + payload.demo_url.as_deref(), + ) + .await + { + Ok(p) => p, + Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => { + return ( + StatusCode::CONFLICT, + Json(serde_json::json!({ + "error": "Conflict", + "message": "A project with this slug already exists" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, "Failed to update project"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to update project" + })), + ) + .into_response(); + } + }; + + // Update tags (smart diff) + if let Err(err) = db::set_project_tags(&state.pool, project.id, &tag_ids).await { + tracing::error!(error = %err, project_id = %project.id, "Failed to update project tags"); + } + + // Fetch updated project with tags + let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await { + Ok(Some(data)) => data, + Ok(None) => { + tracing::error!(project_id = %project.id, "Project not found after update"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch updated project" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, project_id = %project.id, "Failed to fetch updated project"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch updated project" + })), + ) + .into_response(); + } + }; + + tracing::info!(project_id = %project.id, project_name = %project.name, "Project updated"); + + Json(project.to_api_admin_project(tags)).into_response() +} + +/// Delete a project (requires authentication) +pub async fn delete_project_handler( + State(state): State>, + axum::extract::Path(id): axum::extract::Path, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + // Check auth + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + + // Parse project ID + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + // Fetch project before deletion to return it + let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project_id).await { + Ok(Some(data)) => data, + Ok(None) => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Project not found" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch project before deletion"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to delete project" + })), + ) + .into_response(); + } + }; + + // Delete project (CASCADE handles tags) + match db::delete_project(&state.pool, project_id).await { + Ok(()) => { + tracing::info!(project_id = %project_id, project_name = %project.name, "Project deleted"); + Json(project.to_api_admin_project(tags)).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to delete project"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to delete project" + })), + ) + .into_response() + } + } +} + +/// Get admin stats (requires authentication) +pub async fn get_admin_stats_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + // Check auth + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + + match db::get_admin_stats(&state.pool).await { + Ok(stats) => Json(stats).into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to fetch admin stats"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch statistics" + })), + ) + .into_response() + } + } +} + +/// Get tags for a project +pub async fn get_project_tags_handler( + State(state): State>, + axum::extract::Path(id): axum::extract::Path, +) -> impl IntoResponse { + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + match db::get_tags_for_project(&state.pool, project_id).await { + Ok(tags) => { + let api_tags: Vec = tags.into_iter().map(|t| t.to_api_tag()).collect(); + Json(api_tags).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch tags for project"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch tags" + })), + ) + .into_response() + } + } +} + +/// Add a tag to a project (requires authentication) +pub async fn add_project_tag_handler( + State(state): State>, + axum::extract::Path(id): axum::extract::Path, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + let tag_id = match uuid::Uuid::parse_str(&payload.tag_id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid tag ID", + "message": "Tag ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + match db::add_tag_to_project(&state.pool, project_id, tag_id).await { + Ok(()) => ( + StatusCode::CREATED, + Json(serde_json::json!({ + "message": "Tag added to project" + })), + ) + .into_response(), + Err(sqlx::Error::Database(db_err)) if db_err.is_foreign_key_violation() => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Project or tag not found" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to add tag to project"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to add tag to project" + })), + ) + .into_response() + } + } +} + +/// Remove a tag from a project (requires authentication) +pub async fn remove_project_tag_handler( + State(state): State>, + axum::extract::Path((id, tag_id)): axum::extract::Path<(String, String)>, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + let project_id = match uuid::Uuid::parse_str(&id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid project ID", + "message": "Project ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + let tag_id = match uuid::Uuid::parse_str(&tag_id) { + Ok(id) => id, + Err(_) => { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Invalid tag ID", + "message": "Tag ID must be a valid UUID" + })), + ) + .into_response(); + } + }; + + match db::remove_tag_from_project(&state.pool, project_id, tag_id).await { + Ok(()) => ( + StatusCode::OK, + Json(serde_json::json!({ + "message": "Tag removed from project" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to remove tag from project"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to remove tag from project" + })), + ) + .into_response() + } + } +} diff --git a/src/handlers/settings.rs b/src/handlers/settings.rs new file mode 100644 index 0000000..cd8b756 --- /dev/null +++ b/src/handlers/settings.rs @@ -0,0 +1,54 @@ +use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; +use std::sync::Arc; + +use crate::{auth, db, state::AppState}; + +/// Get site settings (public endpoint) +pub async fn get_settings_handler(State(state): State>) -> impl IntoResponse { + match db::get_site_settings(&state.pool).await { + Ok(settings) => Json(settings).into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to fetch site settings"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch settings" + })), + ) + .into_response() + } + } +} + +/// Update site settings (requires authentication) +pub async fn update_settings_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + // Check authentication + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + + match db::update_site_settings(&state.pool, &payload).await { + Ok(settings) => { + // TODO: Invalidate ISR cache for homepage and affected routes when ISR is implemented + // TODO: Add event log entry for settings update when events table is implemented + tracing::info!("Site settings updated"); + Json(settings).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to update site settings"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to update settings" + })), + ) + .into_response() + } + } +} diff --git a/src/handlers/tags.rs b/src/handlers/tags.rs new file mode 100644 index 0000000..56a0ed3 --- /dev/null +++ b/src/handlers/tags.rs @@ -0,0 +1,328 @@ +use axum::{Json, extract::State, http::StatusCode, response::IntoResponse}; +use std::sync::Arc; + +use crate::{ + auth, db, + handlers::{CreateTagRequest, UpdateTagRequest}, + state::AppState, + utils, +}; + +/// List all tags with project counts (public endpoint) +pub async fn list_tags_handler(State(state): State>) -> impl IntoResponse { + match db::get_all_tags_with_counts(&state.pool).await { + Ok(tags_with_counts) => { + let api_tags: Vec = tags_with_counts + .into_iter() + .map(|(tag, count)| db::ApiTagWithCount { + tag: tag.to_api_tag(), + project_count: count, + }) + .collect(); + Json(api_tags).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch tags"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch tags" + })), + ) + .into_response() + } + } +} + +/// Create a new tag (requires authentication) +pub async fn create_tag_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + if payload.name.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Tag name cannot be empty" + })), + ) + .into_response(); + } + + // Validate color if provided + if let Some(ref color) = payload.color { + if !utils::validate_hex_color(color) { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')" + })), + ) + .into_response(); + } + } + + match db::create_tag( + &state.pool, + &payload.name, + payload.slug.as_deref(), + None, // icon - not yet supported in admin UI + payload.color.as_deref(), + ) + .await + { + Ok(tag) => (StatusCode::CREATED, Json(tag.to_api_tag())).into_response(), + Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => ( + StatusCode::CONFLICT, + Json(serde_json::json!({ + "error": "Conflict", + "message": "A tag with this name or slug already exists" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to create tag"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to create tag" + })), + ) + .into_response() + } + } +} + +/// Get a tag by slug with associated projects +pub async fn get_tag_handler( + State(state): State>, + axum::extract::Path(slug): axum::extract::Path, +) -> impl IntoResponse { + match db::get_tag_by_slug(&state.pool, &slug).await { + Ok(Some(tag)) => match db::get_projects_for_tag(&state.pool, tag.id).await { + Ok(projects) => { + let response = serde_json::json!({ + "tag": tag.to_api_tag(), + "projects": projects.into_iter().map(|p| p.to_api_project()).collect::>() + }); + Json(response).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch projects for tag"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch projects" + })), + ) + .into_response() + } + }, + Ok(None) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Tag not found" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to fetch tag"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch tag" + })), + ) + .into_response() + } + } +} + +/// Update a tag (requires authentication) +pub async fn update_tag_handler( + State(state): State>, + axum::extract::Path(slug): axum::extract::Path, + jar: axum_extra::extract::CookieJar, + Json(payload): Json, +) -> impl IntoResponse { + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + if payload.name.trim().is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Tag name cannot be empty" + })), + ) + .into_response(); + } + + // Validate color if provided + if let Some(ref color) = payload.color { + if !utils::validate_hex_color(color) { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Validation error", + "message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')" + })), + ) + .into_response(); + } + } + + let tag = match db::get_tag_by_slug(&state.pool, &slug).await { + Ok(Some(tag)) => tag, + Ok(None) => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Tag not found" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch tag"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch tag" + })), + ) + .into_response(); + } + }; + + match db::update_tag( + &state.pool, + tag.id, + &payload.name, + payload.slug.as_deref(), + None, // icon - not yet supported in admin UI + payload.color.as_deref(), + ) + .await + { + Ok(updated_tag) => Json(updated_tag.to_api_tag()).into_response(), + Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => ( + StatusCode::CONFLICT, + Json(serde_json::json!({ + "error": "Conflict", + "message": "A tag with this name or slug already exists" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to update tag"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to update tag" + })), + ) + .into_response() + } + } +} + +/// Get related tags by cooccurrence +pub async fn get_related_tags_handler( + State(state): State>, + axum::extract::Path(slug): axum::extract::Path, +) -> impl IntoResponse { + let tag = match db::get_tag_by_slug(&state.pool, &slug).await { + Ok(Some(tag)) => tag, + Ok(None) => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "message": "Tag not found" + })), + ) + .into_response(); + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch tag"); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch tag" + })), + ) + .into_response(); + } + }; + + match db::get_related_tags(&state.pool, tag.id, 10).await { + Ok(related_tags) => { + let api_related_tags: Vec = related_tags + .into_iter() + .map(|(tag, count)| db::ApiRelatedTag { + tag: tag.to_api_tag(), + cooccurrence_count: count, + }) + .collect(); + Json(api_related_tags).into_response() + } + Err(err) => { + tracing::error!(error = %err, "Failed to fetch related tags"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to fetch related tags" + })), + ) + .into_response() + } + } +} + +/// Recalculate tag cooccurrence matrix (requires authentication) +pub async fn recalculate_cooccurrence_handler( + State(state): State>, + jar: axum_extra::extract::CookieJar, +) -> impl IntoResponse { + if auth::check_session(&state, &jar).is_none() { + return auth::require_auth_response().into_response(); + } + match db::recalculate_tag_cooccurrence(&state.pool).await { + Ok(()) => ( + StatusCode::OK, + Json(serde_json::json!({ + "message": "Tag cooccurrence recalculated successfully" + })), + ) + .into_response(), + Err(err) => { + tracing::error!(error = %err, "Failed to recalculate cooccurrence"); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "error": "Internal server error", + "message": "Failed to recalculate cooccurrence" + })), + ) + .into_response() + } + } +} diff --git a/src/main.rs b/src/main.rs index 58ae48c..fc34ed2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,10 +1,3 @@ -use axum::{ - Json, Router, - extract::{ConnectInfo, Request, State}, - http::{HeaderMap, StatusCode}, - response::{IntoResponse, Response}, - routing::any, -}; use clap::Parser; use std::net::SocketAddr; use std::path::PathBuf; @@ -18,17 +11,23 @@ mod auth; mod config; mod db; mod formatter; +mod handlers; mod health; mod middleware; mod og; +mod proxy; mod r2; +mod routes; +mod state; mod tarpit; -use assets::{serve_embedded_asset, try_serve_embedded_asset, try_serve_prerendered_page}; +mod utils; + use config::{Args, ListenAddr}; use formatter::{CustomJsonFormatter, CustomPrettyFormatter}; use health::HealthChecker; use middleware::RequestIdLayer; -use tarpit::{TarpitConfig, TarpitState, is_malicious_path, tarpit_handler}; +use state::AppState; +use tarpit::{TarpitConfig, TarpitState}; fn init_tracing() { let use_json = std::env::var("LOG_JSON") @@ -167,7 +166,9 @@ async fn main() { let unix_client = unix_client_for_health.clone(); let pool = pool_for_health.clone(); - async move { perform_health_check(downstream_url, http_client, unix_client, Some(pool)).await } + async move { + proxy::perform_health_check(downstream_url, http_client, unix_client, Some(pool)).await + } })); let tarpit_config = TarpitConfig::from_env(); @@ -202,26 +203,11 @@ async fn main() { } }); - // Build base router (shared routes) - fn build_base_router() -> Router> { - Router::new() - .nest("/api", api_routes()) - .route("/api/", any(api_root_404_handler)) - .route( - "/_app/{*path}", - axum::routing::get(serve_embedded_asset).head(serve_embedded_asset), - ) - .route("/pgp", axum::routing::get(handle_pgp_route)) - .route("/publickey.asc", axum::routing::get(serve_pgp_key)) - .route("/pgp.asc", axum::routing::get(serve_pgp_key)) - .route("/.well-known/pgpkey.asc", axum::routing::get(serve_pgp_key)) - .route("/keys", axum::routing::get(redirect_to_pgp)) - } - + // Apply middleware to router fn apply_middleware( - router: Router>, + router: axum::Router>, trust_request_id: Option, - ) -> Router> { + ) -> axum::Router> { router .layer(TraceLayer::new_for_http()) .layer(RequestIdLayer::new(trust_request_id)) @@ -240,7 +226,7 @@ async fn main() { match listen_addr { ListenAddr::Tcp(addr) => { let app = apply_middleware( - build_base_router().fallback(fallback_handler_tcp), + routes::build_base_router().fallback(proxy::fallback_handler_tcp), trust_request_id, ) .with_state(state); @@ -265,7 +251,7 @@ async fn main() { } ListenAddr::Unix(path) => { let app = apply_middleware( - build_base_router().fallback(fallback_handler_unix), + routes::build_base_router().fallback(proxy::fallback_handler_unix), trust_request_id, ) .with_state(state); @@ -290,1929 +276,3 @@ async fn main() { task.await.expect("Listener task panicked"); } } - -#[derive(Clone)] -pub struct AppState { - downstream_url: String, - http_client: reqwest::Client, - unix_client: Option, - health_checker: Arc, - tarpit_state: Arc, - pool: sqlx::PgPool, - session_manager: Arc, -} - -#[derive(Debug)] -pub enum ProxyError { - Network(reqwest::Error), - Other(String), -} - -impl std::fmt::Display for ProxyError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ProxyError::Network(e) => write!(f, "Network error: {e}"), - ProxyError::Other(s) => write!(f, "{s}"), - } - } -} - -impl std::error::Error for ProxyError {} - -fn is_static_asset(path: &str) -> bool { - path.starts_with("/node_modules/") - || path.starts_with("/@") // Vite internals like /@vite/client, /@fs/, /@id/ - || path.starts_with("/.svelte-kit/") - || path.starts_with("/.well-known/") - || path.ends_with(".woff2") - || path.ends_with(".woff") - || path.ends_with(".ttf") - || path.ends_with(".ico") - || path.ends_with(".png") - || path.ends_with(".jpg") - || path.ends_with(".svg") - || path.ends_with(".webp") - || path.ends_with(".css") - || path.ends_with(".js") - || path.ends_with(".map") -} - -fn is_page_route(path: &str) -> bool { - !path.starts_with("/node_modules/") - && !path.starts_with("/@") - && !path.starts_with("/.svelte-kit/") - && !path.contains('.') -} - -fn api_routes() -> Router> { - Router::new() - .route("/", any(api_root_404_handler)) - .route( - "/health", - axum::routing::get(health_handler).head(health_handler), - ) - // Authentication endpoints (public) - .route("/login", axum::routing::post(api_login_handler)) - .route("/logout", axum::routing::post(api_logout_handler)) - .route("/session", axum::routing::get(api_session_handler)) - // Projects - GET is public (shows all for admin, only non-hidden for public) - // POST/PUT/DELETE require authentication - .route( - "/projects", - axum::routing::get(projects_handler).post(create_project_handler), - ) - .route( - "/projects/{id}", - axum::routing::get(get_project_handler) - .put(update_project_handler) - .delete(delete_project_handler), - ) - // Project tags - authentication checked in handlers - .route( - "/projects/{id}/tags", - axum::routing::get(get_project_tags_handler).post(add_project_tag_handler), - ) - .route( - "/projects/{id}/tags/{tag_id}", - axum::routing::delete(remove_project_tag_handler), - ) - // Tags - authentication checked in handlers - .route( - "/tags", - axum::routing::get(list_tags_handler).post(create_tag_handler), - ) - .route( - "/tags/{slug}", - axum::routing::get(get_tag_handler).put(update_tag_handler), - ) - .route( - "/tags/{slug}/related", - axum::routing::get(get_related_tags_handler), - ) - .route( - "/tags/recalculate-cooccurrence", - axum::routing::post(recalculate_cooccurrence_handler), - ) - // Admin stats - requires authentication - .route("/stats", axum::routing::get(get_admin_stats_handler)) - // Site settings - GET is public, PUT requires authentication - .route( - "/settings", - axum::routing::get(get_settings_handler).put(update_settings_handler), - ) - // Icon API - proxy to SvelteKit (authentication handled by SvelteKit) - .route("/icons/{*path}", axum::routing::get(proxy_icons_handler)) - .fallback(api_404_and_method_handler) -} - -async fn api_root_404_handler(uri: axum::http::Uri) -> impl IntoResponse { - api_404_handler(uri).await -} - -fn accepts_html(headers: &HeaderMap) -> bool { - if let Some(accept) = headers.get(axum::http::header::ACCEPT) { - if let Ok(accept_str) = accept.to_str() { - return accept_str.contains("text/html") || accept_str.contains("*/*"); - } - } - // Default to true for requests without Accept header (browsers typically send it) - true -} - -/// Determines if request prefers raw content (CLI tools) over HTML -fn prefers_raw_content(headers: &HeaderMap) -> bool { - // Check User-Agent for known CLI tools first (most reliable) - if let Some(ua) = headers.get(axum::http::header::USER_AGENT) { - if let Ok(ua_str) = ua.to_str() { - let ua_lower = ua_str.to_lowercase(); - if ua_lower.starts_with("curl/") - || ua_lower.starts_with("wget/") - || ua_lower.starts_with("httpie/") - || ua_lower.contains("curlie") - { - return true; - } - } - } - - // Check Accept header - if it explicitly prefers text/html, serve HTML - if let Some(accept) = headers.get(axum::http::header::ACCEPT) { - if let Ok(accept_str) = accept.to_str() { - // If text/html appears before */* in the list, they prefer HTML - if let Some(html_pos) = accept_str.find("text/html") { - if let Some(wildcard_pos) = accept_str.find("*/*") { - return html_pos > wildcard_pos; - } - // Has text/html but no */* → prefers HTML - return false; - } - // Has */* but no text/html → probably a CLI tool - if accept_str.contains("*/*") && !accept_str.contains("text/html") { - return true; - } - } - } - - // No Accept header → assume browser (safer default) - false -} - -fn serve_error_page(status: StatusCode) -> Response { - let status_code = status.as_u16(); - - if let Some(html) = assets::get_error_page(status_code) { - let mut headers = HeaderMap::new(); - headers.insert( - axum::http::header::CONTENT_TYPE, - axum::http::HeaderValue::from_static("text/html; charset=utf-8"), - ); - headers.insert( - axum::http::header::CACHE_CONTROL, - axum::http::HeaderValue::from_static("no-cache, no-store, must-revalidate"), - ); - - (status, headers, html).into_response() - } else { - // Fallback for undefined error codes (500 generic page) - tracing::warn!( - status_code, - "No prerendered error page found for status code - using fallback" - ); - - if let Some(fallback_html) = assets::get_error_page(500) { - let mut headers = HeaderMap::new(); - headers.insert( - axum::http::header::CONTENT_TYPE, - axum::http::HeaderValue::from_static("text/html; charset=utf-8"), - ); - headers.insert( - axum::http::header::CACHE_CONTROL, - axum::http::HeaderValue::from_static("no-cache, no-store, must-revalidate"), - ); - - (status, headers, fallback_html).into_response() - } else { - // Last resort: plaintext (should never happen if 500.html exists) - (status, format!("Error {}", status_code)).into_response() - } - } -} - -async fn health_handler(State(state): State>) -> impl IntoResponse { - let healthy = state.health_checker.check().await; - - if healthy { - (StatusCode::OK, "OK") - } else { - (StatusCode::SERVICE_UNAVAILABLE, "Unhealthy") - } -} - -async fn serve_pgp_key() -> impl IntoResponse { - if let Some(content) = assets::get_static_file("publickey.asc") { - let mut headers = HeaderMap::new(); - headers.insert( - axum::http::header::CONTENT_TYPE, - axum::http::HeaderValue::from_static("application/pgp-keys"), - ); - headers.insert( - axum::http::header::CONTENT_DISPOSITION, - axum::http::HeaderValue::from_static("attachment; filename=\"publickey.asc\""), - ); - headers.insert( - axum::http::header::CACHE_CONTROL, - axum::http::HeaderValue::from_static("public, max-age=86400"), - ); - (StatusCode::OK, headers, content).into_response() - } else { - (StatusCode::NOT_FOUND, "PGP key not found").into_response() - } -} - -async fn redirect_to_pgp() -> impl IntoResponse { - axum::response::Redirect::permanent("/pgp") -} - -async fn handle_pgp_route( - State(state): State>, - headers: HeaderMap, - req: Request, -) -> Response { - if prefers_raw_content(&headers) { - // Serve raw .asc file for CLI tools - serve_pgp_key().await.into_response() - } else { - // Proxy to Bun for HTML page - isr_handler(State(state), req).await - } -} - -async fn api_404_and_method_handler(req: Request) -> impl IntoResponse { - let method = req.method(); - let uri = req.uri(); - let path = uri.path(); - - if method != axum::http::Method::GET - && method != axum::http::Method::HEAD - && method != axum::http::Method::OPTIONS - { - let content_type = req - .headers() - .get(axum::http::header::CONTENT_TYPE) - .and_then(|v| v.to_str().ok()); - - if let Some(ct) = content_type { - if !ct.starts_with("application/json") { - return ( - StatusCode::UNSUPPORTED_MEDIA_TYPE, - Json(serde_json::json!({ - "error": "Unsupported media type", - "message": "API endpoints only accept application/json" - })), - ) - .into_response(); - } - } else if method == axum::http::Method::POST - || method == axum::http::Method::PUT - || method == axum::http::Method::PATCH - { - // POST/PUT/PATCH require Content-Type header - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Missing Content-Type header", - "message": "Content-Type: application/json is required" - })), - ) - .into_response(); - } - } - - // Route not found - tracing::warn!(path = %path, method = %method, "API route not found"); - ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "path": path - })), - ) - .into_response() -} - -async fn api_404_handler(uri: axum::http::Uri) -> impl IntoResponse { - let req = Request::builder() - .uri(uri) - .body(axum::body::Body::empty()) - .unwrap(); - - api_404_and_method_handler(req).await -} - -async fn projects_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - let is_admin = check_session(&state, &jar).is_some(); - - if is_admin { - // Admin view: return all projects with tags - match db::get_all_projects_with_tags_admin(&state.pool).await { - Ok(projects_with_tags) => { - let response: Vec = projects_with_tags - .into_iter() - .map(|(project, tags)| project.to_api_admin_project(tags)) - .collect(); - Json(response).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch admin projects"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch projects" - })), - ) - .into_response() - } - } - } else { - // Public view: return non-hidden projects with tags - match db::get_public_projects_with_tags(&state.pool).await { - Ok(projects_with_tags) => { - let response: Vec = projects_with_tags - .into_iter() - .map(|(project, tags)| project.to_api_admin_project(tags)) - .collect(); - Json(response).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch public projects"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch projects" - })), - ) - .into_response() - } - } - } -} - -// Icon API handler - proxy to SvelteKit -async fn proxy_icons_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, - axum::extract::Path(path): axum::extract::Path, - req: Request, -) -> impl IntoResponse { - let full_path = format!("/api/icons/{}", path); - let query = req.uri().query().unwrap_or(""); - - let bun_url = if state.downstream_url.starts_with('/') || state.downstream_url.starts_with("./") - { - if query.is_empty() { - format!("http://localhost{}", full_path) - } else { - format!("http://localhost{}?{}", full_path, query) - } - } else if query.is_empty() { - format!("{}{}", state.downstream_url, full_path) - } else { - format!("{}{}?{}", state.downstream_url, full_path, query) - }; - - // Build trusted headers with session info - let mut forward_headers = HeaderMap::new(); - - if let Some(cookie) = jar.get("admin_session") { - if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) { - if let Some(session) = state.session_manager.validate_session(session_id) { - if let Ok(username_value) = axum::http::HeaderValue::from_str(&session.username) { - forward_headers.insert("x-session-user", username_value); - } - } - } - } - - match proxy_to_bun(&bun_url, state, forward_headers).await { - Ok((status, headers, body)) => (status, headers, body).into_response(), - Err(err) => { - tracing::error!(error = %err, path = %full_path, "Failed to proxy icon request"); - ( - StatusCode::BAD_GATEWAY, - Json(serde_json::json!({ - "error": "Failed to fetch icon data" - })), - ) - .into_response() - } - } -} - -// Project CRUD handlers - -async fn get_project_handler( - State(state): State>, - axum::extract::Path(id): axum::extract::Path, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - let is_admin = check_session(&state, &jar).is_some(); - - match db::get_project_by_id_with_tags(&state.pool, project_id).await { - Ok(Some((project, tags))) => { - // If project is hidden and user is not admin, return 404 - if project.status == db::ProjectStatus::Hidden && !is_admin { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Project not found" - })), - ) - .into_response(); - } - - // Return full project details - Json(project.to_api_admin_project(tags)).into_response() - } - Ok(None) => ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Project not found" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to fetch project"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch project" - })), - ) - .into_response() - } - } -} - -async fn create_project_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - // Check auth - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - - // Validate request - if payload.name.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Project name cannot be empty" - })), - ) - .into_response(); - } - - if payload.short_description.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Project short description cannot be empty" - })), - ) - .into_response(); - } - - // Parse tag UUIDs - let tag_ids: Result, _> = payload - .tag_ids - .iter() - .map(|id| uuid::Uuid::parse_str(id)) - .collect(); - - let tag_ids = match tag_ids { - Ok(ids) => ids, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Invalid tag UUID format" - })), - ) - .into_response(); - } - }; - - // Create project - let project = match db::create_project( - &state.pool, - &payload.name, - payload.slug.as_deref(), - &payload.short_description, - &payload.description, - payload.status, - payload.github_repo.as_deref(), - payload.demo_url.as_deref(), - ) - .await - { - Ok(p) => p, - Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => { - return ( - StatusCode::CONFLICT, - Json(serde_json::json!({ - "error": "Conflict", - "message": "A project with this slug already exists" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, "Failed to create project"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to create project" - })), - ) - .into_response(); - } - }; - - // Set tags - if let Err(err) = db::set_project_tags(&state.pool, project.id, &tag_ids).await { - tracing::error!(error = %err, project_id = %project.id, "Failed to set project tags"); - } - - // Fetch project with tags to return - let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await { - Ok(Some(data)) => data, - Ok(None) => { - tracing::error!(project_id = %project.id, "Project not found after creation"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch created project" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, project_id = %project.id, "Failed to fetch created project"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch created project" - })), - ) - .into_response(); - } - }; - - tracing::info!(project_id = %project.id, project_name = %project.name, "Project created"); - - ( - StatusCode::CREATED, - Json(project.to_api_admin_project(tags)), - ) - .into_response() -} - -async fn update_project_handler( - State(state): State>, - axum::extract::Path(id): axum::extract::Path, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - // Check auth - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - - // Parse project ID - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - // Validate exists - if db::get_project_by_id(&state.pool, project_id) - .await - .ok() - .flatten() - .is_none() - { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Project not found" - })), - ) - .into_response(); - } - - // Validate request - if payload.name.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Project name cannot be empty" - })), - ) - .into_response(); - } - - if payload.short_description.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Project short description cannot be empty" - })), - ) - .into_response(); - } - - // Parse tag UUIDs - let tag_ids: Result, _> = payload - .tag_ids - .iter() - .map(|id| uuid::Uuid::parse_str(id)) - .collect(); - - let tag_ids = match tag_ids { - Ok(ids) => ids, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Invalid tag UUID format" - })), - ) - .into_response(); - } - }; - - // Update project - let project = match db::update_project( - &state.pool, - project_id, - &payload.name, - payload.slug.as_deref(), - &payload.short_description, - &payload.description, - payload.status, - payload.github_repo.as_deref(), - payload.demo_url.as_deref(), - ) - .await - { - Ok(p) => p, - Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => { - return ( - StatusCode::CONFLICT, - Json(serde_json::json!({ - "error": "Conflict", - "message": "A project with this slug already exists" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, "Failed to update project"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to update project" - })), - ) - .into_response(); - } - }; - - // Update tags (smart diff) - if let Err(err) = db::set_project_tags(&state.pool, project.id, &tag_ids).await { - tracing::error!(error = %err, project_id = %project.id, "Failed to update project tags"); - } - - // Fetch updated project with tags - let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await { - Ok(Some(data)) => data, - Ok(None) => { - tracing::error!(project_id = %project.id, "Project not found after update"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch updated project" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, project_id = %project.id, "Failed to fetch updated project"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch updated project" - })), - ) - .into_response(); - } - }; - - tracing::info!(project_id = %project.id, project_name = %project.name, "Project updated"); - - Json(project.to_api_admin_project(tags)).into_response() -} - -async fn delete_project_handler( - State(state): State>, - axum::extract::Path(id): axum::extract::Path, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - // Check auth - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - - // Parse project ID - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - // Fetch project before deletion to return it - let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project_id).await { - Ok(Some(data)) => data, - Ok(None) => { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Project not found" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch project before deletion"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to delete project" - })), - ) - .into_response(); - } - }; - - // Delete project (CASCADE handles tags) - match db::delete_project(&state.pool, project_id).await { - Ok(()) => { - tracing::info!(project_id = %project_id, project_name = %project.name, "Project deleted"); - Json(project.to_api_admin_project(tags)).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to delete project"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to delete project" - })), - ) - .into_response() - } - } -} - -async fn get_admin_stats_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - // Check auth - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - - match db::get_admin_stats(&state.pool).await { - Ok(stats) => Json(stats).into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to fetch admin stats"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch statistics" - })), - ) - .into_response() - } - } -} - -// Site settings handlers - -async fn get_settings_handler(State(state): State>) -> impl IntoResponse { - match db::get_site_settings(&state.pool).await { - Ok(settings) => Json(settings).into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to fetch site settings"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch settings" - })), - ) - .into_response() - } - } -} - -async fn update_settings_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - // Check authentication - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - - match db::update_site_settings(&state.pool, &payload).await { - Ok(settings) => { - // TODO: Invalidate ISR cache for homepage and affected routes when ISR is implemented - // TODO: Add event log entry for settings update when events table is implemented - tracing::info!("Site settings updated"); - Json(settings).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to update site settings"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to update settings" - })), - ) - .into_response() - } - } -} - -// Tag API handlers - -async fn list_tags_handler(State(state): State>) -> impl IntoResponse { - match db::get_all_tags_with_counts(&state.pool).await { - Ok(tags_with_counts) => { - let api_tags: Vec = tags_with_counts - .into_iter() - .map(|(tag, count)| db::ApiTagWithCount { - tag: tag.to_api_tag(), - project_count: count, - }) - .collect(); - Json(api_tags).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch tags"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch tags" - })), - ) - .into_response() - } - } -} - -/// Validate hex color format (6 characters, no hash, no alpha) -fn validate_hex_color(color: &str) -> bool { - color.len() == 6 && color.chars().all(|c| c.is_ascii_hexdigit()) -} - -#[derive(serde::Deserialize)] -struct CreateTagRequest { - name: String, - slug: Option, - color: Option, -} - -async fn create_tag_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - if payload.name.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Tag name cannot be empty" - })), - ) - .into_response(); - } - - // Validate color if provided - if let Some(ref color) = payload.color { - if !validate_hex_color(color) { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')" - })), - ) - .into_response(); - } - } - - match db::create_tag( - &state.pool, - &payload.name, - payload.slug.as_deref(), - None, // icon - not yet supported in admin UI - payload.color.as_deref(), - ) - .await - { - Ok(tag) => (StatusCode::CREATED, Json(tag.to_api_tag())).into_response(), - Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => ( - StatusCode::CONFLICT, - Json(serde_json::json!({ - "error": "Conflict", - "message": "A tag with this name or slug already exists" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to create tag"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to create tag" - })), - ) - .into_response() - } - } -} - -async fn get_tag_handler( - State(state): State>, - axum::extract::Path(slug): axum::extract::Path, -) -> impl IntoResponse { - match db::get_tag_by_slug(&state.pool, &slug).await { - Ok(Some(tag)) => match db::get_projects_for_tag(&state.pool, tag.id).await { - Ok(projects) => { - let response = serde_json::json!({ - "tag": tag.to_api_tag(), - "projects": projects.into_iter().map(|p| p.to_api_project()).collect::>() - }); - Json(response).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch projects for tag"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch projects" - })), - ) - .into_response() - } - }, - Ok(None) => ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Tag not found" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to fetch tag"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch tag" - })), - ) - .into_response() - } - } -} - -#[derive(serde::Deserialize)] -struct UpdateTagRequest { - name: String, - slug: Option, - color: Option, -} - -async fn update_tag_handler( - State(state): State>, - axum::extract::Path(slug): axum::extract::Path, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - if payload.name.trim().is_empty() { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Tag name cannot be empty" - })), - ) - .into_response(); - } - - // Validate color if provided - if let Some(ref color) = payload.color { - if !validate_hex_color(color) { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Validation error", - "message": "Invalid color format. Must be 6-character hex (e.g., '3b82f6')" - })), - ) - .into_response(); - } - } - - let tag = match db::get_tag_by_slug(&state.pool, &slug).await { - Ok(Some(tag)) => tag, - Ok(None) => { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Tag not found" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch tag"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch tag" - })), - ) - .into_response(); - } - }; - - match db::update_tag( - &state.pool, - tag.id, - &payload.name, - payload.slug.as_deref(), - None, // icon - not yet supported in admin UI - payload.color.as_deref(), - ) - .await - { - Ok(updated_tag) => Json(updated_tag.to_api_tag()).into_response(), - Err(sqlx::Error::Database(db_err)) if db_err.is_unique_violation() => ( - StatusCode::CONFLICT, - Json(serde_json::json!({ - "error": "Conflict", - "message": "A tag with this name or slug already exists" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to update tag"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to update tag" - })), - ) - .into_response() - } - } -} - -async fn get_related_tags_handler( - State(state): State>, - axum::extract::Path(slug): axum::extract::Path, -) -> impl IntoResponse { - let tag = match db::get_tag_by_slug(&state.pool, &slug).await { - Ok(Some(tag)) => tag, - Ok(None) => { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Tag not found" - })), - ) - .into_response(); - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch tag"); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch tag" - })), - ) - .into_response(); - } - }; - - match db::get_related_tags(&state.pool, tag.id, 10).await { - Ok(related_tags) => { - let api_related_tags: Vec = related_tags - .into_iter() - .map(|(tag, count)| db::ApiRelatedTag { - tag: tag.to_api_tag(), - cooccurrence_count: count, - }) - .collect(); - Json(api_related_tags).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch related tags"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch related tags" - })), - ) - .into_response() - } - } -} - -async fn recalculate_cooccurrence_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - match db::recalculate_tag_cooccurrence(&state.pool).await { - Ok(()) => ( - StatusCode::OK, - Json(serde_json::json!({ - "message": "Tag cooccurrence recalculated successfully" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to recalculate cooccurrence"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to recalculate cooccurrence" - })), - ) - .into_response() - } - } -} - -// Authentication API handlers - -fn check_session(state: &AppState, jar: &axum_extra::extract::CookieJar) -> Option { - let session_cookie = jar.get("admin_session")?; - let session_id = ulid::Ulid::from_string(session_cookie.value()).ok()?; - state.session_manager.validate_session(session_id) -} - -fn require_auth_response() -> impl IntoResponse { - ( - StatusCode::UNAUTHORIZED, - Json(serde_json::json!({ - "error": "Unauthorized", - "message": "Authentication required" - })), - ) -} - -#[derive(serde::Deserialize)] -struct LoginRequest { - username: String, - password: String, -} - -#[derive(serde::Serialize)] -struct LoginResponse { - success: bool, - username: String, -} - -#[derive(serde::Serialize)] -struct SessionResponse { - authenticated: bool, - username: String, -} - -async fn api_login_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> Result<(axum_extra::extract::CookieJar, Json), impl IntoResponse> { - let user = match auth::get_admin_user(&state.pool, &payload.username).await { - Ok(Some(user)) => user, - Ok(None) => { - return Err(( - StatusCode::UNAUTHORIZED, - Json(serde_json::json!({ - "error": "Invalid credentials", - "message": "Username or password incorrect" - })), - )); - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch admin user"); - return Err(( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to authenticate" - })), - )); - } - }; - - let password_valid = match auth::verify_password(&payload.password, &user.password_hash) { - Ok(valid) => valid, - Err(err) => { - tracing::error!(error = %err, "Failed to verify password"); - return Err(( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to authenticate" - })), - )); - } - }; - - if !password_valid { - return Err(( - StatusCode::UNAUTHORIZED, - Json(serde_json::json!({ - "error": "Invalid credentials", - "message": "Username or password incorrect" - })), - )); - } - - let session = match state - .session_manager - .create_session(user.id, user.username.clone()) - .await - { - Ok(session) => session, - Err(err) => { - tracing::error!(error = %err, "Failed to create session"); - return Err(( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to create session" - })), - )); - } - }; - - let cookie = - axum_extra::extract::cookie::Cookie::build(("admin_session", session.id.to_string())) - .path("/") - .http_only(true) - .same_site(axum_extra::extract::cookie::SameSite::Lax) - .max_age(time::Duration::days(7)) - .build(); - - let jar = jar.add(cookie); - - tracing::info!(username = %user.username, "User logged in"); - - Ok(( - jar, - Json(LoginResponse { - success: true, - username: user.username, - }), - )) -} - -async fn api_logout_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, -) -> (axum_extra::extract::CookieJar, StatusCode) { - if let Some(cookie) = jar.get("admin_session") { - if let Ok(session_id) = ulid::Ulid::from_string(cookie.value()) { - if let Err(e) = state.session_manager.delete_session(session_id).await { - tracing::error!(error = %e, "Failed to delete session during logout"); - } - } - } - - let cookie = axum_extra::extract::cookie::Cookie::build(("admin_session", "")) - .path("/") - .max_age(time::Duration::ZERO) - .build(); - - (jar.add(cookie), StatusCode::OK) -} - -async fn api_session_handler( - State(state): State>, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - let session_cookie = jar.get("admin_session"); - - let session_id = session_cookie.and_then(|cookie| ulid::Ulid::from_string(cookie.value()).ok()); - - let session = session_id.and_then(|id| state.session_manager.validate_session(id)); - - match session { - Some(session) => ( - StatusCode::OK, - Json(SessionResponse { - authenticated: true, - username: session.username, - }), - ) - .into_response(), - None => ( - StatusCode::UNAUTHORIZED, - Json(serde_json::json!({ - "error": "Unauthorized", - "message": "No valid session" - })), - ) - .into_response(), - } -} - -// Project-Tag association handlers - -async fn get_project_tags_handler( - State(state): State>, - axum::extract::Path(id): axum::extract::Path, -) -> impl IntoResponse { - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - match db::get_tags_for_project(&state.pool, project_id).await { - Ok(tags) => { - let api_tags: Vec = tags.into_iter().map(|t| t.to_api_tag()).collect(); - Json(api_tags).into_response() - } - Err(err) => { - tracing::error!(error = %err, "Failed to fetch tags for project"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to fetch tags" - })), - ) - .into_response() - } - } -} - -#[derive(serde::Deserialize)] -struct AddProjectTagRequest { - tag_id: String, -} - -async fn add_project_tag_handler( - State(state): State>, - axum::extract::Path(id): axum::extract::Path, - jar: axum_extra::extract::CookieJar, - Json(payload): Json, -) -> impl IntoResponse { - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - let tag_id = match uuid::Uuid::parse_str(&payload.tag_id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid tag ID", - "message": "Tag ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - match db::add_tag_to_project(&state.pool, project_id, tag_id).await { - Ok(()) => ( - StatusCode::CREATED, - Json(serde_json::json!({ - "message": "Tag added to project" - })), - ) - .into_response(), - Err(sqlx::Error::Database(db_err)) if db_err.is_foreign_key_violation() => ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({ - "error": "Not found", - "message": "Project or tag not found" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to add tag to project"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to add tag to project" - })), - ) - .into_response() - } - } -} - -async fn remove_project_tag_handler( - State(state): State>, - axum::extract::Path((id, tag_id)): axum::extract::Path<(String, String)>, - jar: axum_extra::extract::CookieJar, -) -> impl IntoResponse { - if check_session(&state, &jar).is_none() { - return require_auth_response().into_response(); - } - let project_id = match uuid::Uuid::parse_str(&id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid project ID", - "message": "Project ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - let tag_id = match uuid::Uuid::parse_str(&tag_id) { - Ok(id) => id, - Err(_) => { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({ - "error": "Invalid tag ID", - "message": "Tag ID must be a valid UUID" - })), - ) - .into_response(); - } - }; - - match db::remove_tag_from_project(&state.pool, project_id, tag_id).await { - Ok(()) => ( - StatusCode::OK, - Json(serde_json::json!({ - "message": "Tag removed from project" - })), - ) - .into_response(), - Err(err) => { - tracing::error!(error = %err, "Failed to remove tag from project"); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({ - "error": "Internal server error", - "message": "Failed to remove tag from project" - })), - ) - .into_response() - } - } -} - -fn should_tarpit(state: &TarpitState, path: &str) -> bool { - state.config.enabled && is_malicious_path(path) -} - -async fn fallback_handler_tcp( - State(state): State>, - ConnectInfo(peer): ConnectInfo, - req: Request, -) -> Response { - let path = req.uri().path(); - - if should_tarpit(&state.tarpit_state, path) { - tarpit_handler( - State(state.tarpit_state.clone()), - Some(ConnectInfo(peer)), - req, - ) - .await - } else { - isr_handler(State(state), req).await - } -} - -async fn fallback_handler_unix(State(state): State>, req: Request) -> Response { - let path = req.uri().path(); - - if should_tarpit(&state.tarpit_state, path) { - tarpit_handler(State(state.tarpit_state.clone()), None, req).await - } else { - isr_handler(State(state), req).await - } -} - -#[tracing::instrument(skip(state, req), fields(path = %req.uri().path(), method = %req.method()))] -async fn isr_handler(State(state): State>, req: Request) -> Response { - let method = req.method().clone(); - let uri = req.uri(); - let path = uri.path(); - let query = uri.query().unwrap_or(""); - - if method != axum::http::Method::GET && method != axum::http::Method::HEAD { - tracing::warn!(method = %method, path = %path, "Non-GET/HEAD request to non-API route"); - - if accepts_html(req.headers()) { - return serve_error_page(StatusCode::METHOD_NOT_ALLOWED); - } - - let mut headers = HeaderMap::new(); - headers.insert( - axum::http::header::ALLOW, - axum::http::HeaderValue::from_static("GET, HEAD, OPTIONS"), - ); - return ( - StatusCode::METHOD_NOT_ALLOWED, - headers, - "Method not allowed", - ) - .into_response(); - } - - let is_head = method == axum::http::Method::HEAD; - - if path.starts_with("/api/") { - tracing::error!("API request reached ISR handler - routing bug!"); - return (StatusCode::INTERNAL_SERVER_ERROR, "Internal routing error").into_response(); - } - - // Block internal routes from external access - if path.starts_with("/internal/") { - tracing::warn!(path = %path, "Attempted access to internal route"); - - if accepts_html(req.headers()) { - return serve_error_page(StatusCode::NOT_FOUND); - } - - return (StatusCode::NOT_FOUND, "Not found").into_response(); - } - - // Check if this is a static asset that exists in embedded CLIENT_ASSETS - // This handles root-level files like favicon.ico, favicon.svg, etc. - if is_static_asset(path) { - if let Some(response) = try_serve_embedded_asset(path) { - return response; - } - // If not found in embedded assets, continue to proxy (might be in Bun's static dir) - } - - // Check if this is a prerendered page (routes with `export const prerender = true`) - // This handles pages like /pgp, /about, etc. that are pre-built at compile time - if let Some(response) = try_serve_prerendered_page(path) { - tracing::debug!(path = %path, "Serving prerendered page"); - return response; - } - - let bun_url = if state.downstream_url.starts_with('/') || state.downstream_url.starts_with("./") - { - if query.is_empty() { - format!("http://localhost{path}") - } else { - format!("http://localhost{path}?{query}") - } - } else if query.is_empty() { - format!("{}{}", state.downstream_url, path) - } else { - format!("{}{}?{}", state.downstream_url, path, query) - }; - - // Build trusted headers to forward to downstream - let mut forward_headers = HeaderMap::new(); - - // SECURITY: Strip any X-Session-User header from incoming request to prevent spoofing - // (We will add it ourselves if session is valid) - - // Extract and validate session from cookie - if let Some(cookie_header) = req.headers().get(axum::http::header::COOKIE) { - if let Ok(cookie_str) = cookie_header.to_str() { - // Parse cookies manually to find admin_session - for cookie_pair in cookie_str.split(';') { - let cookie_pair = cookie_pair.trim(); - if let Some((name, value)) = cookie_pair.split_once('=') { - if name == "admin_session" { - // Found session cookie, validate it - if let Ok(session_id) = ulid::Ulid::from_string(value) { - if let Some(session) = - state.session_manager.validate_session(session_id) - { - // Session is valid - add trusted header - if let Ok(username_value) = - axum::http::HeaderValue::from_str(&session.username) - { - forward_headers.insert("x-session-user", username_value); - } - } - } - break; - } - } - } - } - } - - let start = std::time::Instant::now(); - - match proxy_to_bun(&bun_url, state.clone(), forward_headers).await { - Ok((status, headers, body)) => { - let duration_ms = start.elapsed().as_millis() as u64; - let cache = "miss"; - - let is_static = is_static_asset(path); - let is_page = is_page_route(path); - - match (status.as_u16(), is_static, is_page) { - (200..=299, true, _) => { - tracing::trace!(status = status.as_u16(), duration_ms, cache, "ISR request"); - } - (404, true, _) => { - tracing::warn!( - status = status.as_u16(), - duration_ms, - cache, - "ISR request - missing asset" - ); - } - (500..=599, true, _) => { - tracing::error!( - status = status.as_u16(), - duration_ms, - cache, - "ISR request - server error" - ); - } - (200..=299, _, true) => { - tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request"); - } - (404, _, true) => {} - (500..=599, _, _) => { - tracing::error!( - status = status.as_u16(), - duration_ms, - cache, - "ISR request - server error" - ); - } - _ => { - tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request"); - } - } - - // Intercept error responses for HTML requests - if (status.is_client_error() || status.is_server_error()) && accepts_html(req.headers()) - { - return serve_error_page(status); - } - - if is_head { - (status, headers).into_response() - } else { - (status, headers, body).into_response() - } - } - Err(err) => { - let duration_ms = start.elapsed().as_millis() as u64; - tracing::error!( - error = %err, - url = %bun_url, - duration_ms, - "Failed to proxy to Bun" - ); - - // Serve 502 error page instead of plaintext - if accepts_html(req.headers()) { - return serve_error_page(StatusCode::BAD_GATEWAY); - } - - ( - StatusCode::BAD_GATEWAY, - format!("Failed to render page: {err}"), - ) - .into_response() - } - } -} - -async fn proxy_to_bun( - url: &str, - state: Arc, - forward_headers: HeaderMap, -) -> Result<(StatusCode, HeaderMap, axum::body::Bytes), ProxyError> { - let client = if state.unix_client.is_some() { - state.unix_client.as_ref().unwrap() - } else { - &state.http_client - }; - - // Build request with forwarded headers - let mut request_builder = client.get(url); - for (name, value) in forward_headers.iter() { - request_builder = request_builder.header(name, value); - } - - let response = request_builder.send().await.map_err(ProxyError::Network)?; - - let status = StatusCode::from_u16(response.status().as_u16()) - .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); - - let mut headers = HeaderMap::new(); - for (name, value) in response.headers() { - let name_str = name.as_str(); - if name_str == "transfer-encoding" - || name_str == "connection" - || name_str == "content-length" - { - continue; - } - - if let Ok(header_name) = axum::http::HeaderName::try_from(name.as_str()) - && let Ok(header_value) = axum::http::HeaderValue::try_from(value.as_bytes()) - { - headers.insert(header_name, header_value); - } - } - - let body = response.bytes().await.map_err(ProxyError::Network)?; - Ok((status, headers, body)) -} - -async fn perform_health_check( - downstream_url: String, - http_client: reqwest::Client, - unix_client: Option, - pool: Option, -) -> bool { - let url = if downstream_url.starts_with('/') || downstream_url.starts_with("./") { - "http://localhost/internal/health".to_string() - } else { - format!("{downstream_url}/internal/health") - }; - - let client = if unix_client.is_some() { - unix_client.as_ref().unwrap() - } else { - &http_client - }; - - let bun_healthy = - match tokio::time::timeout(Duration::from_secs(5), client.get(&url).send()).await { - Ok(Ok(response)) => { - let is_success = response.status().is_success(); - if !is_success { - tracing::warn!( - status = response.status().as_u16(), - "Health check failed: Bun returned non-success status" - ); - } - is_success - } - Ok(Err(err)) => { - tracing::error!(error = %err, "Health check failed: cannot reach Bun"); - false - } - Err(_) => { - tracing::error!("Health check failed: timeout after 5s"); - false - } - }; - - // Check database - let db_healthy = if let Some(pool) = pool { - match db::health_check(&pool).await { - Ok(_) => true, - Err(err) => { - tracing::error!(error = %err, "Database health check failed"); - false - } - } - } else { - true - }; - - bun_healthy && db_healthy -} diff --git a/src/middleware/auth.rs b/src/middleware/auth.rs deleted file mode 100644 index bbe0e18..0000000 --- a/src/middleware/auth.rs +++ /dev/null @@ -1,74 +0,0 @@ -use crate::auth::{Session, SessionManager}; -use axum::{ - Json, - body::Body, - extract::{Request, State}, - http::{StatusCode, Uri}, - middleware::Next, - response::{IntoResponse, Redirect, Response}, -}; -use axum_extra::extract::CookieJar; -use serde_json::json; -use std::sync::Arc; - -const SESSION_COOKIE_NAME: &str = "admin_session"; - -pub async fn require_admin_auth( - State(session_mgr): State>, - jar: CookieJar, - uri: Uri, - mut req: Request, - next: Next, -) -> Result { - let session_cookie = jar.get(SESSION_COOKIE_NAME); - - let session_id = session_cookie.and_then(|cookie| ulid::Ulid::from_string(cookie.value()).ok()); - - let session = session_id.and_then(|id| session_mgr.validate_session(id)); - - match session { - Some(session) => { - req.extensions_mut().insert(session); - Ok(next.run(req).await) - } - None => { - let next_param = urlencoding::encode(uri.path()); - let redirect_url = format!("/admin/login?next={}", next_param); - Err(Redirect::to(&redirect_url).into_response()) - } - } -} - -pub async fn require_api_auth( - State(session_mgr): State>, - jar: CookieJar, - mut req: Request, - next: Next, -) -> Result { - let session_cookie = jar.get(SESSION_COOKIE_NAME); - - let session_id = session_cookie.and_then(|cookie| ulid::Ulid::from_string(cookie.value()).ok()); - - let session = session_id.and_then(|id| session_mgr.validate_session(id)); - - match session { - Some(session) => { - req.extensions_mut().insert(session); - Ok(next.run(req).await) - } - None => { - let error_response = ( - StatusCode::UNAUTHORIZED, - Json(json!({ - "error": "Unauthorized", - "message": "Authentication required" - })), - ); - Err(error_response.into_response()) - } - } -} - -pub fn extract_session(req: &Request) -> Option { - req.extensions().get::().cloned() -} diff --git a/src/middleware/mod.rs b/src/middleware/mod.rs index 5d68186..7b4ccd1 100644 --- a/src/middleware/mod.rs +++ b/src/middleware/mod.rs @@ -1,5 +1,3 @@ -pub mod auth; pub mod request_id; -pub use auth::{require_admin_auth, require_api_auth}; pub use request_id::RequestIdLayer; diff --git a/src/og.rs b/src/og.rs index bb4bce2..b05989e 100644 --- a/src/og.rs +++ b/src/og.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use std::{sync::Arc, time::Duration}; -use crate::{AppState, r2::R2Client}; +use crate::{r2::R2Client, state::AppState}; /// Discriminated union matching TypeScript's `OGImageSpec` in web/src/lib/og-types.ts /// @@ -74,7 +74,6 @@ pub async fn generate_og_image(spec: &OGImageSpec, state: Arc) -> Resu } /// Check if an OG image exists in R2 -#[allow(dead_code)] pub async fn og_image_exists(spec: &OGImageSpec) -> bool { if let Some(r2) = R2Client::get().await { r2.object_exists(&spec.r2_key()).await @@ -84,7 +83,6 @@ pub async fn og_image_exists(spec: &OGImageSpec) -> bool { } /// Ensure an OG image exists, generating if necessary -#[allow(dead_code)] pub async fn ensure_og_image(spec: &OGImageSpec, state: Arc) -> Result<(), String> { if og_image_exists(spec).await { tracing::debug!(r2_key = spec.r2_key(), "OG image already exists"); @@ -94,23 +92,24 @@ pub async fn ensure_og_image(spec: &OGImageSpec, state: Arc) -> Result } /// Regenerate common OG images (index, projects) on server startup +/// Uses ensure_og_image to skip regeneration if images already exist pub async fn regenerate_common_images(state: Arc) { // Wait 2 seconds before starting tokio::time::sleep(Duration::from_secs(2)).await; - tracing::info!("Regenerating common OG images"); + tracing::info!("Ensuring common OG images exist"); let specs = vec![OGImageSpec::Index, OGImageSpec::Projects]; for spec in specs { - match generate_og_image(&spec, state.clone()).await { + match ensure_og_image(&spec, state.clone()).await { Ok(()) => { - tracing::info!(r2_key = spec.r2_key(), "Successfully regenerated OG image"); + tracing::info!(r2_key = spec.r2_key(), "Common OG image ready"); } Err(e) => { - tracing::error!(r2_key = spec.r2_key(), error = %e, "Failed to regenerate OG image"); + tracing::error!(r2_key = spec.r2_key(), error = %e, "Failed to ensure OG image"); } } } - tracing::info!("Finished regenerating common OG images"); + tracing::info!("Finished ensuring common OG images"); } diff --git a/src/proxy.rs b/src/proxy.rs new file mode 100644 index 0000000..adceea6 --- /dev/null +++ b/src/proxy.rs @@ -0,0 +1,339 @@ +use axum::{ + extract::{ConnectInfo, Request, State}, + http::{HeaderMap, StatusCode}, + response::{IntoResponse, Response}, +}; +use std::{net::SocketAddr, sync::Arc, time::Duration}; + +use crate::{ + assets, db, + state::{AppState, ProxyError}, + tarpit::{self, TarpitState}, + utils, +}; + +/// ISR handler - serves pages through Bun SSR with session validation +#[tracing::instrument(skip(state, req), fields(path = %req.uri().path(), method = %req.method()))] +pub async fn isr_handler(State(state): State>, req: Request) -> Response { + let method = req.method().clone(); + let uri = req.uri(); + let path = uri.path(); + let query = uri.query().unwrap_or(""); + + if method != axum::http::Method::GET && method != axum::http::Method::HEAD { + tracing::warn!(method = %method, path = %path, "Non-GET/HEAD request to non-API route"); + + if utils::accepts_html(req.headers()) { + return utils::serve_error_page(StatusCode::METHOD_NOT_ALLOWED); + } + + let mut headers = HeaderMap::new(); + headers.insert( + axum::http::header::ALLOW, + axum::http::HeaderValue::from_static("GET, HEAD, OPTIONS"), + ); + return ( + StatusCode::METHOD_NOT_ALLOWED, + headers, + "Method not allowed", + ) + .into_response(); + } + + let is_head = method == axum::http::Method::HEAD; + + if path.starts_with("/api/") { + tracing::error!("API request reached ISR handler - routing bug!"); + return (StatusCode::INTERNAL_SERVER_ERROR, "Internal routing error").into_response(); + } + + // Block internal routes from external access + if path.starts_with("/internal/") { + tracing::warn!(path = %path, "Attempted access to internal route"); + + if utils::accepts_html(req.headers()) { + return utils::serve_error_page(StatusCode::NOT_FOUND); + } + + return (StatusCode::NOT_FOUND, "Not found").into_response(); + } + + // Check if this is a static asset that exists in embedded CLIENT_ASSETS + if utils::is_static_asset(path) { + if let Some(response) = assets::try_serve_embedded_asset(path) { + return response; + } + // If not found in embedded assets, continue to proxy (might be in Bun's static dir) + } + + // Check if this is a prerendered page + if let Some(response) = assets::try_serve_prerendered_page(path) { + tracing::debug!(path = %path, "Serving prerendered page"); + return response; + } + + let bun_url = if state.downstream_url.starts_with('/') || state.downstream_url.starts_with("./") + { + if query.is_empty() { + format!("http://localhost{path}") + } else { + format!("http://localhost{path}?{query}") + } + } else if query.is_empty() { + format!("{}{}", state.downstream_url, path) + } else { + format!("{}{}?{}", state.downstream_url, path, query) + }; + + // Build trusted headers to forward to downstream + let mut forward_headers = HeaderMap::new(); + + // SECURITY: Strip any X-Session-User header from incoming request to prevent spoofing + + // Extract and validate session from cookie + if let Some(cookie_header) = req.headers().get(axum::http::header::COOKIE) { + if let Ok(cookie_str) = cookie_header.to_str() { + // Parse cookies manually to find admin_session + for cookie_pair in cookie_str.split(';') { + let cookie_pair = cookie_pair.trim(); + if let Some((name, value)) = cookie_pair.split_once('=') { + if name == "admin_session" { + // Found session cookie, validate it + if let Ok(session_id) = ulid::Ulid::from_string(value) { + if let Some(session) = + state.session_manager.validate_session(session_id) + { + // Session is valid - add trusted header + if let Ok(username_value) = + axum::http::HeaderValue::from_str(&session.username) + { + forward_headers.insert("x-session-user", username_value); + } + } + } + break; + } + } + } + } + } + + let start = std::time::Instant::now(); + + match proxy_to_bun(&bun_url, state.clone(), forward_headers).await { + Ok((status, headers, body)) => { + let duration_ms = start.elapsed().as_millis() as u64; + let cache = "miss"; + + let is_static = utils::is_static_asset(path); + let is_page = utils::is_page_route(path); + + match (status.as_u16(), is_static, is_page) { + (200..=299, true, _) => { + tracing::trace!(status = status.as_u16(), duration_ms, cache, "ISR request"); + } + (404, true, _) => { + tracing::warn!( + status = status.as_u16(), + duration_ms, + cache, + "ISR request - missing asset" + ); + } + (500..=599, true, _) => { + tracing::error!( + status = status.as_u16(), + duration_ms, + cache, + "ISR request - server error" + ); + } + (200..=299, _, true) => { + tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request"); + } + (404, _, true) => {} + (500..=599, _, _) => { + tracing::error!( + status = status.as_u16(), + duration_ms, + cache, + "ISR request - server error" + ); + } + _ => { + tracing::debug!(status = status.as_u16(), duration_ms, cache, "ISR request"); + } + } + + // Intercept error responses for HTML requests + if (status.is_client_error() || status.is_server_error()) + && utils::accepts_html(req.headers()) + { + return utils::serve_error_page(status); + } + + if is_head { + (status, headers).into_response() + } else { + (status, headers, body).into_response() + } + } + Err(err) => { + let duration_ms = start.elapsed().as_millis() as u64; + tracing::error!( + error = %err, + url = %bun_url, + duration_ms, + "Failed to proxy to Bun" + ); + + // Serve 502 error page instead of plaintext + if utils::accepts_html(req.headers()) { + return utils::serve_error_page(StatusCode::BAD_GATEWAY); + } + + ( + StatusCode::BAD_GATEWAY, + format!("Failed to render page: {err}"), + ) + .into_response() + } + } +} + +/// Proxy a request to Bun SSR +pub async fn proxy_to_bun( + url: &str, + state: Arc, + forward_headers: HeaderMap, +) -> Result<(StatusCode, HeaderMap, axum::body::Bytes), ProxyError> { + let client = if state.unix_client.is_some() { + state.unix_client.as_ref().unwrap() + } else { + &state.http_client + }; + + // Build request with forwarded headers + let mut request_builder = client.get(url); + for (name, value) in forward_headers.iter() { + request_builder = request_builder.header(name, value); + } + + let response = request_builder.send().await.map_err(ProxyError::Network)?; + + let status = StatusCode::from_u16(response.status().as_u16()) + .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); + + let mut headers = HeaderMap::new(); + for (name, value) in response.headers() { + let name_str = name.as_str(); + if name_str == "transfer-encoding" + || name_str == "connection" + || name_str == "content-length" + { + continue; + } + + if let Ok(header_name) = axum::http::HeaderName::try_from(name.as_str()) + && let Ok(header_value) = axum::http::HeaderValue::try_from(value.as_bytes()) + { + headers.insert(header_name, header_value); + } + } + + let body = response.bytes().await.map_err(ProxyError::Network)?; + Ok((status, headers, body)) +} + +/// Perform health check on Bun SSR and database +pub async fn perform_health_check( + downstream_url: String, + http_client: reqwest::Client, + unix_client: Option, + pool: Option, +) -> bool { + let url = if downstream_url.starts_with('/') || downstream_url.starts_with("./") { + "http://localhost/internal/health".to_string() + } else { + format!("{downstream_url}/internal/health") + }; + + let client = if unix_client.is_some() { + unix_client.as_ref().unwrap() + } else { + &http_client + }; + + let bun_healthy = + match tokio::time::timeout(Duration::from_secs(5), client.get(&url).send()).await { + Ok(Ok(response)) => { + let is_success = response.status().is_success(); + if !is_success { + tracing::warn!( + status = response.status().as_u16(), + "Health check failed: Bun returned non-success status" + ); + } + is_success + } + Ok(Err(err)) => { + tracing::error!(error = %err, "Health check failed: cannot reach Bun"); + false + } + Err(_) => { + tracing::error!("Health check failed: timeout after 5s"); + false + } + }; + + // Check database + let db_healthy = if let Some(pool) = pool { + match db::health_check(&pool).await { + Ok(_) => true, + Err(err) => { + tracing::error!(error = %err, "Database health check failed"); + false + } + } + } else { + true + }; + + bun_healthy && db_healthy +} + +/// Check if path should trigger tarpit +fn should_tarpit(state: &TarpitState, path: &str) -> bool { + state.config.enabled && tarpit::is_malicious_path(path) +} + +/// Fallback handler for TCP connections (has access to peer IP) +pub async fn fallback_handler_tcp( + State(state): State>, + ConnectInfo(peer): ConnectInfo, + req: Request, +) -> Response { + let path = req.uri().path(); + + if should_tarpit(&state.tarpit_state, path) { + tarpit::tarpit_handler( + State(state.tarpit_state.clone()), + Some(ConnectInfo(peer)), + req, + ) + .await + } else { + isr_handler(State(state), req).await + } +} + +/// Fallback handler for Unix sockets (no peer IP available) +pub async fn fallback_handler_unix(State(state): State>, req: Request) -> Response { + let path = req.uri().path(); + + if should_tarpit(&state.tarpit_state, path) { + tarpit::tarpit_handler(State(state.tarpit_state.clone()), None, req).await + } else { + isr_handler(State(state), req).await + } +} diff --git a/src/r2.rs b/src/r2.rs index df4e8a9..685d872 100644 --- a/src/r2.rs +++ b/src/r2.rs @@ -57,7 +57,6 @@ impl R2Client { .cloned() } - #[allow(dead_code)] pub async fn get_object(&self, key: &str) -> Result, String> { let result = self .client @@ -93,7 +92,6 @@ impl R2Client { Ok(()) } - #[allow(dead_code)] pub async fn object_exists(&self, key: &str) -> bool { self.client .head_object() diff --git a/src/routes.rs b/src/routes.rs new file mode 100644 index 0000000..171664c --- /dev/null +++ b/src/routes.rs @@ -0,0 +1,167 @@ +use axum::{Router, extract::Request, http::Uri, response::IntoResponse, routing::any}; +use std::sync::Arc; + +use crate::{assets, handlers, state::AppState}; + +/// Build API routes +pub fn api_routes() -> Router> { + Router::new() + .route("/", any(api_root_404_handler)) + .route( + "/health", + axum::routing::get(handlers::health_handler).head(handlers::health_handler), + ) + // Authentication endpoints (public) + .route("/login", axum::routing::post(handlers::api_login_handler)) + .route("/logout", axum::routing::post(handlers::api_logout_handler)) + .route( + "/session", + axum::routing::get(handlers::api_session_handler), + ) + // Projects - GET is public (shows all for admin, only non-hidden for public) + // POST/PUT/DELETE require authentication + .route( + "/projects", + axum::routing::get(handlers::projects_handler).post(handlers::create_project_handler), + ) + .route( + "/projects/{id}", + axum::routing::get(handlers::get_project_handler) + .put(handlers::update_project_handler) + .delete(handlers::delete_project_handler), + ) + // Project tags - authentication checked in handlers + .route( + "/projects/{id}/tags", + axum::routing::get(handlers::get_project_tags_handler) + .post(handlers::add_project_tag_handler), + ) + .route( + "/projects/{id}/tags/{tag_id}", + axum::routing::delete(handlers::remove_project_tag_handler), + ) + // Tags - authentication checked in handlers + .route( + "/tags", + axum::routing::get(handlers::list_tags_handler).post(handlers::create_tag_handler), + ) + .route( + "/tags/{slug}", + axum::routing::get(handlers::get_tag_handler).put(handlers::update_tag_handler), + ) + .route( + "/tags/{slug}/related", + axum::routing::get(handlers::get_related_tags_handler), + ) + .route( + "/tags/recalculate-cooccurrence", + axum::routing::post(handlers::recalculate_cooccurrence_handler), + ) + // Admin stats - requires authentication + .route( + "/stats", + axum::routing::get(handlers::get_admin_stats_handler), + ) + // Site settings - GET is public, PUT requires authentication + .route( + "/settings", + axum::routing::get(handlers::get_settings_handler) + .put(handlers::update_settings_handler), + ) + // Icon API - proxy to SvelteKit (authentication handled by SvelteKit) + .route( + "/icons/{*path}", + axum::routing::get(handlers::proxy_icons_handler), + ) + .fallback(api_404_and_method_handler) +} + +/// Build base router (shared routes for all listen addresses) +pub fn build_base_router() -> Router> { + Router::new() + .nest("/api", api_routes()) + .route("/api/", any(api_root_404_handler)) + .route( + "/_app/{*path}", + axum::routing::get(assets::serve_embedded_asset).head(assets::serve_embedded_asset), + ) + .route("/pgp", axum::routing::get(handlers::handle_pgp_route)) + .route( + "/publickey.asc", + axum::routing::get(handlers::serve_pgp_key), + ) + .route("/pgp.asc", axum::routing::get(handlers::serve_pgp_key)) + .route( + "/.well-known/pgpkey.asc", + axum::routing::get(handlers::serve_pgp_key), + ) + .route("/keys", axum::routing::get(handlers::redirect_to_pgp)) +} + +async fn api_root_404_handler(uri: Uri) -> impl IntoResponse { + api_404_handler(uri).await +} + +async fn api_404_and_method_handler(req: Request) -> impl IntoResponse { + use axum::{Json, http::StatusCode}; + + let method = req.method(); + let uri = req.uri(); + let path = uri.path(); + + if method != axum::http::Method::GET + && method != axum::http::Method::HEAD + && method != axum::http::Method::OPTIONS + { + let content_type = req + .headers() + .get(axum::http::header::CONTENT_TYPE) + .and_then(|v| v.to_str().ok()); + + if let Some(ct) = content_type { + if !ct.starts_with("application/json") { + return ( + StatusCode::UNSUPPORTED_MEDIA_TYPE, + Json(serde_json::json!({ + "error": "Unsupported media type", + "message": "API endpoints only accept application/json" + })), + ) + .into_response(); + } + } else if method == axum::http::Method::POST + || method == axum::http::Method::PUT + || method == axum::http::Method::PATCH + { + // POST/PUT/PATCH require Content-Type header + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({ + "error": "Missing Content-Type header", + "message": "Content-Type: application/json is required" + })), + ) + .into_response(); + } + } + + // Route not found + tracing::warn!(path = %path, method = %method, "API route not found"); + ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({ + "error": "Not found", + "path": path + })), + ) + .into_response() +} + +async fn api_404_handler(uri: Uri) -> impl IntoResponse { + let req = Request::builder() + .uri(uri) + .body(axum::body::Body::empty()) + .unwrap(); + + api_404_and_method_handler(req).await +} diff --git a/src/state.rs b/src/state.rs new file mode 100644 index 0000000..addcb8b --- /dev/null +++ b/src/state.rs @@ -0,0 +1,33 @@ +use std::sync::Arc; + +use crate::{auth::SessionManager, health::HealthChecker, tarpit::TarpitState}; + +/// Application state shared across all handlers +#[derive(Clone)] +pub struct AppState { + pub downstream_url: String, + pub http_client: reqwest::Client, + pub unix_client: Option, + pub health_checker: Arc, + pub tarpit_state: Arc, + pub pool: sqlx::PgPool, + pub session_manager: Arc, +} + +/// Errors that can occur during proxying to Bun +#[derive(Debug)] +pub enum ProxyError { + Network(reqwest::Error), + Other(String), +} + +impl std::fmt::Display for ProxyError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ProxyError::Network(e) => write!(f, "Network error: {e}"), + ProxyError::Other(s) => write!(f, "{s}"), + } + } +} + +impl std::error::Error for ProxyError {} diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..3e4d486 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,129 @@ +use axum::{ + http::{HeaderMap, StatusCode}, + response::{IntoResponse, Response}, +}; + +use crate::assets; + +/// Check if a path represents a static asset +pub fn is_static_asset(path: &str) -> bool { + path.starts_with("/node_modules/") + || path.starts_with("/@") // Vite internals like /@vite/client, /@fs/, /@id/ + || path.starts_with("/.svelte-kit/") + || path.starts_with("/.well-known/") + || path.ends_with(".woff2") + || path.ends_with(".woff") + || path.ends_with(".ttf") + || path.ends_with(".ico") + || path.ends_with(".png") + || path.ends_with(".jpg") + || path.ends_with(".svg") + || path.ends_with(".webp") + || path.ends_with(".css") + || path.ends_with(".js") + || path.ends_with(".map") +} + +/// Check if a path represents a page route (not an asset) +pub fn is_page_route(path: &str) -> bool { + !path.starts_with("/node_modules/") + && !path.starts_with("/@") + && !path.starts_with("/.svelte-kit/") + && !path.contains('.') +} + +/// Check if the request accepts HTML responses +pub fn accepts_html(headers: &HeaderMap) -> bool { + if let Some(accept) = headers.get(axum::http::header::ACCEPT) { + if let Ok(accept_str) = accept.to_str() { + return accept_str.contains("text/html") || accept_str.contains("*/*"); + } + } + // Default to true for requests without Accept header (browsers typically send it) + true +} + +/// Determines if request prefers raw content (CLI tools) over HTML +pub fn prefers_raw_content(headers: &HeaderMap) -> bool { + // Check User-Agent for known CLI tools first (most reliable) + if let Some(ua) = headers.get(axum::http::header::USER_AGENT) { + if let Ok(ua_str) = ua.to_str() { + let ua_lower = ua_str.to_lowercase(); + if ua_lower.starts_with("curl/") + || ua_lower.starts_with("wget/") + || ua_lower.starts_with("httpie/") + || ua_lower.contains("curlie") + { + return true; + } + } + } + + // Check Accept header - if it explicitly prefers text/html, serve HTML + if let Some(accept) = headers.get(axum::http::header::ACCEPT) { + if let Ok(accept_str) = accept.to_str() { + // If text/html appears before */* in the list, they prefer HTML + if let Some(html_pos) = accept_str.find("text/html") { + if let Some(wildcard_pos) = accept_str.find("*/*") { + return html_pos > wildcard_pos; + } + // Has text/html but no */* → prefers HTML + return false; + } + // Has */* but no text/html → probably a CLI tool + if accept_str.contains("*/*") && !accept_str.contains("text/html") { + return true; + } + } + } + + // No Accept header → assume browser (safer default) + false +} + +/// Serve a prerendered error page for the given status code +pub fn serve_error_page(status: StatusCode) -> Response { + let status_code = status.as_u16(); + + if let Some(html) = assets::get_error_page(status_code) { + let mut headers = HeaderMap::new(); + headers.insert( + axum::http::header::CONTENT_TYPE, + axum::http::HeaderValue::from_static("text/html; charset=utf-8"), + ); + headers.insert( + axum::http::header::CACHE_CONTROL, + axum::http::HeaderValue::from_static("no-cache, no-store, must-revalidate"), + ); + + (status, headers, html).into_response() + } else { + // Fallback for undefined error codes (500 generic page) + tracing::warn!( + status_code, + "No prerendered error page found for status code - using fallback" + ); + + if let Some(fallback_html) = assets::get_error_page(500) { + let mut headers = HeaderMap::new(); + headers.insert( + axum::http::header::CONTENT_TYPE, + axum::http::HeaderValue::from_static("text/html; charset=utf-8"), + ); + headers.insert( + axum::http::header::CACHE_CONTROL, + axum::http::HeaderValue::from_static("no-cache, no-store, must-revalidate"), + ); + + (status, headers, fallback_html).into_response() + } else { + // Last resort: plaintext (should never happen if 500.html exists) + (status, format!("Error {}", status_code)).into_response() + } + } +} + +/// Validate hex color format (6 characters, no hash, no alpha) +pub fn validate_hex_color(color: &str) -> bool { + color.len() == 6 && color.chars().all(|c| c.is_ascii_hexdigit()) +} diff --git a/web/src/app.css b/web/src/app.css index 32456ac..cb87428 100644 --- a/web/src/app.css +++ b/web/src/app.css @@ -170,7 +170,9 @@ html, body { @apply font-inter overflow-x-hidden; color: var(--color-text-primary); - transition: background-color 0.3s ease-in-out, color 0.3s ease-in-out; + transition: + background-color 0.3s ease-in-out, + color 0.3s ease-in-out; } body { diff --git a/web/src/app.html b/web/src/app.html index c7e85f4..b3b7498 100644 --- a/web/src/app.html +++ b/web/src/app.html @@ -4,11 +4,14 @@ diff --git a/web/src/lib/api.server.ts b/web/src/lib/api.server.ts index 199d55c..3e81a31 100644 --- a/web/src/lib/api.server.ts +++ b/web/src/lib/api.server.ts @@ -19,7 +19,7 @@ export async function apiFetch( const url = `${baseUrl}${path}`; const method = init?.method ?? "GET"; - + // Unix sockets require Bun's native fetch (SvelteKit's fetch doesn't support it) const fetchFn = isUnixSocket ? fetch : (init?.fetch ?? fetch); diff --git a/web/src/lib/api.ts b/web/src/lib/api.ts index f7f2caf..2c6da6e 100644 --- a/web/src/lib/api.ts +++ b/web/src/lib/api.ts @@ -16,10 +16,7 @@ import type { // ============================================================================ // Client-side fetch wrapper for browser requests -async function clientApiFetch( - path: string, - init?: RequestInit, -): Promise { +async function clientApiFetch(path: string, init?: RequestInit): Promise { const response = await fetch(path, { ...init, credentials: "same-origin", // Include cookies for auth @@ -83,9 +80,10 @@ export async function deleteAdminProject(id: string): Promise { // Admin Tags API export async function getAdminTags(): Promise { - const tags = await clientApiFetch< - Array - >("/api/tags"); + const tags = + await clientApiFetch>( + "/api/tags", + ); // Transform snake_case to camelCase return tags.map((item) => ({ diff --git a/web/src/lib/components/AppWrapper.svelte b/web/src/lib/components/AppWrapper.svelte index fe60488..df21b1e 100644 --- a/web/src/lib/components/AppWrapper.svelte +++ b/web/src/lib/components/AppWrapper.svelte @@ -19,9 +19,19 @@ } = $props(); -
+
-
+
{#if showThemeToggle}
diff --git a/web/src/lib/components/PgpKeyModal.svelte b/web/src/lib/components/PgpKeyModal.svelte index 94f704a..9b051c9 100644 --- a/web/src/lib/components/PgpKeyModal.svelte +++ b/web/src/lib/components/PgpKeyModal.svelte @@ -83,7 +83,10 @@ transition:scale={{ duration: 200, start: 0.95 }} >
-

+

PGP Public Key

diff --git a/web/src/lib/components/ProjectCard.svelte b/web/src/lib/components/ProjectCard.svelte index a177ab4..07ad1cb 100644 --- a/web/src/lib/components/ProjectCard.svelte +++ b/web/src/lib/components/ProjectCard.svelte @@ -13,7 +13,9 @@ let { project, class: className }: Props = $props(); // Prefer demo URL, fallback to GitHub repo - const projectUrl = project.demoUrl || (project.githubRepo ? `https://github.com/${project.githubRepo}` : null); + const projectUrl = + project.demoUrl || + (project.githubRepo ? `https://github.com/${project.githubRepo}` : null); function formatDate(dateString: string): string { const date = new Date(dateString); @@ -34,87 +36,95 @@ {#if projectUrl} - -
-
-

+
+
+

+ {project.name} +

+ + {formatDate(project.updatedAt)} + +
+

- {project.name} -

- - {formatDate(project.updatedAt)} - + {project.shortDescription} +

-

- {project.shortDescription} -

-
-
- {#each project.tags as tag (tag.name)} - - - {#if tag.iconSvg} - - - {@html tag.iconSvg} - - {/if} - {tag.name} - - {/each} -
-
+
+ {#each project.tags as tag (tag.name)} + + + {#if tag.iconSvg} + + + {@html tag.iconSvg} + + {/if} + {tag.name} + + {/each} +
+ {:else} -
-
-
-

+
+
+

+ {project.name} +

+ + {formatDate(project.updatedAt)} + +
+

- {project.name} -

- - {formatDate(project.updatedAt)} - + {project.shortDescription} +

-

- {project.shortDescription} -

-
-
- {#each project.tags as tag (tag.name)} - - {#if tag.iconSvg} - - - {@html tag.iconSvg} - - {/if} - {tag.name} - - {/each} +
+ {#each project.tags as tag (tag.name)} + + {#if tag.iconSvg} + + + {@html tag.iconSvg} + + {/if} + {tag.name} + + {/each} +
-
{/if} diff --git a/web/src/lib/components/ThemeToggle.svelte b/web/src/lib/components/ThemeToggle.svelte index f00adb8..f38b130 100644 --- a/web/src/lib/components/ThemeToggle.svelte +++ b/web/src/lib/components/ThemeToggle.svelte @@ -7,7 +7,9 @@ @@ -88,7 +93,8 @@ {@html link.iconSvg} - {link.label} @@ -100,8 +106,13 @@ class="flex items-center gap-x-1.5 px-1.5 py-1 rounded-sm bg-zinc-100 dark:bg-zinc-900 shadow-sm hover:bg-zinc-200 dark:hover:bg-zinc-800 transition-colors" onclick={() => (pgpModalOpen = true)} > - - PGP Key + + PGP Key diff --git a/web/src/routes/admin/+page.svelte b/web/src/routes/admin/+page.svelte index bd29452..31060de 100644 --- a/web/src/routes/admin/+page.svelte +++ b/web/src/routes/admin/+page.svelte @@ -63,7 +63,9 @@
-

Recent Events

+

+ Recent Events +

{#if recentEvents.length === 0} -

No events yet

+

+ No events yet +

{:else} {/if} diff --git a/web/src/routes/admin/events/+page.svelte b/web/src/routes/admin/events/+page.svelte index 575d52f..29b8e4f 100644 --- a/web/src/routes/admin/events/+page.svelte +++ b/web/src/routes/admin/events/+page.svelte @@ -84,7 +84,9 @@
-
+

Event Log diff --git a/web/src/routes/admin/login/+page.svelte b/web/src/routes/admin/login/+page.svelte index 54c5c4c..5b1e6e7 100644 --- a/web/src/routes/admin/login/+page.svelte +++ b/web/src/routes/admin/login/+page.svelte @@ -43,7 +43,9 @@
-
+

Projects

-

Manage your project portfolio

+

+ Manage your project portfolio +