feat: add media upload pipeline with multipart support, blurhash generation, and R2 storage

- Add project_media table with image/video variants, ordering, and metadata
- Implement multipart upload handlers with 50MB limit
- Generate blurhash placeholders and resize images to thumb/medium/full variants
- Update ProjectCard to use media carousel instead of mock gradients
- Add MediaManager component for drag-drop upload and reordering
This commit is contained in:
2026-01-14 22:34:15 -06:00
parent 39a4e702fd
commit e83133cfcc
33 changed files with 3462 additions and 226 deletions
+4 -2
View File
@@ -1,8 +1,9 @@
use axum::extract::DefaultBodyLimit;
use std::collections::HashSet;
use std::net::SocketAddr;
use std::sync::Arc;
use std::time::Duration;
use tower_http::{cors::CorsLayer, limit::RequestBodyLimitLayer};
use tower_http::cors::CorsLayer;
use crate::cache::{IsrCache, IsrCacheConfig};
use crate::config::ListenAddr;
@@ -218,7 +219,8 @@ pub async fn run(
router
.layer(RequestIdLayer::new(trust_request_id))
.layer(CorsLayer::permissive())
.layer(RequestBodyLimitLayer::new(1_048_576))
// 50 MiB limit for media uploads
.layer(DefaultBodyLimit::max(50 * 1024 * 1024))
}
let mut tasks = Vec::new();
+450
View File
@@ -0,0 +1,450 @@
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
use uuid::Uuid;
/// Media type enum matching PostgreSQL enum
#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
#[sqlx(type_name = "media_type", rename_all = "lowercase")]
#[serde(rename_all = "lowercase")]
pub enum MediaType {
Image,
Video,
}
/// Database model for project media
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct DbProjectMedia {
pub id: Uuid,
pub project_id: Uuid,
pub display_order: i32,
pub media_type: MediaType,
pub original_filename: String,
pub r2_base_path: String,
pub variants: serde_json::Value,
pub width: Option<i32>,
pub height: Option<i32>,
pub size_bytes: i64,
pub blurhash: Option<String>,
pub metadata: Option<serde_json::Value>,
pub created_at: OffsetDateTime,
}
/// Variant info for images
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageVariant {
pub key: String,
pub width: i32,
pub height: i32,
#[serde(skip_serializing_if = "Option::is_none")]
pub mime: Option<String>,
}
/// Variant info for video poster
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VideoOriginal {
pub key: String,
pub mime: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<f64>,
}
/// API response for media variant with full URL
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiMediaVariant {
pub url: String,
pub width: i32,
pub height: i32,
}
/// API response for video original
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiVideoOriginal {
pub url: String,
pub mime: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<f64>,
}
/// API response for media variants
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ApiMediaVariants {
#[serde(skip_serializing_if = "Option::is_none")]
pub thumb: Option<ApiMediaVariant>,
#[serde(skip_serializing_if = "Option::is_none")]
pub medium: Option<ApiMediaVariant>,
#[serde(skip_serializing_if = "Option::is_none")]
pub full: Option<ApiMediaVariant>,
#[serde(skip_serializing_if = "Option::is_none")]
pub original: Option<ApiMediaVariant>,
#[serde(skip_serializing_if = "Option::is_none")]
pub poster: Option<ApiMediaVariant>,
// For video original (different structure)
#[serde(skip_serializing_if = "Option::is_none")]
pub video: Option<ApiVideoOriginal>,
}
/// Optional metadata stored with media
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MediaMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub focal_point: Option<FocalPoint>,
#[serde(skip_serializing_if = "Option::is_none")]
pub alt_text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration: Option<f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FocalPoint {
pub x: f64,
pub y: f64,
}
/// API response type for project media
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ApiProjectMedia {
pub id: String,
pub display_order: i32,
pub media_type: MediaType,
pub variants: ApiMediaVariants,
#[serde(skip_serializing_if = "Option::is_none")]
pub blurhash: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<MediaMetadata>,
}
/// Base URL for R2 media storage
const R2_BASE_URL: &str = "https://media.xevion.dev";
impl DbProjectMedia {
/// Convert database media to API response format
pub fn to_api_media(&self) -> ApiProjectMedia {
let variants = self.build_api_variants();
let metadata = self
.metadata
.as_ref()
.and_then(|m| serde_json::from_value(m.clone()).ok());
ApiProjectMedia {
id: self.id.to_string(),
display_order: self.display_order,
media_type: self.media_type,
variants,
blurhash: self.blurhash.clone(),
metadata,
}
}
fn build_api_variants(&self) -> ApiMediaVariants {
let base_url = format!(
"{}/{}",
R2_BASE_URL,
self.r2_base_path.trim_end_matches('/')
);
let mut variants = ApiMediaVariants {
thumb: None,
medium: None,
full: None,
original: None,
poster: None,
video: None,
};
// Parse the JSONB variants
if let Some(obj) = self.variants.as_object() {
// Handle image variants
if let Some(thumb) = obj.get("thumb") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(thumb.clone()) {
variants.thumb = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
if let Some(medium) = obj.get("medium") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(medium.clone()) {
variants.medium = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
if let Some(full) = obj.get("full") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(full.clone()) {
variants.full = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
// Handle original - could be image or video
if let Some(original) = obj.get("original") {
if self.media_type == MediaType::Video {
// Video original has different structure
if let Ok(v) = serde_json::from_value::<VideoOriginal>(original.clone()) {
variants.video = Some(ApiVideoOriginal {
url: format!("{}/{}", base_url, v.key),
mime: v.mime,
duration: v.duration,
});
}
} else {
// Image original
if let Ok(v) = serde_json::from_value::<ImageVariant>(original.clone()) {
variants.original = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
}
// Handle video poster
if let Some(poster) = obj.get("poster") {
if let Ok(v) = serde_json::from_value::<ImageVariant>(poster.clone()) {
variants.poster = Some(ApiMediaVariant {
url: format!("{}/{}", base_url, v.key),
width: v.width,
height: v.height,
});
}
}
}
variants
}
}
// Database query functions
/// Get all media for a project, ordered by display_order
pub async fn get_media_for_project(
pool: &PgPool,
project_id: Uuid,
) -> Result<Vec<DbProjectMedia>, sqlx::Error> {
sqlx::query_as!(
DbProjectMedia,
r#"
SELECT
id,
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
FROM project_media
WHERE project_id = $1
ORDER BY display_order ASC
"#,
project_id
)
.fetch_all(pool)
.await
}
/// Get single media item by ID
pub async fn get_media_by_id(
pool: &PgPool,
id: Uuid,
) -> Result<Option<DbProjectMedia>, sqlx::Error> {
sqlx::query_as!(
DbProjectMedia,
r#"
SELECT
id,
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
FROM project_media
WHERE id = $1
"#,
id
)
.fetch_optional(pool)
.await
}
/// Get the next display order for a project's media
pub async fn get_next_display_order(pool: &PgPool, project_id: Uuid) -> Result<i32, sqlx::Error> {
let result = sqlx::query!(
r#"
SELECT COALESCE(MAX(display_order) + 1, 0) as "next_order!"
FROM project_media
WHERE project_id = $1
"#,
project_id
)
.fetch_one(pool)
.await?;
Ok(result.next_order)
}
/// Create a new media record
pub async fn create_media(
pool: &PgPool,
project_id: Uuid,
media_type: MediaType,
original_filename: &str,
r2_base_path: &str,
variants: serde_json::Value,
width: Option<i32>,
height: Option<i32>,
size_bytes: i64,
blurhash: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<DbProjectMedia, sqlx::Error> {
let display_order = get_next_display_order(pool, project_id).await?;
sqlx::query_as!(
DbProjectMedia,
r#"
INSERT INTO project_media (
project_id, display_order, media_type, original_filename,
r2_base_path, variants, width, height, size_bytes, blurhash, metadata
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING
id,
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
"#,
project_id,
display_order,
media_type as MediaType,
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata
)
.fetch_one(pool)
.await
}
/// Delete a media record
pub async fn delete_media(pool: &PgPool, id: Uuid) -> Result<Option<DbProjectMedia>, sqlx::Error> {
// First get the media to return it
let media = get_media_by_id(pool, id).await?;
if media.is_some() {
sqlx::query!("DELETE FROM project_media WHERE id = $1", id)
.execute(pool)
.await?;
}
Ok(media)
}
/// Reorder media for a project
/// Takes a list of media IDs in desired order and updates display_order accordingly
pub async fn reorder_media(
pool: &PgPool,
project_id: Uuid,
media_ids: &[Uuid],
) -> Result<(), sqlx::Error> {
// Use a transaction to ensure atomicity
let mut tx = pool.begin().await?;
// First, set all to negative values to avoid unique constraint conflicts
for (i, id) in media_ids.iter().enumerate() {
sqlx::query!(
"UPDATE project_media SET display_order = $1 WHERE id = $2 AND project_id = $3",
-(i as i32 + 1),
id,
project_id
)
.execute(&mut *tx)
.await?;
}
// Then set to final positive values
for (i, id) in media_ids.iter().enumerate() {
sqlx::query!(
"UPDATE project_media SET display_order = $1 WHERE id = $2 AND project_id = $3",
i as i32,
id,
project_id
)
.execute(&mut *tx)
.await?;
}
tx.commit().await?;
Ok(())
}
/// Update media metadata (focal point, alt text, etc.)
pub async fn update_media_metadata(
pool: &PgPool,
id: Uuid,
metadata: serde_json::Value,
) -> Result<DbProjectMedia, sqlx::Error> {
sqlx::query_as!(
DbProjectMedia,
r#"
UPDATE project_media
SET metadata = $2
WHERE id = $1
RETURNING
id,
project_id,
display_order,
media_type as "media_type: MediaType",
original_filename,
r2_base_path,
variants,
width,
height,
size_bytes,
blurhash,
metadata,
created_at
"#,
id,
metadata
)
.fetch_one(pool)
.await
}
+2
View File
@@ -1,8 +1,10 @@
pub mod media;
pub mod projects;
pub mod settings;
pub mod tags;
// Re-export all types and functions
pub use media::*;
pub use projects::*;
pub use settings::*;
pub use tags::*;
+21 -10
View File
@@ -5,7 +5,9 @@ use time::{OffsetDateTime, format_description::well_known::Rfc3339};
use uuid::Uuid;
use super::{
ProjectStatus, slugify,
ProjectStatus,
media::{ApiProjectMedia, DbProjectMedia, get_media_for_project},
slugify,
tags::{ApiTag, DbTag, get_tags_for_project},
};
@@ -49,6 +51,7 @@ pub struct ApiAdminProject {
#[serde(flatten)]
pub project: ApiProject,
pub tags: Vec<ApiTag>,
pub media: Vec<ApiProjectMedia>,
pub status: String,
pub description: String,
#[serde(skip_serializing_if = "Option::is_none")]
@@ -87,7 +90,11 @@ impl DbProject {
}
}
pub fn to_api_admin_project(&self, tags: Vec<DbTag>) -> ApiAdminProject {
pub fn to_api_admin_project(
&self,
tags: Vec<DbTag>,
media: Vec<DbProjectMedia>,
) -> ApiAdminProject {
let last_activity = self
.last_github_activity
.unwrap_or(self.created_at)
@@ -97,6 +104,7 @@ impl DbProject {
ApiAdminProject {
project: self.to_api_project(),
tags: tags.into_iter().map(|t| t.to_api_tag()).collect(),
media: media.into_iter().map(|m| m.to_api_media()).collect(),
status: format!("{:?}", self.status).to_lowercase(),
description: self.description.clone(),
github_repo: self.github_repo.clone(),
@@ -173,13 +181,14 @@ pub async fn get_public_projects(pool: &PgPool) -> Result<Vec<DbProject>, sqlx::
pub async fn get_public_projects_with_tags(
pool: &PgPool,
) -> Result<Vec<(DbProject, Vec<DbTag>)>, sqlx::Error> {
) -> Result<Vec<(DbProject, Vec<DbTag>, Vec<DbProjectMedia>)>, sqlx::Error> {
let projects = get_public_projects(pool).await?;
let mut result = Vec::new();
for project in projects {
let tags = get_tags_for_project(pool, project.id).await?;
result.push((project, tags));
let media = get_media_for_project(pool, project.id).await?;
result.push((project, tags, media));
}
Ok(result)
@@ -210,16 +219,17 @@ pub async fn get_all_projects_admin(pool: &PgPool) -> Result<Vec<DbProject>, sql
.await
}
/// Get all projects with tags (admin view)
/// Get all projects with tags and media (admin view)
pub async fn get_all_projects_with_tags_admin(
pool: &PgPool,
) -> Result<Vec<(DbProject, Vec<DbTag>)>, sqlx::Error> {
) -> Result<Vec<(DbProject, Vec<DbTag>, Vec<DbProjectMedia>)>, sqlx::Error> {
let projects = get_all_projects_admin(pool).await?;
let mut result = Vec::new();
for project in projects {
let tags = get_tags_for_project(pool, project.id).await?;
result.push((project, tags));
let media = get_media_for_project(pool, project.id).await?;
result.push((project, tags, media));
}
Ok(result)
@@ -252,17 +262,18 @@ pub async fn get_project_by_id(pool: &PgPool, id: Uuid) -> Result<Option<DbProje
.await
}
/// Get single project by ID with tags
/// Get single project by ID with tags and media
pub async fn get_project_by_id_with_tags(
pool: &PgPool,
id: Uuid,
) -> Result<Option<(DbProject, Vec<DbTag>)>, sqlx::Error> {
) -> Result<Option<(DbProject, Vec<DbTag>, Vec<DbProjectMedia>)>, sqlx::Error> {
let project = get_project_by_id(pool, id).await?;
match project {
Some(p) => {
let tags = get_tags_for_project(pool, p.id).await?;
Ok(Some((p, tags)))
let media = get_media_for_project(pool, p.id).await?;
Ok(Some((p, tags, media)))
}
None => Ok(None),
}
+709
View File
@@ -0,0 +1,709 @@
use axum::{
Json,
extract::{Multipart, State},
http::StatusCode,
response::IntoResponse,
};
use std::sync::Arc;
use ulid::Ulid;
use uuid::Uuid;
use crate::{auth, db, media_processing, r2::R2Client, state::AppState};
/// Request type for reordering media
#[derive(Debug, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ReorderMediaRequest {
/// Media IDs in desired order
pub media_ids: Vec<String>,
}
/// Upload media for a project (requires authentication)
///
/// Accepts multipart/form-data with a single file field.
/// Images are processed into variants (thumb, medium, full) and uploaded to R2.
pub async fn upload_media_handler(
State(state): State<Arc<AppState>>,
axum::extract::Path(project_id): axum::extract::Path<String>,
jar: axum_extra::extract::CookieJar,
mut multipart: Multipart,
) -> impl IntoResponse {
// Check auth
if auth::check_session(&state, &jar).is_none() {
return auth::require_auth_response().into_response();
}
let project_id = match Uuid::parse_str(&project_id) {
Ok(id) => id,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid project ID",
"message": "Project ID must be a valid UUID"
})),
)
.into_response();
}
};
// Verify project exists
match db::get_project_by_id(&state.pool, project_id).await {
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Project not found"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to check project existence");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to verify project"
})),
)
.into_response();
}
Ok(Some(_)) => {}
}
// Get R2 client
let r2 = match R2Client::get().await {
Some(r2) => r2,
None => {
tracing::error!("R2 client not available");
return (
StatusCode::SERVICE_UNAVAILABLE,
Json(serde_json::json!({
"error": "Service unavailable",
"message": "Media storage is not configured"
})),
)
.into_response();
}
};
// Extract file from multipart
let (filename, content_type, data) = match extract_file(&mut multipart).await {
Ok(Some(file)) => file,
Ok(None) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Bad request",
"message": "No file provided"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to extract file from multipart");
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Bad request",
"message": format!("Failed to read upload: {err}")
})),
)
.into_response();
}
};
// Determine media type and process
let is_video = media_processing::is_supported_video(&content_type);
let is_image = media_processing::is_supported_image(&content_type);
if !is_video && !is_image {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Unsupported format",
"message": format!("Content type '{}' is not supported. Supported: JPEG, PNG, GIF, WebP, AVIF, MP4, WebM", content_type)
})),
)
.into_response();
}
// Generate unique asset ID
let asset_id = Ulid::new();
let r2_base_path = format!("projects/{project_id}/{asset_id}");
if is_image {
// Process image
let processed = match media_processing::process_image(&data, &filename) {
Ok(p) => p,
Err(err) => {
tracing::error!(error = %err, filename = %filename, "Failed to process image");
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Processing failed",
"message": format!("Failed to process image: {err}")
})),
)
.into_response();
}
};
// Upload all variants to R2
if let Err(err) = upload_image_variants(&r2, &r2_base_path, &processed).await {
tracing::error!(error = %err, "Failed to upload image variants to R2");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Upload failed",
"message": "Failed to upload media to storage"
})),
)
.into_response();
}
// Build variants JSON
let original_ext = filename.rsplit('.').next().unwrap_or("jpg");
let variants = serde_json::json!({
"thumb": {
"key": "thumb.webp",
"width": processed.thumb.width,
"height": processed.thumb.height
},
"medium": {
"key": "medium.webp",
"width": processed.medium.width,
"height": processed.medium.height
},
"full": {
"key": "full.webp",
"width": processed.full.width,
"height": processed.full.height
},
"original": {
"key": format!("original.{original_ext}"),
"width": processed.original.width,
"height": processed.original.height,
"mime": processed.original.mime
}
});
// Create database record
match db::create_media(
&state.pool,
project_id,
db::MediaType::Image,
&filename,
&r2_base_path,
variants,
Some(processed.original.width as i32),
Some(processed.original.height as i32),
data.len() as i64,
Some(&processed.blurhash),
None,
)
.await
{
Ok(media) => {
tracing::info!(
media_id = %media.id,
project_id = %project_id,
filename = %filename,
"Image uploaded successfully"
);
// Invalidate cache
state.isr_cache.invalidate("/").await;
(StatusCode::CREATED, Json(media.to_api_media())).into_response()
}
Err(err) => {
tracing::error!(error = %err, "Failed to create media record");
// TODO: Clean up R2 files on DB failure
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Database error",
"message": "Failed to save media record"
})),
)
.into_response()
}
}
} else {
// Video upload - basic support (no transcoding, ffmpeg poster extraction optional)
let original_ext = match content_type.as_str() {
"video/mp4" => "mp4",
"video/webm" => "webm",
"video/quicktime" => "mov",
_ => "mp4",
};
// Upload original video
let video_key = format!("{r2_base_path}/original.{original_ext}");
if let Err(err) = r2.put_object(&video_key, data.clone(), &content_type).await {
tracing::error!(error = %err, "Failed to upload video to R2");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Upload failed",
"message": "Failed to upload video to storage"
})),
)
.into_response();
}
// Build variants JSON (video only has original for now)
let variants = serde_json::json!({
"original": {
"key": format!("original.{original_ext}"),
"mime": content_type
}
});
// Create database record
match db::create_media(
&state.pool,
project_id,
db::MediaType::Video,
&filename,
&r2_base_path,
variants,
None, // Video dimensions would require ffprobe
None,
data.len() as i64,
None, // No blurhash without poster
None,
)
.await
{
Ok(media) => {
tracing::info!(
media_id = %media.id,
project_id = %project_id,
filename = %filename,
"Video uploaded successfully"
);
// Invalidate cache
state.isr_cache.invalidate("/").await;
(StatusCode::CREATED, Json(media.to_api_media())).into_response()
}
Err(err) => {
tracing::error!(error = %err, "Failed to create media record");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Database error",
"message": "Failed to save media record"
})),
)
.into_response()
}
}
}
}
/// Extract file from multipart form data
async fn extract_file(
multipart: &mut Multipart,
) -> Result<Option<(String, String, Vec<u8>)>, String> {
while let Some(field) = multipart
.next_field()
.await
.map_err(|e| format!("Failed to get field: {e}"))?
{
let name = field.name().unwrap_or("").to_string();
// Accept 'file' or 'media' field names
if name == "file" || name == "media" {
let filename = field.file_name().unwrap_or("upload").to_string();
let content_type = field
.content_type()
.unwrap_or("application/octet-stream")
.to_string();
let data = field
.bytes()
.await
.map_err(|e| format!("Failed to read file data: {e}"))?
.to_vec();
return Ok(Some((filename, content_type, data)));
}
}
Ok(None)
}
/// Upload all image variants to R2
async fn upload_image_variants(
r2: &R2Client,
base_path: &str,
processed: &media_processing::ProcessedImage,
) -> Result<(), String> {
// Upload thumb
r2.put_object(
&format!("{base_path}/thumb.webp"),
processed.thumb.data.clone(),
"image/webp",
)
.await?;
// Upload medium
r2.put_object(
&format!("{base_path}/medium.webp"),
processed.medium.data.clone(),
"image/webp",
)
.await?;
// Upload full
r2.put_object(
&format!("{base_path}/full.webp"),
processed.full.data.clone(),
"image/webp",
)
.await?;
// Upload original (preserve format)
let original_ext = match processed.original.mime.as_str() {
"image/jpeg" => "jpg",
"image/png" => "png",
"image/gif" => "gif",
"image/webp" => "webp",
"image/avif" => "avif",
_ => "jpg",
};
r2.put_object(
&format!("{base_path}/original.{original_ext}"),
processed.original.data.clone(),
&processed.original.mime,
)
.await?;
Ok(())
}
/// Get all media for a project
pub async fn get_project_media_handler(
State(state): State<Arc<AppState>>,
axum::extract::Path(project_id): axum::extract::Path<String>,
) -> impl IntoResponse {
let project_id = match Uuid::parse_str(&project_id) {
Ok(id) => id,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid project ID",
"message": "Project ID must be a valid UUID"
})),
)
.into_response();
}
};
// Verify project exists
match db::get_project_by_id(&state.pool, project_id).await {
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Project not found"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to check project existence");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to fetch project"
})),
)
.into_response();
}
Ok(Some(_)) => {}
}
match db::get_media_for_project(&state.pool, project_id).await {
Ok(media) => {
let response: Vec<db::ApiProjectMedia> =
media.into_iter().map(|m| m.to_api_media()).collect();
Json(response).into_response()
}
Err(err) => {
tracing::error!(error = %err, project_id = %project_id, "Failed to fetch project media");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to fetch media"
})),
)
.into_response()
}
}
}
/// Delete a media item (requires authentication)
pub async fn delete_media_handler(
State(state): State<Arc<AppState>>,
axum::extract::Path((project_id, media_id)): axum::extract::Path<(String, String)>,
jar: axum_extra::extract::CookieJar,
) -> impl IntoResponse {
// Check auth
if auth::check_session(&state, &jar).is_none() {
return auth::require_auth_response().into_response();
}
let project_id = match Uuid::parse_str(&project_id) {
Ok(id) => id,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid project ID",
"message": "Project ID must be a valid UUID"
})),
)
.into_response();
}
};
let media_id = match Uuid::parse_str(&media_id) {
Ok(id) => id,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid media ID",
"message": "Media ID must be a valid UUID"
})),
)
.into_response();
}
};
// Get media first to verify it belongs to the project
match db::get_media_by_id(&state.pool, media_id).await {
Ok(Some(media)) => {
if media.project_id != project_id {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Media not found for this project"
})),
)
.into_response();
}
// Delete files from R2 storage
let r2_base_path = media.r2_base_path.clone();
if let Some(r2) = R2Client::get().await {
// Delete all files under the media's R2 prefix
let prefix = format!("{}/", r2_base_path.trim_end_matches('/'));
match r2.delete_prefix(&prefix).await {
Ok(count) => {
tracing::info!(
media_id = %media_id,
r2_prefix = %prefix,
deleted_count = count,
"Deleted R2 objects"
);
}
Err(err) => {
// Log but don't fail - DB record deletion is more important
tracing::warn!(
error = %err,
media_id = %media_id,
r2_prefix = %prefix,
"Failed to delete R2 objects (will be orphaned)"
);
}
}
}
match db::delete_media(&state.pool, media_id).await {
Ok(Some(deleted)) => {
tracing::info!(
media_id = %media_id,
project_id = %project_id,
r2_base_path = %deleted.r2_base_path,
"Media deleted from database"
);
// Invalidate cache since project data changed
state.isr_cache.invalidate("/").await;
Json(deleted.to_api_media()).into_response()
}
Ok(None) => (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Media not found"
})),
)
.into_response(),
Err(err) => {
tracing::error!(error = %err, "Failed to delete media");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to delete media"
})),
)
.into_response()
}
}
}
Ok(None) => (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Media not found"
})),
)
.into_response(),
Err(err) => {
tracing::error!(error = %err, "Failed to fetch media");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to fetch media"
})),
)
.into_response()
}
}
}
/// Reorder media items for a project (requires authentication)
pub async fn reorder_media_handler(
State(state): State<Arc<AppState>>,
axum::extract::Path(project_id): axum::extract::Path<String>,
jar: axum_extra::extract::CookieJar,
Json(payload): Json<ReorderMediaRequest>,
) -> impl IntoResponse {
// Check auth
if auth::check_session(&state, &jar).is_none() {
return auth::require_auth_response().into_response();
}
let project_id = match Uuid::parse_str(&project_id) {
Ok(id) => id,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid project ID",
"message": "Project ID must be a valid UUID"
})),
)
.into_response();
}
};
// Parse media IDs
let media_ids: Result<Vec<Uuid>, _> = payload
.media_ids
.iter()
.map(|id| Uuid::parse_str(id))
.collect();
let media_ids = match media_ids {
Ok(ids) => ids,
Err(_) => {
return (
StatusCode::BAD_REQUEST,
Json(serde_json::json!({
"error": "Invalid media ID",
"message": "All media IDs must be valid UUIDs"
})),
)
.into_response();
}
};
// Verify project exists
match db::get_project_by_id(&state.pool, project_id).await {
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Project not found"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to check project existence");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to verify project"
})),
)
.into_response();
}
Ok(Some(_)) => {}
}
// Reorder media
match db::reorder_media(&state.pool, project_id, &media_ids).await {
Ok(()) => {
// Fetch updated media list
match db::get_media_for_project(&state.pool, project_id).await {
Ok(media) => {
// Invalidate cache since project data changed
state.isr_cache.invalidate("/").await;
let response: Vec<db::ApiProjectMedia> =
media.into_iter().map(|m| m.to_api_media()).collect();
Json(response).into_response()
}
Err(err) => {
tracing::error!(error = %err, "Failed to fetch updated media");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Reorder succeeded but failed to fetch updated list"
})),
)
.into_response()
}
}
}
Err(err) => {
tracing::error!(error = %err, "Failed to reorder media");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to reorder media"
})),
)
.into_response()
}
}
}
// TODO: Upload handler requires multipart form handling and image processing
// This will be implemented when we add the upload functionality
// For now, media records can only be created programmatically or via seeding
+2
View File
@@ -1,6 +1,7 @@
pub mod assets;
pub mod auth;
pub mod health;
pub mod media;
pub mod projects;
pub mod settings;
pub mod tags;
@@ -9,6 +10,7 @@ pub mod tags;
pub use assets::*;
pub use auth::*;
pub use health::*;
pub use media::*;
pub use projects::*;
pub use settings::*;
pub use tags::*;
+43 -38
View File
@@ -11,12 +11,12 @@ pub async fn projects_handler(
let is_admin = auth::check_session(&state, &jar).is_some();
if is_admin {
// Admin view: return all projects with tags
// Admin view: return all projects with tags and media
match db::get_all_projects_with_tags_admin(&state.pool).await {
Ok(projects_with_tags) => {
let response: Vec<db::ApiAdminProject> = projects_with_tags
.into_iter()
.map(|(project, tags)| project.to_api_admin_project(tags))
.map(|(project, tags, media)| project.to_api_admin_project(tags, media))
.collect();
Json(response).into_response()
}
@@ -33,12 +33,12 @@ pub async fn projects_handler(
}
}
} else {
// Public view: return non-hidden projects with tags
// Public view: return non-hidden projects with tags and media
match db::get_public_projects_with_tags(&state.pool).await {
Ok(projects_with_tags) => {
let response: Vec<db::ApiAdminProject> = projects_with_tags
.into_iter()
.map(|(project, tags)| project.to_api_admin_project(tags))
.map(|(project, tags, media)| project.to_api_admin_project(tags, media))
.collect();
Json(response).into_response()
}
@@ -80,7 +80,7 @@ pub async fn get_project_handler(
let is_admin = auth::check_session(&state, &jar).is_some();
match db::get_project_by_id_with_tags(&state.pool, project_id).await {
Ok(Some((project, tags))) => {
Ok(Some((project, tags, media))) => {
// If project is hidden and user is not admin, return 404
if project.status == db::ProjectStatus::Hidden && !is_admin {
return (
@@ -93,7 +93,7 @@ pub async fn get_project_handler(
.into_response();
}
Json(project.to_api_admin_project(tags)).into_response()
Json(project.to_api_admin_project(tags, media)).into_response()
}
Ok(None) => (
StatusCode::NOT_FOUND,
@@ -214,8 +214,10 @@ pub async fn create_project_handler(
tracing::error!(error = %err, project_id = %project.id, "Failed to set project tags");
}
// Fetch project with tags to return
let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await {
// Fetch project with tags and media to return
let (project, tags, media) = match db::get_project_by_id_with_tags(&state.pool, project.id)
.await
{
Ok(Some(data)) => data,
Ok(None) => {
tracing::error!(project_id = %project.id, "Project not found after creation");
@@ -248,7 +250,7 @@ pub async fn create_project_handler(
(
StatusCode::CREATED,
Json(project.to_api_admin_project(tags)),
Json(project.to_api_admin_project(tags, media)),
)
.into_response()
}
@@ -384,8 +386,10 @@ pub async fn update_project_handler(
tracing::error!(error = %err, project_id = %project.id, "Failed to update project tags");
}
// Fetch updated project with tags
let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project.id).await {
// Fetch updated project with tags and media
let (project, tags, media) = match db::get_project_by_id_with_tags(&state.pool, project.id)
.await
{
Ok(Some(data)) => data,
Ok(None) => {
tracing::error!(project_id = %project.id, "Project not found after update");
@@ -416,7 +420,7 @@ pub async fn update_project_handler(
// Invalidate cached pages that display projects
state.isr_cache.invalidate("/").await;
Json(project.to_api_admin_project(tags)).into_response()
Json(project.to_api_admin_project(tags, media)).into_response()
}
/// Delete a project (requires authentication)
@@ -446,32 +450,33 @@ pub async fn delete_project_handler(
};
// Fetch project before deletion to return it
let (project, tags) = match db::get_project_by_id_with_tags(&state.pool, project_id).await {
Ok(Some(data)) => data,
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Project not found"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to fetch project before deletion");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to delete project"
})),
)
.into_response();
}
};
let (project, tags, media) =
match db::get_project_by_id_with_tags(&state.pool, project_id).await {
Ok(Some(data)) => data,
Ok(None) => {
return (
StatusCode::NOT_FOUND,
Json(serde_json::json!({
"error": "Not found",
"message": "Project not found"
})),
)
.into_response();
}
Err(err) => {
tracing::error!(error = %err, "Failed to fetch project before deletion");
return (
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to delete project"
})),
)
.into_response();
}
};
// Delete project (CASCADE handles tags)
// Delete project (CASCADE handles tags and media)
match db::delete_project(&state.pool, project_id).await {
Ok(()) => {
tracing::info!(project_id = %project_id, project_name = %project.name, "Project deleted");
@@ -479,7 +484,7 @@ pub async fn delete_project_handler(
// Invalidate cached pages that display projects
state.isr_cache.invalidate("/").await;
Json(project.to_api_admin_project(tags)).into_response()
Json(project.to_api_admin_project(tags, media)).into_response()
}
Err(err) => {
tracing::error!(error = %err, "Failed to delete project");
+1
View File
@@ -12,6 +12,7 @@ mod github;
mod handlers;
mod health;
mod http;
mod media_processing;
mod middleware;
mod og;
mod proxy;
+229
View File
@@ -0,0 +1,229 @@
//! Image processing utilities for media uploads.
//!
//! Handles resizing, format conversion, and blurhash generation.
use image::{DynamicImage, GenericImageView, ImageFormat, imageops::FilterType};
use std::io::Cursor;
/// Target widths for image variants
pub const THUMB_WIDTH: u32 = 300;
pub const MEDIUM_WIDTH: u32 = 800;
pub const FULL_WIDTH: u32 = 1600;
/// Quality setting for WebP encoding (0-100)
const WEBP_QUALITY: u8 = 85;
/// Result of processing an uploaded image
#[derive(Debug)]
pub struct ProcessedImage {
pub thumb: ImageVariant,
pub medium: ImageVariant,
pub full: ImageVariant,
pub original: OriginalImage,
pub blurhash: String,
}
/// A processed image variant
#[derive(Debug)]
pub struct ImageVariant {
pub data: Vec<u8>,
pub width: u32,
pub height: u32,
}
/// Original image info
#[derive(Debug)]
pub struct OriginalImage {
pub data: Vec<u8>,
pub width: u32,
pub height: u32,
pub mime: String,
}
/// Errors that can occur during image processing
#[derive(Debug, thiserror::Error)]
pub enum ProcessingError {
#[error("Failed to decode image: {0}")]
DecodeError(String),
#[error("Failed to encode image: {0}")]
EncodeError(String),
#[error("Unsupported image format: {0}")]
UnsupportedFormat(String),
#[error("Image too small: minimum {min}px, got {actual}px")]
TooSmall { min: u32, actual: u32 },
#[error("BlurHash generation failed: {0}")]
BlurHashError(String),
}
/// Detect image format from bytes and filename
pub fn detect_format(
data: &[u8],
filename: &str,
) -> Result<(ImageFormat, &'static str), ProcessingError> {
// Try to detect from magic bytes first
if let Ok(format) = image::guess_format(data) {
let mime = match format {
ImageFormat::Jpeg => "image/jpeg",
ImageFormat::Png => "image/png",
ImageFormat::Gif => "image/gif",
ImageFormat::WebP => "image/webp",
ImageFormat::Avif => "image/avif",
_ => return Err(ProcessingError::UnsupportedFormat(format!("{format:?}"))),
};
return Ok((format, mime));
}
// Fall back to extension
let ext = filename.rsplit('.').next().unwrap_or("").to_lowercase();
match ext.as_str() {
"jpg" | "jpeg" => Ok((ImageFormat::Jpeg, "image/jpeg")),
"png" => Ok((ImageFormat::Png, "image/png")),
"gif" => Ok((ImageFormat::Gif, "image/gif")),
"webp" => Ok((ImageFormat::WebP, "image/webp")),
"avif" => Ok((ImageFormat::Avif, "image/avif")),
_ => Err(ProcessingError::UnsupportedFormat(ext)),
}
}
/// Process an uploaded image into all required variants
pub fn process_image(data: &[u8], filename: &str) -> Result<ProcessedImage, ProcessingError> {
// Detect format and decode
let (format, mime) = detect_format(data, filename)?;
let img = image::load_from_memory_with_format(data, format)
.map_err(|e| ProcessingError::DecodeError(e.to_string()))?;
let (orig_width, orig_height) = img.dimensions();
// Minimum size check - at least thumbnail size
let min_dim = orig_width.min(orig_height);
if min_dim < THUMB_WIDTH {
return Err(ProcessingError::TooSmall {
min: THUMB_WIDTH,
actual: min_dim,
});
}
// Generate variants (only resize if larger than target)
let thumb = resize_to_webp(&img, THUMB_WIDTH)?;
let medium = resize_to_webp(&img, MEDIUM_WIDTH)?;
let full = resize_to_webp(&img, FULL_WIDTH)?;
// Generate blurhash from thumbnail for efficiency
let blurhash = generate_blurhash(&img, 4, 3)?;
// Keep original as-is (preserve format)
let original = OriginalImage {
data: data.to_vec(),
width: orig_width,
height: orig_height,
mime: mime.to_string(),
};
Ok(ProcessedImage {
thumb,
medium,
full,
original,
blurhash,
})
}
/// Resize image to target width (maintaining aspect ratio) and encode as WebP
fn resize_to_webp(img: &DynamicImage, target_width: u32) -> Result<ImageVariant, ProcessingError> {
let (orig_width, orig_height) = img.dimensions();
// Only resize if larger than target
let (resized, width, height) = if orig_width > target_width {
let ratio = target_width as f64 / orig_width as f64;
let target_height = (orig_height as f64 * ratio).round() as u32;
let resized = img.resize(target_width, target_height, FilterType::Lanczos3);
(resized, target_width, target_height)
} else {
(img.clone(), orig_width, orig_height)
};
// Encode as WebP
let mut buf = Cursor::new(Vec::new());
resized
.write_to(&mut buf, ImageFormat::WebP)
.map_err(|e| ProcessingError::EncodeError(e.to_string()))?;
Ok(ImageVariant {
data: buf.into_inner(),
width,
height,
})
}
/// Generate a BlurHash string from an image
fn generate_blurhash(
img: &DynamicImage,
x_components: u32,
y_components: u32,
) -> Result<String, ProcessingError> {
// Resize to small size for efficient blurhash computation
let small = img.resize(32, 32, FilterType::Triangle);
let rgba = small.to_rgba8();
let (w, h) = rgba.dimensions();
let hash = blurhash::encode(x_components, y_components, w, h, rgba.as_raw())
.map_err(|e| ProcessingError::BlurHashError(format!("{e:?}")))?;
Ok(hash)
}
/// Check if a MIME type is a supported image format
pub fn is_supported_image(mime: &str) -> bool {
matches!(
mime,
"image/jpeg" | "image/png" | "image/gif" | "image/webp" | "image/avif"
)
}
/// Check if a MIME type is a supported video format
pub fn is_supported_video(mime: &str) -> bool {
matches!(mime, "video/mp4" | "video/webm" | "video/quicktime")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_detect_format_jpeg() {
// JPEG magic bytes
let data = [0xFF, 0xD8, 0xFF, 0xE0];
let (format, mime) = detect_format(&data, "test.jpg").unwrap();
assert_eq!(format, ImageFormat::Jpeg);
assert_eq!(mime, "image/jpeg");
}
#[test]
fn test_detect_format_png() {
// PNG magic bytes
let data = [0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A];
let (format, mime) = detect_format(&data, "test.png").unwrap();
assert_eq!(format, ImageFormat::Png);
assert_eq!(mime, "image/png");
}
#[test]
fn test_is_supported_image() {
assert!(is_supported_image("image/jpeg"));
assert!(is_supported_image("image/png"));
assert!(!is_supported_image("text/plain"));
assert!(!is_supported_image("video/mp4"));
}
#[test]
fn test_is_supported_video() {
assert!(is_supported_video("video/mp4"));
assert!(is_supported_video("video/webm"));
assert!(!is_supported_video("image/jpeg"));
}
}
+1 -1
View File
@@ -57,7 +57,7 @@ pub async fn generate_og_image(spec: &OGImageSpec, state: Arc<AppState>) -> Resu
.map_err(|e| format!("Failed to read response: {e}"))?
.to_vec();
r2.put_object(&r2_key, bytes)
r2.put_object(&r2_key, bytes, "image/png")
.await
.map_err(|e| format!("Failed to upload to R2: {e}"))?;
+43 -2
View File
@@ -78,13 +78,18 @@ impl R2Client {
Ok(bytes)
}
pub async fn put_object(&self, key: &str, body: Vec<u8>) -> Result<(), String> {
pub async fn put_object(
&self,
key: &str,
body: Vec<u8>,
content_type: &str,
) -> Result<(), String> {
self.client
.put_object()
.bucket(&self.bucket)
.key(key)
.body(ByteStream::from(body))
.content_type("image/png")
.content_type(content_type)
.send()
.await
.map_err(|e| format!("Failed to put object to R2: {e}"))?;
@@ -92,6 +97,42 @@ impl R2Client {
Ok(())
}
pub async fn delete_object(&self, key: &str) -> Result<(), String> {
self.client
.delete_object()
.bucket(&self.bucket)
.key(key)
.send()
.await
.map_err(|e| format!("Failed to delete object from R2: {e}"))?;
Ok(())
}
/// Delete all objects under a prefix (e.g., "projects/{id}/{ulid}/")
pub async fn delete_prefix(&self, prefix: &str) -> Result<usize, String> {
let list_result = self
.client
.list_objects_v2()
.bucket(&self.bucket)
.prefix(prefix)
.send()
.await
.map_err(|e| format!("Failed to list objects in R2: {e}"))?;
let mut deleted = 0;
if let Some(contents) = list_result.contents {
for object in contents {
if let Some(key) = object.key {
self.delete_object(&key).await?;
deleted += 1;
}
}
}
Ok(deleted)
}
pub async fn object_exists(&self, key: &str) -> bool {
self.client
.head_object()
+15 -2
View File
@@ -4,7 +4,7 @@ use axum::{
extract::Request,
http::{Method, Uri},
response::IntoResponse,
routing::{any, get, post},
routing::{any, delete, get, post, put},
};
use std::sync::Arc;
@@ -41,7 +41,20 @@ pub fn api_routes() -> Router<Arc<AppState>> {
)
.route(
"/projects/{id}/tags/{tag_id}",
axum::routing::delete(handlers::remove_project_tag_handler),
delete(handlers::remove_project_tag_handler),
)
// Project media - GET is public, POST/PUT/DELETE require authentication
.route(
"/projects/{id}/media",
get(handlers::get_project_media_handler).post(handlers::upload_media_handler),
)
.route(
"/projects/{id}/media/reorder",
put(handlers::reorder_media_handler),
)
.route(
"/projects/{id}/media/{media_id}",
delete(handlers::delete_media_handler),
)
// Tags - authentication checked in handlers
.route(