feat: add PostgreSQL database integration for projects

- Add SQLx with Postgres support and migration system
- Create projects table with status enum and auto-updated timestamps
- Implement database queries and API response conversion layer
- Add Justfile commands for database management and seeding
- Integrate health checks for both Bun and database connectivity
This commit is contained in:
2026-01-06 01:53:49 -06:00
parent 5fc7277cd7
commit b4c708335b
11 changed files with 1235 additions and 90 deletions
+1
View File
@@ -24,4 +24,5 @@ Thumbs.db
# Don't ignore these - we need them
!web/build/client
!web/build/server
!web/build/prerendered
!web/build/*.js
@@ -0,0 +1,20 @@
{
"db_name": "PostgreSQL",
"query": "SELECT 1 as check",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "check",
"type_info": "Int4"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "42799df09f28f38b73c4a0f90516dc432e7660d679d3ce8eb448cde2dad81608"
}
@@ -0,0 +1,98 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT \n id, \n slug, \n title, \n description, \n status as \"status: ProjectStatus\", \n github_repo, \n demo_url, \n priority, \n icon, \n last_github_activity, \n created_at, \n updated_at\n FROM projects\n WHERE status != 'hidden'\n ORDER BY priority DESC, created_at DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "slug",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "description",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "status: ProjectStatus",
"type_info": {
"Custom": {
"name": "project_status",
"kind": {
"Enum": [
"active",
"maintained",
"archived",
"hidden"
]
}
}
}
},
{
"ordinal": 5,
"name": "github_repo",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "demo_url",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "priority",
"type_info": "Int4"
},
{
"ordinal": 8,
"name": "icon",
"type_info": "Text"
},
{
"ordinal": 9,
"name": "last_github_activity",
"type_info": "Timestamptz"
},
{
"ordinal": 10,
"name": "created_at",
"type_info": "Timestamptz"
},
{
"ordinal": 11,
"name": "updated_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false,
false,
true,
true,
false,
true,
true,
false,
false
]
},
"hash": "8adc48c833126d2cd690612a83c1637347e8bdfd230bf46c60ceef8fa096391e"
}
Generated
+678 -15
View File
File diff suppressed because it is too large Load Diff
+4
View File
@@ -2,6 +2,7 @@
name = "api"
version = "0.1.0"
edition = "2024"
default-run = "api"
[dependencies]
aws-config = "1.8.12"
@@ -9,6 +10,7 @@ aws-sdk-s3 = "1.119.0"
axum = "0.8.8"
clap = { version = "4.5.54", features = ["derive", "env"] }
dashmap = "6.1.0"
dotenvy = "0.15"
futures = "0.3.31"
include_dir = "0.7.4"
mime_guess = "2.0.5"
@@ -17,6 +19,7 @@ rand = "0.9.2"
reqwest = { version = "0.13.1", default-features = false, features = ["rustls", "charset", "json", "stream"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.148"
sqlx = { version = "0.8", features = ["runtime-tokio", "tls-rustls", "postgres", "uuid", "time", "migrate"] }
time = { version = "0.3.44", features = ["formatting", "macros"] }
tokio = { version = "1.49.0", features = ["full"] }
tokio-util = { version = "0.7.18", features = ["io"] }
@@ -25,3 +28,4 @@ tower-http = { version = "0.6.8", features = ["trace", "cors", "limit"] }
tracing = "0.1.44"
tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] }
ulid = { version = "1", features = ["serde"] }
uuid = { version = "1", features = ["serde", "v4"] }
+6 -1
View File
@@ -55,8 +55,13 @@ RUN cargo chef cook --release --recipe-path recipe.json
COPY Cargo.toml Cargo.lock ./
COPY src/ ./src/
# Copy frontend client assets for embedding
# Copy SQLx offline cache and migrations for compile-time macros
COPY .sqlx/ ./.sqlx/
COPY migrations/ ./migrations/
# Copy frontend assets for embedding
COPY --from=frontend /build/build/client ./web/build/client
COPY --from=frontend /build/build/prerendered ./web/build/prerendered
# Build with real assets
RUN cargo build --release && \
+84
View File
@@ -91,3 +91,87 @@ docker-run-json port="8080":
docker stop xevion-dev-container 2>/dev/null || true
docker rm xevion-dev-container 2>/dev/null || true
docker run --name xevion-dev-container -p {{port}}:8080 xevion-dev
[script("bun")]
seed:
const { spawnSync } = await import("child_process");
// Ensure DB is running
const db = spawnSync("just", ["db"], { stdio: "inherit" });
if (db.status !== 0) process.exit(db.status);
// Run migrations
const migrate = spawnSync("sqlx", ["migrate", "run"], { stdio: "inherit" });
if (migrate.status !== 0) process.exit(migrate.status);
// Seed data
const seed = spawnSync("cargo", ["run", "--bin", "seed"], { stdio: "inherit" });
if (seed.status !== 0) process.exit(seed.status);
console.log("✅ Database ready with seed data");
[script("bun")]
db cmd="start":
const fs = await import("fs/promises");
const { spawnSync } = await import("child_process");
const NAME = "xevion-postgres";
const USER = "xevion";
const PASS = "dev";
const DB = "xevion";
const PORT = "5432";
const ENV_FILE = ".env";
const CMD = "{{cmd}}";
const run = (args) => spawnSync("docker", args, { encoding: "utf8" });
const getContainer = () => {
const res = run(["ps", "-a", "--filter", `name=^${NAME}$`, "--format", "json"]);
return res.stdout.trim() ? JSON.parse(res.stdout) : null;
};
const updateEnv = async () => {
const url = `postgresql://${USER}:${PASS}@localhost:${PORT}/${DB}`;
try {
let content = await fs.readFile(ENV_FILE, "utf8");
content = content.includes("DATABASE_URL=")
? content.replace(/DATABASE_URL=.*$/m, `DATABASE_URL=${url}`)
: content.trim() + `\nDATABASE_URL=${url}\n`;
await fs.writeFile(ENV_FILE, content);
} catch {
await fs.writeFile(ENV_FILE, `DATABASE_URL=${url}\n`);
}
};
const create = () => {
run(["run", "-d", "--name", NAME, "-e", `POSTGRES_USER=${USER}`,
"-e", `POSTGRES_PASSWORD=${PASS}`, "-e", `POSTGRES_DB=${DB}`,
"-p", `${PORT}:5432`, "postgres:16-alpine"]);
console.log("✅ created");
};
const container = getContainer();
if (CMD === "rm") {
if (!container) process.exit(0);
run(["stop", NAME]);
run(["rm", NAME]);
console.log("✅ removed");
} else if (CMD === "reset") {
if (!container) create();
else {
run(["exec", NAME, "psql", "-U", USER, "-c", `DROP DATABASE IF EXISTS ${DB}`]);
run(["exec", NAME, "psql", "-U", USER, "-c", `CREATE DATABASE ${DB}`]);
console.log("✅ reset");
}
await updateEnv();
} else {
if (!container) {
create();
} else if (container.State !== "running") {
run(["start", NAME]);
console.log("✅ started");
} else {
console.log("✅ running");
}
await updateEnv();
}
@@ -0,0 +1,35 @@
-- Project status enum
CREATE TYPE project_status AS ENUM ('active', 'maintained', 'archived', 'hidden');
-- Projects table
CREATE TABLE projects (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
slug TEXT NOT NULL UNIQUE,
title TEXT NOT NULL,
description TEXT NOT NULL,
status project_status NOT NULL DEFAULT 'active',
github_repo TEXT,
demo_url TEXT,
priority INTEGER NOT NULL DEFAULT 0,
icon TEXT,
last_github_activity TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Indexes for common queries
CREATE INDEX idx_projects_status ON projects(status);
CREATE INDEX idx_projects_priority ON projects(priority DESC);
CREATE INDEX idx_projects_slug ON projects(slug);
-- Trigger to auto-update updated_at
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE TRIGGER update_projects_updated_at BEFORE UPDATE ON projects
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
+103
View File
@@ -0,0 +1,103 @@
use sqlx::PgPool;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
dotenvy::dotenv().ok();
let database_url = std::env::var("DATABASE_URL")?;
let pool = PgPool::connect(&database_url).await?;
println!("🌱 Seeding database...");
// Clear existing data
sqlx::query("DELETE FROM projects").execute(&pool).await?;
// Seed projects with diverse data
let projects = vec![
(
"xevion-dev",
"xevion.dev",
"Personal portfolio site with fuzzy tag discovery and ISR caching",
"active",
Some("Xevion/xevion.dev"),
None,
10,
Some("fa-globe"),
),
(
"contest",
"Contest",
"Archive and analysis platform for competitive programming problems",
"active",
Some("Xevion/contest"),
Some("https://contest.xevion.dev"),
9,
Some("fa-trophy"),
),
(
"reforge",
"Reforge",
"Rust library for parsing and manipulating Replay files from Rocket League",
"maintained",
Some("Xevion/reforge"),
None,
8,
Some("fa-file-code"),
),
(
"algorithms",
"Algorithms",
"Collection of algorithm implementations and data structures in Python",
"archived",
Some("Xevion/algorithms"),
None,
5,
Some("fa-brain"),
),
(
"wordplay",
"WordPlay",
"Interactive word game with real-time multiplayer using WebSockets",
"maintained",
Some("Xevion/wordplay"),
Some("https://wordplay.example.com"),
7,
Some("fa-gamepad"),
),
(
"dotfiles",
"Dotfiles",
"Personal configuration files and development environment setup scripts",
"active",
Some("Xevion/dotfiles"),
None,
6,
Some("fa-terminal"),
),
];
let project_count = projects.len();
for (slug, title, desc, status, repo, demo, priority, icon) in projects {
sqlx::query(
r#"
INSERT INTO projects (slug, title, description, status, github_repo, demo_url, priority, icon)
VALUES ($1, $2, $3, $4::project_status, $5, $6, $7, $8)
"#,
)
.bind(slug)
.bind(title)
.bind(desc)
.bind(status)
.bind(repo)
.bind(demo)
.bind(priority)
.bind(icon)
.execute(&pool)
.await?;
}
println!("✅ Seeded {} projects", project_count);
Ok(())
}
+123
View File
@@ -0,0 +1,123 @@
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, postgres::PgPoolOptions};
use time::OffsetDateTime;
use uuid::Uuid;
// Database types
#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
#[sqlx(type_name = "project_status", rename_all = "lowercase")]
pub enum ProjectStatus {
Active,
Maintained,
Archived,
Hidden,
}
// Database model
#[derive(Debug, Clone, sqlx::FromRow)]
#[allow(dead_code)]
pub struct DbProject {
pub id: Uuid,
pub slug: String,
pub title: String,
pub description: String,
pub status: ProjectStatus,
pub github_repo: Option<String>,
pub demo_url: Option<String>,
pub priority: i32,
pub icon: Option<String>,
pub last_github_activity: Option<OffsetDateTime>,
pub created_at: OffsetDateTime,
pub updated_at: OffsetDateTime,
}
// API response types
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiProjectLink {
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiProject {
pub id: String,
pub name: String,
#[serde(rename = "shortDescription")]
pub short_description: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub icon: Option<String>,
pub links: Vec<ApiProjectLink>,
}
impl DbProject {
/// Convert database project to API response format
pub fn to_api_project(&self) -> ApiProject {
let mut links = Vec::new();
if let Some(ref repo) = self.github_repo {
links.push(ApiProjectLink {
url: format!("https://github.com/{}", repo),
title: Some("GitHub".to_string()),
});
}
if let Some(ref demo) = self.demo_url {
links.push(ApiProjectLink {
url: demo.clone(),
title: Some("Demo".to_string()),
});
}
ApiProject {
id: self.id.to_string(),
name: self.title.clone(),
short_description: self.description.clone(),
icon: self.icon.clone(),
links,
}
}
}
// Connection pool creation
pub async fn create_pool(database_url: &str) -> Result<PgPool, sqlx::Error> {
PgPoolOptions::new()
.max_connections(20)
.acquire_timeout(std::time::Duration::from_secs(3))
.connect(database_url)
.await
}
// Queries
pub async fn get_public_projects(pool: &PgPool) -> Result<Vec<DbProject>, sqlx::Error> {
sqlx::query_as!(
DbProject,
r#"
SELECT
id,
slug,
title,
description,
status as "status: ProjectStatus",
github_repo,
demo_url,
priority,
icon,
last_github_activity,
created_at,
updated_at
FROM projects
WHERE status != 'hidden'
ORDER BY priority DESC, created_at DESC
"#
)
.fetch_all(pool)
.await
}
pub async fn health_check(pool: &PgPool) -> Result<(), sqlx::Error> {
sqlx::query!("SELECT 1 as check")
.fetch_one(pool)
.await
.map(|_| ())
}
+65 -56
View File
@@ -6,7 +6,6 @@ use axum::{
routing::any,
};
use clap::Parser;
use serde::{Deserialize, Serialize};
use std::net::SocketAddr;
use std::path::PathBuf;
use std::sync::Arc;
@@ -16,6 +15,7 @@ use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitEx
mod assets;
mod config;
mod db;
mod formatter;
mod health;
mod middleware;
@@ -68,9 +68,30 @@ fn init_tracing() {
#[tokio::main]
async fn main() {
// Load .env file if present
dotenvy::dotenv().ok();
// Parse args early to allow --help to work without database
let args = Args::parse();
init_tracing();
let args = Args::parse();
// Load database URL from environment (fail-fast)
let database_url =
std::env::var("DATABASE_URL").expect("DATABASE_URL must be set in environment");
// Create connection pool
let pool = db::create_pool(&database_url)
.await
.expect("Failed to connect to database");
// Run migrations on startup
sqlx::migrate!()
.run(&pool)
.await
.expect("Failed to run database migrations");
tracing::info!("Database connected and migrations applied");
if args.listen.is_empty() {
eprintln!("Error: At least one --listen address is required");
@@ -108,13 +129,15 @@ async fn main() {
let downstream_url_for_health = args.downstream.clone();
let http_client_for_health = http_client.clone();
let unix_client_for_health = unix_client.clone();
let pool_for_health = pool.clone();
let health_checker = Arc::new(HealthChecker::new(move || {
let downstream_url = downstream_url_for_health.clone();
let http_client = http_client_for_health.clone();
let unix_client = unix_client_for_health.clone();
let pool = pool_for_health.clone();
async move { perform_health_check(downstream_url, http_client, unix_client).await }
async move { perform_health_check(downstream_url, http_client, unix_client, Some(pool)).await }
}));
let tarpit_config = TarpitConfig::from_env();
@@ -137,6 +160,7 @@ async fn main() {
unix_client,
health_checker,
tarpit_state,
pool: pool.clone(),
});
// Regenerate common OGP images on startup
@@ -238,6 +262,7 @@ pub struct AppState {
unix_client: Option<reqwest::Client>,
health_checker: Arc<HealthChecker>,
tarpit_state: Arc<TarpitState>,
pool: sqlx::PgPool,
}
#[derive(Debug)]
@@ -289,10 +314,7 @@ fn api_routes() -> Router<Arc<AppState>> {
"/health",
axum::routing::get(health_handler).head(health_handler),
)
.route(
"/projects",
axum::routing::get(projects_handler).head(projects_handler),
)
.route("/projects", axum::routing::get(projects_handler))
.fallback(api_404_and_method_handler)
}
@@ -423,55 +445,25 @@ async fn api_404_handler(uri: axum::http::Uri) -> impl IntoResponse {
api_404_and_method_handler(req).await
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct ProjectLink {
url: String,
#[serde(skip_serializing_if = "Option::is_none")]
title: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct Project {
id: String,
name: String,
#[serde(rename = "shortDescription")]
short_description: String,
#[serde(skip_serializing_if = "Option::is_none")]
icon: Option<String>,
links: Vec<ProjectLink>,
}
async fn projects_handler() -> impl IntoResponse {
let projects = vec![
Project {
id: "1".to_string(),
name: "xevion.dev".to_string(),
short_description: "Personal portfolio with fuzzy tag discovery".to_string(),
icon: None,
links: vec![ProjectLink {
url: "https://github.com/Xevion/xevion.dev".to_string(),
title: Some("GitHub".to_string()),
}],
},
Project {
id: "2".to_string(),
name: "Contest".to_string(),
short_description: "Competitive programming problem archive".to_string(),
icon: None,
links: vec![
ProjectLink {
url: "https://github.com/Xevion/contest".to_string(),
title: Some("GitHub".to_string()),
},
ProjectLink {
url: "https://contest.xevion.dev".to_string(),
title: Some("Demo".to_string()),
},
],
},
];
Json(projects)
async fn projects_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
match db::get_public_projects(&state.pool).await {
Ok(projects) => {
let api_projects: Vec<db::ApiProject> =
projects.into_iter().map(|p| p.to_api_project()).collect();
Json(api_projects).into_response()
}
Err(err) => {
tracing::error!(error = %err, "Failed to fetch projects from database");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(serde_json::json!({
"error": "Internal server error",
"message": "Failed to fetch projects"
})),
)
.into_response()
}
}
}
fn should_tarpit(state: &TarpitState, path: &str) -> bool {
@@ -687,6 +679,7 @@ async fn perform_health_check(
downstream_url: String,
http_client: reqwest::Client,
unix_client: Option<reqwest::Client>,
pool: Option<sqlx::PgPool>,
) -> bool {
let url = if downstream_url.starts_with('/') || downstream_url.starts_with("./") {
"http://localhost/internal/health".to_string()
@@ -700,6 +693,7 @@ async fn perform_health_check(
&http_client
};
let bun_healthy =
match tokio::time::timeout(Duration::from_secs(5), client.get(&url).send()).await {
Ok(Ok(response)) => {
let is_success = response.status().is_success();
@@ -719,5 +713,20 @@ async fn perform_health_check(
tracing::error!("Health check failed: timeout after 5s");
false
}
};
// Check database
let db_healthy = if let Some(pool) = pool {
match db::health_check(&pool).await {
Ok(_) => true,
Err(err) => {
tracing::error!(error = %err, "Database health check failed");
false
}
}
} else {
true
};
bun_healthy && db_healthy
}