mirror of
https://github.com/Xevion/xevion.dev.git
synced 2026-01-31 04:26:43 -06:00
feat: add PostgreSQL database integration for projects
- Add SQLx with Postgres support and migration system - Create projects table with status enum and auto-updated timestamps - Implement database queries and API response conversion layer - Add Justfile commands for database management and seeding - Integrate health checks for both Bun and database connectivity
This commit is contained in:
@@ -24,4 +24,5 @@ Thumbs.db
|
|||||||
# Don't ignore these - we need them
|
# Don't ignore these - we need them
|
||||||
!web/build/client
|
!web/build/client
|
||||||
!web/build/server
|
!web/build/server
|
||||||
|
!web/build/prerendered
|
||||||
!web/build/*.js
|
!web/build/*.js
|
||||||
|
|||||||
+20
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT 1 as check",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "check",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "42799df09f28f38b73c4a0f90516dc432e7660d679d3ce8eb448cde2dad81608"
|
||||||
|
}
|
||||||
+98
@@ -0,0 +1,98 @@
|
|||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "\n SELECT \n id, \n slug, \n title, \n description, \n status as \"status: ProjectStatus\", \n github_repo, \n demo_url, \n priority, \n icon, \n last_github_activity, \n created_at, \n updated_at\n FROM projects\n WHERE status != 'hidden'\n ORDER BY priority DESC, created_at DESC\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "id",
|
||||||
|
"type_info": "Uuid"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 1,
|
||||||
|
"name": "slug",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 2,
|
||||||
|
"name": "title",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 3,
|
||||||
|
"name": "description",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 4,
|
||||||
|
"name": "status: ProjectStatus",
|
||||||
|
"type_info": {
|
||||||
|
"Custom": {
|
||||||
|
"name": "project_status",
|
||||||
|
"kind": {
|
||||||
|
"Enum": [
|
||||||
|
"active",
|
||||||
|
"maintained",
|
||||||
|
"archived",
|
||||||
|
"hidden"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 5,
|
||||||
|
"name": "github_repo",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 6,
|
||||||
|
"name": "demo_url",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 7,
|
||||||
|
"name": "priority",
|
||||||
|
"type_info": "Int4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 8,
|
||||||
|
"name": "icon",
|
||||||
|
"type_info": "Text"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 9,
|
||||||
|
"name": "last_github_activity",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 10,
|
||||||
|
"name": "created_at",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ordinal": 11,
|
||||||
|
"name": "updated_at",
|
||||||
|
"type_info": "Timestamptz"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": []
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "8adc48c833126d2cd690612a83c1637347e8bdfd230bf46c60ceef8fa096391e"
|
||||||
|
}
|
||||||
Generated
+678
-15
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,7 @@
|
|||||||
name = "api"
|
name = "api"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
default-run = "api"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
aws-config = "1.8.12"
|
aws-config = "1.8.12"
|
||||||
@@ -9,6 +10,7 @@ aws-sdk-s3 = "1.119.0"
|
|||||||
axum = "0.8.8"
|
axum = "0.8.8"
|
||||||
clap = { version = "4.5.54", features = ["derive", "env"] }
|
clap = { version = "4.5.54", features = ["derive", "env"] }
|
||||||
dashmap = "6.1.0"
|
dashmap = "6.1.0"
|
||||||
|
dotenvy = "0.15"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
include_dir = "0.7.4"
|
include_dir = "0.7.4"
|
||||||
mime_guess = "2.0.5"
|
mime_guess = "2.0.5"
|
||||||
@@ -17,6 +19,7 @@ rand = "0.9.2"
|
|||||||
reqwest = { version = "0.13.1", default-features = false, features = ["rustls", "charset", "json", "stream"] }
|
reqwest = { version = "0.13.1", default-features = false, features = ["rustls", "charset", "json", "stream"] }
|
||||||
serde = { version = "1.0.228", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = "1.0.148"
|
serde_json = "1.0.148"
|
||||||
|
sqlx = { version = "0.8", features = ["runtime-tokio", "tls-rustls", "postgres", "uuid", "time", "migrate"] }
|
||||||
time = { version = "0.3.44", features = ["formatting", "macros"] }
|
time = { version = "0.3.44", features = ["formatting", "macros"] }
|
||||||
tokio = { version = "1.49.0", features = ["full"] }
|
tokio = { version = "1.49.0", features = ["full"] }
|
||||||
tokio-util = { version = "0.7.18", features = ["io"] }
|
tokio-util = { version = "0.7.18", features = ["io"] }
|
||||||
@@ -25,3 +28,4 @@ tower-http = { version = "0.6.8", features = ["trace", "cors", "limit"] }
|
|||||||
tracing = "0.1.44"
|
tracing = "0.1.44"
|
||||||
tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] }
|
tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] }
|
||||||
ulid = { version = "1", features = ["serde"] }
|
ulid = { version = "1", features = ["serde"] }
|
||||||
|
uuid = { version = "1", features = ["serde", "v4"] }
|
||||||
|
|||||||
+6
-1
@@ -55,8 +55,13 @@ RUN cargo chef cook --release --recipe-path recipe.json
|
|||||||
COPY Cargo.toml Cargo.lock ./
|
COPY Cargo.toml Cargo.lock ./
|
||||||
COPY src/ ./src/
|
COPY src/ ./src/
|
||||||
|
|
||||||
# Copy frontend client assets for embedding
|
# Copy SQLx offline cache and migrations for compile-time macros
|
||||||
|
COPY .sqlx/ ./.sqlx/
|
||||||
|
COPY migrations/ ./migrations/
|
||||||
|
|
||||||
|
# Copy frontend assets for embedding
|
||||||
COPY --from=frontend /build/build/client ./web/build/client
|
COPY --from=frontend /build/build/client ./web/build/client
|
||||||
|
COPY --from=frontend /build/build/prerendered ./web/build/prerendered
|
||||||
|
|
||||||
# Build with real assets
|
# Build with real assets
|
||||||
RUN cargo build --release && \
|
RUN cargo build --release && \
|
||||||
|
|||||||
@@ -91,3 +91,87 @@ docker-run-json port="8080":
|
|||||||
docker stop xevion-dev-container 2>/dev/null || true
|
docker stop xevion-dev-container 2>/dev/null || true
|
||||||
docker rm xevion-dev-container 2>/dev/null || true
|
docker rm xevion-dev-container 2>/dev/null || true
|
||||||
docker run --name xevion-dev-container -p {{port}}:8080 xevion-dev
|
docker run --name xevion-dev-container -p {{port}}:8080 xevion-dev
|
||||||
|
|
||||||
|
[script("bun")]
|
||||||
|
seed:
|
||||||
|
const { spawnSync } = await import("child_process");
|
||||||
|
|
||||||
|
// Ensure DB is running
|
||||||
|
const db = spawnSync("just", ["db"], { stdio: "inherit" });
|
||||||
|
if (db.status !== 0) process.exit(db.status);
|
||||||
|
|
||||||
|
// Run migrations
|
||||||
|
const migrate = spawnSync("sqlx", ["migrate", "run"], { stdio: "inherit" });
|
||||||
|
if (migrate.status !== 0) process.exit(migrate.status);
|
||||||
|
|
||||||
|
// Seed data
|
||||||
|
const seed = spawnSync("cargo", ["run", "--bin", "seed"], { stdio: "inherit" });
|
||||||
|
if (seed.status !== 0) process.exit(seed.status);
|
||||||
|
|
||||||
|
console.log("✅ Database ready with seed data");
|
||||||
|
|
||||||
|
[script("bun")]
|
||||||
|
db cmd="start":
|
||||||
|
const fs = await import("fs/promises");
|
||||||
|
const { spawnSync } = await import("child_process");
|
||||||
|
|
||||||
|
const NAME = "xevion-postgres";
|
||||||
|
const USER = "xevion";
|
||||||
|
const PASS = "dev";
|
||||||
|
const DB = "xevion";
|
||||||
|
const PORT = "5432";
|
||||||
|
const ENV_FILE = ".env";
|
||||||
|
const CMD = "{{cmd}}";
|
||||||
|
|
||||||
|
const run = (args) => spawnSync("docker", args, { encoding: "utf8" });
|
||||||
|
const getContainer = () => {
|
||||||
|
const res = run(["ps", "-a", "--filter", `name=^${NAME}$`, "--format", "json"]);
|
||||||
|
return res.stdout.trim() ? JSON.parse(res.stdout) : null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateEnv = async () => {
|
||||||
|
const url = `postgresql://${USER}:${PASS}@localhost:${PORT}/${DB}`;
|
||||||
|
try {
|
||||||
|
let content = await fs.readFile(ENV_FILE, "utf8");
|
||||||
|
content = content.includes("DATABASE_URL=")
|
||||||
|
? content.replace(/DATABASE_URL=.*$/m, `DATABASE_URL=${url}`)
|
||||||
|
: content.trim() + `\nDATABASE_URL=${url}\n`;
|
||||||
|
await fs.writeFile(ENV_FILE, content);
|
||||||
|
} catch {
|
||||||
|
await fs.writeFile(ENV_FILE, `DATABASE_URL=${url}\n`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const create = () => {
|
||||||
|
run(["run", "-d", "--name", NAME, "-e", `POSTGRES_USER=${USER}`,
|
||||||
|
"-e", `POSTGRES_PASSWORD=${PASS}`, "-e", `POSTGRES_DB=${DB}`,
|
||||||
|
"-p", `${PORT}:5432`, "postgres:16-alpine"]);
|
||||||
|
console.log("✅ created");
|
||||||
|
};
|
||||||
|
|
||||||
|
const container = getContainer();
|
||||||
|
|
||||||
|
if (CMD === "rm") {
|
||||||
|
if (!container) process.exit(0);
|
||||||
|
run(["stop", NAME]);
|
||||||
|
run(["rm", NAME]);
|
||||||
|
console.log("✅ removed");
|
||||||
|
} else if (CMD === "reset") {
|
||||||
|
if (!container) create();
|
||||||
|
else {
|
||||||
|
run(["exec", NAME, "psql", "-U", USER, "-c", `DROP DATABASE IF EXISTS ${DB}`]);
|
||||||
|
run(["exec", NAME, "psql", "-U", USER, "-c", `CREATE DATABASE ${DB}`]);
|
||||||
|
console.log("✅ reset");
|
||||||
|
}
|
||||||
|
await updateEnv();
|
||||||
|
} else {
|
||||||
|
if (!container) {
|
||||||
|
create();
|
||||||
|
} else if (container.State !== "running") {
|
||||||
|
run(["start", NAME]);
|
||||||
|
console.log("✅ started");
|
||||||
|
} else {
|
||||||
|
console.log("✅ running");
|
||||||
|
}
|
||||||
|
await updateEnv();
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
-- Project status enum
|
||||||
|
CREATE TYPE project_status AS ENUM ('active', 'maintained', 'archived', 'hidden');
|
||||||
|
|
||||||
|
-- Projects table
|
||||||
|
CREATE TABLE projects (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
slug TEXT NOT NULL UNIQUE,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
status project_status NOT NULL DEFAULT 'active',
|
||||||
|
github_repo TEXT,
|
||||||
|
demo_url TEXT,
|
||||||
|
priority INTEGER NOT NULL DEFAULT 0,
|
||||||
|
icon TEXT,
|
||||||
|
last_github_activity TIMESTAMPTZ,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for common queries
|
||||||
|
CREATE INDEX idx_projects_status ON projects(status);
|
||||||
|
CREATE INDEX idx_projects_priority ON projects(priority DESC);
|
||||||
|
CREATE INDEX idx_projects_slug ON projects(slug);
|
||||||
|
|
||||||
|
-- Trigger to auto-update updated_at
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = NOW();
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ language 'plpgsql';
|
||||||
|
|
||||||
|
CREATE TRIGGER update_projects_updated_at BEFORE UPDATE ON projects
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
+103
@@ -0,0 +1,103 @@
|
|||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
|
let database_url = std::env::var("DATABASE_URL")?;
|
||||||
|
let pool = PgPool::connect(&database_url).await?;
|
||||||
|
|
||||||
|
println!("🌱 Seeding database...");
|
||||||
|
|
||||||
|
// Clear existing data
|
||||||
|
sqlx::query("DELETE FROM projects").execute(&pool).await?;
|
||||||
|
|
||||||
|
// Seed projects with diverse data
|
||||||
|
let projects = vec![
|
||||||
|
(
|
||||||
|
"xevion-dev",
|
||||||
|
"xevion.dev",
|
||||||
|
"Personal portfolio site with fuzzy tag discovery and ISR caching",
|
||||||
|
"active",
|
||||||
|
Some("Xevion/xevion.dev"),
|
||||||
|
None,
|
||||||
|
10,
|
||||||
|
Some("fa-globe"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"contest",
|
||||||
|
"Contest",
|
||||||
|
"Archive and analysis platform for competitive programming problems",
|
||||||
|
"active",
|
||||||
|
Some("Xevion/contest"),
|
||||||
|
Some("https://contest.xevion.dev"),
|
||||||
|
9,
|
||||||
|
Some("fa-trophy"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"reforge",
|
||||||
|
"Reforge",
|
||||||
|
"Rust library for parsing and manipulating Replay files from Rocket League",
|
||||||
|
"maintained",
|
||||||
|
Some("Xevion/reforge"),
|
||||||
|
None,
|
||||||
|
8,
|
||||||
|
Some("fa-file-code"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"algorithms",
|
||||||
|
"Algorithms",
|
||||||
|
"Collection of algorithm implementations and data structures in Python",
|
||||||
|
"archived",
|
||||||
|
Some("Xevion/algorithms"),
|
||||||
|
None,
|
||||||
|
5,
|
||||||
|
Some("fa-brain"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"wordplay",
|
||||||
|
"WordPlay",
|
||||||
|
"Interactive word game with real-time multiplayer using WebSockets",
|
||||||
|
"maintained",
|
||||||
|
Some("Xevion/wordplay"),
|
||||||
|
Some("https://wordplay.example.com"),
|
||||||
|
7,
|
||||||
|
Some("fa-gamepad"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"dotfiles",
|
||||||
|
"Dotfiles",
|
||||||
|
"Personal configuration files and development environment setup scripts",
|
||||||
|
"active",
|
||||||
|
Some("Xevion/dotfiles"),
|
||||||
|
None,
|
||||||
|
6,
|
||||||
|
Some("fa-terminal"),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
let project_count = projects.len();
|
||||||
|
|
||||||
|
for (slug, title, desc, status, repo, demo, priority, icon) in projects {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO projects (slug, title, description, status, github_repo, demo_url, priority, icon)
|
||||||
|
VALUES ($1, $2, $3, $4::project_status, $5, $6, $7, $8)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(slug)
|
||||||
|
.bind(title)
|
||||||
|
.bind(desc)
|
||||||
|
.bind(status)
|
||||||
|
.bind(repo)
|
||||||
|
.bind(demo)
|
||||||
|
.bind(priority)
|
||||||
|
.bind(icon)
|
||||||
|
.execute(&pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("✅ Seeded {} projects", project_count);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -0,0 +1,123 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{PgPool, postgres::PgPoolOptions};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
// Database types
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
|
||||||
|
#[sqlx(type_name = "project_status", rename_all = "lowercase")]
|
||||||
|
pub enum ProjectStatus {
|
||||||
|
Active,
|
||||||
|
Maintained,
|
||||||
|
Archived,
|
||||||
|
Hidden,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Database model
|
||||||
|
#[derive(Debug, Clone, sqlx::FromRow)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub struct DbProject {
|
||||||
|
pub id: Uuid,
|
||||||
|
pub slug: String,
|
||||||
|
pub title: String,
|
||||||
|
pub description: String,
|
||||||
|
pub status: ProjectStatus,
|
||||||
|
pub github_repo: Option<String>,
|
||||||
|
pub demo_url: Option<String>,
|
||||||
|
pub priority: i32,
|
||||||
|
pub icon: Option<String>,
|
||||||
|
pub last_github_activity: Option<OffsetDateTime>,
|
||||||
|
pub created_at: OffsetDateTime,
|
||||||
|
pub updated_at: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
// API response types
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ApiProjectLink {
|
||||||
|
pub url: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub title: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ApiProject {
|
||||||
|
pub id: String,
|
||||||
|
pub name: String,
|
||||||
|
#[serde(rename = "shortDescription")]
|
||||||
|
pub short_description: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub icon: Option<String>,
|
||||||
|
pub links: Vec<ApiProjectLink>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DbProject {
|
||||||
|
/// Convert database project to API response format
|
||||||
|
pub fn to_api_project(&self) -> ApiProject {
|
||||||
|
let mut links = Vec::new();
|
||||||
|
|
||||||
|
if let Some(ref repo) = self.github_repo {
|
||||||
|
links.push(ApiProjectLink {
|
||||||
|
url: format!("https://github.com/{}", repo),
|
||||||
|
title: Some("GitHub".to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref demo) = self.demo_url {
|
||||||
|
links.push(ApiProjectLink {
|
||||||
|
url: demo.clone(),
|
||||||
|
title: Some("Demo".to_string()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ApiProject {
|
||||||
|
id: self.id.to_string(),
|
||||||
|
name: self.title.clone(),
|
||||||
|
short_description: self.description.clone(),
|
||||||
|
icon: self.icon.clone(),
|
||||||
|
links,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connection pool creation
|
||||||
|
pub async fn create_pool(database_url: &str) -> Result<PgPool, sqlx::Error> {
|
||||||
|
PgPoolOptions::new()
|
||||||
|
.max_connections(20)
|
||||||
|
.acquire_timeout(std::time::Duration::from_secs(3))
|
||||||
|
.connect(database_url)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
// Queries
|
||||||
|
pub async fn get_public_projects(pool: &PgPool) -> Result<Vec<DbProject>, sqlx::Error> {
|
||||||
|
sqlx::query_as!(
|
||||||
|
DbProject,
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
slug,
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
status as "status: ProjectStatus",
|
||||||
|
github_repo,
|
||||||
|
demo_url,
|
||||||
|
priority,
|
||||||
|
icon,
|
||||||
|
last_github_activity,
|
||||||
|
created_at,
|
||||||
|
updated_at
|
||||||
|
FROM projects
|
||||||
|
WHERE status != 'hidden'
|
||||||
|
ORDER BY priority DESC, created_at DESC
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn health_check(pool: &PgPool) -> Result<(), sqlx::Error> {
|
||||||
|
sqlx::query!("SELECT 1 as check")
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
+83
-74
@@ -6,7 +6,6 @@ use axum::{
|
|||||||
routing::any,
|
routing::any,
|
||||||
};
|
};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::net::SocketAddr;
|
use std::net::SocketAddr;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@@ -16,6 +15,7 @@ use tracing_subscriber::{EnvFilter, layer::SubscriberExt, util::SubscriberInitEx
|
|||||||
|
|
||||||
mod assets;
|
mod assets;
|
||||||
mod config;
|
mod config;
|
||||||
|
mod db;
|
||||||
mod formatter;
|
mod formatter;
|
||||||
mod health;
|
mod health;
|
||||||
mod middleware;
|
mod middleware;
|
||||||
@@ -68,9 +68,30 @@ fn init_tracing() {
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
|
// Load .env file if present
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
|
// Parse args early to allow --help to work without database
|
||||||
|
let args = Args::parse();
|
||||||
|
|
||||||
init_tracing();
|
init_tracing();
|
||||||
|
|
||||||
let args = Args::parse();
|
// Load database URL from environment (fail-fast)
|
||||||
|
let database_url =
|
||||||
|
std::env::var("DATABASE_URL").expect("DATABASE_URL must be set in environment");
|
||||||
|
|
||||||
|
// Create connection pool
|
||||||
|
let pool = db::create_pool(&database_url)
|
||||||
|
.await
|
||||||
|
.expect("Failed to connect to database");
|
||||||
|
|
||||||
|
// Run migrations on startup
|
||||||
|
sqlx::migrate!()
|
||||||
|
.run(&pool)
|
||||||
|
.await
|
||||||
|
.expect("Failed to run database migrations");
|
||||||
|
|
||||||
|
tracing::info!("Database connected and migrations applied");
|
||||||
|
|
||||||
if args.listen.is_empty() {
|
if args.listen.is_empty() {
|
||||||
eprintln!("Error: At least one --listen address is required");
|
eprintln!("Error: At least one --listen address is required");
|
||||||
@@ -108,13 +129,15 @@ async fn main() {
|
|||||||
let downstream_url_for_health = args.downstream.clone();
|
let downstream_url_for_health = args.downstream.clone();
|
||||||
let http_client_for_health = http_client.clone();
|
let http_client_for_health = http_client.clone();
|
||||||
let unix_client_for_health = unix_client.clone();
|
let unix_client_for_health = unix_client.clone();
|
||||||
|
let pool_for_health = pool.clone();
|
||||||
|
|
||||||
let health_checker = Arc::new(HealthChecker::new(move || {
|
let health_checker = Arc::new(HealthChecker::new(move || {
|
||||||
let downstream_url = downstream_url_for_health.clone();
|
let downstream_url = downstream_url_for_health.clone();
|
||||||
let http_client = http_client_for_health.clone();
|
let http_client = http_client_for_health.clone();
|
||||||
let unix_client = unix_client_for_health.clone();
|
let unix_client = unix_client_for_health.clone();
|
||||||
|
let pool = pool_for_health.clone();
|
||||||
|
|
||||||
async move { perform_health_check(downstream_url, http_client, unix_client).await }
|
async move { perform_health_check(downstream_url, http_client, unix_client, Some(pool)).await }
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let tarpit_config = TarpitConfig::from_env();
|
let tarpit_config = TarpitConfig::from_env();
|
||||||
@@ -137,6 +160,7 @@ async fn main() {
|
|||||||
unix_client,
|
unix_client,
|
||||||
health_checker,
|
health_checker,
|
||||||
tarpit_state,
|
tarpit_state,
|
||||||
|
pool: pool.clone(),
|
||||||
});
|
});
|
||||||
|
|
||||||
// Regenerate common OGP images on startup
|
// Regenerate common OGP images on startup
|
||||||
@@ -238,6 +262,7 @@ pub struct AppState {
|
|||||||
unix_client: Option<reqwest::Client>,
|
unix_client: Option<reqwest::Client>,
|
||||||
health_checker: Arc<HealthChecker>,
|
health_checker: Arc<HealthChecker>,
|
||||||
tarpit_state: Arc<TarpitState>,
|
tarpit_state: Arc<TarpitState>,
|
||||||
|
pool: sqlx::PgPool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -289,10 +314,7 @@ fn api_routes() -> Router<Arc<AppState>> {
|
|||||||
"/health",
|
"/health",
|
||||||
axum::routing::get(health_handler).head(health_handler),
|
axum::routing::get(health_handler).head(health_handler),
|
||||||
)
|
)
|
||||||
.route(
|
.route("/projects", axum::routing::get(projects_handler))
|
||||||
"/projects",
|
|
||||||
axum::routing::get(projects_handler).head(projects_handler),
|
|
||||||
)
|
|
||||||
.fallback(api_404_and_method_handler)
|
.fallback(api_404_and_method_handler)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -423,55 +445,25 @@ async fn api_404_handler(uri: axum::http::Uri) -> impl IntoResponse {
|
|||||||
api_404_and_method_handler(req).await
|
api_404_and_method_handler(req).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
async fn projects_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
||||||
struct ProjectLink {
|
match db::get_public_projects(&state.pool).await {
|
||||||
url: String,
|
Ok(projects) => {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
let api_projects: Vec<db::ApiProject> =
|
||||||
title: Option<String>,
|
projects.into_iter().map(|p| p.to_api_project()).collect();
|
||||||
}
|
Json(api_projects).into_response()
|
||||||
|
}
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
Err(err) => {
|
||||||
struct Project {
|
tracing::error!(error = %err, "Failed to fetch projects from database");
|
||||||
id: String,
|
(
|
||||||
name: String,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
#[serde(rename = "shortDescription")]
|
Json(serde_json::json!({
|
||||||
short_description: String,
|
"error": "Internal server error",
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
"message": "Failed to fetch projects"
|
||||||
icon: Option<String>,
|
})),
|
||||||
links: Vec<ProjectLink>,
|
)
|
||||||
}
|
.into_response()
|
||||||
|
}
|
||||||
async fn projects_handler() -> impl IntoResponse {
|
}
|
||||||
let projects = vec![
|
|
||||||
Project {
|
|
||||||
id: "1".to_string(),
|
|
||||||
name: "xevion.dev".to_string(),
|
|
||||||
short_description: "Personal portfolio with fuzzy tag discovery".to_string(),
|
|
||||||
icon: None,
|
|
||||||
links: vec![ProjectLink {
|
|
||||||
url: "https://github.com/Xevion/xevion.dev".to_string(),
|
|
||||||
title: Some("GitHub".to_string()),
|
|
||||||
}],
|
|
||||||
},
|
|
||||||
Project {
|
|
||||||
id: "2".to_string(),
|
|
||||||
name: "Contest".to_string(),
|
|
||||||
short_description: "Competitive programming problem archive".to_string(),
|
|
||||||
icon: None,
|
|
||||||
links: vec![
|
|
||||||
ProjectLink {
|
|
||||||
url: "https://github.com/Xevion/contest".to_string(),
|
|
||||||
title: Some("GitHub".to_string()),
|
|
||||||
},
|
|
||||||
ProjectLink {
|
|
||||||
url: "https://contest.xevion.dev".to_string(),
|
|
||||||
title: Some("Demo".to_string()),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
Json(projects)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_tarpit(state: &TarpitState, path: &str) -> bool {
|
fn should_tarpit(state: &TarpitState, path: &str) -> bool {
|
||||||
@@ -687,6 +679,7 @@ async fn perform_health_check(
|
|||||||
downstream_url: String,
|
downstream_url: String,
|
||||||
http_client: reqwest::Client,
|
http_client: reqwest::Client,
|
||||||
unix_client: Option<reqwest::Client>,
|
unix_client: Option<reqwest::Client>,
|
||||||
|
pool: Option<sqlx::PgPool>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let url = if downstream_url.starts_with('/') || downstream_url.starts_with("./") {
|
let url = if downstream_url.starts_with('/') || downstream_url.starts_with("./") {
|
||||||
"http://localhost/internal/health".to_string()
|
"http://localhost/internal/health".to_string()
|
||||||
@@ -700,24 +693,40 @@ async fn perform_health_check(
|
|||||||
&http_client
|
&http_client
|
||||||
};
|
};
|
||||||
|
|
||||||
match tokio::time::timeout(Duration::from_secs(5), client.get(&url).send()).await {
|
let bun_healthy =
|
||||||
Ok(Ok(response)) => {
|
match tokio::time::timeout(Duration::from_secs(5), client.get(&url).send()).await {
|
||||||
let is_success = response.status().is_success();
|
Ok(Ok(response)) => {
|
||||||
if !is_success {
|
let is_success = response.status().is_success();
|
||||||
tracing::warn!(
|
if !is_success {
|
||||||
status = response.status().as_u16(),
|
tracing::warn!(
|
||||||
"Health check failed: Bun returned non-success status"
|
status = response.status().as_u16(),
|
||||||
);
|
"Health check failed: Bun returned non-success status"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
is_success
|
||||||
|
}
|
||||||
|
Ok(Err(err)) => {
|
||||||
|
tracing::error!(error = %err, "Health check failed: cannot reach Bun");
|
||||||
|
false
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
tracing::error!("Health check failed: timeout after 5s");
|
||||||
|
false
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check database
|
||||||
|
let db_healthy = if let Some(pool) = pool {
|
||||||
|
match db::health_check(&pool).await {
|
||||||
|
Ok(_) => true,
|
||||||
|
Err(err) => {
|
||||||
|
tracing::error!(error = %err, "Database health check failed");
|
||||||
|
false
|
||||||
}
|
}
|
||||||
is_success
|
|
||||||
}
|
}
|
||||||
Ok(Err(err)) => {
|
} else {
|
||||||
tracing::error!(error = %err, "Health check failed: cannot reach Bun");
|
true
|
||||||
false
|
};
|
||||||
}
|
|
||||||
Err(_) => {
|
bun_healthy && db_healthy
|
||||||
tracing::error!("Health check failed: timeout after 5s");
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user