mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 14:23:36 -06:00
Compare commits
11 Commits
v0.5.0
..
e41b970d6e
| Author | SHA1 | Date | |
|---|---|---|---|
| e41b970d6e | |||
| e880126281 | |||
| db0ec1e69d | |||
| 2947face06 | |||
| 36bcc27d7f | |||
| 9e403e5043 | |||
| 98a6d978c6 | |||
| 4deeef2f00 | |||
| e008ee5a12 | |||
| a007ccb6a2 | |||
| 527cbebc6a |
Generated
+121
-1
@@ -26,6 +26,21 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alloc-no-stdlib"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
|
||||
|
||||
[[package]]
|
||||
name = "alloc-stdlib"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
@@ -106,6 +121,19 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-compression"
|
||||
version = "0.4.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
|
||||
dependencies = [
|
||||
"compression-codecs",
|
||||
"compression-core",
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.89"
|
||||
@@ -241,7 +269,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "banner"
|
||||
version = "0.3.4"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@@ -284,6 +312,7 @@ dependencies = [
|
||||
"tracing-subscriber",
|
||||
"ts-rs",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"yansi",
|
||||
]
|
||||
|
||||
@@ -329,6 +358,27 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "8.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli-decompressor"
|
||||
version = "5.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.12.0"
|
||||
@@ -406,6 +456,8 @@ version = "1.2.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
"shlex",
|
||||
]
|
||||
|
||||
@@ -500,6 +552,26 @@ dependencies = [
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-codecs"
|
||||
version = "0.4.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
|
||||
dependencies = [
|
||||
"brotli",
|
||||
"compression-core",
|
||||
"flate2",
|
||||
"memchr",
|
||||
"zstd",
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-core"
|
||||
version = "0.4.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-queue"
|
||||
version = "2.5.0"
|
||||
@@ -1641,6 +1713,16 @@ version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
@@ -3602,14 +3684,17 @@ version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"bitflags 2.9.4",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"iri-string",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -3721,6 +3806,7 @@ version = "11.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"ts-rs-macros",
|
||||
@@ -3860,6 +3946,12 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urlencoding"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf-8"
|
||||
version = "0.7.6"
|
||||
@@ -4555,3 +4647,31 @@ dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
|
||||
dependencies = [
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-safe"
|
||||
version = "7.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
|
||||
dependencies = [
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.16+zstd.1.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
+3
-2
@@ -48,16 +48,17 @@ url = "2.5"
|
||||
governor = "0.10.1"
|
||||
serde_path_to_error = "0.1.17"
|
||||
num-format = "0.4.4"
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
||||
mime_guess = { version = "2.0", optional = true }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
rapidhash = "4.1.0"
|
||||
yansi = "1.0.1"
|
||||
extension-traits = "2"
|
||||
ts-rs = { version = "11.1.0", features = ["serde-compat", "serde-json-impl"] }
|
||||
ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-json-impl"] }
|
||||
html-escape = "0.2.13"
|
||||
axum-extra = { version = "0.12.5", features = ["query"] }
|
||||
urlencoding = "2.1.3"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
|
||||
+7
-4
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install zstd for pre-compression
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy backend Cargo.toml for build-time version retrieval
|
||||
COPY ./Cargo.toml ./
|
||||
|
||||
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
|
||||
# Copy frontend source code
|
||||
COPY ./web ./
|
||||
|
||||
# Build frontend
|
||||
RUN bun run build
|
||||
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
|
||||
RUN bun run build && bun run scripts/compress-assets.ts
|
||||
|
||||
# --- Chef Base Stage ---
|
||||
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||
@@ -112,5 +115,5 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
ENV HOSTS=0.0.0.0,[::]
|
||||
|
||||
# Implicitly uses PORT environment variable
|
||||
# temporary: running without 'scraper' service
|
||||
CMD ["sh", "-c", "exec ./banner --services web,bot"]
|
||||
# Runs all services: web, bot, and scraper
|
||||
CMD ["sh", "-c", "exec ./banner"]
|
||||
|
||||
@@ -1,51 +1,292 @@
|
||||
set dotenv-load
|
||||
default_services := "bot,web,scraper"
|
||||
|
||||
default:
|
||||
just --list
|
||||
|
||||
# Run all checks (format, clippy, tests, lint)
|
||||
check:
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
bun run --cwd web test
|
||||
# Run all checks in parallel. Pass -f/--fix to auto-format and fix first.
|
||||
[script("bun")]
|
||||
check *flags:
|
||||
const args = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
let fix = false;
|
||||
for (const arg of args) {
|
||||
if (arg === "-f" || arg === "--fix") fix = true;
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
}
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
|
||||
# Run all tests (Rust + frontend)
|
||||
test: test-rust test-web
|
||||
if (fix) {
|
||||
console.log("\x1b[1;36m→ Fixing...\x1b[0m");
|
||||
run(["cargo", "fmt", "--all"]);
|
||||
run(["bun", "run", "--cwd", "web", "format"]);
|
||||
run(["cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
|
||||
"--", "--deny", "warnings"]);
|
||||
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
|
||||
}
|
||||
|
||||
# Run only Rust tests (excludes ts-rs bindings generation)
|
||||
test-rust *ARGS:
|
||||
cargo nextest run -E 'not test(export_bindings)' {{ARGS}}
|
||||
const checks = [
|
||||
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
|
||||
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
|
||||
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
|
||||
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
|
||||
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
|
||||
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
|
||||
// { name: "sqlx-prepare", cmd: ["cargo", "sqlx", "prepare", "--check"] },
|
||||
];
|
||||
|
||||
# Run only frontend tests
|
||||
test-web:
|
||||
bun run --cwd web test
|
||||
const isTTY = process.stderr.isTTY;
|
||||
const start = Date.now();
|
||||
const remaining = new Set(checks.map(c => c.name));
|
||||
|
||||
# Quick check: clippy + tests + typecheck (skips formatting)
|
||||
check-quick:
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
const promises = checks.map(async (check) => {
|
||||
const proc = Bun.spawn(check.cmd, {
|
||||
env: { ...process.env, FORCE_COLOR: "1" },
|
||||
stdout: "pipe", stderr: "pipe",
|
||||
});
|
||||
const [stdout, stderr] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]);
|
||||
await proc.exited;
|
||||
return { ...check, stdout, stderr, exitCode: proc.exitCode,
|
||||
elapsed: ((Date.now() - start) / 1000).toFixed(1) };
|
||||
});
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
cargo run -q --bin search -- {{ARGS}}
|
||||
const interval = isTTY ? setInterval(() => {
|
||||
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
|
||||
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
|
||||
}, 100) : null;
|
||||
|
||||
let anyFailed = false;
|
||||
for (const promise of promises) {
|
||||
const r = await promise;
|
||||
remaining.delete(r.name);
|
||||
if (isTTY) process.stderr.write(`\r\x1b[K`);
|
||||
if (r.exitCode !== 0) {
|
||||
anyFailed = true;
|
||||
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
if (r.stdout) process.stdout.write(r.stdout);
|
||||
if (r.stderr) process.stderr.write(r.stderr);
|
||||
} else {
|
||||
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
|
||||
}
|
||||
}
|
||||
|
||||
if (interval) clearInterval(interval);
|
||||
if (isTTY) process.stderr.write(`\r\x1b[K`);
|
||||
process.exit(anyFailed ? 1 : 0);
|
||||
|
||||
# Format all Rust and TypeScript code
|
||||
format:
|
||||
cargo fmt --all
|
||||
bun run --cwd web format
|
||||
|
||||
# Check formatting without modifying (CI-friendly)
|
||||
format-check:
|
||||
cargo fmt --all -- --check
|
||||
bun run --cwd web format:check
|
||||
# Run tests. Usage: just test [rust|web|<nextest filter args>]
|
||||
[script("bun")]
|
||||
test *args:
|
||||
const input = "{{args}}".trim();
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
if (input === "web") {
|
||||
run(["bun", "run", "--cwd", "web", "test"]);
|
||||
} else if (input === "rust") {
|
||||
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
|
||||
} else if (input === "") {
|
||||
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
|
||||
run(["bun", "run", "--cwd", "web", "test"]);
|
||||
} else {
|
||||
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
|
||||
}
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
cargo run -q --bin search -- {{ARGS}}
|
||||
|
||||
# Pass args to binary after --: just dev -n -- --some-flag
|
||||
# Dev server. Flags: -f(rontend) -b(ackend) -W(no-watch) -n(o-build) -r(elease) -e(mbed) --tracing <fmt>
|
||||
[script("bun")]
|
||||
dev *flags:
|
||||
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
|
||||
let frontendOnly = false, backendOnly = false;
|
||||
let noWatch = false, noBuild = false, release = false, embed = false;
|
||||
let tracing = "pretty";
|
||||
const passthrough = [];
|
||||
|
||||
let i = 0;
|
||||
let seenDashDash = false;
|
||||
while (i < argv.length) {
|
||||
const arg = argv[i];
|
||||
if (seenDashDash) { passthrough.push(arg); i++; continue; }
|
||||
if (arg === "--") { seenDashDash = true; i++; continue; }
|
||||
if (arg.startsWith("--")) {
|
||||
if (arg === "--frontend-only") frontendOnly = true;
|
||||
else if (arg === "--backend-only") backendOnly = true;
|
||||
else if (arg === "--no-watch") noWatch = true;
|
||||
else if (arg === "--no-build") noBuild = true;
|
||||
else if (arg === "--release") release = true;
|
||||
else if (arg === "--embed") embed = true;
|
||||
else if (arg === "--tracing") { tracing = argv[++i] || "pretty"; }
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
} else if (arg.startsWith("-") && arg.length > 1) {
|
||||
for (const c of arg.slice(1)) {
|
||||
if (c === "f") frontendOnly = true;
|
||||
else if (c === "b") backendOnly = true;
|
||||
else if (c === "W") noWatch = true;
|
||||
else if (c === "n") noBuild = true;
|
||||
else if (c === "r") release = true;
|
||||
else if (c === "e") embed = true;
|
||||
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
|
||||
}
|
||||
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
|
||||
i++;
|
||||
}
|
||||
|
||||
// -e implies -b (no point running Vite if assets are embedded)
|
||||
if (embed) backendOnly = true;
|
||||
// -n implies -W (no build means no watch)
|
||||
if (noBuild) noWatch = true;
|
||||
|
||||
// Validate conflicting flags
|
||||
if (frontendOnly && backendOnly) {
|
||||
console.error("Cannot use -f and -b together (or -e implies -b)");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const runFrontend = !backendOnly;
|
||||
const runBackend = !frontendOnly;
|
||||
const profile = release ? "release" : "dev";
|
||||
const profileDir = release ? "release" : "debug";
|
||||
|
||||
const procs = [];
|
||||
const cleanup = async () => {
|
||||
for (const p of procs) p.kill();
|
||||
await Promise.all(procs.map(p => p.exited));
|
||||
};
|
||||
process.on("SIGINT", async () => { await cleanup(); process.exit(0); });
|
||||
process.on("SIGTERM", async () => { await cleanup(); process.exit(0); });
|
||||
|
||||
// Build frontend first when embedding assets (backend will bake them in)
|
||||
if (embed && !noBuild) {
|
||||
console.log(`\x1b[1;36m→ Building frontend (for embedding)...\x1b[0m`);
|
||||
const fb = Bun.spawnSync(["bun", "run", "--cwd", "web", "build"], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
if (fb.exitCode !== 0) process.exit(fb.exitCode);
|
||||
}
|
||||
|
||||
// Frontend: Vite dev server
|
||||
if (runFrontend) {
|
||||
const proc = Bun.spawn(["bun", "run", "--cwd", "web", "dev"], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
}
|
||||
|
||||
// Backend
|
||||
if (runBackend) {
|
||||
const backendArgs = [`--tracing`, tracing, ...passthrough];
|
||||
const bin = `target/${profileDir}/banner`;
|
||||
|
||||
if (noWatch) {
|
||||
// Build first unless -n (skip build)
|
||||
if (!noBuild) {
|
||||
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
|
||||
const cargoArgs = ["cargo", "build", "--bin", "banner"];
|
||||
if (!embed) cargoArgs.push("--no-default-features");
|
||||
if (release) cargoArgs.push("--release");
|
||||
const build = Bun.spawnSync(cargoArgs, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (build.exitCode !== 0) { cleanup(); process.exit(build.exitCode); }
|
||||
}
|
||||
|
||||
// Run the binary directly (no watch)
|
||||
const { existsSync } = await import("fs");
|
||||
if (!existsSync(bin)) {
|
||||
console.error(`Binary not found: ${bin}`);
|
||||
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
|
||||
cleanup();
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`\x1b[1;36m→ Running ${bin} (no watch)\x1b[0m`);
|
||||
const proc = Bun.spawn([bin, ...backendArgs], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
} else {
|
||||
// Bacon watch mode
|
||||
const baconArgs = ["bacon", "--headless", "run", "--"];
|
||||
if (!embed) baconArgs.push("--no-default-features");
|
||||
if (release) baconArgs.push("--profile", "release");
|
||||
baconArgs.push("--", ...backendArgs);
|
||||
const proc = Bun.spawn(baconArgs, {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
procs.push(proc);
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for any process to exit, then kill the rest
|
||||
const results = procs.map((p, i) => p.exited.then(code => ({ i, code })));
|
||||
const first = await Promise.race(results);
|
||||
cleanup();
|
||||
process.exit(first.code);
|
||||
|
||||
# Production build. Flags: -d(ebug) -f(rontend-only) -b(ackend-only)
|
||||
[script("bun")]
|
||||
build *flags:
|
||||
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
|
||||
|
||||
let debug = false, frontendOnly = false, backendOnly = false;
|
||||
for (const arg of argv) {
|
||||
if (arg.startsWith("--")) {
|
||||
if (arg === "--debug") debug = true;
|
||||
else if (arg === "--frontend-only") frontendOnly = true;
|
||||
else if (arg === "--backend-only") backendOnly = true;
|
||||
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
|
||||
} else if (arg.startsWith("-") && arg.length > 1) {
|
||||
for (const c of arg.slice(1)) {
|
||||
if (c === "d") debug = true;
|
||||
else if (c === "f") frontendOnly = true;
|
||||
else if (c === "b") backendOnly = true;
|
||||
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
|
||||
}
|
||||
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
|
||||
}
|
||||
|
||||
if (frontendOnly && backendOnly) {
|
||||
console.error("Cannot use -f and -b together");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const run = (cmd) => {
|
||||
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
|
||||
if (proc.exitCode !== 0) process.exit(proc.exitCode);
|
||||
};
|
||||
|
||||
const buildFrontend = !backendOnly;
|
||||
const buildBackend = !frontendOnly;
|
||||
const profile = debug ? "debug" : "release";
|
||||
|
||||
if (buildFrontend) {
|
||||
console.log("\x1b[1;36m→ Building frontend...\x1b[0m");
|
||||
run(["bun", "run", "--cwd", "web", "build"]);
|
||||
}
|
||||
|
||||
if (buildBackend) {
|
||||
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
|
||||
const cmd = ["cargo", "build", "--bin", "banner"];
|
||||
if (!debug) cmd.push("--release");
|
||||
run(cmd);
|
||||
}
|
||||
|
||||
# Start PostgreSQL in Docker and update .env with connection string
|
||||
# Commands: start (default), reset, rm
|
||||
@@ -115,86 +356,6 @@ db cmd="start":
|
||||
await updateEnv();
|
||||
}
|
||||
|
||||
# Auto-reloading frontend server
|
||||
frontend:
|
||||
bun run --cwd web dev
|
||||
|
||||
# Production build of frontend
|
||||
build-frontend:
|
||||
bun run --cwd web build
|
||||
|
||||
# Auto-reloading backend server (with embedded assets)
|
||||
backend *ARGS:
|
||||
bacon --headless run -- -- {{ARGS}}
|
||||
|
||||
# Auto-reloading backend server (no embedded assets, for dev proxy mode)
|
||||
backend-dev *ARGS:
|
||||
bacon --headless run -- --no-default-features -- {{ARGS}}
|
||||
|
||||
# Production build
|
||||
build:
|
||||
bun run --cwd web build
|
||||
cargo build --release --bin banner
|
||||
|
||||
# Run auto-reloading development build with release characteristics
|
||||
dev-build *ARGS='--services web --tracing pretty': build-frontend
|
||||
bacon --headless run -- --profile dev-release -- {{ARGS}}
|
||||
|
||||
# Auto-reloading development build: Vite frontend + backend (no embedded assets, proxies to Vite)
|
||||
[parallel]
|
||||
dev *ARGS='--services web,bot': frontend (backend-dev ARGS)
|
||||
|
||||
# Smoke test: start web server, hit API endpoints, verify responses
|
||||
[script("bash")]
|
||||
test-smoke port="18080":
|
||||
set -euo pipefail
|
||||
PORT={{port}}
|
||||
|
||||
cleanup() { kill "$SERVER_PID" 2>/dev/null; wait "$SERVER_PID" 2>/dev/null; }
|
||||
|
||||
# Start server in background
|
||||
PORT=$PORT cargo run -q --no-default-features -- --services web --tracing json &
|
||||
SERVER_PID=$!
|
||||
trap cleanup EXIT
|
||||
|
||||
# Wait for server to be ready (up to 15s)
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf "http://localhost:$PORT/api/health" >/dev/null 2>&1; then break; fi
|
||||
if ! kill -0 "$SERVER_PID" 2>/dev/null; then echo "FAIL: server exited early"; exit 1; fi
|
||||
sleep 0.5
|
||||
done
|
||||
|
||||
PASS=0; FAIL=0
|
||||
check() {
|
||||
local label="$1" url="$2" expected="$3"
|
||||
body=$(curl -sf "$url") || { echo "FAIL: $label - request failed"; FAIL=$((FAIL+1)); return; }
|
||||
if echo "$body" | grep -q "$expected"; then
|
||||
echo "PASS: $label"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: $label - expected '$expected' in: $body"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
}
|
||||
|
||||
check "GET /api/health" "http://localhost:$PORT/api/health" '"status":"healthy"'
|
||||
check "GET /api/status" "http://localhost:$PORT/api/status" '"version"'
|
||||
check "GET /api/metrics" "http://localhost:$PORT/api/metrics" '"banner_api"'
|
||||
|
||||
# Test 404
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$PORT/api/nonexistent")
|
||||
if [ "$STATUS" = "404" ]; then
|
||||
echo "PASS: 404 on unknown route"
|
||||
PASS=$((PASS+1))
|
||||
else
|
||||
echo "FAIL: expected 404, got $STATUS"
|
||||
FAIL=$((FAIL+1))
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Results: $PASS passed, $FAIL failed"
|
||||
[ "$FAIL" -eq 0 ]
|
||||
|
||||
alias b := bun
|
||||
bun *ARGS:
|
||||
cd web && bun {{ ARGS }}
|
||||
|
||||
@@ -29,8 +29,7 @@ The application consists of three modular services that can be run independently
|
||||
bun install --cwd web # Install frontend dependencies
|
||||
cargo build # Build the backend
|
||||
|
||||
just dev # Runs auto-reloading dev build
|
||||
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
|
||||
just dev # Runs auto-reloading dev build with all services
|
||||
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
|
||||
|
||||
just build # Production build that embeds assets
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE users (
|
||||
discord_id BIGINT PRIMARY KEY,
|
||||
discord_username TEXT NOT NULL,
|
||||
discord_avatar_hash TEXT,
|
||||
is_admin BOOLEAN NOT NULL DEFAULT false,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE user_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id BIGINT NOT NULL REFERENCES users(discord_id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
last_active_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_user_sessions_user_id ON user_sessions(user_id);
|
||||
CREATE INDEX idx_user_sessions_expires_at ON user_sessions(expires_at);
|
||||
+19
-1
@@ -6,6 +6,7 @@ use crate::services::bot::BotService;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use crate::services::web::WebService;
|
||||
use crate::state::AppState;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use anyhow::Context;
|
||||
use figment::value::UncasedStr;
|
||||
use figment::{Figment, providers::Env};
|
||||
@@ -84,6 +85,14 @@ impl App {
|
||||
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
|
||||
}
|
||||
|
||||
// Seed the initial admin user if configured
|
||||
if let Some(admin_id) = config.admin_discord_id {
|
||||
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
|
||||
.await
|
||||
.context("Failed to seed admin user")?;
|
||||
info!(discord_id = admin_id, username = %user.discord_username, "Seed admin ensured");
|
||||
}
|
||||
|
||||
Ok(App {
|
||||
config,
|
||||
db_pool,
|
||||
@@ -97,7 +106,16 @@ impl App {
|
||||
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
|
||||
// Register enabled services with the manager
|
||||
if services.contains(&ServiceName::Web) {
|
||||
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
|
||||
let auth_config = AuthConfig {
|
||||
client_id: self.config.discord_client_id.clone(),
|
||||
client_secret: self.config.discord_client_secret.clone(),
|
||||
redirect_base: self.config.discord_redirect_uri.clone(),
|
||||
};
|
||||
let web_service = Box::new(WebService::new(
|
||||
self.config.port,
|
||||
self.app_state.clone(),
|
||||
auth_config,
|
||||
));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Web.as_str(), web_service);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use bitflags::{bitflags, Flags};
|
||||
use bitflags::{Flags, bitflags};
|
||||
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc, Weekday};
|
||||
use extension_traits::extension;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
+1
-105
@@ -2,34 +2,16 @@ use clap::Parser;
|
||||
|
||||
/// Banner Discord Bot - Course availability monitoring
|
||||
///
|
||||
/// This application runs multiple services that can be controlled via CLI arguments:
|
||||
/// This application runs all services:
|
||||
/// - bot: Discord bot for course monitoring commands
|
||||
/// - web: HTTP server for web interface and API
|
||||
/// - scraper: Background service for scraping course data
|
||||
///
|
||||
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
/// Log formatter to use
|
||||
#[arg(long, value_enum, default_value_t = default_tracing_format())]
|
||||
pub tracing: TracingFormat,
|
||||
|
||||
/// Services to run (comma-separated). Default: all services
|
||||
///
|
||||
/// Examples:
|
||||
/// --services bot,web # Run only bot and web services
|
||||
/// --services scraper # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
|
||||
pub services: Option<Vec<ServiceName>>,
|
||||
|
||||
/// Services to disable (comma-separated)
|
||||
///
|
||||
/// Examples:
|
||||
/// --disable-services bot # Run web and scraper only
|
||||
/// --disable-services bot,web # Run only the scraper service
|
||||
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
|
||||
pub disable_services: Option<Vec<ServiceName>>,
|
||||
}
|
||||
|
||||
#[derive(clap::ValueEnum, Clone, Debug)]
|
||||
@@ -66,34 +48,6 @@ impl ServiceName {
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine which services should be enabled based on CLI arguments
|
||||
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
|
||||
match (&args.services, &args.disable_services) {
|
||||
(Some(services), None) => {
|
||||
// User specified which services to run
|
||||
Ok(services.clone())
|
||||
}
|
||||
(None, Some(disabled)) => {
|
||||
// User specified which services to disable
|
||||
let enabled: Vec<ServiceName> = ServiceName::all()
|
||||
.into_iter()
|
||||
.filter(|s| !disabled.contains(s))
|
||||
.collect();
|
||||
Ok(enabled)
|
||||
}
|
||||
(None, None) => {
|
||||
// Default: run all services
|
||||
Ok(ServiceName::all())
|
||||
}
|
||||
(Some(_), Some(_)) => {
|
||||
// This should be prevented by clap's conflicts_with, but just in case
|
||||
Err(anyhow::anyhow!(
|
||||
"Cannot specify both --services and --disable-services"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
|
||||
#[cfg(not(debug_assertions))]
|
||||
@@ -107,64 +61,6 @@ fn default_tracing_format() -> TracingFormat {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn args_with_services(
|
||||
services: Option<Vec<ServiceName>>,
|
||||
disable: Option<Vec<ServiceName>>,
|
||||
) -> Args {
|
||||
Args {
|
||||
tracing: TracingFormat::Pretty,
|
||||
services,
|
||||
disable_services: disable,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_enables_all_services() {
|
||||
let result = determine_enabled_services(&args_with_services(None, None)).unwrap();
|
||||
assert_eq!(result.len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_explicit_services_only_those() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(Some(vec![ServiceName::Web]), None))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 1);
|
||||
assert_eq!(result[0].as_str(), "web");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_bot_leaves_web_and_scraper() {
|
||||
let result =
|
||||
determine_enabled_services(&args_with_services(None, Some(vec![ServiceName::Bot])))
|
||||
.unwrap();
|
||||
assert_eq!(result.len(), 2);
|
||||
assert!(result.iter().all(|s| s.as_str() != "bot"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disable_all_leaves_empty() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
None,
|
||||
Some(vec![
|
||||
ServiceName::Bot,
|
||||
ServiceName::Web,
|
||||
ServiceName::Scraper,
|
||||
]),
|
||||
))
|
||||
.unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_both_specified_returns_error() {
|
||||
let result = determine_enabled_services(&args_with_services(
|
||||
Some(vec![ServiceName::Web]),
|
||||
Some(vec![ServiceName::Bot]),
|
||||
));
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_service_name_as_str() {
|
||||
assert_eq!(ServiceName::Bot.as_str(), "bot");
|
||||
|
||||
@@ -47,6 +47,19 @@ pub struct Config {
|
||||
/// Rate limiting configuration for Banner API requests
|
||||
#[serde(default = "default_rate_limiting")]
|
||||
pub rate_limiting: RateLimitingConfig,
|
||||
|
||||
/// Discord OAuth2 client ID for web authentication
|
||||
#[serde(deserialize_with = "deserialize_string_or_uint")]
|
||||
pub discord_client_id: String,
|
||||
/// Discord OAuth2 client secret for web authentication
|
||||
pub discord_client_secret: String,
|
||||
/// Optional base URL override for OAuth2 redirect (e.g. "https://banner.xevion.dev").
|
||||
/// When unset, the redirect URI is derived from the incoming request's Origin/Host.
|
||||
#[serde(default)]
|
||||
pub discord_redirect_uri: Option<String>,
|
||||
/// Discord user ID to seed as initial admin on startup (optional)
|
||||
#[serde(default)]
|
||||
pub admin_discord_id: Option<u64>,
|
||||
}
|
||||
|
||||
/// Default log level of "info"
|
||||
@@ -216,6 +229,43 @@ where
|
||||
deserializer.deserialize_any(DurationVisitor)
|
||||
}
|
||||
|
||||
/// Deserializes a value that may arrive as either a string or unsigned integer.
|
||||
///
|
||||
/// Figment's env provider infers types from raw values, so numeric-looking strings
|
||||
/// like Discord client IDs get parsed as integers. This accepts both forms.
|
||||
fn deserialize_string_or_uint<'de, D>(deserializer: D) -> Result<String, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
use serde::de::Visitor;
|
||||
|
||||
struct StringOrUintVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for StringOrUintVisitor {
|
||||
type Value = String;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string or unsigned integer")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_owned())
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StringOrUintVisitor)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
+438
-78
@@ -3,6 +3,7 @@
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use crate::error::Result;
|
||||
use sqlx::PgConnection;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use std::time::Instant;
|
||||
@@ -57,15 +58,315 @@ fn extract_campus_code(course: &Course) -> Option<String> {
|
||||
.and_then(|mf| mf.meeting_time.campus.clone())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 1: UpsertDiffRow — captures pre- and post-upsert state for diffing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Row returned by the CTE-based upsert query, carrying both old and new values
|
||||
/// for every auditable field. `old_id` is `None` for fresh inserts.
|
||||
#[derive(sqlx::FromRow, Debug)]
|
||||
struct UpsertDiffRow {
|
||||
id: i32,
|
||||
old_id: Option<i32>,
|
||||
|
||||
// enrollment fields
|
||||
old_enrollment: Option<i32>,
|
||||
new_enrollment: i32,
|
||||
old_max_enrollment: Option<i32>,
|
||||
new_max_enrollment: i32,
|
||||
old_wait_count: Option<i32>,
|
||||
new_wait_count: i32,
|
||||
old_wait_capacity: Option<i32>,
|
||||
new_wait_capacity: i32,
|
||||
|
||||
// text fields (non-nullable in DB)
|
||||
old_subject: Option<String>,
|
||||
new_subject: String,
|
||||
old_course_number: Option<String>,
|
||||
new_course_number: String,
|
||||
old_title: Option<String>,
|
||||
new_title: String,
|
||||
|
||||
// nullable text fields
|
||||
old_sequence_number: Option<String>,
|
||||
new_sequence_number: Option<String>,
|
||||
old_part_of_term: Option<String>,
|
||||
new_part_of_term: Option<String>,
|
||||
old_instructional_method: Option<String>,
|
||||
new_instructional_method: Option<String>,
|
||||
old_campus: Option<String>,
|
||||
new_campus: Option<String>,
|
||||
|
||||
// nullable int fields
|
||||
old_credit_hours: Option<i32>,
|
||||
new_credit_hours: Option<i32>,
|
||||
old_credit_hour_low: Option<i32>,
|
||||
new_credit_hour_low: Option<i32>,
|
||||
old_credit_hour_high: Option<i32>,
|
||||
new_credit_hour_high: Option<i32>,
|
||||
|
||||
// cross-list fields
|
||||
old_cross_list: Option<String>,
|
||||
new_cross_list: Option<String>,
|
||||
old_cross_list_capacity: Option<i32>,
|
||||
new_cross_list_capacity: Option<i32>,
|
||||
old_cross_list_count: Option<i32>,
|
||||
new_cross_list_count: Option<i32>,
|
||||
|
||||
// link fields
|
||||
old_link_identifier: Option<String>,
|
||||
new_link_identifier: Option<String>,
|
||||
old_is_section_linked: Option<bool>,
|
||||
new_is_section_linked: Option<bool>,
|
||||
|
||||
// JSONB fields
|
||||
old_meeting_times: Option<serde_json::Value>,
|
||||
new_meeting_times: serde_json::Value,
|
||||
old_attributes: Option<serde_json::Value>,
|
||||
new_attributes: serde_json::Value,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 3: Entry types and diff logic
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
struct AuditEntry {
|
||||
course_id: i32,
|
||||
field_changed: &'static str,
|
||||
old_value: String,
|
||||
new_value: String,
|
||||
}
|
||||
|
||||
struct MetricEntry {
|
||||
course_id: i32,
|
||||
enrollment: i32,
|
||||
wait_count: i32,
|
||||
seats_available: i32,
|
||||
}
|
||||
|
||||
/// Compare old vs new for a single field, pushing an `AuditEntry` when they differ.
|
||||
///
|
||||
/// Three variants:
|
||||
/// - `diff_field!(audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `T` new
|
||||
/// - `diff_field!(opt audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `Option<T>` new
|
||||
/// - `diff_field!(json audits, row, field_name, old_field, new_field)` — `Option<Value>` old vs `Value` new
|
||||
///
|
||||
/// All variants skip when `old_id` is None (fresh insert).
|
||||
macro_rules! diff_field {
|
||||
// Standard: Option<T> old vs T new (non-nullable columns)
|
||||
($audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_str = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
let new_str = $row.$new.to_string();
|
||||
if old_str != new_str {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_str,
|
||||
new_value: new_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
// Nullable: Option<T> old vs Option<T> new
|
||||
(opt $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_str = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
let new_str = $row
|
||||
.$new
|
||||
.as_ref()
|
||||
.map(|v| v.to_string())
|
||||
.unwrap_or_default();
|
||||
if old_str != new_str {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_str,
|
||||
new_value: new_str,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
// JSONB: Option<Value> old vs Value new
|
||||
(json $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
|
||||
if $row.old_id.is_some() {
|
||||
let old_val = $row
|
||||
.$old
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.unwrap_or(serde_json::Value::Null);
|
||||
let new_val = &$row.$new;
|
||||
if old_val != *new_val {
|
||||
$audits.push(AuditEntry {
|
||||
course_id: $row.id,
|
||||
field_changed: $field,
|
||||
old_value: old_val.to_string(),
|
||||
new_value: new_val.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Compute audit entries (field-level diffs) and metric entries from upsert diff rows.
|
||||
fn compute_diffs(rows: &[UpsertDiffRow]) -> (Vec<AuditEntry>, Vec<MetricEntry>) {
|
||||
let mut audits = Vec::new();
|
||||
let mut metrics = Vec::new();
|
||||
|
||||
for row in rows {
|
||||
// Non-nullable fields
|
||||
diff_field!(audits, row, "enrollment", old_enrollment, new_enrollment);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"max_enrollment",
|
||||
old_max_enrollment,
|
||||
new_max_enrollment
|
||||
);
|
||||
diff_field!(audits, row, "wait_count", old_wait_count, new_wait_count);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"wait_capacity",
|
||||
old_wait_capacity,
|
||||
new_wait_capacity
|
||||
);
|
||||
diff_field!(audits, row, "subject", old_subject, new_subject);
|
||||
diff_field!(
|
||||
audits,
|
||||
row,
|
||||
"course_number",
|
||||
old_course_number,
|
||||
new_course_number
|
||||
);
|
||||
diff_field!(audits, row, "title", old_title, new_title);
|
||||
|
||||
// Nullable text fields
|
||||
diff_field!(opt audits, row, "sequence_number", old_sequence_number, new_sequence_number);
|
||||
diff_field!(opt audits, row, "part_of_term", old_part_of_term, new_part_of_term);
|
||||
diff_field!(opt audits, row, "instructional_method", old_instructional_method, new_instructional_method);
|
||||
diff_field!(opt audits, row, "campus", old_campus, new_campus);
|
||||
|
||||
// Nullable int fields
|
||||
diff_field!(opt audits, row, "credit_hours", old_credit_hours, new_credit_hours);
|
||||
diff_field!(opt audits, row, "credit_hour_low", old_credit_hour_low, new_credit_hour_low);
|
||||
diff_field!(opt audits, row, "credit_hour_high", old_credit_hour_high, new_credit_hour_high);
|
||||
|
||||
// Cross-list fields
|
||||
diff_field!(opt audits, row, "cross_list", old_cross_list, new_cross_list);
|
||||
diff_field!(opt audits, row, "cross_list_capacity", old_cross_list_capacity, new_cross_list_capacity);
|
||||
diff_field!(opt audits, row, "cross_list_count", old_cross_list_count, new_cross_list_count);
|
||||
|
||||
// Link fields
|
||||
diff_field!(opt audits, row, "link_identifier", old_link_identifier, new_link_identifier);
|
||||
diff_field!(opt audits, row, "is_section_linked", old_is_section_linked, new_is_section_linked);
|
||||
|
||||
// JSONB fields
|
||||
diff_field!(json audits, row, "meeting_times", old_meeting_times, new_meeting_times);
|
||||
diff_field!(json audits, row, "attributes", old_attributes, new_attributes);
|
||||
|
||||
// Emit a metric entry when enrollment/wait_count/max_enrollment changed
|
||||
// Skip fresh inserts (no old data to compare against)
|
||||
let enrollment_changed = row.old_id.is_some()
|
||||
&& (row.old_enrollment != Some(row.new_enrollment)
|
||||
|| row.old_wait_count != Some(row.new_wait_count)
|
||||
|| row.old_max_enrollment != Some(row.new_max_enrollment));
|
||||
|
||||
if enrollment_changed {
|
||||
metrics.push(MetricEntry {
|
||||
course_id: row.id,
|
||||
enrollment: row.new_enrollment,
|
||||
wait_count: row.new_wait_count,
|
||||
seats_available: row.new_max_enrollment - row.new_enrollment,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
(audits, metrics)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 4: Batch insert functions for audits and metrics
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn insert_audits(audits: &[AuditEntry], conn: &mut PgConnection) -> Result<()> {
|
||||
if audits.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let course_ids: Vec<i32> = audits.iter().map(|a| a.course_id).collect();
|
||||
let fields: Vec<&str> = audits.iter().map(|a| a.field_changed).collect();
|
||||
let old_values: Vec<&str> = audits.iter().map(|a| a.old_value.as_str()).collect();
|
||||
let new_values: Vec<&str> = audits.iter().map(|a| a.new_value.as_str()).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_audits (course_id, timestamp, field_changed, old_value, new_value)
|
||||
SELECT v.course_id, NOW(), v.field_changed, v.old_value, v.new_value
|
||||
FROM UNNEST($1::int4[], $2::text[], $3::text[], $4::text[])
|
||||
AS v(course_id, field_changed, old_value, new_value)
|
||||
"#,
|
||||
)
|
||||
.bind(&course_ids)
|
||||
.bind(&fields)
|
||||
.bind(&old_values)
|
||||
.bind(&new_values)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_audits: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn insert_metrics(metrics: &[MetricEntry], conn: &mut PgConnection) -> Result<()> {
|
||||
if metrics.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let course_ids: Vec<i32> = metrics.iter().map(|m| m.course_id).collect();
|
||||
let enrollments: Vec<i32> = metrics.iter().map(|m| m.enrollment).collect();
|
||||
let wait_counts: Vec<i32> = metrics.iter().map(|m| m.wait_count).collect();
|
||||
let seats_available: Vec<i32> = metrics.iter().map(|m| m.seats_available).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_metrics (course_id, timestamp, enrollment, wait_count, seats_available)
|
||||
SELECT v.course_id, NOW(), v.enrollment, v.wait_count, v.seats_available
|
||||
FROM UNNEST($1::int4[], $2::int4[], $3::int4[], $4::int4[])
|
||||
AS v(course_id, enrollment, wait_count, seats_available)
|
||||
"#,
|
||||
)
|
||||
.bind(&course_ids)
|
||||
.bind(&enrollments)
|
||||
.bind(&wait_counts)
|
||||
.bind(&seats_available)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_metrics: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Core upsert functions (updated to use &mut PgConnection)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Batch upsert courses in a single database query.
|
||||
///
|
||||
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
|
||||
/// new fields (meeting times, attributes, instructor data). Returns the
|
||||
/// database IDs for all upserted courses (in input order) so instructors
|
||||
/// can be linked.
|
||||
/// new fields (meeting times, attributes, instructor data). Captures pre-update
|
||||
/// state for audit/metric tracking, all within a single transaction.
|
||||
///
|
||||
/// # Performance
|
||||
/// - Reduces N database round-trips to 3 (courses, instructors, junction)
|
||||
/// - Reduces N database round-trips to 5 (old-data CTE + upsert, audits, metrics, instructors, junction)
|
||||
/// - Typical usage: 50-200 courses per batch
|
||||
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
if courses.is_empty() {
|
||||
@@ -76,27 +377,47 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
|
||||
let start = Instant::now();
|
||||
let course_count = courses.len();
|
||||
|
||||
// Step 1: Upsert courses with all fields, returning IDs
|
||||
let course_ids = upsert_courses(courses, db_pool).await?;
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
// Step 2: Upsert instructors (deduplicated across batch)
|
||||
upsert_instructors(courses, db_pool).await?;
|
||||
// Step 1: Upsert courses with CTE, returning diff rows
|
||||
let diff_rows = upsert_courses(courses, &mut tx).await?;
|
||||
|
||||
// Step 3: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &course_ids, db_pool).await?;
|
||||
// Step 2: Extract course IDs for instructor linking
|
||||
let course_ids: Vec<i32> = diff_rows.iter().map(|r| r.id).collect();
|
||||
|
||||
// Step 3: Compute audit/metric diffs
|
||||
let (audits, metrics) = compute_diffs(&diff_rows);
|
||||
|
||||
// Step 4: Insert audits and metrics
|
||||
insert_audits(&audits, &mut tx).await?;
|
||||
insert_metrics(&metrics, &mut tx).await?;
|
||||
|
||||
// Step 5: Upsert instructors (deduplicated across batch)
|
||||
upsert_instructors(courses, &mut tx).await?;
|
||||
|
||||
// Step 6: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &course_ids, &mut tx).await?;
|
||||
|
||||
tx.commit().await?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
courses_count = course_count,
|
||||
audit_entries = audits.len(),
|
||||
metric_entries = metrics.len(),
|
||||
duration_ms = duration.as_millis(),
|
||||
"Batch upserted courses with instructors"
|
||||
"Batch upserted courses with instructors, audits, and metrics"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Upsert all courses and return their database IDs in input order.
|
||||
async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>> {
|
||||
// ---------------------------------------------------------------------------
|
||||
// Task 2: CTE-based upsert returning old+new values
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Upsert all courses and return diff rows with old and new values for auditing.
|
||||
async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<Vec<UpsertDiffRow>> {
|
||||
let crns: Vec<&str> = courses
|
||||
.iter()
|
||||
.map(|c| c.course_reference_number.as_str())
|
||||
@@ -143,67 +464,106 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
|
||||
courses.iter().map(to_db_meeting_times).collect();
|
||||
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
|
||||
|
||||
let rows = sqlx::query_scalar::<_, i32>(
|
||||
let rows = sqlx::query_as::<_, UpsertDiffRow>(
|
||||
r#"
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
WITH old_data AS (
|
||||
SELECT id, enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
subject, course_number, title,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes,
|
||||
crn, term_code
|
||||
FROM courses
|
||||
WHERE (crn, term_code) IN (SELECT * FROM UNNEST($1::text[], $5::text[]))
|
||||
),
|
||||
upserted AS (
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
course_number = EXCLUDED.course_number,
|
||||
title = EXCLUDED.title,
|
||||
enrollment = EXCLUDED.enrollment,
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING *
|
||||
)
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
subject = EXCLUDED.subject,
|
||||
course_number = EXCLUDED.course_number,
|
||||
title = EXCLUDED.title,
|
||||
enrollment = EXCLUDED.enrollment,
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING id
|
||||
SELECT u.id,
|
||||
o.id AS old_id,
|
||||
o.enrollment AS old_enrollment, u.enrollment AS new_enrollment,
|
||||
o.max_enrollment AS old_max_enrollment, u.max_enrollment AS new_max_enrollment,
|
||||
o.wait_count AS old_wait_count, u.wait_count AS new_wait_count,
|
||||
o.wait_capacity AS old_wait_capacity, u.wait_capacity AS new_wait_capacity,
|
||||
o.subject AS old_subject, u.subject AS new_subject,
|
||||
o.course_number AS old_course_number, u.course_number AS new_course_number,
|
||||
o.title AS old_title, u.title AS new_title,
|
||||
o.sequence_number AS old_sequence_number, u.sequence_number AS new_sequence_number,
|
||||
o.part_of_term AS old_part_of_term, u.part_of_term AS new_part_of_term,
|
||||
o.instructional_method AS old_instructional_method, u.instructional_method AS new_instructional_method,
|
||||
o.campus AS old_campus, u.campus AS new_campus,
|
||||
o.credit_hours AS old_credit_hours, u.credit_hours AS new_credit_hours,
|
||||
o.credit_hour_low AS old_credit_hour_low, u.credit_hour_low AS new_credit_hour_low,
|
||||
o.credit_hour_high AS old_credit_hour_high, u.credit_hour_high AS new_credit_hour_high,
|
||||
o.cross_list AS old_cross_list, u.cross_list AS new_cross_list,
|
||||
o.cross_list_capacity AS old_cross_list_capacity, u.cross_list_capacity AS new_cross_list_capacity,
|
||||
o.cross_list_count AS old_cross_list_count, u.cross_list_count AS new_cross_list_count,
|
||||
o.link_identifier AS old_link_identifier, u.link_identifier AS new_link_identifier,
|
||||
o.is_section_linked AS old_is_section_linked, u.is_section_linked AS new_is_section_linked,
|
||||
o.meeting_times AS old_meeting_times, u.meeting_times AS new_meeting_times,
|
||||
o.attributes AS old_attributes, u.attributes AS new_attributes
|
||||
FROM upserted u
|
||||
LEFT JOIN old_data o ON u.crn = o.crn AND u.term_code = o.term_code
|
||||
"#,
|
||||
)
|
||||
.bind(&crns)
|
||||
@@ -229,7 +589,7 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
|
||||
.bind(&is_section_linkeds)
|
||||
.bind(&meeting_times_json)
|
||||
.bind(&attributes_json)
|
||||
.fetch_all(db_pool)
|
||||
.fetch_all(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
|
||||
|
||||
@@ -237,7 +597,7 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
|
||||
}
|
||||
|
||||
/// Deduplicate and upsert all instructors from the batch.
|
||||
async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
async fn upsert_instructors(courses: &[Course], conn: &mut PgConnection) -> Result<()> {
|
||||
let mut seen = HashSet::new();
|
||||
let mut banner_ids = Vec::new();
|
||||
let mut display_names = Vec::new();
|
||||
@@ -270,7 +630,7 @@ async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()>
|
||||
.bind(&banner_ids)
|
||||
.bind(&display_names)
|
||||
.bind(&emails)
|
||||
.execute(db_pool)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
|
||||
|
||||
@@ -281,7 +641,7 @@ async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()>
|
||||
async fn upsert_course_instructors(
|
||||
courses: &[Course],
|
||||
course_ids: &[i32],
|
||||
db_pool: &PgPool,
|
||||
conn: &mut PgConnection,
|
||||
) -> Result<()> {
|
||||
let mut cids = Vec::new();
|
||||
let mut iids = Vec::new();
|
||||
@@ -303,7 +663,7 @@ async fn upsert_course_instructors(
|
||||
// This handles instructor changes cleanly.
|
||||
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
|
||||
.bind(&cids)
|
||||
.execute(db_pool)
|
||||
.execute(&mut *conn)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
@@ -317,7 +677,7 @@ async fn upsert_course_instructors(
|
||||
.bind(&cids)
|
||||
.bind(&iids)
|
||||
.bind(&primaries)
|
||||
.execute(db_pool)
|
||||
.execute(&mut *conn)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
|
||||
|
||||
|
||||
+4
-5
@@ -92,9 +92,8 @@ pub async fn search_courses(
|
||||
) -> Result<(Vec<Course>, i64)> {
|
||||
let order_by = sort_clause(sort_by, sort_dir);
|
||||
|
||||
let data_query = format!(
|
||||
"SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10"
|
||||
);
|
||||
let data_query =
|
||||
format!("SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10");
|
||||
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
|
||||
|
||||
let courses = sqlx::query_as::<_, Course>(&data_query)
|
||||
@@ -149,7 +148,7 @@ pub async fn get_course_instructors(
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
rp.avg_rating, rp.num_ratings, i.rmp_legacy_id,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
@@ -178,7 +177,7 @@ pub async fn get_instructors_for_courses(
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
rp.avg_rating, rp.num_ratings, i.rmp_legacy_id,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
|
||||
@@ -6,3 +6,5 @@ pub mod models;
|
||||
pub mod reference;
|
||||
pub mod rmp;
|
||||
pub mod scrape_jobs;
|
||||
pub mod sessions;
|
||||
pub mod users;
|
||||
|
||||
+67
-1
@@ -1,10 +1,46 @@
|
||||
//! `sqlx` models for the database schema.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use serde_json::Value;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Serialize an `i64` as a string to avoid JavaScript precision loss for values exceeding 2^53.
|
||||
fn serialize_i64_as_string<S: Serializer>(value: &i64, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
serializer.serialize_str(&value.to_string())
|
||||
}
|
||||
|
||||
/// Deserialize an `i64` from either a number or a string.
|
||||
fn deserialize_i64_from_string<'de, D: Deserializer<'de>>(
|
||||
deserializer: D,
|
||||
) -> Result<i64, D::Error> {
|
||||
use serde::de;
|
||||
|
||||
struct I64OrStringVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for I64OrStringVisitor {
|
||||
type Value = i64;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("an integer or a string containing an integer")
|
||||
}
|
||||
|
||||
fn visit_i64<E: de::Error>(self, value: i64) -> Result<i64, E> {
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
fn visit_u64<E: de::Error>(self, value: u64) -> Result<i64, E> {
|
||||
i64::try_from(value).map_err(|_| E::custom(format!("u64 {value} out of i64 range")))
|
||||
}
|
||||
|
||||
fn visit_str<E: de::Error>(self, value: &str) -> Result<i64, E> {
|
||||
value.parse().map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(I64OrStringVisitor)
|
||||
}
|
||||
|
||||
/// Represents a meeting time stored as JSONB in the courses table.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
@@ -85,6 +121,7 @@ pub struct CourseInstructorDetail {
|
||||
pub is_primary: bool,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
pub rmp_legacy_id: Option<i32>,
|
||||
/// Present when fetched via batch query; `None` for single-course queries.
|
||||
pub course_id: Option<i32>,
|
||||
}
|
||||
@@ -155,3 +192,32 @@ pub struct ScrapeJob {
|
||||
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
|
||||
pub max_retries: i32,
|
||||
}
|
||||
|
||||
/// A user authenticated via Discord OAuth.
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct User {
|
||||
#[serde(
|
||||
serialize_with = "serialize_i64_as_string",
|
||||
deserialize_with = "deserialize_i64_from_string"
|
||||
)]
|
||||
#[ts(type = "string")]
|
||||
pub discord_id: i64,
|
||||
pub discord_username: String,
|
||||
pub discord_avatar_hash: Option<String>,
|
||||
pub is_admin: bool,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// A server-side session for an authenticated user.
|
||||
#[allow(dead_code)] // Fields read via sqlx::FromRow; some only used in DB queries
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct UserSession {
|
||||
pub id: String,
|
||||
pub user_id: i64,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
pub last_active_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
+38
-9
@@ -5,11 +5,33 @@ use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Force-unlock all jobs that have a non-NULL `locked_at`.
|
||||
///
|
||||
/// Intended to be called once at startup to recover jobs left locked by
|
||||
/// a previous unclean shutdown (crash, OOM kill, etc.).
|
||||
///
|
||||
/// # Returns
|
||||
/// The number of jobs that were unlocked.
|
||||
pub async fn force_unlock_all(db_pool: &PgPool) -> Result<u64> {
|
||||
let result = sqlx::query("UPDATE scrape_jobs SET locked_at = NULL WHERE locked_at IS NOT NULL")
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// How long a lock can be held before it is considered expired and reclaimable.
|
||||
///
|
||||
/// This acts as a safety net for cases where a worker dies without unlocking
|
||||
/// (OOM kill, crash, network partition). Under normal operation, the worker's
|
||||
/// own job timeout fires well before this threshold.
|
||||
const LOCK_EXPIRY: std::time::Duration = std::time::Duration::from_secs(10 * 60);
|
||||
|
||||
/// Atomically fetch and lock the next available scrape job.
|
||||
///
|
||||
/// Uses `FOR UPDATE SKIP LOCKED` to allow multiple workers to poll the queue
|
||||
/// concurrently without conflicts. Only jobs that are unlocked and ready to
|
||||
/// execute (based on `execute_at`) are considered.
|
||||
/// concurrently without conflicts. Considers jobs that are:
|
||||
/// - Unlocked and ready to execute, OR
|
||||
/// - Locked but past [`LOCK_EXPIRY`] (abandoned by a dead worker)
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
@@ -20,9 +42,16 @@ use std::collections::HashSet;
|
||||
pub async fn fetch_and_lock_job(db_pool: &PgPool) -> Result<Option<ScrapeJob>> {
|
||||
let mut tx = db_pool.begin().await?;
|
||||
|
||||
let lock_expiry_secs = LOCK_EXPIRY.as_secs() as i32;
|
||||
let job = sqlx::query_as::<_, ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
|
||||
"SELECT * FROM scrape_jobs \
|
||||
WHERE (locked_at IS NULL OR locked_at < NOW() - make_interval(secs => $1::double precision)) \
|
||||
AND execute_at <= NOW() \
|
||||
ORDER BY priority DESC, execute_at ASC \
|
||||
LIMIT 1 \
|
||||
FOR UPDATE SKIP LOCKED"
|
||||
)
|
||||
.bind(lock_expiry_secs)
|
||||
.fetch_optional(&mut *tx)
|
||||
.await?;
|
||||
|
||||
@@ -90,7 +119,7 @@ pub async fn unlock_and_increment_retry(
|
||||
"UPDATE scrape_jobs
|
||||
SET locked_at = NULL, retry_count = retry_count + 1
|
||||
WHERE id = $1
|
||||
RETURNING CASE WHEN retry_count < $2 THEN retry_count ELSE NULL END",
|
||||
RETURNING CASE WHEN retry_count <= $2 THEN retry_count ELSE NULL END",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(max_retries)
|
||||
@@ -100,10 +129,10 @@ pub async fn unlock_and_increment_retry(
|
||||
Ok(result.is_some())
|
||||
}
|
||||
|
||||
/// Find existing unlocked job payloads matching the given target type and candidates.
|
||||
/// Find existing job payloads matching the given target type and candidates.
|
||||
///
|
||||
/// Returns a set of stringified JSON payloads that already exist in the queue,
|
||||
/// used for deduplication when scheduling new jobs.
|
||||
/// Returns a set of stringified JSON payloads that already exist in the queue
|
||||
/// (both locked and unlocked), used for deduplication when scheduling new jobs.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `target_type` - The target type to filter by
|
||||
@@ -111,7 +140,7 @@ pub async fn unlock_and_increment_retry(
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// A `HashSet` of stringified JSON payloads that already have pending jobs
|
||||
/// A `HashSet` of stringified JSON payloads that already have pending or in-progress jobs
|
||||
pub async fn find_existing_job_payloads(
|
||||
target_type: TargetType,
|
||||
candidate_payloads: &[serde_json::Value],
|
||||
@@ -119,7 +148,7 @@ pub async fn find_existing_job_payloads(
|
||||
) -> Result<HashSet<String>> {
|
||||
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
|
||||
"SELECT target_payload FROM scrape_jobs
|
||||
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
|
||||
WHERE target_type = $1 AND target_payload = ANY($2)",
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(candidate_payloads)
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
//! Database query functions for user sessions.
|
||||
|
||||
use anyhow::Context;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::UserSession;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Generate a cryptographically random 32-byte hex token.
|
||||
fn generate_token() -> String {
|
||||
let bytes: [u8; 32] = rand::rng().random();
|
||||
bytes.iter().map(|b| format!("{b:02x}")).collect()
|
||||
}
|
||||
|
||||
/// Create a new session for a user with the given duration.
|
||||
pub async fn create_session(
|
||||
pool: &PgPool,
|
||||
user_id: i64,
|
||||
duration: std::time::Duration,
|
||||
) -> Result<UserSession> {
|
||||
let token = generate_token();
|
||||
let duration_secs = duration.as_secs() as i64;
|
||||
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
r#"
|
||||
INSERT INTO user_sessions (id, user_id, expires_at)
|
||||
VALUES ($1, $2, now() + make_interval(secs => $3::double precision))
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(&token)
|
||||
.bind(user_id)
|
||||
.bind(duration_secs as f64)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to create session")
|
||||
}
|
||||
|
||||
/// Fetch a session by token, only if it has not expired.
|
||||
pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSession>> {
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
"SELECT * FROM user_sessions WHERE id = $1 AND expires_at > now()",
|
||||
)
|
||||
.bind(token)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get session")
|
||||
}
|
||||
|
||||
/// Update the last-active timestamp for a session.
|
||||
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query("UPDATE user_sessions SET last_active_at = now() WHERE id = $1")
|
||||
.bind(token)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to touch session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete a session by token.
|
||||
pub async fn delete_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query("DELETE FROM user_sessions WHERE id = $1")
|
||||
.bind(token)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete all sessions for a user. Returns the number of sessions deleted.
|
||||
#[allow(dead_code)] // Available for admin user-deletion flow
|
||||
pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE user_id = $1")
|
||||
.bind(user_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete user sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// Delete all expired sessions. Returns the number of sessions cleaned up.
|
||||
#[allow(dead_code)] // Called by SessionCache::cleanup_expired (not yet wired to periodic task)
|
||||
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to cleanup expired sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
//! Database query functions for users.
|
||||
|
||||
use anyhow::Context;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::User;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Insert a new user or update username/avatar on conflict.
|
||||
pub async fn upsert_user(
|
||||
pool: &PgPool,
|
||||
discord_id: i64,
|
||||
username: &str,
|
||||
avatar_hash: Option<&str>,
|
||||
) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, discord_avatar_hash)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET discord_username = EXCLUDED.discord_username,
|
||||
discord_avatar_hash = EXCLUDED.discord_avatar_hash,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(username)
|
||||
.bind(avatar_hash)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to upsert user")
|
||||
}
|
||||
|
||||
/// Fetch a user by Discord ID.
|
||||
pub async fn get_user(pool: &PgPool, discord_id: i64) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users WHERE discord_id = $1")
|
||||
.bind(discord_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get user")
|
||||
}
|
||||
|
||||
/// List all users ordered by creation date (newest first).
|
||||
pub async fn list_users(pool: &PgPool) -> Result<Vec<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users ORDER BY created_at DESC")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.context("failed to list users")
|
||||
}
|
||||
|
||||
/// Set the admin flag for a user, returning the updated user if found.
|
||||
pub async fn set_admin(pool: &PgPool, discord_id: i64, is_admin: bool) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
UPDATE users
|
||||
SET is_admin = $2, updated_at = now()
|
||||
WHERE discord_id = $1
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(is_admin)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to set admin status")
|
||||
}
|
||||
|
||||
/// Ensure a seed admin exists. Upserts with `is_admin = true` and a placeholder
|
||||
/// username that will be replaced on first OAuth login.
|
||||
pub async fn ensure_seed_admin(pool: &PgPool, discord_id: i64) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, is_admin)
|
||||
VALUES ($1, 'seed-admin', true)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET is_admin = true,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to ensure seed admin")
|
||||
}
|
||||
+3
-4
@@ -1,5 +1,5 @@
|
||||
use crate::app::App;
|
||||
use crate::cli::{Args, ServiceName, determine_enabled_services};
|
||||
use crate::cli::{Args, ServiceName};
|
||||
use crate::logging::setup_logging;
|
||||
use clap::Parser;
|
||||
use std::process::ExitCode;
|
||||
@@ -29,9 +29,8 @@ async fn main() -> ExitCode {
|
||||
// Parse CLI arguments
|
||||
let args = Args::parse();
|
||||
|
||||
// Determine which services should be enabled
|
||||
let enabled_services: Vec<ServiceName> =
|
||||
determine_enabled_services(&args).expect("Failed to determine enabled services");
|
||||
// Always run all services
|
||||
let enabled_services = ServiceName::all();
|
||||
|
||||
// Create and initialize the application
|
||||
let mut app = App::new().await.expect("Failed to initialize application");
|
||||
|
||||
+13
-2
@@ -3,6 +3,7 @@ pub mod scheduler;
|
||||
pub mod worker;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::services::Service;
|
||||
use crate::state::ReferenceCache;
|
||||
use crate::status::{ServiceStatus, ServiceStatusRegistry};
|
||||
@@ -49,7 +50,17 @@ impl ScraperService {
|
||||
}
|
||||
|
||||
/// Starts the scheduler and a pool of workers.
|
||||
pub fn start(&mut self) {
|
||||
///
|
||||
/// Force-unlocks any jobs left locked by a previous unclean shutdown before
|
||||
/// spawning workers, so those jobs re-enter the queue immediately.
|
||||
pub async fn start(&mut self) {
|
||||
// Recover jobs left locked by a previous crash/unclean shutdown
|
||||
match scrape_jobs::force_unlock_all(&self.db_pool).await {
|
||||
Ok(0) => {}
|
||||
Ok(count) => warn!(count, "Force-unlocked stale jobs from previous run"),
|
||||
Err(e) => warn!(error = ?e, "Failed to force-unlock stale jobs"),
|
||||
}
|
||||
|
||||
info!("ScraperService starting");
|
||||
|
||||
// Create shutdown channel
|
||||
@@ -92,7 +103,7 @@ impl Service for ScraperService {
|
||||
}
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.start();
|
||||
self.start().await;
|
||||
std::future::pending::<()>().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
+15
-2
@@ -10,6 +10,9 @@ use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
use tracing::{Instrument, debug, error, info, trace, warn};
|
||||
|
||||
/// Maximum time a single job is allowed to run before being considered stuck.
|
||||
const JOB_TIMEOUT: Duration = Duration::from_secs(5 * 60);
|
||||
|
||||
/// A single worker instance.
|
||||
///
|
||||
/// Each worker runs in its own asynchronous task and continuously polls the
|
||||
@@ -62,13 +65,23 @@ impl Worker {
|
||||
let max_retries = job.max_retries;
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
// Process the job, racing against shutdown signal
|
||||
// Process the job, racing against shutdown signal and timeout
|
||||
let process_result = tokio::select! {
|
||||
_ = shutdown_rx.recv() => {
|
||||
self.handle_shutdown_during_processing(job_id).await;
|
||||
break;
|
||||
}
|
||||
result = self.process_job(job) => result
|
||||
result = async {
|
||||
match time::timeout(JOB_TIMEOUT, self.process_job(job)).await {
|
||||
Ok(result) => result,
|
||||
Err(_elapsed) => {
|
||||
Err(JobError::Recoverable(anyhow::anyhow!(
|
||||
"job timed out after {}s",
|
||||
JOB_TIMEOUT.as_secs()
|
||||
)))
|
||||
}
|
||||
}
|
||||
} => result
|
||||
};
|
||||
|
||||
let duration = start.elapsed();
|
||||
|
||||
+5
-2
@@ -1,6 +1,7 @@
|
||||
use super::Service;
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use crate::web::create_router;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
@@ -11,14 +12,16 @@ use tracing::{info, trace, warn};
|
||||
pub struct WebService {
|
||||
port: u16,
|
||||
app_state: AppState,
|
||||
auth_config: AuthConfig,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
}
|
||||
|
||||
impl WebService {
|
||||
pub fn new(port: u16, app_state: AppState) -> Self {
|
||||
pub fn new(port: u16, app_state: AppState, auth_config: AuthConfig) -> Self {
|
||||
Self {
|
||||
port,
|
||||
app_state,
|
||||
auth_config,
|
||||
shutdown_tx: None,
|
||||
}
|
||||
}
|
||||
@@ -58,7 +61,7 @@ impl Service for WebService {
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
// Create the main router with Banner API routes
|
||||
let app = create_router(self.app_state.clone());
|
||||
let app = create_router(self.app_state.clone(), self.auth_config.clone());
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ use crate::banner::BannerApi;
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::status::ServiceStatusRegistry;
|
||||
use crate::web::session_cache::{OAuthStateStore, SessionCache};
|
||||
use anyhow::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@@ -72,11 +73,15 @@ pub struct AppState {
|
||||
pub db_pool: PgPool,
|
||||
pub service_statuses: ServiceStatusRegistry,
|
||||
pub reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
pub session_cache: SessionCache,
|
||||
pub oauth_state_store: OAuthStateStore,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
session_cache: SessionCache::new(db_pool.clone()),
|
||||
oauth_state_store: OAuthStateStore::new(),
|
||||
banner_api,
|
||||
db_pool,
|
||||
service_statuses: ServiceStatusRegistry::new(),
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
//! Admin API handlers.
|
||||
//!
|
||||
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
|
||||
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::Json;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
/// `GET /api/admin/status` — Enhanced system status for admins.
|
||||
pub async fn admin_status(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (session_count,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(*) FROM user_sessions WHERE expires_at > now()")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count sessions");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count sessions"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let course_count = state.get_course_count().await.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count courses");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count courses"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (scrape_job_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let services: Vec<Value> = state
|
||||
.service_statuses
|
||||
.all()
|
||||
.into_iter()
|
||||
.map(|(name, status)| {
|
||||
json!({
|
||||
"name": name,
|
||||
"status": status,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"userCount": user_count,
|
||||
"sessionCount": session_count,
|
||||
"courseCount": course_count,
|
||||
"scrapeJobCount": scrape_job_count,
|
||||
"services": services,
|
||||
})))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/users` — List all users.
|
||||
pub async fn list_users(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<User>>, (StatusCode, Json<Value>)> {
|
||||
let users = crate::data::users::list_users(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Json(users))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetAdminBody {
|
||||
is_admin: bool,
|
||||
}
|
||||
|
||||
/// `PUT /api/admin/users/{discord_id}/admin` — Set admin status for a user.
|
||||
pub async fn set_user_admin(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(discord_id): Path<i64>,
|
||||
Json(body): Json<SetAdminBody>,
|
||||
) -> Result<Json<User>, (StatusCode, Json<Value>)> {
|
||||
let user = crate::data::users::set_admin(&state.db_pool, discord_id, body.is_admin)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to set admin status");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to set admin status"})),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "user not found"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
state.session_cache.evict_user(discord_id);
|
||||
|
||||
Ok(Json(user))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/scrape-jobs` — List scrape jobs.
|
||||
pub async fn list_scrape_jobs(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let jobs: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|j| {
|
||||
json!({
|
||||
"id": j.id,
|
||||
"targetType": format!("{:?}", j.target_type),
|
||||
"targetPayload": j.target_payload,
|
||||
"priority": format!("{:?}", j.priority),
|
||||
"executeAt": j.execute_at.to_rfc3339(),
|
||||
"createdAt": j.created_at.to_rfc3339(),
|
||||
"lockedAt": j.locked_at.map(|t| t.to_rfc3339()),
|
||||
"retryCount": j.retry_count,
|
||||
"maxRetries": j.max_retries,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({ "jobs": jobs })))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/audit-log` — List recent audit entries.
|
||||
pub async fn list_audit_log(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, crate::data::models::CourseAudit>(
|
||||
"SELECT * FROM course_audits ORDER BY timestamp DESC LIMIT 200",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list audit log");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list audit log"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let entries: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|a| {
|
||||
json!({
|
||||
"id": a.id,
|
||||
"courseId": a.course_id,
|
||||
"timestamp": a.timestamp.to_rfc3339(),
|
||||
"fieldChanged": a.field_changed,
|
||||
"oldValue": a.old_value,
|
||||
"newValue": a.new_value,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({ "entries": entries })))
|
||||
}
|
||||
+114
-19
@@ -1,14 +1,18 @@
|
||||
//! Embedded assets for the web frontend
|
||||
//! Embedded assets for the web frontend.
|
||||
//!
|
||||
//! This module handles serving static assets that are embedded into the binary
|
||||
//! at compile time using rust-embed.
|
||||
//! Serves static assets embedded into the binary at compile time using rust-embed.
|
||||
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
|
||||
//! generated at build time by `web/scripts/compress-assets.ts`.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
use dashmap::DashMap;
|
||||
use rapidhash::v3::rapidhash_v3;
|
||||
use rust_embed::RustEmbed;
|
||||
use std::fmt;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
|
||||
|
||||
/// Embedded web assets from the dist directory
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "web/dist/"]
|
||||
@@ -21,17 +25,15 @@ pub struct WebAssets;
|
||||
pub struct AssetHash(u64);
|
||||
|
||||
impl AssetHash {
|
||||
/// Create a new AssetHash from u64 value
|
||||
pub fn new(hash: u64) -> Self {
|
||||
Self(hash)
|
||||
}
|
||||
|
||||
/// Get the hash as a hex string
|
||||
pub fn to_hex(&self) -> String {
|
||||
format!("{:016x}", self.0)
|
||||
}
|
||||
|
||||
/// Get the hash as a quoted hex string
|
||||
/// Get the hash as a quoted hex string (for ETag headers)
|
||||
pub fn quoted(&self) -> String {
|
||||
format!("\"{}\"", self.to_hex())
|
||||
}
|
||||
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
|
||||
}
|
||||
|
||||
impl AssetMetadata {
|
||||
/// Check if the etag matches the asset hash
|
||||
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||
// Remove quotes if present (ETags are typically quoted)
|
||||
let etag = etag.trim_matches('"');
|
||||
|
||||
// ETags generated from u64 hex should be 16 characters
|
||||
etag.len() == 16
|
||||
&& u64::from_str_radix(etag, 16)
|
||||
.map(|parsed| parsed == self.hash.0)
|
||||
@@ -68,28 +66,125 @@ impl AssetMetadata {
|
||||
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
||||
|
||||
/// Get cached asset metadata for a file path, caching on-demand
|
||||
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
||||
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||
// Check cache first
|
||||
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||
return cached.value().clone();
|
||||
}
|
||||
|
||||
// Calculate MIME type
|
||||
let mime_type = mime_guess::from_path(path)
|
||||
.first()
|
||||
.map(|mime| mime.to_string());
|
||||
|
||||
// Calculate RapidHash hash (using u64 native output size)
|
||||
let hash_value = rapidhash_v3(content);
|
||||
let hash = AssetHash::new(hash_value);
|
||||
|
||||
let hash = AssetHash::new(rapidhash_v3(content));
|
||||
let metadata = AssetMetadata { mime_type, hash };
|
||||
|
||||
// Only cache if we haven't exceeded the limit
|
||||
if ASSET_CACHE.len() < 1000 {
|
||||
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
|
||||
/// Set appropriate `Cache-Control` header based on the asset path.
|
||||
///
|
||||
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
|
||||
/// safe to cache indefinitely. Other assets get shorter cache durations.
|
||||
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
|
||||
let cache_control = if path.contains("immutable/") {
|
||||
// SvelteKit fingerprinted assets — cache forever
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" || path.ends_with(".html") {
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.rsplit_once('.').map(|(_, ext)| ext) {
|
||||
Some("css" | "js") => "public, max-age=86400",
|
||||
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
|
||||
_ => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Serve an embedded asset with content encoding negotiation.
|
||||
///
|
||||
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
|
||||
/// the client's `Accept-Encoding` header, falling back to the uncompressed
|
||||
/// original. Returns `None` if the asset doesn't exist at all.
|
||||
pub fn try_serve_asset_with_encoding(
|
||||
path: &str,
|
||||
request_headers: &HeaderMap,
|
||||
) -> Option<axum::response::Response> {
|
||||
use axum::response::IntoResponse;
|
||||
|
||||
let asset_path = path.strip_prefix('/').unwrap_or(path);
|
||||
|
||||
// Get the uncompressed original first (for metadata: MIME type, ETag)
|
||||
let original = WebAssets::get(asset_path)?;
|
||||
let metadata = get_asset_metadata_cached(asset_path, &original.data);
|
||||
|
||||
// Check ETag for conditional requests (304 Not Modified)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
|
||||
}
|
||||
|
||||
let mime_type = metadata
|
||||
.mime_type
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
// Only attempt pre-compressed variants for files above the compression
|
||||
// threshold — the build script skips smaller files too.
|
||||
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
|
||||
parse_accepted_encodings(request_headers)
|
||||
} else {
|
||||
vec![ContentEncoding::Identity]
|
||||
};
|
||||
|
||||
for encoding in &accepted_encodings {
|
||||
if *encoding == ContentEncoding::Identity {
|
||||
continue;
|
||||
}
|
||||
|
||||
let compressed_path = format!("{}{}", asset_path, encoding.extension());
|
||||
if let Some(compressed) = WebAssets::get(&compressed_path) {
|
||||
let mut response_headers = HeaderMap::new();
|
||||
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Some(ce) = encoding.header_value() {
|
||||
response_headers.insert(header::CONTENT_ENCODING, ce);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
// Vary so caches distinguish by encoding
|
||||
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
return Some(
|
||||
(
|
||||
axum::http::StatusCode::OK,
|
||||
response_headers,
|
||||
compressed.data,
|
||||
)
|
||||
.into_response(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// No compressed variant found — serve uncompressed original
|
||||
let mut response_headers = HeaderMap::new();
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
|
||||
}
|
||||
|
||||
+300
@@ -0,0 +1,300 @@
|
||||
//! Discord OAuth2 authentication handlers.
|
||||
//!
|
||||
//! Provides login, callback, logout, and session introspection endpoints
|
||||
//! for Discord OAuth2 authentication flow.
|
||||
|
||||
use axum::extract::{Extension, Query, State};
|
||||
use axum::http::{HeaderMap, StatusCode, header};
|
||||
use axum::response::{IntoResponse, Json, Redirect, Response};
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
/// OAuth configuration passed as an Axum Extension.
|
||||
#[derive(Clone)]
|
||||
pub struct AuthConfig {
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
/// Optional base URL override (e.g. "https://banner.xevion.dev").
|
||||
/// When `None`, the redirect URI is derived from the request's Origin/Host header.
|
||||
pub redirect_base: Option<String>,
|
||||
}
|
||||
|
||||
const CALLBACK_PATH: &str = "/api/auth/callback";
|
||||
|
||||
/// Derive the origin (scheme + host + port) the user's browser is actually on.
|
||||
///
|
||||
/// Priority:
|
||||
/// 1. Configured `redirect_base` (production override)
|
||||
/// 2. `Referer` header — preserves the real browser origin even through
|
||||
/// reverse proxies that rewrite `Host` (e.g. Vite dev proxy with
|
||||
/// `changeOrigin: true`)
|
||||
/// 3. `Origin` header (present on POST / CORS requests)
|
||||
/// 4. `Host` header (last resort, may be rewritten by proxies)
|
||||
fn resolve_origin(auth_config: &AuthConfig, headers: &HeaderMap) -> String {
|
||||
if let Some(base) = &auth_config.redirect_base {
|
||||
return base.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
// Referer carries the full browser URL; extract just the origin.
|
||||
if let Some(referer) = headers.get(header::REFERER).and_then(|v| v.to_str().ok())
|
||||
&& let Ok(parsed) = url::Url::parse(referer)
|
||||
{
|
||||
let origin = parsed.origin().unicode_serialization();
|
||||
if origin != "null" {
|
||||
return origin;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) {
|
||||
return origin.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
if let Some(host) = headers.get(header::HOST).and_then(|v| v.to_str().ok()) {
|
||||
return format!("http://{host}");
|
||||
}
|
||||
|
||||
"http://localhost:8080".to_owned()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CallbackParams {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TokenResponse {
|
||||
access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DiscordUser {
|
||||
id: String,
|
||||
username: String,
|
||||
avatar: Option<String>,
|
||||
}
|
||||
|
||||
/// Extract the `session` cookie value from request headers.
|
||||
fn extract_session_token(headers: &HeaderMap) -> Option<String> {
|
||||
headers
|
||||
.get(header::COOKIE)?
|
||||
.to_str()
|
||||
.ok()?
|
||||
.split(';')
|
||||
.find_map(|cookie| {
|
||||
let cookie = cookie.trim();
|
||||
cookie.strip_prefix("session=").map(|v| v.to_owned())
|
||||
})
|
||||
}
|
||||
|
||||
/// Build a `Set-Cookie` header value for the session cookie.
|
||||
fn session_cookie(token: &str, max_age: i64, secure: bool) -> String {
|
||||
let mut cookie = format!("session={token}; HttpOnly; SameSite=Lax; Path=/; Max-Age={max_age}");
|
||||
if secure {
|
||||
cookie.push_str("; Secure");
|
||||
}
|
||||
cookie
|
||||
}
|
||||
|
||||
/// `GET /api/auth/login` — Redirect to Discord OAuth2 authorization page.
|
||||
pub async fn auth_login(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
headers: HeaderMap,
|
||||
) -> Redirect {
|
||||
let origin = resolve_origin(&auth_config, &headers);
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let csrf_state = state.oauth_state_store.generate(origin);
|
||||
let redirect_uri_encoded = urlencoding::encode(&redirect_uri);
|
||||
|
||||
let url = format!(
|
||||
"https://discord.com/oauth2/authorize\
|
||||
?client_id={}\
|
||||
&redirect_uri={redirect_uri_encoded}\
|
||||
&response_type=code\
|
||||
&scope=identify\
|
||||
&state={csrf_state}",
|
||||
auth_config.client_id,
|
||||
);
|
||||
|
||||
Redirect::temporary(&url)
|
||||
}
|
||||
|
||||
/// `GET /api/auth/callback` — Handle Discord OAuth2 callback.
|
||||
pub async fn auth_callback(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
Query(params): Query<CallbackParams>,
|
||||
) -> Result<Response, (StatusCode, Json<Value>)> {
|
||||
// 1. Validate CSRF state and recover the origin used during login
|
||||
let origin = state
|
||||
.oauth_state_store
|
||||
.validate(¶ms.state)
|
||||
.ok_or_else(|| {
|
||||
warn!("OAuth callback with invalid CSRF state");
|
||||
(
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(json!({ "error": "Invalid OAuth state" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 2. Exchange authorization code for access token
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let client = reqwest::Client::new();
|
||||
let token_response = client
|
||||
.post("https://discord.com/api/oauth2/token")
|
||||
.form(&[
|
||||
("client_id", auth_config.client_id.as_str()),
|
||||
("client_secret", auth_config.client_secret.as_str()),
|
||||
("grant_type", "authorization_code"),
|
||||
("code", params.code.as_str()),
|
||||
("redirect_uri", redirect_uri.as_str()),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to exchange OAuth code for token");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to exchange code with Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !token_response.status().is_success() {
|
||||
let status = token_response.status();
|
||||
let body = token_response.text().await.unwrap_or_default();
|
||||
error!(%status, %body, "Discord token exchange returned error");
|
||||
return Err((
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Discord token exchange failed" })),
|
||||
));
|
||||
}
|
||||
|
||||
let token_data: TokenResponse = token_response.json().await.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord token response");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid token response from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 3. Fetch Discord user profile
|
||||
let discord_user: DiscordUser = client
|
||||
.get("https://discord.com/api/users/@me")
|
||||
.bearer_auth(&token_data.access_token)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to fetch Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to fetch Discord profile" })),
|
||||
)
|
||||
})?
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid user profile from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
let discord_id: i64 = discord_user.id.parse().map_err(|_| {
|
||||
error!(id = %discord_user.id, "Discord user ID is not a valid i64");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid Discord user ID" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 4. Upsert user
|
||||
let user = crate::data::users::upsert_user(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
&discord_user.username,
|
||||
discord_user.avatar.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to upsert user");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Database error" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
info!(discord_id, username = %user.discord_username, "user authenticated via OAuth");
|
||||
|
||||
// 5. Create session
|
||||
let session = crate::data::sessions::create_session(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
Duration::from_secs(7 * 24 * 3600),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to create session");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Failed to create session" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 6. Build response with session cookie
|
||||
let secure = redirect_uri.starts_with("https://");
|
||||
let cookie = session_cookie(&session.id, 604800, secure);
|
||||
|
||||
let redirect_to = if user.is_admin { "/admin" } else { "/" };
|
||||
|
||||
Ok((
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Redirect::temporary(redirect_to),
|
||||
)
|
||||
.into_response())
|
||||
}
|
||||
|
||||
/// `POST /api/auth/logout` — Destroy the current session.
|
||||
pub async fn auth_logout(State(state): State<AppState>, headers: HeaderMap) -> Response {
|
||||
if let Some(token) = extract_session_token(&headers) {
|
||||
if let Err(e) = crate::data::sessions::delete_session(&state.db_pool, &token).await {
|
||||
warn!(error = %e, "failed to delete session from database");
|
||||
}
|
||||
state.session_cache.evict(&token);
|
||||
}
|
||||
|
||||
let cookie = session_cookie("", 0, false);
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Json(json!({ "ok": true })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
/// `GET /api/auth/me` — Return the current authenticated user's info.
|
||||
pub async fn auth_me(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Json<Value>, StatusCode> {
|
||||
let token = extract_session_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
let user = state
|
||||
.session_cache
|
||||
.get_user(&token)
|
||||
.await
|
||||
.ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"discordId": user.discord_id.to_string(),
|
||||
"username": user.discord_username,
|
||||
"avatarHash": user.discord_avatar_hash,
|
||||
"isAdmin": user.is_admin,
|
||||
})))
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
//! Content encoding negotiation for pre-compressed asset serving.
|
||||
//!
|
||||
//! Parses Accept-Encoding headers with quality values and returns
|
||||
//! supported encodings in priority order for content negotiation.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
|
||||
/// Minimum size threshold for compression (bytes).
|
||||
///
|
||||
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
|
||||
pub const COMPRESSION_MIN_SIZE: usize = 512;
|
||||
|
||||
/// Supported content encodings in priority order (best compression first).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ContentEncoding {
|
||||
Zstd,
|
||||
Brotli,
|
||||
Gzip,
|
||||
Identity,
|
||||
}
|
||||
|
||||
impl ContentEncoding {
|
||||
/// File extension suffix for pre-compressed variant lookup.
|
||||
#[inline]
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Zstd => ".zst",
|
||||
Self::Brotli => ".br",
|
||||
Self::Gzip => ".gz",
|
||||
Self::Identity => "",
|
||||
}
|
||||
}
|
||||
|
||||
/// `Content-Encoding` header value, or `None` for identity.
|
||||
#[inline]
|
||||
pub fn header_value(&self) -> Option<HeaderValue> {
|
||||
match self {
|
||||
Self::Zstd => Some(HeaderValue::from_static("zstd")),
|
||||
Self::Brotli => Some(HeaderValue::from_static("br")),
|
||||
Self::Gzip => Some(HeaderValue::from_static("gzip")),
|
||||
Self::Identity => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Default priority when quality values are equal (higher = better).
|
||||
#[inline]
|
||||
fn default_priority(&self) -> u8 {
|
||||
match self {
|
||||
Self::Zstd => 4,
|
||||
Self::Brotli => 3,
|
||||
Self::Gzip => 2,
|
||||
Self::Identity => 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
|
||||
///
|
||||
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
|
||||
/// When quality values are equal: zstd > brotli > gzip > identity.
|
||||
/// Encodings with `q=0` are excluded.
|
||||
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
|
||||
let Some(accept) = headers
|
||||
.get(header::ACCEPT_ENCODING)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
else {
|
||||
return vec![ContentEncoding::Identity];
|
||||
};
|
||||
|
||||
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
|
||||
|
||||
for part in accept.split(',') {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
|
||||
let q = params
|
||||
.split(';')
|
||||
.find_map(|p| p.trim().strip_prefix("q="))
|
||||
.and_then(|q| q.parse::<f32>().ok())
|
||||
.unwrap_or(1.0);
|
||||
(enc.trim(), q)
|
||||
} else {
|
||||
(part, 1.0)
|
||||
};
|
||||
|
||||
if quality == 0.0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let encoding = match encoding_str.to_lowercase().as_str() {
|
||||
"zstd" => ContentEncoding::Zstd,
|
||||
"br" | "brotli" => ContentEncoding::Brotli,
|
||||
"gzip" | "x-gzip" => ContentEncoding::Gzip,
|
||||
"*" => ContentEncoding::Gzip,
|
||||
"identity" => ContentEncoding::Identity,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
encodings.push((encoding, quality));
|
||||
}
|
||||
|
||||
// Sort by quality (desc), then default priority (desc)
|
||||
encodings.sort_by(|a, b| {
|
||||
b.1.partial_cmp(&a.1)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
|
||||
});
|
||||
|
||||
if encodings.is_empty() {
|
||||
vec![ContentEncoding::Identity]
|
||||
} else {
|
||||
encodings.into_iter().map(|(e, _)| e).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_all_encodings() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[1], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[2], ContentEncoding::Gzip);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_quality_values() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Gzip);
|
||||
assert_eq!(encodings[1], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[2], ContentEncoding::Brotli);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_header_returns_identity() {
|
||||
let headers = HeaderMap::new();
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings, vec![ContentEncoding::Identity]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disabled_encoding_excluded() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"zstd;q=0, br, gzip".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[1], ContentEncoding::Gzip);
|
||||
assert!(!encodings.contains(&ContentEncoding::Zstd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_real_chrome_header() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip, deflate, br, zstd".parse().unwrap(),
|
||||
);
|
||||
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extensions() {
|
||||
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
|
||||
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
|
||||
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
|
||||
assert_eq!(ContentEncoding::Identity.extension(), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_header_values() {
|
||||
assert_eq!(
|
||||
ContentEncoding::Zstd.header_value().unwrap(),
|
||||
HeaderValue::from_static("zstd")
|
||||
);
|
||||
assert_eq!(
|
||||
ContentEncoding::Brotli.header_value().unwrap(),
|
||||
HeaderValue::from_static("br")
|
||||
);
|
||||
assert!(ContentEncoding::Identity.header_value().is_none());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
//! Axum extractors for authentication and authorization.
|
||||
|
||||
use axum::extract::FromRequestParts;
|
||||
use axum::http::{StatusCode, header};
|
||||
use axum::response::Json;
|
||||
use http::request::Parts;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
|
||||
/// Extractor that resolves the session cookie to an authenticated [`User`].
|
||||
///
|
||||
/// Returns 401 if no valid session cookie is present.
|
||||
pub struct AuthUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AuthUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let token = parts
|
||||
.headers
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|cookies| {
|
||||
cookies
|
||||
.split(';')
|
||||
.find_map(|c| c.trim().strip_prefix("session=").map(|v| v.to_owned()))
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "No session cookie"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let user = state.session_cache.get_user(&token).await.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "Invalid or expired session"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(AuthUser(user))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extractor that requires an authenticated admin user.
|
||||
///
|
||||
/// Returns 401 if not authenticated, 403 if not admin.
|
||||
pub struct AdminUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AdminUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let AuthUser(user) = AuthUser::from_request_parts(parts, state).await?;
|
||||
|
||||
if !user.is_admin {
|
||||
return Err((
|
||||
StatusCode::FORBIDDEN,
|
||||
Json(json!({"error": "forbidden", "message": "Admin access required"})),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(AdminUser(user))
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
//! Web API module for the banner application.
|
||||
|
||||
pub mod admin;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod assets;
|
||||
pub mod auth;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod encoding;
|
||||
pub mod extractors;
|
||||
pub mod routes;
|
||||
pub mod session_cache;
|
||||
|
||||
pub use routes::*;
|
||||
|
||||
+181
-109
@@ -1,20 +1,21 @@
|
||||
//! Web API endpoints for Banner bot monitoring and metrics.
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
Extension, Router,
|
||||
body::Body,
|
||||
extract::{Path, Query, Request, State},
|
||||
http::StatusCode as AxumStatusCode,
|
||||
response::{Json, Response},
|
||||
routing::get,
|
||||
routing::{get, post, put},
|
||||
};
|
||||
|
||||
use crate::web::admin;
|
||||
use crate::web::auth::{self, AuthConfig};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use axum::{
|
||||
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
||||
response::{Html, IntoResponse},
|
||||
http::{HeaderMap, StatusCode, Uri},
|
||||
response::IntoResponse,
|
||||
};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use http::header;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use std::{collections::BTreeMap, time::Duration};
|
||||
@@ -24,51 +25,17 @@ use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
#[cfg(not(feature = "embed-assets"))]
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
|
||||
use tower_http::{
|
||||
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
|
||||
trace::TraceLayer,
|
||||
};
|
||||
use tracing::{Span, debug, trace, warn};
|
||||
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
||||
|
||||
/// Set appropriate caching headers based on asset type
|
||||
#[cfg(feature = "embed-assets")]
|
||||
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
||||
let headers = response.headers_mut();
|
||||
|
||||
// Set ETag
|
||||
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
||||
headers.insert(header::ETAG, etag_value);
|
||||
}
|
||||
|
||||
// Set Cache-Control based on asset type
|
||||
let cache_control = if path.starts_with("assets/") {
|
||||
// Static assets with hashed filenames - long-term cache
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" {
|
||||
// HTML files - short-term cache
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.split_once('.').map(|(_, extension)| extension) {
|
||||
Some(ext) => match ext {
|
||||
// CSS/JS files - medium-term cache
|
||||
"css" | "js" => "public, max-age=86400",
|
||||
// Images - long-term cache
|
||||
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
||||
// Default for other files
|
||||
_ => "public, max-age=3600",
|
||||
},
|
||||
// Default for files without an extension
|
||||
None => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
||||
}
|
||||
}
|
||||
use crate::web::assets::try_serve_asset_with_encoding;
|
||||
|
||||
/// Creates the web server router
|
||||
pub fn create_router(app_state: AppState) -> Router {
|
||||
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
||||
let api_router = Router::new()
|
||||
.route("/health", get(health))
|
||||
.route("/status", get(status))
|
||||
@@ -78,9 +45,31 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
.route("/terms", get(get_terms))
|
||||
.route("/subjects", get(get_subjects))
|
||||
.route("/reference/{category}", get(get_reference))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let auth_router = Router::new()
|
||||
.route("/auth/login", get(auth::auth_login))
|
||||
.route("/auth/callback", get(auth::auth_callback))
|
||||
.route("/auth/logout", post(auth::auth_logout))
|
||||
.route("/auth/me", get(auth::auth_me))
|
||||
.layer(Extension(auth_config))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let admin_router = Router::new()
|
||||
.route("/admin/status", get(admin::admin_status))
|
||||
.route("/admin/users", get(admin::list_users))
|
||||
.route(
|
||||
"/admin/users/{discord_id}/admin",
|
||||
put(admin::set_user_admin),
|
||||
)
|
||||
.route("/admin/scrape-jobs", get(admin::list_scrape_jobs))
|
||||
.route("/admin/audit-log", get(admin::list_audit_log))
|
||||
.with_state(app_state);
|
||||
|
||||
let mut router = Router::new().nest("/api", api_router);
|
||||
let mut router = Router::new()
|
||||
.nest("/api", api_router)
|
||||
.nest("/api", auth_router)
|
||||
.nest("/api", admin_router);
|
||||
|
||||
// When embed-assets feature is enabled, serve embedded static assets
|
||||
#[cfg(feature = "embed-assets")]
|
||||
@@ -100,6 +89,13 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
}
|
||||
|
||||
router.layer((
|
||||
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
|
||||
// assets already have Content-Encoding set, so tower-http skips them.
|
||||
CompressionLayer::new()
|
||||
.zstd(true)
|
||||
.br(true)
|
||||
.gzip(true)
|
||||
.quality(tower_http::CompressionLevel::Fastest),
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(|request: &Request<Body>| {
|
||||
tracing::debug_span!("request", path = request.uri().path())
|
||||
@@ -146,71 +142,35 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
))
|
||||
}
|
||||
|
||||
/// Handler that extracts request information for caching
|
||||
/// SPA fallback handler with content encoding negotiation.
|
||||
///
|
||||
/// Serves embedded static assets with pre-compressed variants when available,
|
||||
/// falling back to `index.html` for SPA client-side routing.
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn fallback(request: Request) -> Response {
|
||||
async fn fallback(request: Request) -> axum::response::Response {
|
||||
let uri = request.uri().clone();
|
||||
let headers = request.headers().clone();
|
||||
handle_spa_fallback_with_headers(uri, headers).await
|
||||
handle_spa_fallback(uri, headers).await
|
||||
}
|
||||
|
||||
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
||||
/// This version includes HTTP caching headers and ETag support
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
||||
let path = uri.path().trim_start_matches('/');
|
||||
|
||||
if let Some(content) = WebAssets::get(path) {
|
||||
// Get asset metadata (MIME type and hash) with caching
|
||||
let metadata = get_asset_metadata_cached(path, &content.data);
|
||||
|
||||
// Check if client has a matching ETag (conditional request)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
||||
let mut response = (
|
||||
[(
|
||||
header::CONTENT_TYPE,
|
||||
// For unknown types, set to application/octet-stream
|
||||
metadata
|
||||
.mime_type
|
||||
.unwrap_or("application/octet-stream".to_string()),
|
||||
)],
|
||||
content.data,
|
||||
)
|
||||
.into_response();
|
||||
|
||||
// Set caching headers
|
||||
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
||||
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
|
||||
let path = uri.path();
|
||||
|
||||
// Try serving the exact asset (with encoding negotiation)
|
||||
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
|
||||
return response;
|
||||
} else {
|
||||
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
||||
if path.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to the SPA index.html
|
||||
match WebAssets::get("index.html") {
|
||||
Some(content) => {
|
||||
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
||||
// SvelteKit assets under _app/ that don't exist are a hard 404
|
||||
let trimmed = path.trim_start_matches('/');
|
||||
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
|
||||
// Check if client has a matching ETag for index.html
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
let mut response = Html(content.data).into_response();
|
||||
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
||||
response
|
||||
}
|
||||
// SPA fallback: serve index.html with encoding negotiation
|
||||
match try_serve_asset_with_encoding("/index.html", &request_headers) {
|
||||
Some(response) => response,
|
||||
None => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to load index.html",
|
||||
@@ -284,20 +244,130 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
|
||||
}
|
||||
|
||||
/// Metrics endpoint for monitoring
|
||||
async fn metrics() -> Json<Value> {
|
||||
// For now, return basic metrics structure
|
||||
Json(json!({
|
||||
"banner_api": {
|
||||
"status": "connected"
|
||||
},
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
}))
|
||||
async fn metrics(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<MetricsParams>,
|
||||
) -> Result<Json<Value>, (AxumStatusCode, String)> {
|
||||
let limit = params.limit.clamp(1, 5000);
|
||||
|
||||
// Parse range shorthand, defaulting to 24h
|
||||
let range_str = params.range.as_deref().unwrap_or("24h");
|
||||
let duration = match range_str {
|
||||
"1h" => chrono::Duration::hours(1),
|
||||
"6h" => chrono::Duration::hours(6),
|
||||
"24h" => chrono::Duration::hours(24),
|
||||
"7d" => chrono::Duration::days(7),
|
||||
"30d" => chrono::Duration::days(30),
|
||||
_ => {
|
||||
return Err((
|
||||
AxumStatusCode::BAD_REQUEST,
|
||||
format!("Invalid range '{range_str}'. Valid: 1h, 6h, 24h, 7d, 30d"),
|
||||
));
|
||||
}
|
||||
};
|
||||
let since = chrono::Utc::now() - duration;
|
||||
|
||||
// Resolve course_id: explicit param takes priority, then term+crn lookup
|
||||
let course_id = if let Some(id) = params.course_id {
|
||||
Some(id)
|
||||
} else if let (Some(term), Some(crn)) = (params.term.as_deref(), params.crn.as_deref()) {
|
||||
let row: Option<(i32,)> =
|
||||
sqlx::query_as("SELECT id FROM courses WHERE term_code = $1 AND crn = $2")
|
||||
.bind(term)
|
||||
.bind(crn)
|
||||
.fetch_optional(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup for metrics failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Course lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
row.map(|(id,)| id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Build query dynamically based on filters
|
||||
let metrics: Vec<(i32, i32, chrono::DateTime<chrono::Utc>, i32, i32, i32)> =
|
||||
if let Some(cid) = course_id {
|
||||
sqlx::query_as(
|
||||
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
|
||||
FROM course_metrics \
|
||||
WHERE course_id = $1 AND timestamp >= $2 \
|
||||
ORDER BY timestamp DESC \
|
||||
LIMIT $3",
|
||||
)
|
||||
.bind(cid)
|
||||
.bind(since)
|
||||
.bind(limit)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
} else {
|
||||
sqlx::query_as(
|
||||
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
|
||||
FROM course_metrics \
|
||||
WHERE timestamp >= $1 \
|
||||
ORDER BY timestamp DESC \
|
||||
LIMIT $2",
|
||||
)
|
||||
.bind(since)
|
||||
.bind(limit)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
}
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Metrics query failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Metrics query failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let count = metrics.len();
|
||||
let metrics_json: Vec<Value> = metrics
|
||||
.into_iter()
|
||||
.map(
|
||||
|(id, course_id, timestamp, enrollment, wait_count, seats_available)| {
|
||||
json!({
|
||||
"id": id,
|
||||
"courseId": course_id,
|
||||
"timestamp": timestamp.to_rfc3339(),
|
||||
"enrollment": enrollment,
|
||||
"waitCount": wait_count,
|
||||
"seatsAvailable": seats_available,
|
||||
})
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"metrics": metrics_json,
|
||||
"count": count,
|
||||
"timestamp": chrono::Utc::now().to_rfc3339(),
|
||||
})))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Course search & detail API
|
||||
// ============================================================
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MetricsParams {
|
||||
course_id: Option<i32>,
|
||||
term: Option<String>,
|
||||
crn: Option<String>,
|
||||
/// Shorthand durations: "1h", "6h", "24h", "7d", "30d"
|
||||
range: Option<String>,
|
||||
#[serde(default = "default_metrics_limit")]
|
||||
limit: i32,
|
||||
}
|
||||
|
||||
fn default_metrics_limit() -> i32 {
|
||||
500
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SubjectsParams {
|
||||
term: String,
|
||||
@@ -369,6 +439,7 @@ pub struct InstructorResponse {
|
||||
is_primary: bool,
|
||||
rmp_rating: Option<f32>,
|
||||
rmp_num_ratings: Option<i32>,
|
||||
rmp_legacy_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
@@ -403,6 +474,7 @@ fn build_course_response(
|
||||
is_primary: i.is_primary,
|
||||
rmp_rating: i.avg_rating,
|
||||
rmp_num_ratings: i.num_ratings,
|
||||
rmp_legacy_id: i.rmp_legacy_id,
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
//! In-memory caches for session resolution and OAuth CSRF state.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use dashmap::DashMap;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::data::models::User;
|
||||
|
||||
/// Cached session entry with TTL.
|
||||
#[derive(Debug, Clone)]
|
||||
struct CachedSession {
|
||||
user: User,
|
||||
session_expires_at: DateTime<Utc>,
|
||||
cached_at: Instant,
|
||||
}
|
||||
|
||||
/// In-memory session cache backed by PostgreSQL.
|
||||
///
|
||||
/// Provides fast session resolution without a DB round-trip on every request.
|
||||
/// Cache entries expire after a configurable TTL (default 5 minutes).
|
||||
#[derive(Clone)]
|
||||
pub struct SessionCache {
|
||||
cache: Arc<DashMap<String, CachedSession>>,
|
||||
db_pool: PgPool,
|
||||
cache_ttl: Duration,
|
||||
}
|
||||
|
||||
impl SessionCache {
|
||||
/// Create a new session cache with a 5-minute default TTL.
|
||||
pub fn new(db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
cache: Arc::new(DashMap::new()),
|
||||
db_pool,
|
||||
cache_ttl: Duration::from_secs(5 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a session token to a [`User`], using the cache when possible.
|
||||
///
|
||||
/// On cache hit (entry present, not stale, session not expired), returns the
|
||||
/// cached user immediately. On miss or stale entry, queries the database for
|
||||
/// the session and user, populates the cache, and fire-and-forgets a
|
||||
/// `touch_session` call to update `last_active_at`.
|
||||
pub async fn get_user(&self, token: &str) -> Option<User> {
|
||||
// Check cache first
|
||||
if let Some(entry) = self.cache.get(token) {
|
||||
let now_instant = Instant::now();
|
||||
let now_utc = Utc::now();
|
||||
|
||||
let cache_fresh = entry.cached_at + self.cache_ttl > now_instant;
|
||||
let session_valid = entry.session_expires_at > now_utc;
|
||||
|
||||
if cache_fresh && session_valid {
|
||||
return Some(entry.user.clone());
|
||||
}
|
||||
|
||||
// Stale or expired — drop the ref before removing
|
||||
drop(entry);
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
// Cache miss — query DB
|
||||
let session = crate::data::sessions::get_session(&self.db_pool, token)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
let user = crate::data::users::get_user(&self.db_pool, session.user_id)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
self.cache.insert(
|
||||
token.to_owned(),
|
||||
CachedSession {
|
||||
user: user.clone(),
|
||||
session_expires_at: session.expires_at,
|
||||
cached_at: Instant::now(),
|
||||
},
|
||||
);
|
||||
|
||||
// Fire-and-forget touch to update last_active_at
|
||||
let pool = self.db_pool.clone();
|
||||
let token_owned = token.to_owned();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = crate::data::sessions::touch_session(&pool, &token_owned).await {
|
||||
tracing::warn!(error = %e, "failed to touch session");
|
||||
}
|
||||
});
|
||||
|
||||
Some(user)
|
||||
}
|
||||
|
||||
/// Remove a single session from the cache (e.g. on logout).
|
||||
pub fn evict(&self, token: &str) {
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
/// Remove all cached sessions belonging to a user.
|
||||
pub fn evict_user(&self, discord_id: i64) {
|
||||
self.cache
|
||||
.retain(|_, entry| entry.user.discord_id != discord_id);
|
||||
}
|
||||
|
||||
/// Delete expired sessions from the database and sweep the in-memory cache.
|
||||
///
|
||||
/// Returns the number of sessions deleted from the database.
|
||||
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
|
||||
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
|
||||
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
|
||||
|
||||
let now = Utc::now();
|
||||
self.cache.retain(|_, entry| entry.session_expires_at > now);
|
||||
|
||||
Ok(deleted)
|
||||
}
|
||||
}
|
||||
|
||||
/// Data stored alongside each OAuth CSRF state token.
|
||||
struct OAuthStateEntry {
|
||||
created_at: Instant,
|
||||
/// The browser origin that initiated the login flow, so the callback
|
||||
/// can reconstruct the exact redirect_uri Discord expects.
|
||||
origin: String,
|
||||
}
|
||||
|
||||
/// Ephemeral store for OAuth CSRF state tokens.
|
||||
///
|
||||
/// Tokens are stored with creation time and expire after a configurable TTL.
|
||||
/// Each token is single-use: validation consumes it.
|
||||
#[derive(Clone)]
|
||||
pub struct OAuthStateStore {
|
||||
states: Arc<DashMap<String, OAuthStateEntry>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
impl Default for OAuthStateStore {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl OAuthStateStore {
|
||||
/// Create a new store with a 10-minute TTL.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
states: Arc::new(DashMap::new()),
|
||||
ttl: Duration::from_secs(10 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a random 16-byte hex CSRF token, store it with the given
|
||||
/// origin, and return the token.
|
||||
pub fn generate(&self, origin: String) -> String {
|
||||
let bytes: [u8; 16] = rand::rng().random();
|
||||
let token: String = bytes.iter().map(|b| format!("{b:02x}")).collect();
|
||||
self.states.insert(
|
||||
token.clone(),
|
||||
OAuthStateEntry {
|
||||
created_at: Instant::now(),
|
||||
origin,
|
||||
},
|
||||
);
|
||||
token
|
||||
}
|
||||
|
||||
/// Validate and consume a CSRF token. Returns the stored origin if the
|
||||
/// token was present and not expired.
|
||||
pub fn validate(&self, state: &str) -> Option<String> {
|
||||
let (_, entry) = self.states.remove(state)?;
|
||||
if entry.created_at.elapsed() < self.ttl {
|
||||
Some(entry.origin)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove all expired entries from the store.
|
||||
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
|
||||
pub fn cleanup(&self) {
|
||||
let ttl = self.ttl;
|
||||
self.states
|
||||
.retain(|_, entry| entry.created_at.elapsed() < ttl);
|
||||
}
|
||||
}
|
||||
@@ -210,3 +210,116 @@ async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
|
||||
assert_eq!(rows[1].0, "202520");
|
||||
assert_eq!(rows[1].1, 10);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
|
||||
// Insert initial data — should NOT create audits/metrics (it's a fresh insert)
|
||||
let initial = vec![helpers::make_course(
|
||||
"50001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
10,
|
||||
35,
|
||||
0,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&initial, &pool).await.unwrap();
|
||||
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
audit_count, 0,
|
||||
"initial insert should not create audit entries"
|
||||
);
|
||||
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
metric_count, 0,
|
||||
"initial insert should not create metric entries"
|
||||
);
|
||||
|
||||
// Update enrollment and wait_count
|
||||
let updated = vec![helpers::make_course(
|
||||
"50001",
|
||||
"202510",
|
||||
"CS",
|
||||
"3443",
|
||||
"App Programming",
|
||||
20,
|
||||
35,
|
||||
2,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&updated, &pool).await.unwrap();
|
||||
|
||||
// Should have audit entries for enrollment and wait_count changes
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(
|
||||
audit_count >= 2,
|
||||
"should have audit entries for enrollment and wait_count changes, got {audit_count}"
|
||||
);
|
||||
|
||||
// Should have exactly 1 metric entry
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(metric_count, 1, "should have 1 metric snapshot");
|
||||
|
||||
// Verify metric values
|
||||
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
|
||||
"SELECT enrollment, wait_count, seats_available FROM course_metrics LIMIT 1",
|
||||
)
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(enrollment, 20);
|
||||
assert_eq!(wait_count, 2);
|
||||
assert_eq!(seats, 15); // 35 - 20
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
|
||||
// Insert then re-insert identical data — should produce zero audits/metrics
|
||||
let course = vec![helpers::make_course(
|
||||
"60001",
|
||||
"202510",
|
||||
"CS",
|
||||
"1083",
|
||||
"Intro to CS",
|
||||
25,
|
||||
30,
|
||||
0,
|
||||
5,
|
||||
)];
|
||||
batch_upsert_courses(&course, &pool).await.unwrap();
|
||||
batch_upsert_courses(&course, &pool).await.unwrap();
|
||||
|
||||
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
audit_count, 0,
|
||||
"identical re-upsert should not create audit entries"
|
||||
);
|
||||
|
||||
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
metric_count, 0,
|
||||
"identical re-upsert should not create metric entries"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -241,7 +241,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
json!({"subject": "CS"}),
|
||||
ScrapePriority::Medium,
|
||||
true,
|
||||
2, // retry_count
|
||||
3, // retry_count (already used all 3 retries)
|
||||
3, // max_retries
|
||||
)
|
||||
.await;
|
||||
@@ -251,7 +251,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
.unwrap();
|
||||
assert!(
|
||||
!has_retries,
|
||||
"should NOT have retries remaining (2→3, max=3)"
|
||||
"should NOT have retries remaining (3→4, max=3)"
|
||||
);
|
||||
|
||||
let (retry_count,): (i32,) =
|
||||
@@ -260,7 +260,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retry_count, 3);
|
||||
assert_eq!(retry_count, 4);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
@@ -346,7 +346,7 @@ async fn find_existing_payloads_returns_matching(pool: PgPool) {
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn find_existing_payloads_ignores_locked(pool: PgPool) {
|
||||
async fn find_existing_payloads_includes_locked(pool: PgPool) {
|
||||
let payload = json!({"subject": "CS"});
|
||||
|
||||
helpers::insert_scrape_job(
|
||||
@@ -365,7 +365,10 @@ async fn find_existing_payloads_ignores_locked(pool: PgPool) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(existing.is_empty(), "locked jobs should be ignored");
|
||||
assert!(
|
||||
existing.contains(&payload.to_string()),
|
||||
"locked jobs should be included in deduplication"
|
||||
);
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Pre-compress static assets with maximum compression levels.
|
||||
* Run after `bun run build`.
|
||||
*
|
||||
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
|
||||
* These are embedded alongside originals by rust-embed and served via
|
||||
* content negotiation in src/web/assets.rs.
|
||||
*/
|
||||
import { readdir, stat, readFile, writeFile } from "fs/promises";
|
||||
import { join, extname } from "path";
|
||||
import { gzipSync, brotliCompressSync, constants } from "zlib";
|
||||
import { $ } from "bun";
|
||||
|
||||
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
|
||||
const MIN_SIZE = 512;
|
||||
|
||||
const COMPRESSIBLE_EXTENSIONS = new Set([
|
||||
".js",
|
||||
".css",
|
||||
".html",
|
||||
".json",
|
||||
".svg",
|
||||
".txt",
|
||||
".xml",
|
||||
".map",
|
||||
]);
|
||||
|
||||
// Check if zstd CLI is available
|
||||
let hasZstd = false;
|
||||
try {
|
||||
await $`which zstd`.quiet();
|
||||
hasZstd = true;
|
||||
} catch {
|
||||
console.warn("Warning: zstd not found, skipping .zst generation");
|
||||
}
|
||||
|
||||
async function* walkDir(dir: string): AsyncGenerator<string> {
|
||||
try {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const path = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
yield* walkDir(path);
|
||||
} else if (entry.isFile()) {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist, skip
|
||||
}
|
||||
}
|
||||
|
||||
async function compressFile(path: string): Promise<void> {
|
||||
const ext = extname(path);
|
||||
|
||||
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
|
||||
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
|
||||
|
||||
const stats = await stat(path);
|
||||
if (stats.size < MIN_SIZE) return;
|
||||
|
||||
// Skip if all compressed variants already exist
|
||||
const variantsExist = await Promise.all([
|
||||
stat(`${path}.br`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
stat(`${path}.gz`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
hasZstd
|
||||
? stat(`${path}.zst`).then(
|
||||
() => true,
|
||||
() => false
|
||||
)
|
||||
: Promise.resolve(false),
|
||||
]);
|
||||
|
||||
if (variantsExist.every((exists) => exists || !hasZstd)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await readFile(path);
|
||||
const originalSize = content.length;
|
||||
|
||||
// Brotli (maximum quality = 11)
|
||||
const brContent = brotliCompressSync(content, {
|
||||
params: {
|
||||
[constants.BROTLI_PARAM_QUALITY]: 11,
|
||||
},
|
||||
});
|
||||
await writeFile(`${path}.br`, brContent);
|
||||
|
||||
// Gzip (level 9)
|
||||
const gzContent = gzipSync(content, { level: 9 });
|
||||
await writeFile(`${path}.gz`, gzContent);
|
||||
|
||||
// Zstd (level 19 - maximum)
|
||||
if (hasZstd) {
|
||||
try {
|
||||
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
|
||||
} catch (e) {
|
||||
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
|
||||
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
|
||||
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Pre-compressing static assets...");
|
||||
|
||||
// Banner uses adapter-static with output in dist/
|
||||
const dirs = ["dist"];
|
||||
let scannedFiles = 0;
|
||||
let compressedFiles = 0;
|
||||
|
||||
for (const dir of dirs) {
|
||||
for await (const file of walkDir(dir)) {
|
||||
const ext = extname(file);
|
||||
scannedFiles++;
|
||||
|
||||
if (
|
||||
COMPRESSIBLE_EXTENSIONS.has(ext) &&
|
||||
!file.endsWith(".br") &&
|
||||
!file.endsWith(".gz") &&
|
||||
!file.endsWith(".zst")
|
||||
) {
|
||||
const stats = await stat(file);
|
||||
if (stats.size >= MIN_SIZE) {
|
||||
await compressFile(file);
|
||||
compressedFiles++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("Compression failed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
+1
-1
@@ -1,5 +1,5 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="no-transition">
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
|
||||
@@ -7,6 +7,7 @@ import type {
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
User,
|
||||
} from "$lib/bindings";
|
||||
|
||||
const API_BASE_URL = "/api";
|
||||
@@ -34,6 +35,66 @@ export type SearchResponse = SearchResponseGenerated;
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
export type SortDirection = "asc" | "desc";
|
||||
|
||||
export interface AdminStatus {
|
||||
userCount: number;
|
||||
sessionCount: number;
|
||||
courseCount: number;
|
||||
scrapeJobCount: number;
|
||||
services: { name: string; status: string }[];
|
||||
}
|
||||
|
||||
export interface ScrapeJob {
|
||||
id: number;
|
||||
targetType: string;
|
||||
targetPayload: unknown;
|
||||
priority: string;
|
||||
executeAt: string;
|
||||
createdAt: string;
|
||||
lockedAt: string | null;
|
||||
retryCount: number;
|
||||
maxRetries: number;
|
||||
}
|
||||
|
||||
export interface ScrapeJobsResponse {
|
||||
jobs: ScrapeJob[];
|
||||
}
|
||||
|
||||
export interface AuditLogEntry {
|
||||
id: number;
|
||||
courseId: number;
|
||||
timestamp: string;
|
||||
fieldChanged: string;
|
||||
oldValue: string;
|
||||
newValue: string;
|
||||
}
|
||||
|
||||
export interface AuditLogResponse {
|
||||
entries: AuditLogEntry[];
|
||||
}
|
||||
|
||||
export interface MetricEntry {
|
||||
id: number;
|
||||
courseId: number;
|
||||
timestamp: string;
|
||||
enrollment: number;
|
||||
waitCount: number;
|
||||
seatsAvailable: number;
|
||||
}
|
||||
|
||||
export interface MetricsResponse {
|
||||
metrics: MetricEntry[];
|
||||
count: number;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface MetricsParams {
|
||||
course_id?: number;
|
||||
term?: string;
|
||||
crn?: string;
|
||||
range?: "1h" | "6h" | "24h" | "7d" | "30d";
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export interface SearchParams {
|
||||
term: string;
|
||||
subjects?: string[];
|
||||
@@ -96,6 +157,44 @@ export class BannerApiClient {
|
||||
async getReference(category: string): Promise<ReferenceEntry[]> {
|
||||
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
|
||||
}
|
||||
|
||||
// Admin endpoints
|
||||
async getAdminStatus(): Promise<AdminStatus> {
|
||||
return this.request<AdminStatus>("/admin/status");
|
||||
}
|
||||
|
||||
async getAdminUsers(): Promise<User[]> {
|
||||
return this.request<User[]>("/admin/users");
|
||||
}
|
||||
|
||||
async setUserAdmin(discordId: string, isAdmin: boolean): Promise<User> {
|
||||
const response = await this.fetchFn(`${this.baseUrl}/admin/users/${discordId}/admin`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ is_admin: isAdmin }),
|
||||
});
|
||||
if (!response.ok) throw new Error(`API request failed: ${response.status}`);
|
||||
return (await response.json()) as User;
|
||||
}
|
||||
|
||||
async getAdminScrapeJobs(): Promise<ScrapeJobsResponse> {
|
||||
return this.request<ScrapeJobsResponse>("/admin/scrape-jobs");
|
||||
}
|
||||
|
||||
async getAdminAuditLog(): Promise<AuditLogResponse> {
|
||||
return this.request<AuditLogResponse>("/admin/audit-log");
|
||||
}
|
||||
|
||||
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
|
||||
const query = new URLSearchParams();
|
||||
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
|
||||
if (params?.term) query.set("term", params.term);
|
||||
if (params?.crn) query.set("crn", params.crn);
|
||||
if (params?.range) query.set("range", params.range);
|
||||
if (params?.limit !== undefined) query.set("limit", String(params.limit));
|
||||
const qs = query.toString();
|
||||
return this.request<MetricsResponse>(`/metrics${qs ? `?${qs}` : ""}`);
|
||||
}
|
||||
}
|
||||
|
||||
export const client = new BannerApiClient();
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import type { User } from "$lib/bindings";
|
||||
|
||||
type AuthState =
|
||||
| { mode: "loading" }
|
||||
| { mode: "authenticated"; user: User }
|
||||
| { mode: "unauthenticated" };
|
||||
|
||||
class AuthStore {
|
||||
state = $state<AuthState>({ mode: "loading" });
|
||||
|
||||
get user(): User | null {
|
||||
return this.state.mode === "authenticated" ? this.state.user : null;
|
||||
}
|
||||
|
||||
get isAdmin(): boolean {
|
||||
return this.user?.isAdmin ?? false;
|
||||
}
|
||||
|
||||
get isLoading(): boolean {
|
||||
return this.state.mode === "loading";
|
||||
}
|
||||
|
||||
get isAuthenticated(): boolean {
|
||||
return this.state.mode === "authenticated";
|
||||
}
|
||||
|
||||
async init() {
|
||||
try {
|
||||
const response = await fetch("/api/auth/me");
|
||||
if (response.ok) {
|
||||
const user: User = await response.json();
|
||||
this.state = { mode: "authenticated", user };
|
||||
} else {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
}
|
||||
} catch {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
}
|
||||
}
|
||||
|
||||
login() {
|
||||
window.location.href = "/api/auth/login";
|
||||
}
|
||||
|
||||
async logout() {
|
||||
try {
|
||||
await fetch("/api/auth/logout", { method: "POST" });
|
||||
} finally {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
window.location.href = "/";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const authStore = new AuthStore();
|
||||
@@ -6,3 +6,4 @@ export type { SearchResponse } from "./SearchResponse";
|
||||
export type { ServiceInfo } from "./ServiceInfo";
|
||||
export type { ServiceStatus } from "./ServiceStatus";
|
||||
export type { StatusResponse } from "./StatusResponse";
|
||||
export type { User } from "./User";
|
||||
|
||||
@@ -7,13 +7,16 @@ import {
|
||||
formatMeetingDaysLong,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
ratingColor,
|
||||
ratingStyle,
|
||||
rmpUrl,
|
||||
RMP_CONFIDENCE_THRESHOLD,
|
||||
} from "$lib/course";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { cn, tooltipContentClass } from "$lib/utils";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import { Info, Copy, Check } from "@lucide/svelte";
|
||||
import { Info, Copy, Check, Star, Triangle, ExternalLink } from "@lucide/svelte";
|
||||
|
||||
let { course }: { course: CourseResponse } = $props();
|
||||
|
||||
@@ -21,206 +24,283 @@ const clipboard = useClipboard();
|
||||
</script>
|
||||
|
||||
<div class="bg-muted/60 p-5 text-sm border-b border-border">
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Instructors
|
||||
</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if instructor.rmpRating != null}
|
||||
{@const rating = instructor.rmpRating}
|
||||
<span
|
||||
class="text-[10px] font-semibold {ratingColor(rating)}"
|
||||
>{rating.toFixed(1)}★</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={cn(tooltipContentClass, "px-3 py-2")}
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">{instructor.displayName}</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">Primary instructor</div>
|
||||
{/if}
|
||||
{#if instructor.rmpRating != null}
|
||||
<div class="text-muted-foreground">
|
||||
{instructor.rmpRating.toFixed(1)}/5 ({instructor.rmpNumRatings ?? 0} ratings)
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) => clipboard.copy(instructor.email!, e)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if clipboard.copiedValue === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Instructors</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if instructor.rmpRating != null}
|
||||
{@const rating = instructor.rmpRating}
|
||||
{@const lowConfidence =
|
||||
(instructor.rmpNumRatings ?? 0) <
|
||||
RMP_CONFIDENCE_THRESHOLD}
|
||||
<span
|
||||
class="text-[10px] font-semibold inline-flex items-center gap-0.5"
|
||||
style={ratingStyle(
|
||||
rating,
|
||||
themeStore.isDark,
|
||||
)}
|
||||
>
|
||||
{rating.toFixed(1)}
|
||||
{#if lowConfidence}
|
||||
<Triangle
|
||||
class="size-2 fill-current"
|
||||
/>
|
||||
{:else}
|
||||
<Star
|
||||
class="size-2.5 fill-current"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={cn(tooltipContentClass, "px-3 py-2")}
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">
|
||||
{instructor.displayName}
|
||||
</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">
|
||||
Primary instructor
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.rmpRating != null}
|
||||
<div class="text-muted-foreground">
|
||||
{instructor.rmpRating.toFixed(1)}/5
|
||||
· {instructor.rmpNumRatings ?? 0} ratings
|
||||
{#if (instructor.rmpNumRatings ?? 0) < RMP_CONFIDENCE_THRESHOLD}
|
||||
(low)
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.rmpLegacyId != null}
|
||||
<a
|
||||
href={rmpUrl(
|
||||
instructor.rmpLegacyId,
|
||||
)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<ExternalLink class="size-3" />
|
||||
<span>View on RMP</span>
|
||||
</a>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) =>
|
||||
clipboard.copy(
|
||||
instructor.email!,
|
||||
e,
|
||||
)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if clipboard.copiedValue === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Meeting Times
|
||||
</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span class="font-medium text-foreground">
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(mt.begin_time)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">Time TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div class="text-xs text-muted-foreground mt-0.5">
|
||||
{mt.building_description ?? mt.building}{mt.room ? ` ${mt.room}` : ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="text-xs text-muted-foreground/70 mt-0.5">
|
||||
{formatDate(mt.start_date)} – {formatDate(mt.end_date)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<SimpleTooltip text="How the course is taught: in-person, online, hybrid, etc." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground"> · {course.campus}</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Credits
|
||||
</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<SimpleTooltip text="Course flags for degree requirements, core curriculum, or special designations" delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<SimpleTooltip text="Course attribute code" delay={150} passthrough>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<SimpleTooltip text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={150} disableHoverableContent>
|
||||
<Tooltip.Trigger>
|
||||
<span class="inline-flex items-center gap-1.5 text-foreground font-mono">
|
||||
<span class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium">
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{course.crossListCount}/{course.crossListCapacity}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={tooltipContentClass}
|
||||
>
|
||||
Group <span class="font-mono font-medium">{course.crossList}</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {course.crossListCount} enrolled across {course.crossListCapacity} shared seats
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Waitlist
|
||||
</h4>
|
||||
<span class="text-foreground">{course.waitCount} / {course.waitCapacity}</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Meeting Times</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground"
|
||||
>TBA</span
|
||||
>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span
|
||||
class="font-medium text-foreground"
|
||||
>
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(
|
||||
mt.begin_time,
|
||||
)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span
|
||||
class="italic text-muted-foreground"
|
||||
>Time TBA</span
|
||||
>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div
|
||||
class="text-xs text-muted-foreground mt-0.5"
|
||||
>
|
||||
{mt.building_description ??
|
||||
mt.building}{mt.room
|
||||
? ` ${mt.room}`
|
||||
: ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div
|
||||
class="text-xs text-muted-foreground/70 mt-0.5"
|
||||
>
|
||||
{formatDate(mt.start_date)} – {formatDate(
|
||||
mt.end_date,
|
||||
)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<SimpleTooltip
|
||||
text="How the course is taught: in-person, online, hybrid, etc."
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground">
|
||||
· {course.campus}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Credits</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<SimpleTooltip
|
||||
text="Course flags for degree requirements, core curriculum, or special designations"
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<SimpleTooltip
|
||||
text="Course attribute code"
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<SimpleTooltip
|
||||
text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class."
|
||||
delay={150}
|
||||
passthrough
|
||||
>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={150} disableHoverableContent>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-foreground font-mono"
|
||||
>
|
||||
<span
|
||||
class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium"
|
||||
>
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{course.crossListCount}/{course.crossListCapacity}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content sideOffset={6} class={tooltipContentClass}>
|
||||
Group <span class="font-mono font-medium"
|
||||
>{course.crossList}</span
|
||||
>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {course.crossListCount} enrolled across {course.crossListCapacity}
|
||||
shared seats
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">Waitlist</h4>
|
||||
<span class="text-2foreground"
|
||||
>{course.waitCount} / {course.waitCapacity}</span
|
||||
>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -15,8 +15,11 @@ import {
|
||||
openSeats,
|
||||
seatsColor,
|
||||
seatsDotColor,
|
||||
ratingColor,
|
||||
ratingStyle,
|
||||
rmpUrl,
|
||||
RMP_CONFIDENCE_THRESHOLD,
|
||||
} from "$lib/course";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
import CourseDetail from "./CourseDetail.svelte";
|
||||
@@ -31,8 +34,19 @@ import {
|
||||
type VisibilityState,
|
||||
type Updater,
|
||||
} from "@tanstack/table-core";
|
||||
import { ArrowUp, ArrowDown, ArrowUpDown, Columns3, Check, RotateCcw } from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu } from "bits-ui";
|
||||
import {
|
||||
ArrowUp,
|
||||
ArrowDown,
|
||||
ArrowUpDown,
|
||||
Columns3,
|
||||
Check,
|
||||
RotateCcw,
|
||||
Star,
|
||||
Triangle,
|
||||
ExternalLink,
|
||||
} from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu, Tooltip } from "bits-ui";
|
||||
import { cn, tooltipContentClass } from "$lib/utils";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
let {
|
||||
@@ -91,10 +105,16 @@ function primaryInstructorDisplay(course: CourseResponse): string {
|
||||
return abbreviateInstructor(primary.displayName);
|
||||
}
|
||||
|
||||
function primaryRating(course: CourseResponse): { rating: number; count: number } | null {
|
||||
function primaryRating(
|
||||
course: CourseResponse
|
||||
): { rating: number; count: number; legacyId: number | null } | null {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary?.rmpRating) return null;
|
||||
return { rating: primary.rmpRating, count: primary.rmpNumRatings ?? 0 };
|
||||
return {
|
||||
rating: primary.rmpRating,
|
||||
count: primary.rmpNumRatings ?? 0,
|
||||
legacyId: primary.rmpLegacyId ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
function timeIsTBA(course: CourseResponse): boolean {
|
||||
@@ -207,8 +227,7 @@ const table = createSvelteTable({
|
||||
</GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label =
|
||||
typeof col.header === "string" ? col.header : id}
|
||||
{@const label = typeof col.header === "string" ? col.header : id}
|
||||
<CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
@@ -269,12 +288,12 @@ const table = createSvelteTable({
|
||||
transition:fly={{ duration: 150, y: -10 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
DropdownMenu.Group,
|
||||
DropdownMenu.GroupHeading,
|
||||
DropdownMenu.CheckboxItem,
|
||||
DropdownMenu.Separator,
|
||||
DropdownMenu.Item,
|
||||
)}
|
||||
DropdownMenu.Group,
|
||||
DropdownMenu.GroupHeading,
|
||||
DropdownMenu.CheckboxItem,
|
||||
DropdownMenu.Separator,
|
||||
DropdownMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -379,12 +398,15 @@ const table = createSvelteTable({
|
||||
</tr>
|
||||
</tbody>
|
||||
{:else}
|
||||
<!-- No out: transition — Svelte outros break table layout (tbody loses positioning and overlaps) -->
|
||||
{#each table.getRowModel().rows as row, i (row.id)}
|
||||
{@const course = row.original}
|
||||
<tbody
|
||||
animate:flip={{ duration: 300 }}
|
||||
in:fade={{ duration: 200, delay: Math.min(i * 20, 400) }}
|
||||
out:fade={{ duration: 150 }}
|
||||
in:fade={{
|
||||
duration: 200,
|
||||
delay: Math.min(i * 20, 400),
|
||||
}}
|
||||
>
|
||||
<tr
|
||||
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
|
||||
@@ -405,9 +427,15 @@ const table = createSvelteTable({
|
||||
e,
|
||||
)}
|
||||
onkeydown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
if (
|
||||
e.key === "Enter" ||
|
||||
e.key === " "
|
||||
) {
|
||||
e.preventDefault();
|
||||
clipboard.copy(course.crn, e);
|
||||
clipboard.copy(
|
||||
course.crn,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}}
|
||||
aria-label="Copy CRN {course.crn} to clipboard"
|
||||
@@ -468,9 +496,12 @@ const table = createSvelteTable({
|
||||
{@const primary = getPrimaryInstructor(
|
||||
course.instructors,
|
||||
)}
|
||||
{@const display = primaryInstructorDisplay(course)}
|
||||
{@const commaIdx = display.indexOf(", ")}
|
||||
{@const ratingData = primaryRating(course)}
|
||||
{@const display =
|
||||
primaryInstructorDisplay(course)}
|
||||
{@const commaIdx =
|
||||
display.indexOf(", ")}
|
||||
{@const ratingData =
|
||||
primaryRating(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if display === "Staff"}
|
||||
<span
|
||||
@@ -486,38 +517,98 @@ const table = createSvelteTable({
|
||||
passthrough
|
||||
>
|
||||
{#if commaIdx !== -1}
|
||||
<span>{display.slice(0, commaIdx)},
|
||||
<span class="text-muted-foreground">{display.slice(commaIdx + 1)}</span
|
||||
></span>
|
||||
<span
|
||||
>{display.slice(
|
||||
0,
|
||||
commaIdx,
|
||||
)},
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>{display.slice(
|
||||
commaIdx +
|
||||
1,
|
||||
)}</span
|
||||
></span
|
||||
>
|
||||
{:else}
|
||||
<span>{display}</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
{#if ratingData}
|
||||
<SimpleTooltip
|
||||
text="{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}/5 ({ratingData.count} ratings on RateMyProfessors)"
|
||||
delay={150}
|
||||
side="bottom"
|
||||
passthrough
|
||||
{@const lowConfidence =
|
||||
ratingData.count <
|
||||
RMP_CONFIDENCE_THRESHOLD}
|
||||
<Tooltip.Root
|
||||
delayDuration={150}
|
||||
>
|
||||
<span
|
||||
class="ml-1 text-xs font-medium {ratingColor(
|
||||
ratingData.rating,
|
||||
)}"
|
||||
>{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}★</span
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="ml-1 text-xs font-medium inline-flex items-center gap-0.5"
|
||||
style={ratingStyle(
|
||||
ratingData.rating,
|
||||
themeStore.isDark,
|
||||
)}
|
||||
>
|
||||
{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}
|
||||
{#if lowConfidence}
|
||||
<Triangle
|
||||
class="size-2 fill-current"
|
||||
/>
|
||||
{:else}
|
||||
<Star
|
||||
class="size-2.5 fill-current"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
side="bottom"
|
||||
sideOffset={6}
|
||||
class={cn(
|
||||
tooltipContentClass,
|
||||
"px-2.5 py-1.5",
|
||||
)}
|
||||
>
|
||||
</SimpleTooltip>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-xs"
|
||||
>
|
||||
{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}/5 · {ratingData.count}
|
||||
ratings
|
||||
{#if (ratingData.count ?? 0) < RMP_CONFIDENCE_THRESHOLD}
|
||||
(low)
|
||||
{/if}
|
||||
{#if ratingData.legacyId != null}
|
||||
·
|
||||
<a
|
||||
href={rmpUrl(
|
||||
ratingData.legacyId,
|
||||
)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
class="inline-flex items-center gap-0.5 text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
RMP
|
||||
<ExternalLink
|
||||
class="size-3"
|
||||
/>
|
||||
</a>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "time"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={formatMeetingTimesTooltip(course.meetingTimes)}
|
||||
text={formatMeetingTimesTooltip(
|
||||
course.meetingTimes,
|
||||
)}
|
||||
passthrough
|
||||
>
|
||||
{#if timeIsTBA(course)}
|
||||
@@ -566,10 +657,14 @@ const table = createSvelteTable({
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "location"}
|
||||
{@const concern = getDeliveryConcern(course)}
|
||||
{@const accentColor = concernAccentColor(concern)}
|
||||
{@const locTooltip = formatLocationTooltip(course)}
|
||||
{@const locDisplay = formatLocationDisplay(course)}
|
||||
{@const concern =
|
||||
getDeliveryConcern(course)}
|
||||
{@const accentColor =
|
||||
concernAccentColor(concern)}
|
||||
{@const locTooltip =
|
||||
formatLocationTooltip(course)}
|
||||
{@const locDisplay =
|
||||
formatLocationDisplay(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if locTooltip}
|
||||
<SimpleTooltip
|
||||
@@ -579,18 +674,26 @@ const table = createSvelteTable({
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
class:pl-2={accentColor !== null}
|
||||
style:border-left={accentColor ? `2px solid ${accentColor}` : undefined}
|
||||
class:pl-2={accentColor !==
|
||||
null}
|
||||
style:border-left={accentColor
|
||||
? `2px solid ${accentColor}`
|
||||
: undefined}
|
||||
>
|
||||
{locDisplay ?? "—"}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{:else if locDisplay}
|
||||
<span class="text-muted-foreground">
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>
|
||||
{locDisplay}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="text-xs text-muted-foreground/50">—</span>
|
||||
<span
|
||||
class="text-xs text-muted-foreground/50"
|
||||
>—</span
|
||||
>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "seats"}
|
||||
@@ -668,12 +771,12 @@ const table = createSvelteTable({
|
||||
out:fade={{ duration: 100 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
ContextMenu.Group,
|
||||
ContextMenu.GroupHeading,
|
||||
ContextMenu.CheckboxItem,
|
||||
ContextMenu.Separator,
|
||||
ContextMenu.Item,
|
||||
)}
|
||||
ContextMenu.Group,
|
||||
ContextMenu.GroupHeading,
|
||||
ContextMenu.CheckboxItem,
|
||||
ContextMenu.Separator,
|
||||
ContextMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
<script lang="ts">
|
||||
import { navigationStore } from "$lib/stores/navigation.svelte";
|
||||
import type { Snippet } from "svelte";
|
||||
import { cubicOut } from "svelte/easing";
|
||||
import type { TransitionConfig } from "svelte/transition";
|
||||
|
||||
let { key, children }: { key: string; children: Snippet } = $props();
|
||||
|
||||
const DURATION = 250;
|
||||
const OFFSET = 40;
|
||||
|
||||
function inTransition(_node: HTMLElement): TransitionConfig {
|
||||
const dir = navigationStore.direction;
|
||||
if (dir === "fade") {
|
||||
return { duration: DURATION, easing: cubicOut, css: (t: number) => `opacity: ${t}` };
|
||||
}
|
||||
const x = dir === "right" ? OFFSET : -OFFSET;
|
||||
return {
|
||||
duration: DURATION,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `opacity: ${t}; transform: translateX(${(1 - t) * x}px)`,
|
||||
};
|
||||
}
|
||||
|
||||
function outTransition(_node: HTMLElement): TransitionConfig {
|
||||
const dir = navigationStore.direction;
|
||||
// Outgoing element is positioned absolutely so incoming flows normally
|
||||
const base = "position: absolute; top: 0; left: 0; width: 100%";
|
||||
if (dir === "fade") {
|
||||
return { duration: DURATION, easing: cubicOut, css: (t: number) => `${base}; opacity: ${t}` };
|
||||
}
|
||||
const x = dir === "right" ? -OFFSET : OFFSET;
|
||||
return {
|
||||
duration: DURATION,
|
||||
easing: cubicOut,
|
||||
css: (t: number) => `${base}; opacity: ${t}; transform: translateX(${(1 - t) * x}px)`,
|
||||
};
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="relative overflow-hidden">
|
||||
{#key key}
|
||||
<div in:inTransition out:outTransition class="w-full">
|
||||
{@render children()}
|
||||
</div>
|
||||
{/key}
|
||||
</div>
|
||||
@@ -1,7 +1,19 @@
|
||||
<script lang="ts">
|
||||
import { Select } from "bits-ui";
|
||||
import { ChevronUp, ChevronDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
import type { Action } from "svelte/action";
|
||||
|
||||
const slideIn: Action<HTMLElement, number> = (node, direction) => {
|
||||
if (direction !== 0) {
|
||||
node.animate(
|
||||
[
|
||||
{ transform: `translateX(${direction * 20}px)`, opacity: 0 },
|
||||
{ transform: "translateX(0)", opacity: 1 },
|
||||
],
|
||||
{ duration: 200, easing: "ease-out" }
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let {
|
||||
totalCount,
|
||||
@@ -21,17 +33,8 @@ const start = $derived(offset + 1);
|
||||
const end = $derived(Math.min(offset + limit, totalCount));
|
||||
|
||||
// Track direction for slide animation
|
||||
let prevPage = $state(1);
|
||||
let direction = $state(0);
|
||||
|
||||
$effect(() => {
|
||||
const page = currentPage;
|
||||
if (page !== prevPage) {
|
||||
direction = page > prevPage ? 1 : -1;
|
||||
prevPage = page;
|
||||
}
|
||||
});
|
||||
|
||||
// 5 page slots: current-2, current-1, current, current+1, current+2
|
||||
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
|
||||
|
||||
@@ -40,6 +43,7 @@ function isSlotVisible(page: number): boolean {
|
||||
}
|
||||
|
||||
function goToPage(page: number) {
|
||||
direction = page > currentPage ? 1 : -1;
|
||||
onPageChange((page - 1) * limit);
|
||||
}
|
||||
|
||||
@@ -86,7 +90,7 @@ const selectValue = $derived(String(currentPage));
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
|
||||
aria-label="Page {currentPage} of {totalPages}, click to select page"
|
||||
>
|
||||
<span in:fly={{ x: direction * 20, duration: 200 }}>{currentPage}</span>
|
||||
<span use:slideIn={direction}>{currentPage}</span>
|
||||
<ChevronUp class="size-3 text-muted-foreground" />
|
||||
</Select.Trigger>
|
||||
<Select.Portal>
|
||||
@@ -140,8 +144,8 @@ const selectValue = $derived(String(currentPage));
|
||||
aria-hidden={!isSlotVisible(page)}
|
||||
tabindex={isSlotVisible(page) ? 0 : -1}
|
||||
disabled={!isSlotVisible(page)}
|
||||
in:fly={{ x: direction * 20, duration: 200 }}
|
||||
>
|
||||
use:slideIn={direction}
|
||||
>
|
||||
{page}
|
||||
</button>
|
||||
{/if}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
<script lang="ts">
|
||||
export interface SearchMeta {
|
||||
totalCount: number;
|
||||
durationMs: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
let { meta }: { meta: SearchMeta | null } = $props();
|
||||
|
||||
let formattedTime = $derived(
|
||||
meta
|
||||
? meta.timestamp.toLocaleTimeString(undefined, {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
: ""
|
||||
);
|
||||
|
||||
let countLabel = $derived(meta ? meta.totalCount.toLocaleString() : "");
|
||||
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
|
||||
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
|
||||
</script>
|
||||
|
||||
{#if meta}
|
||||
<p
|
||||
class="pl-1 text-xs"
|
||||
title="Last searched at {formattedTime}"
|
||||
>
|
||||
<span class="text-muted-foreground/70">{countLabel}</span>
|
||||
<span class="text-muted-foreground/35">{resultNoun} in</span>
|
||||
<span class="text-muted-foreground/70">{durationLabel}</span>
|
||||
</p>
|
||||
{/if}
|
||||
@@ -193,6 +193,7 @@ describe("getPrimaryInstructor", () => {
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
{
|
||||
bannerId: "2",
|
||||
@@ -201,6 +202,7 @@ describe("getPrimaryInstructor", () => {
|
||||
isPrimary: true,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("B");
|
||||
@@ -214,6 +216,7 @@ describe("getPrimaryInstructor", () => {
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
rmpLegacyId: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("A");
|
||||
|
||||
+43
-5
@@ -362,11 +362,49 @@ export function seatsDotColor(course: CourseResponse): string {
|
||||
return "bg-green-500";
|
||||
}
|
||||
|
||||
/** Text color class for a RateMyProfessors rating */
|
||||
export function ratingColor(rating: number): string {
|
||||
if (rating >= 4.0) return "text-status-green";
|
||||
if (rating >= 3.0) return "text-yellow-500";
|
||||
return "text-status-red";
|
||||
/** Minimum number of ratings needed to consider RMP data reliable */
|
||||
export const RMP_CONFIDENCE_THRESHOLD = 7;
|
||||
|
||||
/** RMP professor page URL from legacy ID */
|
||||
export function rmpUrl(legacyId: number): string {
|
||||
return `https://www.ratemyprofessors.com/professor/${legacyId}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Smooth OKLCH color + text-shadow for a RateMyProfessors rating.
|
||||
*
|
||||
* Three-stop gradient interpolated in OKLCH:
|
||||
* 1.0 → red, 3.0 → amber, 5.0 → green
|
||||
* with separate light/dark mode tuning.
|
||||
*/
|
||||
export function ratingStyle(rating: number, isDark: boolean): string {
|
||||
const clamped = Math.max(1, Math.min(5, rating));
|
||||
|
||||
// OKLCH stops: [lightness, chroma, hue]
|
||||
const stops: { light: [number, number, number]; dark: [number, number, number] }[] = [
|
||||
{ light: [0.63, 0.2, 25], dark: [0.7, 0.19, 25] }, // 1.0 – red
|
||||
{ light: [0.7, 0.16, 85], dark: [0.78, 0.15, 85] }, // 3.0 – amber
|
||||
{ light: [0.65, 0.2, 145], dark: [0.72, 0.19, 145] }, // 5.0 – green
|
||||
];
|
||||
|
||||
let t: number;
|
||||
let fromIdx: number;
|
||||
if (clamped <= 3) {
|
||||
t = (clamped - 1) / 2;
|
||||
fromIdx = 0;
|
||||
} else {
|
||||
t = (clamped - 3) / 2;
|
||||
fromIdx = 1;
|
||||
}
|
||||
|
||||
const from = isDark ? stops[fromIdx].dark : stops[fromIdx].light;
|
||||
const to = isDark ? stops[fromIdx + 1].dark : stops[fromIdx + 1].light;
|
||||
|
||||
const l = from[0] + (to[0] - from[0]) * t;
|
||||
const c = from[1] + (to[1] - from[1]) * t;
|
||||
const h = from[2] + (to[2] - from[2]) * t;
|
||||
|
||||
return `color: oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)}); text-shadow: 0 0 4px oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)} / 0.3);`;
|
||||
}
|
||||
|
||||
/** Format credit hours display */
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
import { beforeNavigate } from "$app/navigation";
|
||||
|
||||
export type NavDirection = "left" | "right" | "fade";
|
||||
|
||||
/** Admin sidebar order — indexes determine slide direction for same-depth siblings */
|
||||
const ADMIN_NAV_ORDER = ["/admin", "/admin/scrape-jobs", "/admin/audit-log", "/admin/users"];
|
||||
|
||||
function getDepth(path: string): number {
|
||||
return path.replace(/\/$/, "").split("/").filter(Boolean).length;
|
||||
}
|
||||
|
||||
function getAdminIndex(path: string): number {
|
||||
return ADMIN_NAV_ORDER.indexOf(path);
|
||||
}
|
||||
|
||||
function computeDirection(from: string, to: string): NavDirection {
|
||||
const fromDepth = getDepth(from);
|
||||
const toDepth = getDepth(to);
|
||||
|
||||
if (toDepth > fromDepth) return "right";
|
||||
if (toDepth < fromDepth) return "left";
|
||||
|
||||
// Same depth — use admin sidebar ordering if both are admin routes
|
||||
const fromIdx = getAdminIndex(from);
|
||||
const toIdx = getAdminIndex(to);
|
||||
if (fromIdx >= 0 && toIdx >= 0) {
|
||||
return toIdx > fromIdx ? "right" : "left";
|
||||
}
|
||||
|
||||
return "fade";
|
||||
}
|
||||
|
||||
class NavigationStore {
|
||||
direction: NavDirection = $state("fade");
|
||||
}
|
||||
|
||||
export const navigationStore = new NavigationStore();
|
||||
|
||||
/** Call once from root layout to start tracking navigation direction */
|
||||
export function initNavigation() {
|
||||
beforeNavigate(({ from, to }) => {
|
||||
if (!from?.url || !to?.url) return;
|
||||
navigationStore.direction = computeDirection(from.url.pathname, to.url.pathname);
|
||||
});
|
||||
}
|
||||
@@ -1,14 +1,19 @@
|
||||
<script lang="ts">
|
||||
import "overlayscrollbars/overlayscrollbars.css";
|
||||
import "./layout.css";
|
||||
import { onMount } from "svelte";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import { page } from "$app/state";
|
||||
import PageTransition from "$lib/components/PageTransition.svelte";
|
||||
import ThemeToggle from "$lib/components/ThemeToggle.svelte";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
import { initNavigation } from "$lib/stores/navigation.svelte";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
let { children } = $props();
|
||||
|
||||
initNavigation();
|
||||
|
||||
useOverlayScrollbars(() => document.body, {
|
||||
scrollbars: {
|
||||
autoHide: "leave",
|
||||
@@ -18,10 +23,6 @@ useOverlayScrollbars(() => document.body, {
|
||||
|
||||
onMount(() => {
|
||||
themeStore.init();
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
document.documentElement.classList.remove("no-transition");
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -30,5 +31,7 @@ onMount(() => {
|
||||
<ThemeToggle />
|
||||
</div>
|
||||
|
||||
{@render children()}
|
||||
<PageTransition key={page.url.pathname}>
|
||||
{@render children()}
|
||||
</PageTransition>
|
||||
</Tooltip.Provider>
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
} from "$lib/api";
|
||||
import type { SortingState } from "@tanstack/table-core";
|
||||
import SearchFilters from "$lib/components/SearchFilters.svelte";
|
||||
import SearchStatus, { type SearchMeta } from "$lib/components/SearchStatus.svelte";
|
||||
import CourseTable from "$lib/components/CourseTable.svelte";
|
||||
import Pagination from "$lib/components/Pagination.svelte";
|
||||
import Footer from "$lib/components/Footer.svelte";
|
||||
@@ -56,6 +57,7 @@ let subjectMap: Record<string, string> = $derived(
|
||||
Object.fromEntries(subjects.map((s) => [s.code, s.description]))
|
||||
);
|
||||
let searchResult: SearchResponse | null = $state(null);
|
||||
let searchMeta: SearchMeta | null = $state(null);
|
||||
let loading = $state(false);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
@@ -169,6 +171,7 @@ async function performSearch(
|
||||
if (sortDir && sortBy) params.set("sort_dir", sortDir);
|
||||
goto(`?${params.toString()}`, { replaceState: true, noScroll: true, keepFocus: true });
|
||||
|
||||
const t0 = performance.now();
|
||||
try {
|
||||
searchResult = await client.searchCourses({
|
||||
term,
|
||||
@@ -180,6 +183,11 @@ async function performSearch(
|
||||
sort_by: sortBy,
|
||||
sort_dir: sortDir,
|
||||
});
|
||||
searchMeta = {
|
||||
totalCount: searchResult.totalCount,
|
||||
durationMs: performance.now() - t0,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Search failed";
|
||||
} finally {
|
||||
@@ -199,7 +207,10 @@ function handlePageChange(newOffset: number) {
|
||||
<h1 class="text-2xl font-semibold text-foreground">UTSA Course Search</h1>
|
||||
</div>
|
||||
|
||||
<!-- Filters -->
|
||||
<!-- Search status + Filters -->
|
||||
<div class="flex flex-col gap-1.5">
|
||||
<SearchStatus meta={searchMeta} />
|
||||
<!-- Filters -->
|
||||
<SearchFilters
|
||||
terms={data.terms}
|
||||
{subjects}
|
||||
@@ -208,6 +219,7 @@ function handlePageChange(newOffset: number) {
|
||||
bind:query
|
||||
bind:openOnly
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Results -->
|
||||
{#if error}
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
<script lang="ts">
|
||||
import { goto } from "$app/navigation";
|
||||
import { page } from "$app/state";
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import PageTransition from "$lib/components/PageTransition.svelte";
|
||||
import { ClipboardList, FileText, LayoutDashboard, LogOut, Users } from "@lucide/svelte";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
let { children } = $props();
|
||||
|
||||
onMount(async () => {
|
||||
if (authStore.isLoading) {
|
||||
await authStore.init();
|
||||
}
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
if (authStore.state.mode === "unauthenticated") {
|
||||
goto("/login");
|
||||
}
|
||||
});
|
||||
|
||||
const navItems = [
|
||||
{ href: "/admin", label: "Dashboard", icon: LayoutDashboard },
|
||||
{ href: "/admin/scrape-jobs", label: "Scrape Jobs", icon: ClipboardList },
|
||||
{ href: "/admin/audit-log", label: "Audit Log", icon: FileText },
|
||||
{ href: "/admin/users", label: "Users", icon: Users },
|
||||
];
|
||||
</script>
|
||||
|
||||
{#if authStore.isLoading}
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
</div>
|
||||
{:else if !authStore.isAdmin}
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<div class="text-center">
|
||||
<h1 class="text-2xl font-bold">Access Denied</h1>
|
||||
<p class="text-muted-foreground mt-2">You do not have admin access.</p>
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="flex min-h-screen">
|
||||
<aside class="border-border bg-card flex w-64 flex-col border-r">
|
||||
<div class="border-border border-b p-4">
|
||||
<h2 class="text-lg font-semibold">Admin</h2>
|
||||
{#if authStore.user}
|
||||
<p class="text-muted-foreground text-sm">{authStore.user.discordUsername}</p>
|
||||
{/if}
|
||||
</div>
|
||||
<nav class="flex-1 space-y-1 p-2">
|
||||
{#each navItems as item}
|
||||
<a
|
||||
href={item.href}
|
||||
class="hover:bg-accent flex items-center gap-3 rounded-lg px-3 py-2 text-sm font-medium transition-colors"
|
||||
>
|
||||
<item.icon size={18} />
|
||||
{item.label}
|
||||
</a>
|
||||
{/each}
|
||||
</nav>
|
||||
<div class="border-border border-t p-2">
|
||||
<button
|
||||
onclick={() => authStore.logout()}
|
||||
class="hover:bg-destructive/10 text-destructive flex w-full items-center gap-3 rounded-lg px-3 py-2 text-sm font-medium transition-colors"
|
||||
>
|
||||
<LogOut size={18} />
|
||||
Sign Out
|
||||
</button>
|
||||
</div>
|
||||
</aside>
|
||||
<main class="flex-1 overflow-auto p-6">
|
||||
<PageTransition key={page.url.pathname}>
|
||||
{@render children()}
|
||||
</PageTransition>
|
||||
</main>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,54 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type AdminStatus } from "$lib/api";
|
||||
|
||||
let status = $state<AdminStatus | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
status = await client.getAdminStatus();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load status";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Dashboard</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !status}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else}
|
||||
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Users</p>
|
||||
<p class="text-3xl font-bold">{status.userCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Active Sessions</p>
|
||||
<p class="text-3xl font-bold">{status.sessionCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Courses</p>
|
||||
<p class="text-3xl font-bold">{status.courseCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Scrape Jobs</p>
|
||||
<p class="text-3xl font-bold">{status.scrapeJobCount}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2 class="mt-8 mb-4 text-lg font-semibold">Services</h2>
|
||||
<div class="bg-card border-border rounded-lg border">
|
||||
{#each status.services as service}
|
||||
<div class="border-border flex items-center justify-between border-b px-4 py-3 last:border-b-0">
|
||||
<span class="font-medium">{service.name}</span>
|
||||
<span class="rounded-full bg-green-100 px-2 py-0.5 text-xs font-medium text-green-800 dark:bg-green-900 dark:text-green-200">
|
||||
{service.status}
|
||||
</span>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,50 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type AuditLogResponse } from "$lib/api";
|
||||
|
||||
let data = $state<AuditLogResponse | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
data = await client.getAdminAuditLog();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load audit log";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Audit Log</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !data}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else if data.entries.length === 0}
|
||||
<p class="text-muted-foreground">No audit log entries found.</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">Time</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Course ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Field</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Old Value</th>
|
||||
<th class="px-4 py-3 text-left font-medium">New Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each data.entries as entry}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="px-4 py-3">{new Date(entry.timestamp).toLocaleString()}</td>
|
||||
<td class="px-4 py-3">{entry.courseId}</td>
|
||||
<td class="px-4 py-3 font-mono text-xs">{entry.fieldChanged}</td>
|
||||
<td class="px-4 py-3">{entry.oldValue}</td>
|
||||
<td class="px-4 py-3">{entry.newValue}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,52 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type ScrapeJobsResponse } from "$lib/api";
|
||||
|
||||
let data = $state<ScrapeJobsResponse | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
data = await client.getAdminScrapeJobs();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load scrape jobs";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Scrape Jobs</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !data}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else if data.jobs.length === 0}
|
||||
<p class="text-muted-foreground">No scrape jobs found.</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Type</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Priority</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Execute At</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Retries</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each data.jobs as job}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="px-4 py-3">{job.id}</td>
|
||||
<td class="px-4 py-3">{job.targetType}</td>
|
||||
<td class="px-4 py-3">{job.priority}</td>
|
||||
<td class="px-4 py-3">{new Date(job.executeAt).toLocaleString()}</td>
|
||||
<td class="px-4 py-3">{job.retryCount}/{job.maxRetries}</td>
|
||||
<td class="px-4 py-3">{job.lockedAt ? "Locked" : "Pending"}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,92 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client } from "$lib/api";
|
||||
import type { User } from "$lib/bindings";
|
||||
import { Shield, ShieldOff } from "@lucide/svelte";
|
||||
|
||||
let users = $state<User[]>([]);
|
||||
let error = $state<string | null>(null);
|
||||
let updating = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
users = await client.getAdminUsers();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load users";
|
||||
}
|
||||
});
|
||||
|
||||
async function toggleAdmin(user: User) {
|
||||
updating = user.discordId;
|
||||
try {
|
||||
const updated = await client.setUserAdmin(user.discordId, !user.isAdmin);
|
||||
users = users.map((u) => (u.discordId === updated.discordId ? updated : u));
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to update user";
|
||||
} finally {
|
||||
updating = null;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Users</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive mb-4">{error}</p>
|
||||
{/if}
|
||||
|
||||
{#if users.length === 0 && !error}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">Username</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Discord ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Admin</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each users as user}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="flex items-center gap-2 px-4 py-3">
|
||||
{#if user.discordAvatarHash}
|
||||
<img
|
||||
src="https://cdn.discordapp.com/avatars/{user.discordId}/{user.discordAvatarHash}.png?size=32"
|
||||
alt=""
|
||||
class="h-6 w-6 rounded-full"
|
||||
/>
|
||||
{/if}
|
||||
{user.discordUsername}
|
||||
</td>
|
||||
<td class="text-muted-foreground px-4 py-3 font-mono text-xs">{user.discordId}</td>
|
||||
<td class="px-4 py-3">
|
||||
{#if user.isAdmin}
|
||||
<span class="rounded-full bg-blue-100 px-2 py-0.5 text-xs font-medium text-blue-800 dark:bg-blue-900 dark:text-blue-200">Admin</span>
|
||||
{:else}
|
||||
<span class="text-muted-foreground text-xs">User</span>
|
||||
{/if}
|
||||
</td>
|
||||
<td class="px-4 py-3">
|
||||
<button
|
||||
onclick={() => toggleAdmin(user)}
|
||||
disabled={updating === user.discordId}
|
||||
class="hover:bg-accent inline-flex items-center gap-1 rounded px-2 py-1 text-xs transition-colors disabled:opacity-50"
|
||||
>
|
||||
{#if user.isAdmin}
|
||||
<ShieldOff size={14} />
|
||||
Remove Admin
|
||||
{:else}
|
||||
<Shield size={14} />
|
||||
Make Admin
|
||||
{/if}
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -57,11 +57,8 @@
|
||||
--font-sans: "Inter Variable", ui-sans-serif, system-ui, sans-serif;
|
||||
}
|
||||
|
||||
* {
|
||||
border-color: var(--border);
|
||||
}
|
||||
|
||||
body {
|
||||
border-color: var(--border);
|
||||
background-color: var(--background);
|
||||
color: var(--foreground);
|
||||
font-family: var(--font-sans);
|
||||
@@ -129,11 +126,6 @@ input[type="checkbox"]:checked::before {
|
||||
clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%);
|
||||
}
|
||||
|
||||
html:not(.no-transition) body,
|
||||
html:not(.no-transition) body * {
|
||||
transition: background-color 300ms, color 300ms, border-color 300ms, fill 300ms;
|
||||
}
|
||||
|
||||
/* View Transitions API - disable default cross-fade so JS can animate clip-path */
|
||||
::view-transition-old(root),
|
||||
::view-transition-new(root) {
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
<script lang="ts">
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import { LogIn } from "@lucide/svelte";
|
||||
</script>
|
||||
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<div class="w-full max-w-sm space-y-6 text-center">
|
||||
<h1 class="text-3xl font-bold">Sign In</h1>
|
||||
<p class="text-muted-foreground">Sign in with your Discord account to continue.</p>
|
||||
<button
|
||||
onclick={() => authStore.login()}
|
||||
class="inline-flex w-full items-center justify-center gap-2 rounded-lg bg-[#5865F2] px-6 py-3 text-lg font-semibold text-white transition-colors hover:bg-[#4752C4]"
|
||||
>
|
||||
<LogIn size={20} />
|
||||
Sign in with Discord
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
Reference in New Issue
Block a user