35 Commits

Author SHA1 Message Date
474d519b9d feat: add auto-format recovery when formatting is sole check failure
Enhances check recipe to detect when only formatting checks fail while
peers pass, automatically applies formatters, then re-verifies. Supports
both Rust (rustfmt + cargo-check) and web (biome + svelte-check)
domains. Displays results eagerly as checks complete instead of in
original order.
2026-01-30 16:01:56 -06:00
fb27bdc119 feat: implement session expiry extension and 401 recovery 2026-01-30 16:01:17 -06:00
669dec0235 feat: add timeline API with schedule-aware enrollment aggregation
Implements POST /api/timeline endpoint that aggregates enrollment by
subject over 15-minute slots, filtering courses by their actual meeting
times. Includes ISR-style schedule cache with hourly background refresh
using stale-while-revalidate pattern, database indexes for efficient
queries, and frontend refactor to dynamically discover subjects from API.
2026-01-30 10:56:11 -06:00
67ba63339a fix: instructor/course mismatching, build order-independent map for association 2026-01-30 09:53:03 -06:00
7b8c11ac13 feat: add calendar export endpoints for ICS and Google Calendar 2026-01-30 04:08:16 -06:00
a767a3f8be feat: add root error page handling 2026-01-30 04:07:53 -06:00
8ce398c0e0 feat: add scraper analytics dashboard with timeseries and subject monitoring 2026-01-30 03:46:48 -06:00
9fed651641 feat: add adaptive scheduling and scraper admin endpoints
Subjects now have individually calculated scrape intervals based on their
historical change ratio, consecutive zero-change runs, failure counts, and
the current time of day. This reduces unnecessary scrapes during inactive
periods while maintaining responsiveness during peak hours. Includes four
new admin endpoints for monitoring scraper health and scheduling decisions.
2026-01-30 02:14:37 -06:00
75a99c10ea feat: add scrape job result persistence for effectiveness tracking 2026-01-30 01:37:41 -06:00
857ceabcca fix: prevent ts-rs serde warnings 2026-01-30 01:36:57 -06:00
203c337cf0 feat: add confidence-based RMP matching with manual review workflow
Replace simple auto-matching with scored candidate generation that
considers department overlap, name uniqueness, and rating volume.
Candidates above 0.85 auto-accept; others require admin approval.
2026-01-30 01:31:11 -06:00
39ba131322 feat: add mobile touch controls with gesture detection 2026-01-29 23:56:45 -06:00
2fad9c969d fix: avoid title on icon, use simpler href-based login redirect 2026-01-29 23:44:05 -06:00
47b4f3315f feat: enhance login page with FAQ section and improved styling 2026-01-29 23:40:48 -06:00
fa28f13a45 feat: add interactive timeline visualization for class times
Implements a canvas-based timeline view with D3 scales showing class
counts across subjects. Features drag-to-pan, mouse wheel zoom, subject
filtering, hover tooltips, and smooth animations. Timeline auto-follows
current time and supports keyboard navigation.
2026-01-29 23:19:39 -06:00
5a6ea1e53a fix: handle backend startup delays with retry logic in auth 2026-01-29 20:04:50 -06:00
ba2b2fc50a fix: increase Banner API timeouts to handle slow responses 2026-01-29 19:49:57 -06:00
cfe098d193 feat: add websocket support for real-time scrape job monitoring 2026-01-29 19:31:04 -06:00
d861888e5e fix: proper centering for login page content, avoid unnecssary scrollbar 2026-01-29 18:05:50 -06:00
f0645d82d9 refactor: persist audit log state in module scope for cross-navigation caching 2026-01-29 17:54:27 -06:00
7a1cd2a39b refactor: centralize number formatting with locale-aware utility 2026-01-29 17:53:38 -06:00
d2985f98ce feat: enhance audit log with smart diffing, conditional request caching, auto refreshing 2026-01-29 17:35:11 -06:00
b58eb840f3 refactor: consolidate navigation with top nav bar and route groups 2026-01-29 17:01:47 -06:00
2bc6fbdf30 feat: implement relative time feedback and improve tooltip customization 2026-01-29 16:44:06 -06:00
e41b970d6e fix: implement i64 serialization for JavaScript compatibility, fixing avatar URL display 2026-01-29 15:51:19 -06:00
e880126281 feat: implement worker timeout protection and crash recovery for job queue
Add JOB_TIMEOUT constant to fail stuck jobs after 5 minutes, and
LOCK_EXPIRY to reclaim abandoned locks after 10 minutes. Introduce
force_unlock_all to recover orphaned jobs at startup. Fix retry limit
off-by-one error and update deduplication to include locked jobs.
2026-01-29 15:50:09 -06:00
db0ec1e69d feat: add rmp profile links and confidence-aware rating display 2026-01-29 15:43:39 -06:00
2947face06 fix: run frontend build first with -e embed flag in Justfile 2026-01-29 15:00:13 -06:00
36bcc27d7f feat: setup smart page transitions, fix laggy theme-aware element transitions 2026-01-29 14:59:47 -06:00
9e403e5043 refactor: modernize Justfile commands and simplify service management 2026-01-29 14:33:16 -06:00
98a6d978c6 feat: implement course change auditing with time-series metrics endpoint 2026-01-29 14:19:36 -06:00
4deeef2f00 feat: optimize asset delivery with build-time compression and encoding negotiation 2026-01-29 13:56:10 -06:00
e008ee5a12 feat: show search duration and result count feedback 2026-01-29 13:15:25 -06:00
a007ccb6a2 fix: remove out:fade transition from CourseTable 2026-01-29 13:08:45 -06:00
527cbebc6a feat: implement user authentication system with admin dashboard 2026-01-29 12:56:51 -06:00
107 changed files with 14461 additions and 1076 deletions
Vendored
+1 -1
View File
@@ -2,5 +2,5 @@
/target
# ts-rs bindings
web/src/lib/bindings/*.ts
web/src/lib/bindings/**/*.ts
!web/src/lib/bindings/index.ts
Generated
+190 -3
View File
@@ -26,6 +26,21 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloc-no-stdlib"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
[[package]]
name = "alloc-stdlib"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -106,6 +121,19 @@ dependencies = [
"serde",
]
[[package]]
name = "async-compression"
version = "0.4.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
dependencies = [
"compression-codecs",
"compression-core",
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "async-trait"
version = "0.1.89"
@@ -154,6 +182,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core",
"base64 0.22.1",
"bytes",
"form_urlencoded",
"futures-util",
@@ -172,8 +201,10 @@ dependencies = [
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sha1",
"sync_wrapper 1.0.2",
"tokio",
"tokio-tungstenite 0.28.0",
"tower",
"tower-layer",
"tower-service",
@@ -241,7 +272,7 @@ dependencies = [
[[package]]
name = "banner"
version = "0.3.4"
version = "0.5.0"
dependencies = [
"anyhow",
"async-trait",
@@ -249,6 +280,7 @@ dependencies = [
"axum-extra",
"bitflags 2.9.4",
"chrono",
"chrono-tz",
"clap",
"compile-time",
"cookie",
@@ -284,6 +316,7 @@ dependencies = [
"tracing-subscriber",
"ts-rs",
"url",
"urlencoding",
"yansi",
]
@@ -329,6 +362,27 @@ dependencies = [
"generic-array",
]
[[package]]
name = "brotli"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
name = "bstr"
version = "1.12.0"
@@ -406,6 +460,8 @@ version = "1.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
dependencies = [
"jobserver",
"libc",
"shlex",
]
@@ -429,6 +485,16 @@ dependencies = [
"windows-link 0.2.0",
]
[[package]]
name = "chrono-tz"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
dependencies = [
"chrono",
"phf",
]
[[package]]
name = "clap"
version = "4.5.47"
@@ -500,6 +566,26 @@ dependencies = [
"time",
]
[[package]]
name = "compression-codecs"
version = "0.4.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
dependencies = [
"brotli",
"compression-core",
"flate2",
"memchr",
"zstd",
"zstd-safe",
]
[[package]]
name = "compression-core"
version = "0.4.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
[[package]]
name = "concurrent-queue"
version = "2.5.0"
@@ -1641,6 +1727,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.3",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.77"
@@ -2042,6 +2138,24 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "phf"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
dependencies = [
"phf_shared",
]
[[package]]
name = "phf_shared"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@@ -2834,7 +2948,7 @@ dependencies = [
"static_assertions",
"time",
"tokio",
"tokio-tungstenite",
"tokio-tungstenite 0.21.0",
"tracing",
"typemap_rev",
"typesize",
@@ -2898,6 +3012,12 @@ dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "siphasher"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
[[package]]
name = "skeptic"
version = "0.13.7"
@@ -3522,10 +3642,22 @@ dependencies = [
"rustls-pki-types",
"tokio",
"tokio-rustls 0.25.0",
"tungstenite",
"tungstenite 0.21.0",
"webpki-roots 0.26.11",
]
[[package]]
name = "tokio-tungstenite"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
dependencies = [
"futures-util",
"log",
"tokio",
"tungstenite 0.28.0",
]
[[package]]
name = "tokio-util"
version = "0.7.16"
@@ -3602,14 +3734,17 @@ version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"async-compression",
"bitflags 2.9.4",
"bytes",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"iri-string",
"pin-project-lite",
"tokio",
"tokio-util",
"tower",
"tower-layer",
"tower-service",
@@ -3721,6 +3856,7 @@ version = "11.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424"
dependencies = [
"chrono",
"serde_json",
"thiserror 2.0.16",
"ts-rs-macros",
@@ -3759,6 +3895,23 @@ dependencies = [
"utf-8",
]
[[package]]
name = "tungstenite"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
dependencies = [
"bytes",
"data-encoding",
"http 1.3.1",
"httparse",
"log",
"rand 0.9.2",
"sha1",
"thiserror 2.0.16",
"utf-8",
]
[[package]]
name = "typemap_rev"
version = "0.3.0"
@@ -3860,6 +4013,12 @@ dependencies = [
"serde",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf-8"
version = "0.7.6"
@@ -4555,3 +4714,31 @@ dependencies = [
"quote",
"syn 2.0.106",
]
[[package]]
name = "zstd"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "7.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
dependencies = [
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.16+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
dependencies = [
"cc",
"pkg-config",
]
+5 -3
View File
@@ -11,7 +11,7 @@ embed-assets = ["dep:rust-embed", "dep:mime_guess"]
[dependencies]
anyhow = "1.0.99"
async-trait = "0.1"
axum = "0.8.4"
axum = { version = "0.8.4", features = ["ws"] }
bitflags = { version = "2.9.4", features = ["serde"] }
chrono = { version = "0.4.42", features = ["serde"] }
compile-time = "0.2.0"
@@ -48,16 +48,18 @@ url = "2.5"
governor = "0.10.1"
serde_path_to_error = "0.1.17"
num-format = "0.4.4"
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
mime_guess = { version = "2.0", optional = true }
clap = { version = "4.5", features = ["derive"] }
rapidhash = "4.1.0"
yansi = "1.0.1"
extension-traits = "2"
ts-rs = { version = "11.1.0", features = ["serde-compat", "serde-json-impl"] }
ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-json-impl", "no-serde-warnings"] }
html-escape = "0.2.13"
axum-extra = { version = "0.12.5", features = ["query"] }
urlencoding = "2.1.3"
chrono-tz = "0.10.4"
[dev-dependencies]
+7 -4
View File
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
WORKDIR /app
# Install zstd for pre-compression
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
# Copy backend Cargo.toml for build-time version retrieval
COPY ./Cargo.toml ./
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
# Copy frontend source code
COPY ./web ./
# Build frontend
RUN bun run build
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
RUN bun run build && bun run scripts/compress-assets.ts
# --- Chef Base Stage ---
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
@@ -112,5 +115,5 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
ENV HOSTS=0.0.0.0,[::]
# Implicitly uses PORT environment variable
# temporary: running without 'scraper' service
CMD ["sh", "-c", "exec ./banner --services web,bot"]
# Runs all services: web, bot, and scraper
CMD ["sh", "-c", "exec ./banner"]
+353 -111
View File
@@ -1,51 +1,373 @@
set dotenv-load
default_services := "bot,web,scraper"
default:
just --list
# Run all checks (format, clippy, tests, lint)
check:
cargo fmt --all -- --check
cargo clippy --all-features -- --deny warnings
cargo nextest run -E 'not test(export_bindings)'
bun run --cwd web check
bun run --cwd web test
# Run all checks in parallel. Pass -f/--fix to auto-format and fix first.
[script("bun")]
check *flags:
const args = "{{flags}}".split(/\s+/).filter(Boolean);
let fix = false;
for (const arg of args) {
if (arg === "-f" || arg === "--fix") fix = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
}
# Generate TypeScript bindings from Rust types (ts-rs)
bindings:
cargo test export_bindings
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
# Run all tests (Rust + frontend)
test: test-rust test-web
if (fix) {
console.log("\x1b[1;36m→ Fixing...\x1b[0m");
run(["cargo", "fmt", "--all"]);
run(["bun", "run", "--cwd", "web", "format"]);
run(["cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
"--", "--deny", "warnings"]);
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
}
# Run only Rust tests (excludes ts-rs bindings generation)
test-rust *ARGS:
cargo nextest run -E 'not test(export_bindings)' {{ARGS}}
// Domain groups: format check name → { peers (other checks), formatter, sanity re-check }
const domains = {
rustfmt: {
peers: ["clippy", "rust-test"],
format: () => run(["cargo", "fmt", "--all"]),
recheck: [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "cargo-check", cmd: ["cargo", "check", "--all-features"] },
],
},
biome: {
peers: ["svelte-check", "web-test"],
format: () => run(["bun", "run", "--cwd", "web", "format"]),
recheck: [
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
],
},
};
# Run only frontend tests
test-web:
bun run --cwd web test
const checks = [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"], terse: true },
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
// { name: "sqlx-prepare", cmd: ["cargo", "sqlx", "prepare", "--check"] },
];
# Quick check: clippy + tests + typecheck (skips formatting)
check-quick:
cargo clippy --all-features -- --deny warnings
cargo nextest run -E 'not test(export_bindings)'
bun run --cwd web check
const isTTY = process.stderr.isTTY;
const start = Date.now();
const remaining = new Set(checks.map(c => c.name));
# Run the Banner API search demo (hits live UTSA API, ~20s)
search *ARGS:
cargo run -q --bin search -- {{ARGS}}
const promises = checks.map(async (check) => {
const proc = Bun.spawn(check.cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe", stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { ...check, stdout, stderr, exitCode: proc.exitCode,
elapsed: ((Date.now() - start) / 1000).toFixed(1) };
});
const interval = isTTY ? setInterval(() => {
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
}, 100) : null;
// Phase 1: collect all results, eagerly displaying whichever finishes first
const results = {};
let anyFailed = false;
const tagged = promises.map((p, i) => p.then(r => ({ i, r })));
for (let n = 0; n < checks.length; n++) {
const { i, r } = await Promise.race(tagged);
tagged[i] = new Promise(() => {}); // sentinel: never resolves
results[r.name] = r;
remaining.delete(r.name);
if (isTTY) process.stderr.write(`\r\x1b[K`);
if (r.exitCode !== 0) {
anyFailed = true;
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (!r.terse) {
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
}
} else {
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
}
if (interval) clearInterval(interval);
if (isTTY) process.stderr.write(`\r\x1b[K`);
// Phase 2: auto-fix formatting if it's the only failure in a domain
let autoFixed = false;
for (const [fmtName, domain] of Object.entries(domains)) {
const fmtResult = results[fmtName];
if (!fmtResult || fmtResult.exitCode === 0) continue;
const peersAllPassed = domain.peers.every(p => results[p]?.exitCode === 0);
if (!peersAllPassed) continue;
process.stdout.write(`\n\x1b[1;36m→ Auto-formatting ${fmtName} (peers passed, only formatting failed)...\x1b[0m\n`);
domain.format();
// Re-verify format + sanity check in parallel
const recheckStart = Date.now();
const recheckPromises = domain.recheck.map(async (check) => {
const proc = Bun.spawn(check.cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe", stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { ...check, stdout, stderr, exitCode: proc.exitCode,
elapsed: ((Date.now() - recheckStart) / 1000).toFixed(1) };
});
let recheckFailed = false;
for (const p of recheckPromises) {
const r = await p;
if (r.exitCode !== 0) {
recheckFailed = true;
process.stdout.write(`\x1b[31m ✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
} else {
process.stdout.write(`\x1b[32m ✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
}
if (!recheckFailed) {
process.stdout.write(`\x1b[32m ✓ ${fmtName} auto-fix succeeded\x1b[0m\n`);
results[fmtName].exitCode = 0;
autoFixed = true;
} else {
process.stdout.write(`\x1b[31m ✗ ${fmtName} auto-fix failed sanity check\x1b[0m\n`);
}
}
const finalFailed = Object.values(results).some(r => r.exitCode !== 0);
if (autoFixed && !finalFailed) {
process.stdout.write(`\n\x1b[1;32m✓ All checks passed (formatting was auto-fixed)\x1b[0m\n`);
}
process.exit(finalFailed ? 1 : 0);
# Format all Rust and TypeScript code
format:
cargo fmt --all
bun run --cwd web format
# Check formatting without modifying (CI-friendly)
format-check:
cargo fmt --all -- --check
bun run --cwd web format:check
# Run tests. Usage: just test [rust|web|<nextest filter args>]
[script("bun")]
test *args:
const input = "{{args}}".trim();
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
if (input === "web") {
run(["bun", "run", "--cwd", "web", "test"]);
} else if (input === "rust") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
} else if (input === "") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
run(["bun", "run", "--cwd", "web", "test"]);
} else {
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
}
# Generate TypeScript bindings from Rust types (ts-rs)
bindings:
cargo test export_bindings
# Run the Banner API search demo (hits live UTSA API, ~20s)
search *ARGS:
cargo run -q --bin search -- {{ARGS}}
# Pass args to binary after --: just dev -n -- --some-flag
# Dev server. Flags: -f(rontend) -b(ackend) -W(no-watch) -n(o-build) -r(elease) -e(mbed) --tracing <fmt>
[script("bun")]
dev *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let frontendOnly = false, backendOnly = false;
let noWatch = false, noBuild = false, release = false, embed = false;
let tracing = "pretty";
const passthrough = [];
let i = 0;
let seenDashDash = false;
while (i < argv.length) {
const arg = argv[i];
if (seenDashDash) { passthrough.push(arg); i++; continue; }
if (arg === "--") { seenDashDash = true; i++; continue; }
if (arg.startsWith("--")) {
if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else if (arg === "--no-watch") noWatch = true;
else if (arg === "--no-build") noBuild = true;
else if (arg === "--release") release = true;
else if (arg === "--embed") embed = true;
else if (arg === "--tracing") { tracing = argv[++i] || "pretty"; }
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else if (c === "W") noWatch = true;
else if (c === "n") noBuild = true;
else if (c === "r") release = true;
else if (c === "e") embed = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
i++;
}
// -e implies -b (no point running Vite if assets are embedded)
if (embed) backendOnly = true;
// -n implies -W (no build means no watch)
if (noBuild) noWatch = true;
// Validate conflicting flags
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together (or -e implies -b)");
process.exit(1);
}
const runFrontend = !backendOnly;
const runBackend = !frontendOnly;
const profile = release ? "release" : "dev";
const profileDir = release ? "release" : "debug";
const procs = [];
const cleanup = async () => {
for (const p of procs) p.kill();
await Promise.all(procs.map(p => p.exited));
};
process.on("SIGINT", async () => { await cleanup(); process.exit(0); });
process.on("SIGTERM", async () => { await cleanup(); process.exit(0); });
// Build frontend first when embedding assets (backend will bake them in)
if (embed && !noBuild) {
console.log(`\x1b[1;36m→ Building frontend (for embedding)...\x1b[0m`);
const fb = Bun.spawnSync(["bun", "run", "--cwd", "web", "build"], {
stdio: ["inherit", "inherit", "inherit"],
});
if (fb.exitCode !== 0) process.exit(fb.exitCode);
}
// Frontend: Vite dev server
if (runFrontend) {
const proc = Bun.spawn(["bun", "run", "--cwd", "web", "dev"], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
// Backend
if (runBackend) {
const backendArgs = [`--tracing`, tracing, ...passthrough];
const bin = `target/${profileDir}/banner`;
if (noWatch) {
// Build first unless -n (skip build)
if (!noBuild) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cargoArgs = ["cargo", "build", "--bin", "banner"];
if (!embed) cargoArgs.push("--no-default-features");
if (release) cargoArgs.push("--release");
const build = Bun.spawnSync(cargoArgs, { stdio: ["inherit", "inherit", "inherit"] });
if (build.exitCode !== 0) { cleanup(); process.exit(build.exitCode); }
}
// Run the binary directly (no watch)
const { existsSync } = await import("fs");
if (!existsSync(bin)) {
console.error(`Binary not found: ${bin}`);
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
cleanup();
process.exit(1);
}
console.log(`\x1b[1;36m→ Running ${bin} (no watch)\x1b[0m`);
const proc = Bun.spawn([bin, ...backendArgs], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
} else {
// Bacon watch mode
const baconArgs = ["bacon", "--headless", "run", "--"];
if (!embed) baconArgs.push("--no-default-features");
if (release) baconArgs.push("--profile", "release");
baconArgs.push("--", ...backendArgs);
const proc = Bun.spawn(baconArgs, {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
}
// Wait for any process to exit, then kill the rest
const results = procs.map((p, i) => p.exited.then(code => ({ i, code })));
const first = await Promise.race(results);
cleanup();
process.exit(first.code);
# Production build. Flags: -d(ebug) -f(rontend-only) -b(ackend-only)
[script("bun")]
build *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let debug = false, frontendOnly = false, backendOnly = false;
for (const arg of argv) {
if (arg.startsWith("--")) {
if (arg === "--debug") debug = true;
else if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "d") debug = true;
else if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
}
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together");
process.exit(1);
}
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
const buildFrontend = !backendOnly;
const buildBackend = !frontendOnly;
const profile = debug ? "debug" : "release";
if (buildFrontend) {
console.log("\x1b[1;36m→ Building frontend...\x1b[0m");
run(["bun", "run", "--cwd", "web", "build"]);
}
if (buildBackend) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cmd = ["cargo", "build", "--bin", "banner"];
if (!debug) cmd.push("--release");
run(cmd);
}
# Start PostgreSQL in Docker and update .env with connection string
# Commands: start (default), reset, rm
@@ -115,86 +437,6 @@ db cmd="start":
await updateEnv();
}
# Auto-reloading frontend server
frontend:
bun run --cwd web dev
# Production build of frontend
build-frontend:
bun run --cwd web build
# Auto-reloading backend server (with embedded assets)
backend *ARGS:
bacon --headless run -- -- {{ARGS}}
# Auto-reloading backend server (no embedded assets, for dev proxy mode)
backend-dev *ARGS:
bacon --headless run -- --no-default-features -- {{ARGS}}
# Production build
build:
bun run --cwd web build
cargo build --release --bin banner
# Run auto-reloading development build with release characteristics
dev-build *ARGS='--services web --tracing pretty': build-frontend
bacon --headless run -- --profile dev-release -- {{ARGS}}
# Auto-reloading development build: Vite frontend + backend (no embedded assets, proxies to Vite)
[parallel]
dev *ARGS='--services web,bot': frontend (backend-dev ARGS)
# Smoke test: start web server, hit API endpoints, verify responses
[script("bash")]
test-smoke port="18080":
set -euo pipefail
PORT={{port}}
cleanup() { kill "$SERVER_PID" 2>/dev/null; wait "$SERVER_PID" 2>/dev/null; }
# Start server in background
PORT=$PORT cargo run -q --no-default-features -- --services web --tracing json &
SERVER_PID=$!
trap cleanup EXIT
# Wait for server to be ready (up to 15s)
for i in $(seq 1 30); do
if curl -sf "http://localhost:$PORT/api/health" >/dev/null 2>&1; then break; fi
if ! kill -0 "$SERVER_PID" 2>/dev/null; then echo "FAIL: server exited early"; exit 1; fi
sleep 0.5
done
PASS=0; FAIL=0
check() {
local label="$1" url="$2" expected="$3"
body=$(curl -sf "$url") || { echo "FAIL: $label - request failed"; FAIL=$((FAIL+1)); return; }
if echo "$body" | grep -q "$expected"; then
echo "PASS: $label"
PASS=$((PASS+1))
else
echo "FAIL: $label - expected '$expected' in: $body"
FAIL=$((FAIL+1))
fi
}
check "GET /api/health" "http://localhost:$PORT/api/health" '"status":"healthy"'
check "GET /api/status" "http://localhost:$PORT/api/status" '"version"'
check "GET /api/metrics" "http://localhost:$PORT/api/metrics" '"banner_api"'
# Test 404
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$PORT/api/nonexistent")
if [ "$STATUS" = "404" ]; then
echo "PASS: 404 on unknown route"
PASS=$((PASS+1))
else
echo "FAIL: expected 404, got $STATUS"
FAIL=$((FAIL+1))
fi
echo ""
echo "Results: $PASS passed, $FAIL failed"
[ "$FAIL" -eq 0 ]
alias b := bun
bun *ARGS:
cd web && bun {{ ARGS }}
+1 -2
View File
@@ -29,8 +29,7 @@ The application consists of three modular services that can be run independently
bun install --cwd web # Install frontend dependencies
cargo build # Build the backend
just dev # Runs auto-reloading dev build
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
just dev # Runs auto-reloading dev build with all services
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
just build # Production build that embeds assets
@@ -0,0 +1,7 @@
-- Add queued_at column to track when a job last entered the "ready to pick up" state.
-- For fresh jobs this equals execute_at; for retried jobs it is updated to NOW().
ALTER TABLE scrape_jobs
ADD COLUMN queued_at TIMESTAMPTZ NOT NULL DEFAULT NOW();
-- Backfill existing rows: set queued_at = execute_at (best approximation)
UPDATE scrape_jobs SET queued_at = execute_at;
@@ -0,0 +1,19 @@
CREATE TABLE users (
discord_id BIGINT PRIMARY KEY,
discord_username TEXT NOT NULL,
discord_avatar_hash TEXT,
is_admin BOOLEAN NOT NULL DEFAULT false,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE user_sessions (
id TEXT PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES users(discord_id) ON DELETE CASCADE,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
expires_at TIMESTAMPTZ NOT NULL,
last_active_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_user_sessions_user_id ON user_sessions(user_id);
CREATE INDEX idx_user_sessions_expires_at ON user_sessions(expires_at);
@@ -0,0 +1,80 @@
-- Collapse instructors from per-banner-id rows to per-person rows (deduped by lowercased email).
-- All existing RMP matches are deliberately dropped; the new auto-matcher will re-score from scratch.
-- 1. Create the new instructors table (1 row per person, keyed by email)
CREATE TABLE instructors_new (
id SERIAL PRIMARY KEY,
display_name VARCHAR NOT NULL,
email VARCHAR NOT NULL,
rmp_professor_id INTEGER UNIQUE REFERENCES rmp_professors(legacy_id),
rmp_match_status VARCHAR NOT NULL DEFAULT 'unmatched',
CONSTRAINT instructors_email_unique UNIQUE (email)
);
-- 2. Populate from existing data, deduplicating by lowercased email.
-- For each email, pick the display_name from the row with the highest banner_id
-- (deterministic tiebreaker). All rmp fields start fresh (NULL / 'unmatched').
INSERT INTO instructors_new (display_name, email)
SELECT DISTINCT ON (LOWER(email))
display_name,
LOWER(email)
FROM instructors
ORDER BY LOWER(email), banner_id DESC;
-- 3. Create the new course_instructors table with integer FK and banner_id column
CREATE TABLE course_instructors_new (
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
instructor_id INTEGER NOT NULL REFERENCES instructors_new(id) ON DELETE CASCADE,
banner_id VARCHAR NOT NULL,
is_primary BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (course_id, instructor_id)
);
-- 4. Populate from old data, mapping old banner_id → new instructor id via lowercased email.
-- Use DISTINCT ON to handle cases where multiple old banner_ids for the same person
-- taught the same course (would cause duplicate (course_id, instructor_id) pairs).
INSERT INTO course_instructors_new (course_id, instructor_id, banner_id, is_primary)
SELECT DISTINCT ON (ci.course_id, inew.id)
ci.course_id,
inew.id,
ci.instructor_id, -- old banner_id
ci.is_primary
FROM course_instructors ci
JOIN instructors iold ON iold.banner_id = ci.instructor_id
JOIN instructors_new inew ON inew.email = LOWER(iold.email)
ORDER BY ci.course_id, inew.id, ci.is_primary DESC;
-- 5. Drop old tables (course_instructors first due to FK dependency)
DROP TABLE course_instructors;
DROP TABLE instructors;
-- 6. Rename new tables into place
ALTER TABLE instructors_new RENAME TO instructors;
ALTER TABLE course_instructors_new RENAME TO course_instructors;
-- 7. Rename constraints to match the final table names
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_pkey TO instructors_pkey;
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_rmp_professor_id_key TO instructors_rmp_professor_id_key;
ALTER TABLE course_instructors RENAME CONSTRAINT course_instructors_new_pkey TO course_instructors_pkey;
-- 8. Recreate indexes
CREATE INDEX idx_course_instructors_instructor ON course_instructors (instructor_id);
CREATE INDEX idx_instructors_rmp_status ON instructors (rmp_match_status);
CREATE INDEX idx_instructors_email ON instructors (email);
-- 9. Create rmp_match_candidates table
CREATE TABLE rmp_match_candidates (
id SERIAL PRIMARY KEY,
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
rmp_legacy_id INTEGER NOT NULL REFERENCES rmp_professors(legacy_id),
score REAL NOT NULL,
score_breakdown JSONB NOT NULL DEFAULT '{}',
status VARCHAR NOT NULL DEFAULT 'pending',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
resolved_at TIMESTAMPTZ,
resolved_by BIGINT REFERENCES users(discord_id),
CONSTRAINT uq_candidate_pair UNIQUE (instructor_id, rmp_legacy_id)
);
CREATE INDEX idx_match_candidates_instructor ON rmp_match_candidates (instructor_id);
CREATE INDEX idx_match_candidates_status ON rmp_match_candidates (status);
@@ -0,0 +1,24 @@
-- Multi-RMP profile support: allow many RMP profiles per instructor.
-- Each RMP profile still links to at most one instructor (rmp_legacy_id UNIQUE).
-- 1. Create junction table
CREATE TABLE instructor_rmp_links (
id SERIAL PRIMARY KEY,
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
rmp_legacy_id INTEGER NOT NULL UNIQUE REFERENCES rmp_professors(legacy_id),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_by BIGINT REFERENCES users(discord_id),
source VARCHAR NOT NULL DEFAULT 'manual' -- 'auto' | 'manual'
);
CREATE INDEX idx_instructor_rmp_links_instructor ON instructor_rmp_links (instructor_id);
-- 2. Migrate existing matches
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
SELECT id, rmp_professor_id,
CASE rmp_match_status WHEN 'auto' THEN 'auto' ELSE 'manual' END
FROM instructors
WHERE rmp_professor_id IS NOT NULL;
-- 3. Drop old column (and its unique constraint)
ALTER TABLE instructors DROP COLUMN rmp_professor_id;
@@ -0,0 +1,31 @@
-- Scrape job results log: one row per completed (or failed) job for effectiveness tracking.
CREATE TABLE scrape_job_results (
id BIGSERIAL PRIMARY KEY,
target_type target_type NOT NULL,
payload JSONB NOT NULL,
priority scrape_priority NOT NULL,
-- Timing
queued_at TIMESTAMPTZ NOT NULL,
started_at TIMESTAMPTZ NOT NULL,
completed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
duration_ms INT NOT NULL,
-- Outcome
success BOOLEAN NOT NULL,
error_message TEXT,
retry_count INT NOT NULL DEFAULT 0,
-- Effectiveness (NULL when success = false)
courses_fetched INT,
courses_changed INT,
courses_unchanged INT,
audits_generated INT,
metrics_generated INT
);
CREATE INDEX idx_scrape_job_results_target_time
ON scrape_job_results (target_type, completed_at);
CREATE INDEX idx_scrape_job_results_completed
ON scrape_job_results (completed_at);
@@ -0,0 +1,13 @@
-- Indexes for the timeline aggregation endpoint.
-- The query buckets course_metrics by 15-minute intervals, joins to courses
-- for subject, and aggregates enrollment. These indexes support efficient
-- time-range scans and the join.
-- Primary access pattern: scan course_metrics by timestamp range
CREATE INDEX IF NOT EXISTS idx_course_metrics_timestamp
ON course_metrics (timestamp);
-- Composite index for the DISTINCT ON (bucket, course_id) ordered by timestamp DESC
-- to efficiently pick the latest metric per course per bucket.
CREATE INDEX IF NOT EXISTS idx_course_metrics_course_timestamp
ON course_metrics (course_id, timestamp DESC);
+25 -1
View File
@@ -6,6 +6,7 @@ use crate::services::bot::BotService;
use crate::services::manager::ServiceManager;
use crate::services::web::WebService;
use crate::state::AppState;
use crate::web::auth::AuthConfig;
use anyhow::Context;
use figment::value::UncasedStr;
use figment::{Figment, providers::Env};
@@ -84,6 +85,19 @@ impl App {
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
}
// Load schedule cache for timeline enrollment queries
if let Err(e) = app_state.schedule_cache.load().await {
info!(error = ?e, "Could not load schedule cache on startup (may be empty)");
}
// Seed the initial admin user if configured
if let Some(admin_id) = config.admin_discord_id {
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
.await
.context("Failed to seed admin user")?;
info!(discord_id = admin_id, username = %user.discord_username, "Seed admin ensured");
}
Ok(App {
config,
db_pool,
@@ -97,7 +111,16 @@ impl App {
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
// Register enabled services with the manager
if services.contains(&ServiceName::Web) {
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
let auth_config = AuthConfig {
client_id: self.config.discord_client_id.clone(),
client_secret: self.config.discord_client_secret.clone(),
redirect_base: self.config.discord_redirect_uri.clone(),
};
let web_service = Box::new(WebService::new(
self.config.port,
self.app_state.clone(),
auth_config,
));
self.service_manager
.register_service(ServiceName::Web.as_str(), web_service);
}
@@ -108,6 +131,7 @@ impl App {
self.banner_api.clone(),
self.app_state.reference_cache.clone(),
self.app_state.service_statuses.clone(),
self.app_state.scrape_job_tx.clone(),
));
self.service_manager
.register_service(ServiceName::Scraper.as_str(), scraper_service);
+3 -3
View File
@@ -40,9 +40,9 @@ impl BannerApi {
.cookie_store(false)
.user_agent(user_agent())
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
.read_timeout(std::time::Duration::from_secs(10))
.connect_timeout(std::time::Duration::from_secs(10))
.timeout(std::time::Duration::from_secs(30))
.read_timeout(std::time::Duration::from_secs(20))
.connect_timeout(std::time::Duration::from_secs(15))
.timeout(std::time::Duration::from_secs(40))
.build()
.context("Failed to create HTTP client")?,
)
+1 -1
View File
@@ -1,4 +1,4 @@
use bitflags::{bitflags, Flags};
use bitflags::{Flags, bitflags};
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc, Weekday};
use extension_traits::extension;
use serde::{Deserialize, Deserializer, Serialize};
+462
View File
@@ -0,0 +1,462 @@
//! Shared calendar generation logic for ICS files and Google Calendar URLs.
//!
//! Used by both the Discord bot commands and the web API endpoints.
use crate::data::models::DbMeetingTime;
use chrono::{Datelike, Duration, NaiveDate, NaiveTime, Weekday};
/// Course metadata needed for calendar generation (shared interface between bot and web).
pub struct CalendarCourse {
pub crn: String,
pub subject: String,
pub course_number: String,
pub title: String,
pub sequence_number: Option<String>,
pub primary_instructor: Option<String>,
}
impl CalendarCourse {
/// Display title like "CS 1083 - Introduction to Computer Science"
pub fn display_title(&self) -> String {
format!("{} {} - {}", self.subject, self.course_number, self.title)
}
/// Filename-safe identifier: "CS_1083_001"
pub fn filename_stem(&self) -> String {
format!(
"{}_{}{}",
self.subject.replace(' ', "_"),
self.course_number,
self.sequence_number
.as_deref()
.map(|s| format!("_{s}"))
.unwrap_or_default()
)
}
}
// ---------------------------------------------------------------------------
// Date parsing helpers
// ---------------------------------------------------------------------------
/// Parse a date string in either MM/DD/YYYY or YYYY-MM-DD format.
fn parse_date(s: &str) -> Option<NaiveDate> {
NaiveDate::parse_from_str(s, "%m/%d/%Y")
.or_else(|_| NaiveDate::parse_from_str(s, "%Y-%m-%d"))
.ok()
}
/// Parse an HHMM time string into `NaiveTime`.
fn parse_hhmm(s: &str) -> Option<NaiveTime> {
if s.len() != 4 {
return None;
}
let hours = s[..2].parse::<u32>().ok()?;
let minutes = s[2..].parse::<u32>().ok()?;
NaiveTime::from_hms_opt(hours, minutes, 0)
}
/// Active weekdays for a meeting time.
fn active_weekdays(mt: &DbMeetingTime) -> Vec<Weekday> {
let mapping: [(bool, Weekday); 7] = [
(mt.monday, Weekday::Mon),
(mt.tuesday, Weekday::Tue),
(mt.wednesday, Weekday::Wed),
(mt.thursday, Weekday::Thu),
(mt.friday, Weekday::Fri),
(mt.saturday, Weekday::Sat),
(mt.sunday, Weekday::Sun),
];
mapping
.iter()
.filter(|(active, _)| *active)
.map(|(_, day)| *day)
.collect()
}
/// ICS two-letter day code for RRULE BYDAY.
fn ics_day_code(day: Weekday) -> &'static str {
match day {
Weekday::Mon => "MO",
Weekday::Tue => "TU",
Weekday::Wed => "WE",
Weekday::Thu => "TH",
Weekday::Fri => "FR",
Weekday::Sat => "SA",
Weekday::Sun => "SU",
}
}
/// Location string from a `DbMeetingTime`.
fn location_string(mt: &DbMeetingTime) -> String {
let building = mt
.building_description
.as_deref()
.or(mt.building.as_deref())
.unwrap_or("");
let room = mt.room.as_deref().unwrap_or("");
let combined = format!("{building} {room}").trim().to_string();
if combined.is_empty() {
"Online".to_string()
} else {
combined
}
}
/// Days display string (e.g. "MWF", "TTh").
fn days_display(mt: &DbMeetingTime) -> String {
let weekdays = active_weekdays(mt);
if weekdays.is_empty() {
return "TBA".to_string();
}
weekdays
.iter()
.map(|d| ics_day_code(*d))
.collect::<Vec<_>>()
.join("")
}
/// Escape text for ICS property values.
fn escape_ics(text: &str) -> String {
text.replace('\\', "\\\\")
.replace(';', "\\;")
.replace(',', "\\,")
.replace('\n', "\\n")
.replace('\r', "")
}
// ---------------------------------------------------------------------------
// University holidays (ported from bot/commands/ics.rs)
// ---------------------------------------------------------------------------
/// Find the nth occurrence of a weekday in a given month/year (1-based).
fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Option<NaiveDate> {
let first = NaiveDate::from_ymd_opt(year, month, 1)?;
let days_ahead = (weekday.num_days_from_monday() as i64
- first.weekday().num_days_from_monday() as i64)
.rem_euclid(7) as u32;
let day = 1 + days_ahead + 7 * (n - 1);
NaiveDate::from_ymd_opt(year, month, day)
}
/// Compute a consecutive range of dates starting from `start` for `count` days.
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
(0..count)
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
.collect()
}
/// Compute university holidays for a given year.
fn compute_holidays_for_year(year: i32) -> Vec<(&'static str, Vec<NaiveDate>)> {
let mut holidays = Vec::new();
// Labor Day: 1st Monday of September
if let Some(d) = nth_weekday_of_month(year, 9, Weekday::Mon, 1) {
holidays.push(("Labor Day", vec![d]));
}
// Fall Break: Mon-Tue of Columbus Day week
if let Some(mon) = nth_weekday_of_month(year, 10, Weekday::Mon, 2) {
holidays.push(("Fall Break", date_range(mon, 2)));
}
// Day before Thanksgiving
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4)
&& let Some(wed) = thu.checked_sub_signed(Duration::days(1))
{
holidays.push(("Day Before Thanksgiving", vec![wed]));
}
// Thanksgiving: 4th Thursday + Friday
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4) {
holidays.push(("Thanksgiving", date_range(thu, 2)));
}
// Winter Holiday: Dec 23-31
if let Some(start) = NaiveDate::from_ymd_opt(year, 12, 23) {
holidays.push(("Winter Holiday", date_range(start, 9)));
}
// New Year's Day
if let Some(d) = NaiveDate::from_ymd_opt(year, 1, 1) {
holidays.push(("New Year's Day", vec![d]));
}
// MLK Day: 3rd Monday of January
if let Some(d) = nth_weekday_of_month(year, 1, Weekday::Mon, 3) {
holidays.push(("MLK Day", vec![d]));
}
// Spring Break: full week starting 2nd Monday of March
if let Some(mon) = nth_weekday_of_month(year, 3, Weekday::Mon, 2) {
holidays.push(("Spring Break", date_range(mon, 6)));
}
holidays
}
/// Get holiday dates within a date range that fall on specific weekdays.
fn holiday_exceptions(start: NaiveDate, end: NaiveDate, weekdays: &[Weekday]) -> Vec<NaiveDate> {
let start_year = start.year();
let end_year = end.year();
(start_year..=end_year)
.flat_map(compute_holidays_for_year)
.flat_map(|(_, dates)| dates)
.filter(|&date| date >= start && date <= end && weekdays.contains(&date.weekday()))
.collect()
}
/// Names of excluded holidays (for user-facing messages).
fn excluded_holiday_names(
start: NaiveDate,
end: NaiveDate,
exceptions: &[NaiveDate],
) -> Vec<String> {
let start_year = start.year();
let end_year = end.year();
let all_holidays: Vec<_> = (start_year..=end_year)
.flat_map(compute_holidays_for_year)
.collect();
let mut names = Vec::new();
for (holiday_name, holiday_dates) in &all_holidays {
for &exc in exceptions {
if holiday_dates.contains(&exc) {
names.push(format!("{} ({})", holiday_name, exc.format("%a, %b %d")));
}
}
}
names.sort();
names.dedup();
names
}
// ---------------------------------------------------------------------------
// ICS generation
// ---------------------------------------------------------------------------
/// Result from ICS generation, including the file content and excluded holiday names.
pub struct IcsResult {
pub content: String,
pub filename: String,
/// Holiday dates excluded via EXDATE rules, for user-facing messages.
#[allow(dead_code)]
pub excluded_holidays: Vec<String>,
}
/// Generate an ICS calendar file for a course.
pub fn generate_ics(
course: &CalendarCourse,
meeting_times: &[DbMeetingTime],
) -> Result<IcsResult, anyhow::Error> {
let mut ics = String::new();
let mut all_excluded = Vec::new();
// Header
ics.push_str("BEGIN:VCALENDAR\r\n");
ics.push_str("VERSION:2.0\r\n");
ics.push_str("PRODID:-//Banner Bot//Course Calendar//EN\r\n");
ics.push_str("CALSCALE:GREGORIAN\r\n");
ics.push_str("METHOD:PUBLISH\r\n");
ics.push_str(&format!(
"X-WR-CALNAME:{}\r\n",
escape_ics(&course.display_title())
));
for (index, mt) in meeting_times.iter().enumerate() {
let (event, holidays) = generate_ics_event(course, mt, index)?;
ics.push_str(&event);
all_excluded.extend(holidays);
}
ics.push_str("END:VCALENDAR\r\n");
Ok(IcsResult {
content: ics,
filename: format!("{}.ics", course.filename_stem()),
excluded_holidays: all_excluded,
})
}
/// Generate a single VEVENT for one meeting time.
fn generate_ics_event(
course: &CalendarCourse,
mt: &DbMeetingTime,
index: usize,
) -> Result<(String, Vec<String>), anyhow::Error> {
let start_date = parse_date(&mt.start_date)
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
let end_date = parse_date(&mt.end_date)
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
// DTSTART/DTEND: first occurrence with time, or all-day on start_date
let (dtstart, dtend) = match (start_time, end_time) {
(Some(st), Some(et)) => {
let s = start_date.and_time(st).and_utc();
let e = start_date.and_time(et).and_utc();
(
s.format("%Y%m%dT%H%M%SZ").to_string(),
e.format("%Y%m%dT%H%M%SZ").to_string(),
)
}
_ => {
let s = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
let e = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
(
s.format("%Y%m%dT%H%M%SZ").to_string(),
e.format("%Y%m%dT%H%M%SZ").to_string(),
)
}
};
let event_title = if index > 0 {
format!("{} (Meeting {})", course.display_title(), index + 1)
} else {
course.display_title()
};
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
let description = format!(
"CRN: {}\\nInstructor: {}\\nDays: {}\\nMeeting Type: {}",
course.crn,
instructor,
days_display(mt),
mt.meeting_type,
);
let location = location_string(mt);
let uid = format!(
"{}-{}-{}@banner-bot.local",
course.crn,
index,
start_date
.and_hms_opt(0, 0, 0)
.unwrap()
.and_utc()
.timestamp()
);
let mut event = String::new();
event.push_str("BEGIN:VEVENT\r\n");
event.push_str(&format!("UID:{uid}\r\n"));
event.push_str(&format!("DTSTART:{dtstart}\r\n"));
event.push_str(&format!("DTEND:{dtend}\r\n"));
event.push_str(&format!("SUMMARY:{}\r\n", escape_ics(&event_title)));
event.push_str(&format!("DESCRIPTION:{}\r\n", escape_ics(&description)));
event.push_str(&format!("LOCATION:{}\r\n", escape_ics(&location)));
let weekdays = active_weekdays(mt);
let mut holiday_names = Vec::new();
if let (false, Some(st)) = (weekdays.is_empty(), start_time) {
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
let until = end_date.format("%Y%m%dT000000Z").to_string();
event.push_str(&format!(
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}\r\n",
by_day.join(","),
until,
));
// Holiday exceptions
let exceptions = holiday_exceptions(start_date, end_date, &weekdays);
if !exceptions.is_empty() {
let start_utc = start_date.and_time(st).and_utc();
let exdates: Vec<String> = exceptions
.iter()
.map(|&d| {
d.and_time(start_utc.time())
.and_utc()
.format("%Y%m%dT%H%M%SZ")
.to_string()
})
.collect();
event.push_str(&format!("EXDATE:{}\r\n", exdates.join(",")));
}
holiday_names = excluded_holiday_names(start_date, end_date, &exceptions);
}
event.push_str("END:VEVENT\r\n");
Ok((event, holiday_names))
}
// ---------------------------------------------------------------------------
// Google Calendar URL generation
// ---------------------------------------------------------------------------
/// Generate a Google Calendar "add event" URL for a single meeting time.
pub fn generate_gcal_url(
course: &CalendarCourse,
mt: &DbMeetingTime,
) -> Result<String, anyhow::Error> {
let start_date = parse_date(&mt.start_date)
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
let end_date = parse_date(&mt.end_date)
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
let dates_text = match (start_time, end_time) {
(Some(st), Some(et)) => {
let s = start_date.and_time(st);
let e = start_date.and_time(et);
format!(
"{}/{}",
s.format("%Y%m%dT%H%M%S"),
e.format("%Y%m%dT%H%M%S")
)
}
_ => {
let s = start_date.format("%Y%m%d").to_string();
format!("{s}/{s}")
}
};
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
let details = format!(
"CRN: {}\nInstructor: {}\nDays: {}",
course.crn,
instructor,
days_display(mt),
);
let location = location_string(mt);
let weekdays = active_weekdays(mt);
let recur = if !weekdays.is_empty() && start_time.is_some() {
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
let until = end_date.format("%Y%m%dT000000Z").to_string();
format!(
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}",
by_day.join(","),
until
)
} else {
String::new()
};
let course_text = course.display_title();
let params: Vec<(&str, &str)> = vec![
("action", "TEMPLATE"),
("text", &course_text),
("dates", &dates_text),
("details", &details),
("location", &location),
("trp", "true"),
("ctz", "America/Chicago"),
("recur", &recur),
];
let url = url::Url::parse_with_params("https://calendar.google.com/calendar/render", &params)?;
Ok(url.to_string())
}
+1 -105
View File
@@ -2,34 +2,16 @@ use clap::Parser;
/// Banner Discord Bot - Course availability monitoring
///
/// This application runs multiple services that can be controlled via CLI arguments:
/// This application runs all services:
/// - bot: Discord bot for course monitoring commands
/// - web: HTTP server for web interface and API
/// - scraper: Background service for scraping course data
///
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
/// Log formatter to use
#[arg(long, value_enum, default_value_t = default_tracing_format())]
pub tracing: TracingFormat,
/// Services to run (comma-separated). Default: all services
///
/// Examples:
/// --services bot,web # Run only bot and web services
/// --services scraper # Run only the scraper service
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
pub services: Option<Vec<ServiceName>>,
/// Services to disable (comma-separated)
///
/// Examples:
/// --disable-services bot # Run web and scraper only
/// --disable-services bot,web # Run only the scraper service
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
pub disable_services: Option<Vec<ServiceName>>,
}
#[derive(clap::ValueEnum, Clone, Debug)]
@@ -66,34 +48,6 @@ impl ServiceName {
}
}
/// Determine which services should be enabled based on CLI arguments
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
match (&args.services, &args.disable_services) {
(Some(services), None) => {
// User specified which services to run
Ok(services.clone())
}
(None, Some(disabled)) => {
// User specified which services to disable
let enabled: Vec<ServiceName> = ServiceName::all()
.into_iter()
.filter(|s| !disabled.contains(s))
.collect();
Ok(enabled)
}
(None, None) => {
// Default: run all services
Ok(ServiceName::all())
}
(Some(_), Some(_)) => {
// This should be prevented by clap's conflicts_with, but just in case
Err(anyhow::anyhow!(
"Cannot specify both --services and --disable-services"
))
}
}
}
#[cfg(debug_assertions)]
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
#[cfg(not(debug_assertions))]
@@ -107,64 +61,6 @@ fn default_tracing_format() -> TracingFormat {
mod tests {
use super::*;
fn args_with_services(
services: Option<Vec<ServiceName>>,
disable: Option<Vec<ServiceName>>,
) -> Args {
Args {
tracing: TracingFormat::Pretty,
services,
disable_services: disable,
}
}
#[test]
fn test_default_enables_all_services() {
let result = determine_enabled_services(&args_with_services(None, None)).unwrap();
assert_eq!(result.len(), 3);
}
#[test]
fn test_explicit_services_only_those() {
let result =
determine_enabled_services(&args_with_services(Some(vec![ServiceName::Web]), None))
.unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].as_str(), "web");
}
#[test]
fn test_disable_bot_leaves_web_and_scraper() {
let result =
determine_enabled_services(&args_with_services(None, Some(vec![ServiceName::Bot])))
.unwrap();
assert_eq!(result.len(), 2);
assert!(result.iter().all(|s| s.as_str() != "bot"));
}
#[test]
fn test_disable_all_leaves_empty() {
let result = determine_enabled_services(&args_with_services(
None,
Some(vec![
ServiceName::Bot,
ServiceName::Web,
ServiceName::Scraper,
]),
))
.unwrap();
assert!(result.is_empty());
}
#[test]
fn test_both_specified_returns_error() {
let result = determine_enabled_services(&args_with_services(
Some(vec![ServiceName::Web]),
Some(vec![ServiceName::Bot]),
));
assert!(result.is_err());
}
#[test]
fn test_service_name_as_str() {
assert_eq!(ServiceName::Bot.as_str(), "bot");
+50
View File
@@ -47,6 +47,19 @@ pub struct Config {
/// Rate limiting configuration for Banner API requests
#[serde(default = "default_rate_limiting")]
pub rate_limiting: RateLimitingConfig,
/// Discord OAuth2 client ID for web authentication
#[serde(deserialize_with = "deserialize_string_or_uint")]
pub discord_client_id: String,
/// Discord OAuth2 client secret for web authentication
pub discord_client_secret: String,
/// Optional base URL override for OAuth2 redirect (e.g. "https://banner.xevion.dev").
/// When unset, the redirect URI is derived from the incoming request's Origin/Host.
#[serde(default)]
pub discord_redirect_uri: Option<String>,
/// Discord user ID to seed as initial admin on startup (optional)
#[serde(default)]
pub admin_discord_id: Option<u64>,
}
/// Default log level of "info"
@@ -216,6 +229,43 @@ where
deserializer.deserialize_any(DurationVisitor)
}
/// Deserializes a value that may arrive as either a string or unsigned integer.
///
/// Figment's env provider infers types from raw values, so numeric-looking strings
/// like Discord client IDs get parsed as integers. This accepts both forms.
fn deserialize_string_or_uint<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Visitor;
struct StringOrUintVisitor;
impl<'de> Visitor<'de> for StringOrUintVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or unsigned integer")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(value.to_owned())
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(value.to_string())
}
}
deserializer.deserialize_any(StringOrUintVisitor)
}
#[cfg(test)]
mod tests {
use super::*;
+536 -113
View File
@@ -1,10 +1,11 @@
//! Batch database operations for improved performance.
use crate::banner::Course;
use crate::data::models::DbMeetingTime;
use crate::data::models::{DbMeetingTime, UpsertCounts};
use crate::error::Result;
use sqlx::PgConnection;
use sqlx::PgPool;
use std::collections::HashSet;
use std::collections::{HashMap, HashSet};
use std::time::Instant;
use tracing::info;
@@ -57,46 +58,389 @@ fn extract_campus_code(course: &Course) -> Option<String> {
.and_then(|mf| mf.meeting_time.campus.clone())
}
// ---------------------------------------------------------------------------
// Task 1: UpsertDiffRow — captures pre- and post-upsert state for diffing
// ---------------------------------------------------------------------------
/// Row returned by the CTE-based upsert query, carrying both old and new values
/// for every auditable field. `old_id` is `None` for fresh inserts.
#[derive(sqlx::FromRow, Debug)]
struct UpsertDiffRow {
id: i32,
old_id: Option<i32>,
crn: String,
term_code: String,
// enrollment fields
old_enrollment: Option<i32>,
new_enrollment: i32,
old_max_enrollment: Option<i32>,
new_max_enrollment: i32,
old_wait_count: Option<i32>,
new_wait_count: i32,
old_wait_capacity: Option<i32>,
new_wait_capacity: i32,
// text fields (non-nullable in DB)
old_subject: Option<String>,
new_subject: String,
old_course_number: Option<String>,
new_course_number: String,
old_title: Option<String>,
new_title: String,
// nullable text fields
old_sequence_number: Option<String>,
new_sequence_number: Option<String>,
old_part_of_term: Option<String>,
new_part_of_term: Option<String>,
old_instructional_method: Option<String>,
new_instructional_method: Option<String>,
old_campus: Option<String>,
new_campus: Option<String>,
// nullable int fields
old_credit_hours: Option<i32>,
new_credit_hours: Option<i32>,
old_credit_hour_low: Option<i32>,
new_credit_hour_low: Option<i32>,
old_credit_hour_high: Option<i32>,
new_credit_hour_high: Option<i32>,
// cross-list fields
old_cross_list: Option<String>,
new_cross_list: Option<String>,
old_cross_list_capacity: Option<i32>,
new_cross_list_capacity: Option<i32>,
old_cross_list_count: Option<i32>,
new_cross_list_count: Option<i32>,
// link fields
old_link_identifier: Option<String>,
new_link_identifier: Option<String>,
old_is_section_linked: Option<bool>,
new_is_section_linked: Option<bool>,
// JSONB fields
old_meeting_times: Option<serde_json::Value>,
new_meeting_times: serde_json::Value,
old_attributes: Option<serde_json::Value>,
new_attributes: serde_json::Value,
}
// ---------------------------------------------------------------------------
// Task 3: Entry types and diff logic
// ---------------------------------------------------------------------------
struct AuditEntry {
course_id: i32,
field_changed: &'static str,
old_value: String,
new_value: String,
}
struct MetricEntry {
course_id: i32,
enrollment: i32,
wait_count: i32,
seats_available: i32,
}
/// Compare old vs new for a single field, pushing an `AuditEntry` when they differ.
///
/// Three variants:
/// - `diff_field!(audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `T` new
/// - `diff_field!(opt audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `Option<T>` new
/// - `diff_field!(json audits, row, field_name, old_field, new_field)` — `Option<Value>` old vs `Value` new
///
/// All variants skip when `old_id` is None (fresh insert).
macro_rules! diff_field {
// Standard: Option<T> old vs T new (non-nullable columns)
($audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_str = $row
.$old
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
let new_str = $row.$new.to_string();
if old_str != new_str {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_str,
new_value: new_str,
});
}
}
};
// Nullable: Option<T> old vs Option<T> new
(opt $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_str = $row
.$old
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
let new_str = $row
.$new
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
if old_str != new_str {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_str,
new_value: new_str,
});
}
}
};
// JSONB: Option<Value> old vs Value new
(json $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_val = $row
.$old
.as_ref()
.cloned()
.unwrap_or(serde_json::Value::Null);
let new_val = &$row.$new;
if old_val != *new_val {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_val.to_string(),
new_value: new_val.to_string(),
});
}
}
};
}
/// Compute audit entries (field-level diffs) and metric entries from upsert diff rows.
fn compute_diffs(rows: &[UpsertDiffRow]) -> (Vec<AuditEntry>, Vec<MetricEntry>) {
let mut audits = Vec::new();
let mut metrics = Vec::new();
for row in rows {
// Non-nullable fields
diff_field!(audits, row, "enrollment", old_enrollment, new_enrollment);
diff_field!(
audits,
row,
"max_enrollment",
old_max_enrollment,
new_max_enrollment
);
diff_field!(audits, row, "wait_count", old_wait_count, new_wait_count);
diff_field!(
audits,
row,
"wait_capacity",
old_wait_capacity,
new_wait_capacity
);
diff_field!(audits, row, "subject", old_subject, new_subject);
diff_field!(
audits,
row,
"course_number",
old_course_number,
new_course_number
);
diff_field!(audits, row, "title", old_title, new_title);
// Nullable text fields
diff_field!(opt audits, row, "sequence_number", old_sequence_number, new_sequence_number);
diff_field!(opt audits, row, "part_of_term", old_part_of_term, new_part_of_term);
diff_field!(opt audits, row, "instructional_method", old_instructional_method, new_instructional_method);
diff_field!(opt audits, row, "campus", old_campus, new_campus);
// Nullable int fields
diff_field!(opt audits, row, "credit_hours", old_credit_hours, new_credit_hours);
diff_field!(opt audits, row, "credit_hour_low", old_credit_hour_low, new_credit_hour_low);
diff_field!(opt audits, row, "credit_hour_high", old_credit_hour_high, new_credit_hour_high);
// Cross-list fields
diff_field!(opt audits, row, "cross_list", old_cross_list, new_cross_list);
diff_field!(opt audits, row, "cross_list_capacity", old_cross_list_capacity, new_cross_list_capacity);
diff_field!(opt audits, row, "cross_list_count", old_cross_list_count, new_cross_list_count);
// Link fields
diff_field!(opt audits, row, "link_identifier", old_link_identifier, new_link_identifier);
diff_field!(opt audits, row, "is_section_linked", old_is_section_linked, new_is_section_linked);
// JSONB fields
diff_field!(json audits, row, "meeting_times", old_meeting_times, new_meeting_times);
diff_field!(json audits, row, "attributes", old_attributes, new_attributes);
// Emit a metric entry when enrollment/wait_count/max_enrollment changed
// Skip fresh inserts (no old data to compare against)
let enrollment_changed = row.old_id.is_some()
&& (row.old_enrollment != Some(row.new_enrollment)
|| row.old_wait_count != Some(row.new_wait_count)
|| row.old_max_enrollment != Some(row.new_max_enrollment));
if enrollment_changed {
metrics.push(MetricEntry {
course_id: row.id,
enrollment: row.new_enrollment,
wait_count: row.new_wait_count,
seats_available: row.new_max_enrollment - row.new_enrollment,
});
}
}
(audits, metrics)
}
// ---------------------------------------------------------------------------
// Task 4: Batch insert functions for audits and metrics
// ---------------------------------------------------------------------------
async fn insert_audits(audits: &[AuditEntry], conn: &mut PgConnection) -> Result<()> {
if audits.is_empty() {
return Ok(());
}
let course_ids: Vec<i32> = audits.iter().map(|a| a.course_id).collect();
let fields: Vec<&str> = audits.iter().map(|a| a.field_changed).collect();
let old_values: Vec<&str> = audits.iter().map(|a| a.old_value.as_str()).collect();
let new_values: Vec<&str> = audits.iter().map(|a| a.new_value.as_str()).collect();
sqlx::query(
r#"
INSERT INTO course_audits (course_id, timestamp, field_changed, old_value, new_value)
SELECT v.course_id, NOW(), v.field_changed, v.old_value, v.new_value
FROM UNNEST($1::int4[], $2::text[], $3::text[], $4::text[])
AS v(course_id, field_changed, old_value, new_value)
"#,
)
.bind(&course_ids)
.bind(&fields)
.bind(&old_values)
.bind(&new_values)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_audits: {}", e))?;
Ok(())
}
async fn insert_metrics(metrics: &[MetricEntry], conn: &mut PgConnection) -> Result<()> {
if metrics.is_empty() {
return Ok(());
}
let course_ids: Vec<i32> = metrics.iter().map(|m| m.course_id).collect();
let enrollments: Vec<i32> = metrics.iter().map(|m| m.enrollment).collect();
let wait_counts: Vec<i32> = metrics.iter().map(|m| m.wait_count).collect();
let seats_available: Vec<i32> = metrics.iter().map(|m| m.seats_available).collect();
sqlx::query(
r#"
INSERT INTO course_metrics (course_id, timestamp, enrollment, wait_count, seats_available)
SELECT v.course_id, NOW(), v.enrollment, v.wait_count, v.seats_available
FROM UNNEST($1::int4[], $2::int4[], $3::int4[], $4::int4[])
AS v(course_id, enrollment, wait_count, seats_available)
"#,
)
.bind(&course_ids)
.bind(&enrollments)
.bind(&wait_counts)
.bind(&seats_available)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_metrics: {}", e))?;
Ok(())
}
// ---------------------------------------------------------------------------
// Core upsert functions (updated to use &mut PgConnection)
// ---------------------------------------------------------------------------
/// Batch upsert courses in a single database query.
///
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
/// new fields (meeting times, attributes, instructor data). Returns the
/// database IDs for all upserted courses (in input order) so instructors
/// can be linked.
/// new fields (meeting times, attributes, instructor data). Captures pre-update
/// state for audit/metric tracking, all within a single transaction.
///
/// # Performance
/// - Reduces N database round-trips to 3 (courses, instructors, junction)
/// - Reduces N database round-trips to 5 (old-data CTE + upsert, audits, metrics, instructors, junction)
/// - Typical usage: 50-200 courses per batch
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<UpsertCounts> {
if courses.is_empty() {
info!("No courses to upsert, skipping batch operation");
return Ok(());
return Ok(UpsertCounts::default());
}
let start = Instant::now();
let course_count = courses.len();
// Step 1: Upsert courses with all fields, returning IDs
let course_ids = upsert_courses(courses, db_pool).await?;
let mut tx = db_pool.begin().await?;
// Step 2: Upsert instructors (deduplicated across batch)
upsert_instructors(courses, db_pool).await?;
// Step 1: Upsert courses with CTE, returning diff rows
let diff_rows = upsert_courses(courses, &mut tx).await?;
// Step 3: Link courses to instructors via junction table
upsert_course_instructors(courses, &course_ids, db_pool).await?;
// Step 2: Build (crn, term_code) → course_id map for instructor linking.
// RETURNING order from INSERT ... ON CONFLICT is not guaranteed to match
// the input array order, so we must key by (crn, term_code) rather than
// relying on positional correspondence.
let crn_term_to_id: HashMap<(&str, &str), i32> = diff_rows
.iter()
.map(|r| ((r.crn.as_str(), r.term_code.as_str()), r.id))
.collect();
// Step 3: Compute audit/metric diffs
let (audits, metrics) = compute_diffs(&diff_rows);
// Count courses that had at least one field change (existing rows only)
let changed_ids: HashSet<i32> = audits.iter().map(|a| a.course_id).collect();
let existing_count = diff_rows.iter().filter(|r| r.old_id.is_some()).count() as i32;
let courses_changed = changed_ids.len() as i32;
let counts = UpsertCounts {
courses_fetched: course_count as i32,
courses_changed,
courses_unchanged: existing_count - courses_changed,
audits_generated: audits.len() as i32,
metrics_generated: metrics.len() as i32,
};
// Step 4: Insert audits and metrics
insert_audits(&audits, &mut tx).await?;
insert_metrics(&metrics, &mut tx).await?;
// Step 5: Upsert instructors (returns email -> id map)
let email_to_id = upsert_instructors(courses, &mut tx).await?;
// Step 6: Link courses to instructors via junction table
upsert_course_instructors(courses, &crn_term_to_id, &email_to_id, &mut tx).await?;
tx.commit().await?;
let duration = start.elapsed();
info!(
courses_count = course_count,
courses_changed = counts.courses_changed,
courses_unchanged = counts.courses_unchanged,
audit_entries = counts.audits_generated,
metric_entries = counts.metrics_generated,
duration_ms = duration.as_millis(),
"Batch upserted courses with instructors"
"Batch upserted courses with instructors, audits, and metrics"
);
Ok(())
Ok(counts)
}
/// Upsert all courses and return their database IDs in input order.
async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>> {
// ---------------------------------------------------------------------------
// Task 2: CTE-based upsert returning old+new values
// ---------------------------------------------------------------------------
/// Upsert all courses and return diff rows with old and new values for auditing.
async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<Vec<UpsertDiffRow>> {
let crns: Vec<&str> = courses
.iter()
.map(|c| c.course_reference_number.as_str())
@@ -143,67 +487,107 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
courses.iter().map(to_db_meeting_times).collect();
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
let rows = sqlx::query_scalar::<_, i32>(
let rows = sqlx::query_as::<_, UpsertDiffRow>(
r#"
INSERT INTO courses (
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
WITH old_data AS (
SELECT id, enrollment, max_enrollment, wait_count, wait_capacity,
subject, course_number, title,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes,
crn, term_code
FROM courses
WHERE (crn, term_code) IN (SELECT * FROM UNNEST($1::text[], $5::text[]))
),
upserted AS (
INSERT INTO courses (
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
)
SELECT
v.crn, v.subject, v.course_number, v.title, v.term_code,
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
v.cross_list, v.cross_list_capacity, v.cross_list_count,
v.link_identifier, v.is_section_linked,
v.meeting_times, v.attributes
FROM UNNEST(
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
$10::text[], $11::text[], $12::text[], $13::text[],
$14::int4[], $15::int4[], $16::int4[],
$17::text[], $18::int4[], $19::int4[],
$20::text[], $21::bool[],
$22::jsonb[], $23::jsonb[]
) AS v(
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
)
ON CONFLICT (crn, term_code)
DO UPDATE SET
subject = EXCLUDED.subject,
course_number = EXCLUDED.course_number,
title = EXCLUDED.title,
enrollment = EXCLUDED.enrollment,
max_enrollment = EXCLUDED.max_enrollment,
wait_count = EXCLUDED.wait_count,
wait_capacity = EXCLUDED.wait_capacity,
last_scraped_at = EXCLUDED.last_scraped_at,
sequence_number = EXCLUDED.sequence_number,
part_of_term = EXCLUDED.part_of_term,
instructional_method = EXCLUDED.instructional_method,
campus = EXCLUDED.campus,
credit_hours = EXCLUDED.credit_hours,
credit_hour_low = EXCLUDED.credit_hour_low,
credit_hour_high = EXCLUDED.credit_hour_high,
cross_list = EXCLUDED.cross_list,
cross_list_capacity = EXCLUDED.cross_list_capacity,
cross_list_count = EXCLUDED.cross_list_count,
link_identifier = EXCLUDED.link_identifier,
is_section_linked = EXCLUDED.is_section_linked,
meeting_times = EXCLUDED.meeting_times,
attributes = EXCLUDED.attributes
RETURNING *
)
SELECT
v.crn, v.subject, v.course_number, v.title, v.term_code,
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
v.cross_list, v.cross_list_capacity, v.cross_list_count,
v.link_identifier, v.is_section_linked,
v.meeting_times, v.attributes
FROM UNNEST(
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
$10::text[], $11::text[], $12::text[], $13::text[],
$14::int4[], $15::int4[], $16::int4[],
$17::text[], $18::int4[], $19::int4[],
$20::text[], $21::bool[],
$22::jsonb[], $23::jsonb[]
) AS v(
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
)
ON CONFLICT (crn, term_code)
DO UPDATE SET
subject = EXCLUDED.subject,
course_number = EXCLUDED.course_number,
title = EXCLUDED.title,
enrollment = EXCLUDED.enrollment,
max_enrollment = EXCLUDED.max_enrollment,
wait_count = EXCLUDED.wait_count,
wait_capacity = EXCLUDED.wait_capacity,
last_scraped_at = EXCLUDED.last_scraped_at,
sequence_number = EXCLUDED.sequence_number,
part_of_term = EXCLUDED.part_of_term,
instructional_method = EXCLUDED.instructional_method,
campus = EXCLUDED.campus,
credit_hours = EXCLUDED.credit_hours,
credit_hour_low = EXCLUDED.credit_hour_low,
credit_hour_high = EXCLUDED.credit_hour_high,
cross_list = EXCLUDED.cross_list,
cross_list_capacity = EXCLUDED.cross_list_capacity,
cross_list_count = EXCLUDED.cross_list_count,
link_identifier = EXCLUDED.link_identifier,
is_section_linked = EXCLUDED.is_section_linked,
meeting_times = EXCLUDED.meeting_times,
attributes = EXCLUDED.attributes
RETURNING id
SELECT u.id,
o.id AS old_id,
u.crn, u.term_code,
o.enrollment AS old_enrollment, u.enrollment AS new_enrollment,
o.max_enrollment AS old_max_enrollment, u.max_enrollment AS new_max_enrollment,
o.wait_count AS old_wait_count, u.wait_count AS new_wait_count,
o.wait_capacity AS old_wait_capacity, u.wait_capacity AS new_wait_capacity,
o.subject AS old_subject, u.subject AS new_subject,
o.course_number AS old_course_number, u.course_number AS new_course_number,
o.title AS old_title, u.title AS new_title,
o.sequence_number AS old_sequence_number, u.sequence_number AS new_sequence_number,
o.part_of_term AS old_part_of_term, u.part_of_term AS new_part_of_term,
o.instructional_method AS old_instructional_method, u.instructional_method AS new_instructional_method,
o.campus AS old_campus, u.campus AS new_campus,
o.credit_hours AS old_credit_hours, u.credit_hours AS new_credit_hours,
o.credit_hour_low AS old_credit_hour_low, u.credit_hour_low AS new_credit_hour_low,
o.credit_hour_high AS old_credit_hour_high, u.credit_hour_high AS new_credit_hour_high,
o.cross_list AS old_cross_list, u.cross_list AS new_cross_list,
o.cross_list_capacity AS old_cross_list_capacity, u.cross_list_capacity AS new_cross_list_capacity,
o.cross_list_count AS old_cross_list_count, u.cross_list_count AS new_cross_list_count,
o.link_identifier AS old_link_identifier, u.link_identifier AS new_link_identifier,
o.is_section_linked AS old_is_section_linked, u.is_section_linked AS new_is_section_linked,
o.meeting_times AS old_meeting_times, u.meeting_times AS new_meeting_times,
o.attributes AS old_attributes, u.attributes AS new_attributes
FROM upserted u
LEFT JOIN old_data o ON u.crn = o.crn AND u.term_code = o.term_code
"#,
)
.bind(&crns)
@@ -229,69 +613,105 @@ async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>
.bind(&is_section_linkeds)
.bind(&meeting_times_json)
.bind(&attributes_json)
.fetch_all(db_pool)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
Ok(rows)
}
/// Deduplicate and upsert all instructors from the batch.
async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()> {
/// Deduplicate and upsert all instructors from the batch by email.
/// Returns a map of lowercased_email -> instructor id for junction linking.
async fn upsert_instructors(
courses: &[Course],
conn: &mut PgConnection,
) -> Result<HashMap<String, i32>> {
let mut seen = HashSet::new();
let mut banner_ids = Vec::new();
let mut display_names = Vec::new();
let mut emails: Vec<Option<&str>> = Vec::new();
let mut display_names: Vec<&str> = Vec::new();
let mut emails_lower: Vec<String> = Vec::new();
let mut skipped_no_email = 0u32;
for course in courses {
for faculty in &course.faculty {
if seen.insert(faculty.banner_id.as_str()) {
banner_ids.push(faculty.banner_id.as_str());
display_names.push(faculty.display_name.as_str());
emails.push(faculty.email_address.as_deref());
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if seen.insert(email_lower.clone()) {
display_names.push(faculty.display_name.as_str());
emails_lower.push(email_lower);
}
} else {
skipped_no_email += 1;
}
}
}
if banner_ids.is_empty() {
return Ok(());
if skipped_no_email > 0 {
tracing::warn!(
count = skipped_no_email,
"Skipped instructors with no email address"
);
}
sqlx::query(
if display_names.is_empty() {
return Ok(HashMap::new());
}
let email_refs: Vec<&str> = emails_lower.iter().map(|s| s.as_str()).collect();
let rows: Vec<(i32, String)> = sqlx::query_as(
r#"
INSERT INTO instructors (banner_id, display_name, email)
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
ON CONFLICT (banner_id)
DO UPDATE SET
display_name = EXCLUDED.display_name,
email = COALESCE(EXCLUDED.email, instructors.email)
INSERT INTO instructors (display_name, email)
SELECT * FROM UNNEST($1::text[], $2::text[])
ON CONFLICT (email)
DO UPDATE SET display_name = EXCLUDED.display_name
RETURNING id, email
"#,
)
.bind(&banner_ids)
.bind(&display_names)
.bind(&emails)
.execute(db_pool)
.bind(&email_refs)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
Ok(())
Ok(rows.into_iter().map(|(id, email)| (email, id)).collect())
}
/// Link courses to their instructors via the junction table.
async fn upsert_course_instructors(
courses: &[Course],
course_ids: &[i32],
db_pool: &PgPool,
crn_term_to_id: &HashMap<(&str, &str), i32>,
email_to_id: &HashMap<String, i32>,
conn: &mut PgConnection,
) -> Result<()> {
let mut cids = Vec::new();
let mut iids = Vec::new();
let mut instructor_ids: Vec<i32> = Vec::new();
let mut banner_ids: Vec<&str> = Vec::new();
let mut primaries = Vec::new();
for (course, &course_id) in courses.iter().zip(course_ids) {
for course in courses {
let key = (
course.course_reference_number.as_str(),
course.term.as_str(),
);
let Some(&course_id) = crn_term_to_id.get(&key) else {
tracing::warn!(
crn = %course.course_reference_number,
term = %course.term,
"No course_id found for CRN/term pair during instructor linking"
);
continue;
};
for faculty in &course.faculty {
cids.push(course_id);
iids.push(faculty.banner_id.as_str());
primaries.push(faculty.primary_indicator);
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if let Some(&instructor_id) = email_to_id.get(&email_lower) {
cids.push(course_id);
instructor_ids.push(instructor_id);
banner_ids.push(faculty.banner_id.as_str());
primaries.push(faculty.primary_indicator);
}
}
}
}
@@ -303,21 +723,24 @@ async fn upsert_course_instructors(
// This handles instructor changes cleanly.
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
.bind(&cids)
.execute(db_pool)
.execute(&mut *conn)
.await?;
sqlx::query(
r#"
INSERT INTO course_instructors (course_id, instructor_id, is_primary)
SELECT * FROM UNNEST($1::int4[], $2::text[], $3::bool[])
INSERT INTO course_instructors (course_id, instructor_id, banner_id, is_primary)
SELECT * FROM UNNEST($1::int4[], $2::int4[], $3::text[], $4::bool[])
ON CONFLICT (course_id, instructor_id)
DO UPDATE SET is_primary = EXCLUDED.is_primary
DO UPDATE SET
banner_id = EXCLUDED.banner_id,
is_primary = EXCLUDED.is_primary
"#,
)
.bind(&cids)
.bind(&iids)
.bind(&instructor_ids)
.bind(&banner_ids)
.bind(&primaries)
.execute(db_pool)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
+25 -12
View File
@@ -55,7 +55,7 @@ fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) ->
Some(SortColumn::Instructor) => {
format!(
"(SELECT i.display_name FROM course_instructors ci \
JOIN instructors i ON i.banner_id = ci.instructor_id \
JOIN instructors i ON i.id = ci.instructor_id \
WHERE ci.course_id = courses.id AND ci.is_primary = true \
LIMIT 1) {dir} NULLS LAST"
)
@@ -92,9 +92,8 @@ pub async fn search_courses(
) -> Result<(Vec<Course>, i64)> {
let order_by = sort_clause(sort_by, sort_dir);
let data_query = format!(
"SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10"
);
let data_query =
format!("SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10");
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
let courses = sqlx::query_as::<_, Course>(&data_query)
@@ -148,12 +147,19 @@ pub async fn get_course_instructors(
) -> Result<Vec<CourseInstructorDetail>> {
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
r#"
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
rp.avg_rating, rp.num_ratings,
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
ci.course_id
FROM course_instructors ci
JOIN instructors i ON i.banner_id = ci.instructor_id
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
JOIN instructors i ON i.id = ci.instructor_id
LEFT JOIN LATERAL (
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = i.id
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
LIMIT 1
) rmp ON true
WHERE ci.course_id = $1
ORDER BY ci.is_primary DESC, i.display_name
"#,
@@ -177,12 +183,19 @@ pub async fn get_instructors_for_courses(
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
r#"
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
rp.avg_rating, rp.num_ratings,
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
ci.course_id
FROM course_instructors ci
JOIN instructors i ON i.banner_id = ci.instructor_id
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
JOIN instructors i ON i.id = ci.instructor_id
LEFT JOIN LATERAL (
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = i.id
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
LIMIT 1
) rmp ON true
WHERE ci.course_id = ANY($1)
ORDER BY ci.course_id, ci.is_primary DESC, i.display_name
"#,
+3
View File
@@ -5,4 +5,7 @@ pub mod courses;
pub mod models;
pub mod reference;
pub mod rmp;
pub mod rmp_matching;
pub mod scrape_jobs;
pub mod sessions;
pub mod users;
+119 -5
View File
@@ -1,10 +1,46 @@
//! `sqlx` models for the database schema.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use ts_rs::TS;
/// Serialize an `i64` as a string to avoid JavaScript precision loss for values exceeding 2^53.
fn serialize_i64_as_string<S: Serializer>(value: &i64, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&value.to_string())
}
/// Deserialize an `i64` from either a number or a string.
fn deserialize_i64_from_string<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<i64, D::Error> {
use serde::de;
struct I64OrStringVisitor;
impl<'de> de::Visitor<'de> for I64OrStringVisitor {
type Value = i64;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("an integer or a string containing an integer")
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<i64, E> {
Ok(value)
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<i64, E> {
i64::try_from(value).map_err(|_| E::custom(format!("u64 {value} out of i64 range")))
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<i64, E> {
value.parse().map_err(de::Error::custom)
}
}
deserializer.deserialize_any(I64OrStringVisitor)
}
/// Represents a meeting time stored as JSONB in the courses table.
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
@@ -63,28 +99,32 @@ pub struct Course {
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct Instructor {
pub banner_id: String,
pub id: i32,
pub display_name: String,
pub email: Option<String>,
pub email: String,
pub rmp_match_status: String,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct CourseInstructor {
pub course_id: i32,
pub instructor_id: String,
pub instructor_id: i32,
pub banner_id: String,
pub is_primary: bool,
}
/// Joined instructor data for a course (from course_instructors + instructors + rmp_professors).
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct CourseInstructorDetail {
pub instructor_id: i32,
pub banner_id: String,
pub display_name: String,
pub email: Option<String>,
pub email: String,
pub is_primary: bool,
pub avg_rating: Option<f32>,
pub num_ratings: Option<i32>,
pub rmp_legacy_id: Option<i32>,
/// Present when fetched via batch query; `None` for single-course queries.
pub course_id: Option<i32>,
}
@@ -119,6 +159,16 @@ pub struct CourseAudit {
pub new_value: String,
}
/// Aggregate counts returned by batch upsert, used for scrape job result logging.
#[derive(Debug, Clone, Default)]
pub struct UpsertCounts {
pub courses_fetched: i32,
pub courses_changed: i32,
pub courses_unchanged: i32,
pub audits_generated: i32,
pub metrics_generated: i32,
}
/// The priority level of a scrape job.
#[derive(sqlx::Type, Copy, Debug, Clone)]
#[sqlx(type_name = "scrape_priority", rename_all = "PascalCase")]
@@ -139,6 +189,20 @@ pub enum TargetType {
SingleCrn,
}
/// Computed status for a scrape job, derived from existing fields.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum ScrapeJobStatus {
Processing,
StaleLock,
Exhausted,
Scheduled,
Pending,
}
/// How long a lock can be held before it is considered stale (mirrors `scrape_jobs::LOCK_EXPIRY`).
const LOCK_EXPIRY_SECS: i64 = 10 * 60;
/// Represents a queryable job from the database.
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
@@ -154,4 +218,54 @@ pub struct ScrapeJob {
pub retry_count: i32,
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
pub max_retries: i32,
/// When the job last entered the "ready to pick up" state.
/// Set to NOW() on creation; updated to NOW() on retry.
pub queued_at: DateTime<Utc>,
}
impl ScrapeJob {
/// Compute the current status of this job from its fields.
pub fn status(&self) -> ScrapeJobStatus {
let now = Utc::now();
match self.locked_at {
Some(locked) if (now - locked).num_seconds() < LOCK_EXPIRY_SECS => {
ScrapeJobStatus::Processing
}
Some(_) => ScrapeJobStatus::StaleLock,
None if self.retry_count >= self.max_retries && self.max_retries > 0 => {
ScrapeJobStatus::Exhausted
}
None if self.execute_at > now => ScrapeJobStatus::Scheduled,
None => ScrapeJobStatus::Pending,
}
}
}
/// A user authenticated via Discord OAuth.
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct User {
#[serde(
serialize_with = "serialize_i64_as_string",
deserialize_with = "deserialize_i64_from_string"
)]
#[ts(type = "string")]
pub discord_id: i64,
pub discord_username: String,
pub discord_avatar_hash: Option<String>,
pub is_admin: bool,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// A server-side session for an authenticated user.
#[allow(dead_code)] // Fields read via sqlx::FromRow; some only used in DB queries
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct UserSession {
pub id: String,
pub user_id: i64,
pub created_at: DateTime<Utc>,
pub expires_at: DateTime<Utc>,
pub last_active_at: DateTime<Utc>,
}
+13 -115
View File
@@ -3,8 +3,7 @@
use crate::error::Result;
use crate::rmp::RmpProfessor;
use sqlx::PgPool;
use std::collections::{HashMap, HashSet};
use tracing::{debug, info, warn};
use std::collections::HashSet;
/// Bulk upsert RMP professors using the UNNEST pattern.
///
@@ -93,14 +92,14 @@ pub async fn batch_upsert_rmp_professors(
}
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
fn normalize(s: &str) -> String {
pub(crate) fn normalize(s: &str) -> String {
s.trim().to_lowercase().trim_end_matches('.').to_string()
}
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
///
/// Returns `None` if the format is unparseable (no comma, empty parts).
fn parse_display_name(display_name: &str) -> Option<(String, String)> {
pub(crate) fn parse_display_name(display_name: &str) -> Option<(String, String)> {
let (last_part, first_part) = display_name.split_once(',')?;
let last = normalize(last_part);
// Take only the first token of the first-name portion to drop middle names/initials.
@@ -111,128 +110,27 @@ fn parse_display_name(display_name: &str) -> Option<(String, String)> {
Some((last, first))
}
/// Auto-match instructors to RMP professors by normalized name.
/// Retrieve RMP rating data for an instructor by instructor id.
///
/// Loads all pending instructors and all RMP professors, then matches in Rust
/// using normalized name comparison. Only assigns a match when exactly one RMP
/// professor matches a given instructor.
pub async fn auto_match_instructors(db_pool: &PgPool) -> Result<u64> {
// Load pending instructors
let instructors: Vec<(String, String)> = sqlx::query_as(
"SELECT banner_id, display_name FROM instructors WHERE rmp_match_status = 'pending'",
)
.fetch_all(db_pool)
.await?;
if instructors.is_empty() {
info!(matched = 0, "No pending instructors to match");
return Ok(0);
}
// Load all RMP professors
let professors: Vec<(i32, String, String)> =
sqlx::query_as("SELECT legacy_id, first_name, last_name FROM rmp_professors")
.fetch_all(db_pool)
.await?;
// Build a lookup: (normalized_last, normalized_first) -> list of legacy_ids
let mut rmp_index: HashMap<(String, String), Vec<i32>> = HashMap::new();
for (legacy_id, first, last) in &professors {
let key = (normalize(last), normalize(first));
rmp_index.entry(key).or_default().push(*legacy_id);
}
// Match each instructor
let mut matches: Vec<(i32, String)> = Vec::new(); // (legacy_id, banner_id)
let mut no_comma = 0u64;
let mut no_match = 0u64;
let mut ambiguous = 0u64;
for (banner_id, display_name) in &instructors {
let Some((last, first)) = parse_display_name(display_name) else {
no_comma += 1;
continue;
};
let key = (last, first);
match rmp_index.get(&key) {
Some(ids) if ids.len() == 1 => {
matches.push((ids[0], banner_id.clone()));
}
Some(ids) => {
ambiguous += 1;
debug!(
banner_id,
display_name,
candidates = ids.len(),
"Ambiguous RMP match, skipping"
);
}
None => {
no_match += 1;
}
}
}
if no_comma > 0 || ambiguous > 0 {
warn!(
total_pending = instructors.len(),
no_comma,
no_match,
ambiguous,
matched = matches.len(),
"RMP matching diagnostics"
);
}
// Batch update matches
if matches.is_empty() {
info!(matched = 0, "Auto-matched instructors to RMP professors");
return Ok(0);
}
let legacy_ids: Vec<i32> = matches.iter().map(|(id, _)| *id).collect();
let banner_ids: Vec<&str> = matches.iter().map(|(_, bid)| bid.as_str()).collect();
let result = sqlx::query(
r#"
UPDATE instructors i
SET
rmp_legacy_id = m.legacy_id,
rmp_match_status = 'auto'
FROM UNNEST($1::int4[], $2::text[]) AS m(legacy_id, banner_id)
WHERE i.banner_id = m.banner_id
"#,
)
.bind(&legacy_ids)
.bind(&banner_ids)
.execute(db_pool)
.await
.map_err(|e| anyhow::anyhow!("Failed to update instructor RMP matches: {}", e))?;
let matched = result.rows_affected();
info!(matched, "Auto-matched instructors to RMP professors");
Ok(matched)
}
/// Retrieve RMP rating data for an instructor by banner_id.
///
/// Returns `(avg_rating, num_ratings)` if the instructor has an RMP match.
/// Returns `(avg_rating, num_ratings)` for the best linked RMP profile
/// (most ratings). Returns `None` if no link exists.
#[allow(dead_code)]
pub async fn get_instructor_rmp_data(
db_pool: &PgPool,
banner_id: &str,
instructor_id: i32,
) -> Result<Option<(f32, i32)>> {
let row: Option<(f32, i32)> = sqlx::query_as(
r#"
SELECT rp.avg_rating, rp.num_ratings
FROM instructors i
JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
WHERE i.banner_id = $1
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = $1
AND rp.avg_rating IS NOT NULL
ORDER BY rp.num_ratings DESC NULLS LAST
LIMIT 1
"#,
)
.bind(banner_id)
.bind(instructor_id)
.fetch_optional(db_pool)
.await?;
Ok(row)
+513
View File
@@ -0,0 +1,513 @@
//! Confidence scoring and candidate generation for RMP instructor matching.
use crate::data::rmp::{normalize, parse_display_name};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::collections::{HashMap, HashSet};
use tracing::{debug, info};
// ---------------------------------------------------------------------------
// Scoring types
// ---------------------------------------------------------------------------
/// Breakdown of individual scoring signals.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScoreBreakdown {
pub department: f32,
pub uniqueness: f32,
pub volume: f32,
}
/// Result of scoring a single instructorRMP candidate pair.
#[derive(Debug, Clone)]
pub struct MatchScore {
pub score: f32,
pub breakdown: ScoreBreakdown,
}
// ---------------------------------------------------------------------------
// Thresholds
// ---------------------------------------------------------------------------
/// Minimum composite score to store a candidate row.
const MIN_CANDIDATE_THRESHOLD: f32 = 0.40;
/// Score at or above which a candidate is auto-accepted.
const AUTO_ACCEPT_THRESHOLD: f32 = 0.85;
// ---------------------------------------------------------------------------
// Weights
// ---------------------------------------------------------------------------
const WEIGHT_DEPARTMENT: f32 = 0.50;
const WEIGHT_UNIQUENESS: f32 = 0.30;
const WEIGHT_VOLUME: f32 = 0.20;
// ---------------------------------------------------------------------------
// Pure scoring functions
// ---------------------------------------------------------------------------
/// Check if an instructor's subjects overlap with an RMP department.
///
/// Returns `1.0` for a match, `0.2` for a mismatch, `0.5` when the RMP
/// department is unknown.
fn department_similarity(subjects: &[String], rmp_department: Option<&str>) -> f32 {
let Some(dept) = rmp_department else {
return 0.5;
};
let dept_lower = dept.to_lowercase();
// Quick check: does any subject appear directly in the department string
// or vice-versa?
for subj in subjects {
let subj_lower = subj.to_lowercase();
if dept_lower.contains(&subj_lower) || subj_lower.contains(&dept_lower) {
return 1.0;
}
// Handle common UTSA abbreviation mappings.
if matches_known_abbreviation(&subj_lower, &dept_lower) {
return 1.0;
}
}
0.2
}
/// Expand common subject abbreviations used at UTSA and check for overlap.
fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
const MAPPINGS: &[(&str, &[&str])] = &[
("cs", &["computer science"]),
("ece", &["electrical", "computer engineering"]),
("ee", &["electrical engineering", "electrical"]),
("me", &["mechanical engineering", "mechanical"]),
("ce", &["civil engineering", "civil"]),
("bio", &["biology", "biological"]),
("chem", &["chemistry"]),
("phys", &["physics"]),
("math", &["mathematics"]),
("sta", &["statistics"]),
("eng", &["english"]),
("his", &["history"]),
("pol", &["political science"]),
("psy", &["psychology"]),
("soc", &["sociology"]),
("mus", &["music"]),
("art", &["art"]),
("phi", &["philosophy"]),
("eco", &["economics"]),
("acc", &["accounting"]),
("fin", &["finance"]),
("mgt", &["management"]),
("mkt", &["marketing"]),
("is", &["information systems"]),
("ms", &["management science"]),
("kin", &["kinesiology"]),
("com", &["communication"]),
];
for &(abbr, expansions) in MAPPINGS {
if subject == abbr {
return expansions
.iter()
.any(|expansion| department.contains(expansion));
}
}
false
}
/// Compute match confidence score (0.01.0) for an instructorRMP pair.
///
/// Name matching is handled by the caller via pre-filtering on exact
/// normalized `(last, first)`, so only department, uniqueness, and volume
/// signals are scored here.
pub fn compute_match_score(
instructor_subjects: &[String],
rmp_department: Option<&str>,
candidate_count: usize,
rmp_num_ratings: i32,
) -> MatchScore {
// --- Department (0.50) ---
let dept_score = department_similarity(instructor_subjects, rmp_department);
// --- Uniqueness (0.30) ---
let uniqueness_score = match candidate_count {
0 | 1 => 1.0,
2 => 0.5,
_ => 0.2,
};
// --- Volume (0.20) ---
let volume_score = ((rmp_num_ratings as f32).ln_1p() / 5.0_f32.ln_1p()).clamp(0.0, 1.0);
let composite = dept_score * WEIGHT_DEPARTMENT
+ uniqueness_score * WEIGHT_UNIQUENESS
+ volume_score * WEIGHT_VOLUME;
MatchScore {
score: composite,
breakdown: ScoreBreakdown {
department: dept_score,
uniqueness: uniqueness_score,
volume: volume_score,
},
}
}
// ---------------------------------------------------------------------------
// Candidate generation (DB)
// ---------------------------------------------------------------------------
/// Statistics returned from candidate generation.
#[derive(Debug)]
pub struct MatchingStats {
pub total_unmatched: usize,
pub candidates_created: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
}
/// Lightweight row for building the in-memory RMP name index.
struct RmpProfForMatching {
legacy_id: i32,
department: Option<String>,
num_ratings: i32,
}
/// Generate match candidates for all unmatched instructors.
///
/// For each unmatched instructor:
/// 1. Parse `display_name` into (last, first).
/// 2. Find RMP professors with matching normalized name.
/// 3. Score each candidate.
/// 4. Store candidates scoring above [`MIN_CANDIDATE_THRESHOLD`].
/// 5. Auto-accept if the top candidate scores ≥ [`AUTO_ACCEPT_THRESHOLD`]
/// and no existing rejected candidate exists for that pair.
///
/// Already-evaluated instructorRMP pairs (any status) are skipped.
pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
// 1. Load unmatched instructors
let instructors: Vec<(i32, String)> = sqlx::query_as(
"SELECT id, display_name FROM instructors WHERE rmp_match_status = 'unmatched'",
)
.fetch_all(db_pool)
.await?;
if instructors.is_empty() {
info!("No unmatched instructors to generate candidates for");
return Ok(MatchingStats {
total_unmatched: 0,
candidates_created: 0,
auto_matched: 0,
skipped_unparseable: 0,
skipped_no_candidates: 0,
});
}
let instructor_ids: Vec<i32> = instructors.iter().map(|(id, _)| *id).collect();
let total_unmatched = instructors.len();
// 2. Load instructor subjects
let subject_rows: Vec<(i32, String)> = sqlx::query_as(
r#"
SELECT DISTINCT ci.instructor_id, c.subject
FROM course_instructors ci
JOIN courses c ON c.id = ci.course_id
WHERE ci.instructor_id = ANY($1)
"#,
)
.bind(&instructor_ids)
.fetch_all(db_pool)
.await?;
let mut subject_map: HashMap<i32, Vec<String>> = HashMap::new();
for (iid, subject) in subject_rows {
subject_map.entry(iid).or_default().push(subject);
}
// 3. Load all RMP professors
let prof_rows: Vec<(i32, String, String, Option<String>, i32)> = sqlx::query_as(
"SELECT legacy_id, first_name, last_name, department, num_ratings FROM rmp_professors",
)
.fetch_all(db_pool)
.await?;
// Build name index: (normalized_last, normalized_first) -> Vec<RmpProfForMatching>
let mut name_index: HashMap<(String, String), Vec<RmpProfForMatching>> = HashMap::new();
for (legacy_id, first_name, last_name, department, num_ratings) in prof_rows {
let key = (normalize(&last_name), normalize(&first_name));
name_index.entry(key).or_default().push(RmpProfForMatching {
legacy_id,
department,
num_ratings,
});
}
// 4. Load existing candidate pairs (and rejected subset) in a single query
let candidate_rows: Vec<(i32, i32, String)> =
sqlx::query_as("SELECT instructor_id, rmp_legacy_id, status FROM rmp_match_candidates")
.fetch_all(db_pool)
.await?;
let mut existing_pairs: HashSet<(i32, i32)> = HashSet::with_capacity(candidate_rows.len());
let mut rejected_pairs: HashSet<(i32, i32)> = HashSet::new();
for (iid, lid, status) in candidate_rows {
existing_pairs.insert((iid, lid));
if status == "rejected" {
rejected_pairs.insert((iid, lid));
}
}
// 5. Score and collect candidates
let empty_subjects: Vec<String> = Vec::new();
let mut candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut auto_accept: Vec<(i32, i32)> = Vec::new(); // (instructor_id, legacy_id)
let mut skipped_unparseable = 0usize;
let mut skipped_no_candidates = 0usize;
for (instructor_id, display_name) in &instructors {
let Some((norm_last, norm_first)) = parse_display_name(display_name) else {
skipped_unparseable += 1;
debug!(
instructor_id,
display_name, "Unparseable display name, skipping"
);
continue;
};
let subjects = subject_map.get(instructor_id).unwrap_or(&empty_subjects);
let key = (norm_last.clone(), norm_first.clone());
let Some(rmp_candidates) = name_index.get(&key) else {
skipped_no_candidates += 1;
continue;
};
let candidate_count = rmp_candidates.len();
let mut best: Option<(f32, i32)> = None;
for prof in rmp_candidates {
let pair = (*instructor_id, prof.legacy_id);
if existing_pairs.contains(&pair) {
continue;
}
let ms = compute_match_score(
subjects,
prof.department.as_deref(),
candidate_count,
prof.num_ratings,
);
if ms.score < MIN_CANDIDATE_THRESHOLD {
continue;
}
let breakdown_json =
serde_json::to_value(&ms.breakdown).unwrap_or_else(|_| serde_json::json!({}));
candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
match best {
Some((s, _)) if ms.score > s => best = Some((ms.score, prof.legacy_id)),
None => best = Some((ms.score, prof.legacy_id)),
_ => {}
}
}
// Auto-accept the top candidate if it meets the threshold and is not
// previously rejected.
if let Some((score, legacy_id)) = best
&& score >= AUTO_ACCEPT_THRESHOLD
&& !rejected_pairs.contains(&(*instructor_id, legacy_id))
{
auto_accept.push((*instructor_id, legacy_id));
}
}
// 67. Write candidates and auto-accept within a single transaction
let candidates_created = candidates.len();
let auto_matched = auto_accept.len();
let mut tx = db_pool.begin().await?;
// 6. Batch-insert candidates
if !candidates.is_empty() {
let c_instructor_ids: Vec<i32> = candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = candidates.iter().map(|(_, _, s, _)| *s).collect();
let c_breakdowns: Vec<serde_json::Value> =
candidates.into_iter().map(|(_, _, _, b)| b).collect();
sqlx::query(
r#"
INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, score_breakdown)
SELECT v.instructor_id, v.rmp_legacy_id, v.score, v.score_breakdown
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
ON CONFLICT (instructor_id, rmp_legacy_id) DO NOTHING
"#,
)
.bind(&c_instructor_ids)
.bind(&c_legacy_ids)
.bind(&c_scores)
.bind(&c_breakdowns)
.execute(&mut *tx)
.await?;
}
// 7. Auto-accept top candidates
if !auto_accept.is_empty() {
let aa_instructor_ids: Vec<i32> = auto_accept.iter().map(|(iid, _)| *iid).collect();
let aa_legacy_ids: Vec<i32> = auto_accept.iter().map(|(_, lid)| *lid).collect();
// Mark the candidate row as accepted
sqlx::query(
r#"
UPDATE rmp_match_candidates mc
SET status = 'accepted', resolved_at = NOW()
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
WHERE mc.instructor_id = v.instructor_id
AND mc.rmp_legacy_id = v.rmp_legacy_id
"#,
)
.bind(&aa_instructor_ids)
.bind(&aa_legacy_ids)
.execute(&mut *tx)
.await?;
// Insert links into instructor_rmp_links
sqlx::query(
r#"
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
SELECT v.instructor_id, v.rmp_legacy_id, 'auto'
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
ON CONFLICT (rmp_legacy_id) DO NOTHING
"#,
)
.bind(&aa_instructor_ids)
.bind(&aa_legacy_ids)
.execute(&mut *tx)
.await?;
// Update instructor match status
sqlx::query(
r#"
UPDATE instructors i
SET rmp_match_status = 'auto'
FROM UNNEST($1::int4[]) AS v(instructor_id)
WHERE i.id = v.instructor_id
"#,
)
.bind(&aa_instructor_ids)
.execute(&mut *tx)
.await?;
}
tx.commit().await?;
let stats = MatchingStats {
total_unmatched,
candidates_created,
auto_matched,
skipped_unparseable,
skipped_no_candidates,
};
info!(
total_unmatched = stats.total_unmatched,
candidates_created = stats.candidates_created,
auto_matched = stats.auto_matched,
skipped_unparseable = stats.skipped_unparseable,
skipped_no_candidates = stats.skipped_no_candidates,
"Candidate generation complete"
);
Ok(stats)
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ideal_candidate_high_score() {
let ms = compute_match_score(
&["CS".to_string()],
Some("Computer Science"),
1, // unique candidate
50, // decent ratings
);
// dept 1.0*0.50 + unique 1.0*0.30 + volume ~0.97*0.20 ≈ 0.99
assert!(ms.score >= 0.85, "Expected score >= 0.85, got {}", ms.score);
assert_eq!(ms.breakdown.uniqueness, 1.0);
assert_eq!(ms.breakdown.department, 1.0);
}
#[test]
fn test_ambiguous_candidates_lower_score() {
let unique = compute_match_score(&[], None, 1, 10);
let ambiguous = compute_match_score(&[], None, 3, 10);
assert!(
unique.score > ambiguous.score,
"Unique ({}) should outscore ambiguous ({})",
unique.score,
ambiguous.score
);
assert_eq!(unique.breakdown.uniqueness, 1.0);
assert_eq!(ambiguous.breakdown.uniqueness, 0.2);
}
#[test]
fn test_no_department_neutral() {
let ms = compute_match_score(&["CS".to_string()], None, 1, 10);
assert_eq!(ms.breakdown.department, 0.5);
}
#[test]
fn test_department_match() {
let ms = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
assert_eq!(ms.breakdown.department, 1.0);
}
#[test]
fn test_department_mismatch() {
let ms = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
assert_eq!(ms.breakdown.department, 0.2);
}
#[test]
fn test_department_match_outscores_mismatch() {
let matched = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
let mismatched = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
assert!(
matched.score > mismatched.score,
"Department match ({}) should outscore mismatch ({})",
matched.score,
mismatched.score
);
}
#[test]
fn test_volume_scaling() {
let zero = compute_match_score(&[], None, 1, 0);
let many = compute_match_score(&[], None, 1, 100);
assert!(
many.breakdown.volume > zero.breakdown.volume,
"100 ratings ({}) should outscore 0 ratings ({})",
many.breakdown.volume,
zero.breakdown.volume
);
assert_eq!(zero.breakdown.volume, 0.0);
assert!(
many.breakdown.volume > 0.9,
"100 ratings should be near max"
);
}
}
+170 -26
View File
@@ -1,15 +1,40 @@
//! Database operations for scrape job queue management.
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType};
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType, UpsertCounts};
use crate::error::Result;
use chrono::{DateTime, Utc};
use sqlx::PgPool;
use std::collections::HashSet;
/// Force-unlock all jobs that have a non-NULL `locked_at`.
///
/// Intended to be called once at startup to recover jobs left locked by
/// a previous unclean shutdown (crash, OOM kill, etc.).
///
/// # Returns
/// The number of jobs that were unlocked.
pub async fn force_unlock_all(db_pool: &PgPool) -> Result<u64> {
let result = sqlx::query(
"UPDATE scrape_jobs SET locked_at = NULL, queued_at = NOW() WHERE locked_at IS NOT NULL",
)
.execute(db_pool)
.await?;
Ok(result.rows_affected())
}
/// How long a lock can be held before it is considered expired and reclaimable.
///
/// This acts as a safety net for cases where a worker dies without unlocking
/// (OOM kill, crash, network partition). Under normal operation, the worker's
/// own job timeout fires well before this threshold.
const LOCK_EXPIRY: std::time::Duration = std::time::Duration::from_secs(10 * 60);
/// Atomically fetch and lock the next available scrape job.
///
/// Uses `FOR UPDATE SKIP LOCKED` to allow multiple workers to poll the queue
/// concurrently without conflicts. Only jobs that are unlocked and ready to
/// execute (based on `execute_at`) are considered.
/// concurrently without conflicts. Considers jobs that are:
/// - Unlocked and ready to execute, OR
/// - Locked but past [`LOCK_EXPIRY`] (abandoned by a dead worker)
///
/// # Arguments
/// * `db_pool` - PostgreSQL connection pool
@@ -20,9 +45,16 @@ use std::collections::HashSet;
pub async fn fetch_and_lock_job(db_pool: &PgPool) -> Result<Option<ScrapeJob>> {
let mut tx = db_pool.begin().await?;
let lock_expiry_secs = LOCK_EXPIRY.as_secs() as i32;
let job = sqlx::query_as::<_, ScrapeJob>(
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
"SELECT * FROM scrape_jobs \
WHERE (locked_at IS NULL OR locked_at < NOW() - make_interval(secs => $1::double precision)) \
AND execute_at <= NOW() \
ORDER BY priority DESC, execute_at ASC \
LIMIT 1 \
FOR UPDATE SKIP LOCKED"
)
.bind(lock_expiry_secs)
.fetch_optional(&mut *tx)
.await?;
@@ -68,10 +100,11 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
Ok(())
}
/// Atomically unlock a job and increment its retry count.
/// Atomically unlock a job, increment its retry count, and reset `queued_at`.
///
/// Returns whether the job still has retries remaining. This is determined
/// atomically in the database to avoid race conditions between workers.
/// Returns the new `queued_at` timestamp if retries remain, or `None` if
/// the job has exhausted its retries. This is determined atomically in the
/// database to avoid race conditions between workers.
///
/// # Arguments
/// * `job_id` - The database ID of the job
@@ -79,31 +112,31 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
/// * `db_pool` - PostgreSQL connection pool
///
/// # Returns
/// * `Ok(true)` if the job was unlocked and retries remain
/// * `Ok(false)` if the job has exhausted its retries
/// * `Ok(Some(queued_at))` if the job was unlocked and retries remain
/// * `Ok(None)` if the job has exhausted its retries
pub async fn unlock_and_increment_retry(
job_id: i32,
max_retries: i32,
db_pool: &PgPool,
) -> Result<bool> {
let result = sqlx::query_scalar::<_, Option<i32>>(
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
let result = sqlx::query_scalar::<_, Option<chrono::DateTime<chrono::Utc>>>(
"UPDATE scrape_jobs
SET locked_at = NULL, retry_count = retry_count + 1
SET locked_at = NULL, retry_count = retry_count + 1, queued_at = NOW()
WHERE id = $1
RETURNING CASE WHEN retry_count < $2 THEN retry_count ELSE NULL END",
RETURNING CASE WHEN retry_count <= $2 THEN queued_at ELSE NULL END",
)
.bind(job_id)
.bind(max_retries)
.fetch_one(db_pool)
.await?;
Ok(result.is_some())
Ok(result)
}
/// Find existing unlocked job payloads matching the given target type and candidates.
/// Find existing job payloads matching the given target type and candidates.
///
/// Returns a set of stringified JSON payloads that already exist in the queue,
/// used for deduplication when scheduling new jobs.
/// Returns a set of stringified JSON payloads that already exist in the queue
/// (both locked and unlocked), used for deduplication when scheduling new jobs.
///
/// # Arguments
/// * `target_type` - The target type to filter by
@@ -111,7 +144,7 @@ pub async fn unlock_and_increment_retry(
/// * `db_pool` - PostgreSQL connection pool
///
/// # Returns
/// A `HashSet` of stringified JSON payloads that already have pending jobs
/// A `HashSet` of stringified JSON payloads that already have pending or in-progress jobs
pub async fn find_existing_job_payloads(
target_type: TargetType,
candidate_payloads: &[serde_json::Value],
@@ -119,7 +152,7 @@ pub async fn find_existing_job_payloads(
) -> Result<HashSet<String>> {
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
"SELECT target_payload FROM scrape_jobs
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
WHERE target_type = $1 AND target_payload = ANY($2)",
)
.bind(target_type)
.bind(candidate_payloads)
@@ -134,6 +167,116 @@ pub async fn find_existing_job_payloads(
Ok(existing_payloads)
}
/// Insert a scrape job result log entry.
#[allow(clippy::too_many_arguments)]
pub async fn insert_job_result(
target_type: TargetType,
payload: serde_json::Value,
priority: ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
duration_ms: i32,
success: bool,
error_message: Option<&str>,
retry_count: i32,
counts: Option<&UpsertCounts>,
db_pool: &PgPool,
) -> Result<()> {
sqlx::query(
r#"
INSERT INTO scrape_job_results (
target_type, payload, priority,
queued_at, started_at, duration_ms,
success, error_message, retry_count,
courses_fetched, courses_changed, courses_unchanged,
audits_generated, metrics_generated
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
"#,
)
.bind(target_type)
.bind(&payload)
.bind(priority)
.bind(queued_at)
.bind(started_at)
.bind(duration_ms)
.bind(success)
.bind(error_message)
.bind(retry_count)
.bind(counts.map(|c| c.courses_fetched))
.bind(counts.map(|c| c.courses_changed))
.bind(counts.map(|c| c.courses_unchanged))
.bind(counts.map(|c| c.audits_generated))
.bind(counts.map(|c| c.metrics_generated))
.execute(db_pool)
.await?;
Ok(())
}
/// Per-subject aggregated stats from recent scrape results.
///
/// Populated by [`fetch_subject_stats`] and converted into
/// [`crate::scraper::adaptive::SubjectStats`] for interval computation.
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct SubjectResultStats {
pub subject: String,
pub recent_runs: i64,
pub avg_change_ratio: f64,
pub consecutive_zero_changes: i64,
pub consecutive_empty_fetches: i64,
pub recent_failure_count: i64,
pub recent_success_count: i64,
pub last_completed: DateTime<Utc>,
}
/// Fetch aggregated per-subject statistics from the last 24 hours of results.
///
/// For each subject, examines the 20 most recent results and computes:
/// - Average change ratio (courses_changed / courses_fetched)
/// - Consecutive zero-change runs from the most recent result
/// - Consecutive empty-fetch runs from the most recent result
/// - Failure and success counts
/// - Last completion timestamp
pub async fn fetch_subject_stats(db_pool: &PgPool) -> Result<Vec<SubjectResultStats>> {
let rows = sqlx::query_as::<_, SubjectResultStats>(
r#"
WITH recent AS (
SELECT payload->>'subject' AS subject, success,
COALESCE(courses_fetched, 0) AS courses_fetched,
COALESCE(courses_changed, 0) AS courses_changed,
completed_at,
ROW_NUMBER() OVER (PARTITION BY payload->>'subject' ORDER BY completed_at DESC) AS rn
FROM scrape_job_results
WHERE target_type = 'Subject' AND completed_at > NOW() - INTERVAL '24 hours'
),
filtered AS (SELECT * FROM recent WHERE rn <= 20),
zero_break AS (
SELECT subject,
MIN(rn) FILTER (WHERE courses_changed > 0 AND success) AS first_nonzero_rn,
MIN(rn) FILTER (WHERE courses_fetched > 0 AND success) AS first_nonempty_rn
FROM filtered GROUP BY subject
)
SELECT
f.subject::TEXT AS subject,
COUNT(*)::BIGINT AS recent_runs,
COALESCE(AVG(CASE WHEN f.success AND f.courses_fetched > 0
THEN f.courses_changed::FLOAT / f.courses_fetched ELSE NULL END), 0.0)::FLOAT8 AS avg_change_ratio,
COALESCE(zb.first_nonzero_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_changed = 0))::BIGINT AS consecutive_zero_changes,
COALESCE(zb.first_nonempty_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_fetched = 0))::BIGINT AS consecutive_empty_fetches,
COUNT(*) FILTER (WHERE NOT f.success)::BIGINT AS recent_failure_count,
COUNT(*) FILTER (WHERE f.success)::BIGINT AS recent_success_count,
MAX(f.completed_at) AS last_completed
FROM filtered f
LEFT JOIN zero_break zb ON f.subject = zb.subject
GROUP BY f.subject, zb.first_nonzero_rn, zb.first_nonempty_rn
"#,
)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
/// Batch insert scrape jobs using UNNEST for a single round-trip.
///
/// All jobs are inserted with `execute_at` set to the current time.
@@ -144,9 +287,9 @@ pub async fn find_existing_job_payloads(
pub async fn batch_insert_jobs(
jobs: &[(serde_json::Value, TargetType, ScrapePriority)],
db_pool: &PgPool,
) -> Result<()> {
) -> Result<Vec<ScrapeJob>> {
if jobs.is_empty() {
return Ok(());
return Ok(Vec::new());
}
let mut target_types: Vec<String> = Vec::with_capacity(jobs.len());
@@ -159,19 +302,20 @@ pub async fn batch_insert_jobs(
priorities.push(format!("{priority:?}"));
}
sqlx::query(
let inserted = sqlx::query_as::<_, ScrapeJob>(
r#"
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW()
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at, queued_at)
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW(), NOW()
FROM UNNEST($1::text[], $2::jsonb[], $3::text[])
AS v(target_type, payload, priority)
RETURNING *
"#,
)
.bind(&target_types)
.bind(&payloads)
.bind(&priorities)
.execute(db_pool)
.fetch_all(db_pool)
.await?;
Ok(())
Ok(inserted)
}
+100
View File
@@ -0,0 +1,100 @@
//! Database query functions for user sessions.
use anyhow::Context;
use rand::Rng;
use sqlx::PgPool;
use super::models::UserSession;
use crate::error::Result;
/// Session lifetime: 7 days (in seconds).
pub const SESSION_DURATION_SECS: u64 = 7 * 24 * 3600;
/// Generate a cryptographically random 32-byte hex token.
fn generate_token() -> String {
let bytes: [u8; 32] = rand::rng().random();
bytes.iter().map(|b| format!("{b:02x}")).collect()
}
/// Create a new session for a user with the given duration.
pub async fn create_session(
pool: &PgPool,
user_id: i64,
duration: std::time::Duration,
) -> Result<UserSession> {
let token = generate_token();
let duration_secs = duration.as_secs() as i64;
sqlx::query_as::<_, UserSession>(
r#"
INSERT INTO user_sessions (id, user_id, expires_at)
VALUES ($1, $2, now() + make_interval(secs => $3::double precision))
RETURNING *
"#,
)
.bind(&token)
.bind(user_id)
.bind(duration_secs as f64)
.fetch_one(pool)
.await
.context("failed to create session")
}
/// Fetch a session by token, only if it has not expired.
pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSession>> {
sqlx::query_as::<_, UserSession>(
"SELECT * FROM user_sessions WHERE id = $1 AND expires_at > now()",
)
.bind(token)
.fetch_optional(pool)
.await
.context("failed to get session")
}
/// Update the last-active timestamp and extend session expiry (sliding window).
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query(
r#"
UPDATE user_sessions
SET last_active_at = now(),
expires_at = now() + make_interval(secs => $2::double precision)
WHERE id = $1
"#,
)
.bind(token)
.bind(SESSION_DURATION_SECS as f64)
.execute(pool)
.await
.context("failed to touch session")?;
Ok(())
}
/// Delete a session by token.
pub async fn delete_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query("DELETE FROM user_sessions WHERE id = $1")
.bind(token)
.execute(pool)
.await
.context("failed to delete session")?;
Ok(())
}
/// Delete all sessions for a user. Returns the number of sessions deleted.
#[allow(dead_code)] // Available for admin user-deletion flow
pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE user_id = $1")
.bind(user_id)
.execute(pool)
.await
.context("failed to delete user sessions")?;
Ok(result.rows_affected())
}
/// Delete all expired sessions. Returns the number of sessions cleaned up.
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
.execute(pool)
.await
.context("failed to cleanup expired sessions")?;
Ok(result.rows_affected())
}
+86
View File
@@ -0,0 +1,86 @@
//! Database query functions for users.
use anyhow::Context;
use sqlx::PgPool;
use super::models::User;
use crate::error::Result;
/// Insert a new user or update username/avatar on conflict.
pub async fn upsert_user(
pool: &PgPool,
discord_id: i64,
username: &str,
avatar_hash: Option<&str>,
) -> Result<User> {
sqlx::query_as::<_, User>(
r#"
INSERT INTO users (discord_id, discord_username, discord_avatar_hash)
VALUES ($1, $2, $3)
ON CONFLICT (discord_id) DO UPDATE
SET discord_username = EXCLUDED.discord_username,
discord_avatar_hash = EXCLUDED.discord_avatar_hash,
updated_at = now()
RETURNING *
"#,
)
.bind(discord_id)
.bind(username)
.bind(avatar_hash)
.fetch_one(pool)
.await
.context("failed to upsert user")
}
/// Fetch a user by Discord ID.
pub async fn get_user(pool: &PgPool, discord_id: i64) -> Result<Option<User>> {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE discord_id = $1")
.bind(discord_id)
.fetch_optional(pool)
.await
.context("failed to get user")
}
/// List all users ordered by creation date (newest first).
pub async fn list_users(pool: &PgPool) -> Result<Vec<User>> {
sqlx::query_as::<_, User>("SELECT * FROM users ORDER BY created_at DESC")
.fetch_all(pool)
.await
.context("failed to list users")
}
/// Set the admin flag for a user, returning the updated user if found.
pub async fn set_admin(pool: &PgPool, discord_id: i64, is_admin: bool) -> Result<Option<User>> {
sqlx::query_as::<_, User>(
r#"
UPDATE users
SET is_admin = $2, updated_at = now()
WHERE discord_id = $1
RETURNING *
"#,
)
.bind(discord_id)
.bind(is_admin)
.fetch_optional(pool)
.await
.context("failed to set admin status")
}
/// Ensure a seed admin exists. Upserts with `is_admin = true` and a placeholder
/// username that will be replaced on first OAuth login.
pub async fn ensure_seed_admin(pool: &PgPool, discord_id: i64) -> Result<User> {
sqlx::query_as::<_, User>(
r#"
INSERT INTO users (discord_id, discord_username, is_admin)
VALUES ($1, 'seed-admin', true)
ON CONFLICT (discord_id) DO UPDATE
SET is_admin = true,
updated_at = now()
RETURNING *
"#,
)
.bind(discord_id)
.fetch_one(pool)
.await
.context("failed to ensure seed admin")
}
+1
View File
@@ -1,6 +1,7 @@
pub mod app;
pub mod banner;
pub mod bot;
pub mod calendar;
pub mod cli;
pub mod config;
pub mod data;
+4 -4
View File
@@ -1,5 +1,5 @@
use crate::app::App;
use crate::cli::{Args, ServiceName, determine_enabled_services};
use crate::cli::{Args, ServiceName};
use crate::logging::setup_logging;
use clap::Parser;
use std::process::ExitCode;
@@ -8,6 +8,7 @@ use tracing::info;
mod app;
mod banner;
mod bot;
mod calendar;
mod cli;
mod config;
mod data;
@@ -29,9 +30,8 @@ async fn main() -> ExitCode {
// Parse CLI arguments
let args = Args::parse();
// Determine which services should be enabled
let enabled_services: Vec<ServiceName> =
determine_enabled_services(&args).expect("Failed to determine enabled services");
// Always run all services
let enabled_services = ServiceName::all();
// Create and initialize the application
let mut app = App::new().await.expect("Failed to initialize application");
+326
View File
@@ -0,0 +1,326 @@
//! Adaptive scraping interval computation.
//!
//! Assigns per-subject scrape intervals based on recent change rates,
//! consecutive zero-change runs, failure patterns, and time of day.
use chrono::{DateTime, Datelike, Timelike, Utc};
use chrono_tz::US::Central;
use std::time::Duration;
use crate::data::scrape_jobs::SubjectResultStats;
const FLOOR_INTERVAL: Duration = Duration::from_secs(3 * 60);
const MODERATE_HIGH_INTERVAL: Duration = Duration::from_secs(5 * 60);
const MODERATE_LOW_INTERVAL: Duration = Duration::from_secs(15 * 60);
const LOW_CHANGE_INTERVAL: Duration = Duration::from_secs(30 * 60);
const ZERO_5_INTERVAL: Duration = Duration::from_secs(60 * 60);
const ZERO_10_INTERVAL: Duration = Duration::from_secs(2 * 60 * 60);
const CEILING_INTERVAL: Duration = Duration::from_secs(4 * 60 * 60);
const COLD_START_INTERVAL: Duration = FLOOR_INTERVAL;
const PAUSE_PROBE_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
const EMPTY_FETCH_PAUSE_THRESHOLD: i64 = 3;
const FAILURE_PAUSE_THRESHOLD: i64 = 5;
/// Aggregated per-subject statistics derived from recent scrape results.
#[derive(Debug, Clone)]
pub struct SubjectStats {
pub subject: String,
pub recent_runs: i64,
pub avg_change_ratio: f64,
pub consecutive_zero_changes: i64,
pub consecutive_empty_fetches: i64,
pub recent_failure_count: i64,
pub recent_success_count: i64,
pub last_completed: DateTime<Utc>,
}
/// Scheduling decision for a subject.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SubjectSchedule {
/// Subject is due for scraping, with the computed interval.
Eligible(Duration),
/// Subject was scraped recently; wait for the remaining cooldown.
Cooldown(Duration),
/// Subject is paused due to repeated empty fetches or failures.
Paused,
/// Subject belongs to a past term and should not be scraped.
ReadOnly,
}
impl From<SubjectResultStats> for SubjectStats {
fn from(row: SubjectResultStats) -> Self {
Self {
subject: row.subject,
recent_runs: row.recent_runs,
avg_change_ratio: row.avg_change_ratio,
consecutive_zero_changes: row.consecutive_zero_changes,
consecutive_empty_fetches: row.consecutive_empty_fetches,
recent_failure_count: row.recent_failure_count,
recent_success_count: row.recent_success_count,
last_completed: row.last_completed,
}
}
}
/// Compute the base interval tier from change-rate statistics.
pub fn compute_base_interval(stats: &SubjectStats) -> Duration {
if stats.recent_runs == 0 {
return COLD_START_INTERVAL;
}
// Consecutive-zero tiers take precedence when change ratio is near zero
if stats.avg_change_ratio < 0.001 {
return match stats.consecutive_zero_changes {
0..5 => LOW_CHANGE_INTERVAL,
5..10 => ZERO_5_INTERVAL,
10..20 => ZERO_10_INTERVAL,
_ => CEILING_INTERVAL,
};
}
match stats.avg_change_ratio {
r if r >= 0.10 => FLOOR_INTERVAL,
r if r >= 0.05 => MODERATE_HIGH_INTERVAL,
r if r >= 0.01 => MODERATE_LOW_INTERVAL,
_ => LOW_CHANGE_INTERVAL,
}
}
/// Return a time-of-day multiplier for the given UTC timestamp.
///
/// Peak hours (weekdays 8am-6pm CT) return 1; off-peak (weekdays 6pm-midnight CT)
/// return 2; night (midnight-8am CT) and weekends return 4.
pub fn time_of_day_multiplier(now: DateTime<Utc>) -> u32 {
let ct = now.with_timezone(&Central);
let weekday = ct.weekday();
let hour = ct.hour();
// Weekends get the slowest multiplier
if matches!(weekday, chrono::Weekday::Sat | chrono::Weekday::Sun) {
return 4;
}
match hour {
8..18 => 1, // peak
18..24 => 2, // off-peak
_ => 4, // night (0..8)
}
}
/// Evaluate whether a subject should be scraped now.
///
/// Combines base interval, time-of-day multiplier, pause detection (empty
/// fetches / consecutive failures), and past-term read-only status.
pub fn evaluate_subject(
stats: &SubjectStats,
now: DateTime<Utc>,
is_past_term: bool,
) -> SubjectSchedule {
if is_past_term {
return SubjectSchedule::ReadOnly;
}
let elapsed = (now - stats.last_completed)
.to_std()
.unwrap_or(Duration::ZERO);
let probe_due = elapsed >= PAUSE_PROBE_INTERVAL;
// Pause on repeated empty fetches
if stats.consecutive_empty_fetches >= EMPTY_FETCH_PAUSE_THRESHOLD {
return if probe_due {
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
} else {
SubjectSchedule::Paused
};
}
// Pause on all-failures
if stats.recent_success_count == 0 && stats.recent_failure_count >= FAILURE_PAUSE_THRESHOLD {
return if probe_due {
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
} else {
SubjectSchedule::Paused
};
}
let base = compute_base_interval(stats);
let multiplier = time_of_day_multiplier(now);
let effective = base * multiplier;
if elapsed >= effective {
SubjectSchedule::Eligible(effective)
} else {
let remaining = effective - elapsed;
SubjectSchedule::Cooldown(remaining)
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
/// Create a default `SubjectStats` for testing. Callers mutate fields as needed.
fn make_stats(subject: &str) -> SubjectStats {
SubjectStats {
subject: subject.to_string(),
recent_runs: 10,
avg_change_ratio: 0.0,
consecutive_zero_changes: 0,
consecutive_empty_fetches: 0,
recent_failure_count: 0,
recent_success_count: 10,
last_completed: Utc::now() - chrono::Duration::hours(1),
}
}
// -- compute_base_interval tests --
#[test]
fn test_cold_start_returns_floor() {
let mut stats = make_stats("CS");
stats.recent_runs = 0;
assert_eq!(compute_base_interval(&stats), COLD_START_INTERVAL);
}
#[test]
fn test_high_change_rate() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15;
assert_eq!(compute_base_interval(&stats), FLOOR_INTERVAL);
}
#[test]
fn test_moderate_high_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.07;
assert_eq!(compute_base_interval(&stats), MODERATE_HIGH_INTERVAL);
}
#[test]
fn test_moderate_low_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.03;
assert_eq!(compute_base_interval(&stats), MODERATE_LOW_INTERVAL);
}
#[test]
fn test_low_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.005;
assert_eq!(compute_base_interval(&stats), LOW_CHANGE_INTERVAL);
}
#[test]
fn test_zero_5_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 5;
assert_eq!(compute_base_interval(&stats), ZERO_5_INTERVAL);
}
#[test]
fn test_zero_10_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 10;
assert_eq!(compute_base_interval(&stats), ZERO_10_INTERVAL);
}
#[test]
fn test_zero_20_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 20;
assert_eq!(compute_base_interval(&stats), CEILING_INTERVAL);
}
// -- evaluate_subject tests --
#[test]
fn test_pause_empty_fetches() {
let mut stats = make_stats("CS");
stats.consecutive_empty_fetches = 3;
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Paused);
}
#[test]
fn test_pause_all_failures() {
let mut stats = make_stats("CS");
stats.recent_success_count = 0;
stats.recent_failure_count = 5;
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Paused);
}
#[test]
fn test_probe_after_pause() {
let mut stats = make_stats("CS");
stats.consecutive_empty_fetches = 5;
stats.last_completed = Utc::now() - chrono::Duration::hours(7);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL));
}
#[test]
fn test_read_only_past_term() {
let stats = make_stats("CS");
let result = evaluate_subject(&stats, Utc::now(), true);
assert_eq!(result, SubjectSchedule::ReadOnly);
}
#[test]
fn test_cooldown_not_elapsed() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15; // floor = 3 min
stats.last_completed = Utc::now() - chrono::Duration::seconds(30);
// Use a peak-hours timestamp so multiplier = 1
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
stats.last_completed = peak - chrono::Duration::seconds(30);
let result = evaluate_subject(&stats, peak, false);
assert!(matches!(result, SubjectSchedule::Cooldown(_)));
}
#[test]
fn test_eligible_elapsed() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15; // floor = 3 min
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
stats.last_completed = peak - chrono::Duration::minutes(5);
let result = evaluate_subject(&stats, peak, false);
assert!(matches!(result, SubjectSchedule::Eligible(_)));
}
// -- time_of_day_multiplier tests --
#[test]
fn test_time_multiplier_peak() {
// Monday 10am CT = 15:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 1);
}
#[test]
fn test_time_multiplier_offpeak() {
// Monday 8pm CT = 01:00 UTC next day, but let's use Tuesday 01:00 UTC = Mon 8pm CT
let dt = Utc.with_ymd_and_hms(2025, 7, 15, 1, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 2);
}
#[test]
fn test_time_multiplier_night() {
// 3am CT = 08:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 8, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 4);
}
#[test]
fn test_time_multiplier_weekend() {
// Saturday noon CT = 17:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 12, 17, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 4);
}
}
+4 -3
View File
@@ -1,7 +1,7 @@
pub mod subject;
use crate::banner::BannerApi;
use crate::data::models::TargetType;
use crate::data::models::{TargetType, UpsertCounts};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -32,8 +32,9 @@ pub trait Job: Send + Sync {
#[allow(dead_code)]
fn target_type(&self) -> TargetType;
/// Process the job with the given API client and database pool
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()>;
/// Process the job with the given API client and database pool.
/// Returns upsert effectiveness counts on success.
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts>;
/// Get a human-readable description of the job
fn description(&self) -> String;
+8 -6
View File
@@ -1,7 +1,7 @@
use super::Job;
use crate::banner::{BannerApi, SearchQuery, Term};
use crate::data::batch::batch_upsert_courses;
use crate::data::models::TargetType;
use crate::data::models::{TargetType, UpsertCounts};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -26,7 +26,7 @@ impl Job for SubjectJob {
}
#[tracing::instrument(skip(self, banner_api, db_pool), fields(subject = %self.subject))]
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()> {
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts> {
let subject_code = &self.subject;
// Get the current term
@@ -37,17 +37,19 @@ impl Job for SubjectJob {
.search(&term, &query, "subjectDescription", false)
.await?;
if let Some(courses_from_api) = search_result.data {
let counts = if let Some(courses_from_api) = search_result.data {
info!(
subject = %subject_code,
count = courses_from_api.len(),
"Found courses"
);
batch_upsert_courses(&courses_from_api, db_pool).await?;
}
batch_upsert_courses(&courses_from_api, db_pool).await?
} else {
UpsertCounts::default()
};
debug!(subject = %subject_code, "Subject job completed");
Ok(())
Ok(counts)
}
fn description(&self) -> String {
+25 -3
View File
@@ -1,11 +1,14 @@
pub mod adaptive;
pub mod jobs;
pub mod scheduler;
pub mod worker;
use crate::banner::BannerApi;
use crate::data::scrape_jobs;
use crate::services::Service;
use crate::state::ReferenceCache;
use crate::status::{ServiceStatus, ServiceStatusRegistry};
use crate::web::ws::ScrapeJobEvent;
use sqlx::PgPool;
use std::sync::Arc;
use tokio::sync::{RwLock, broadcast};
@@ -24,6 +27,7 @@ pub struct ScraperService {
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
service_statuses: ServiceStatusRegistry,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
scheduler_handle: Option<JoinHandle<()>>,
worker_handles: Vec<JoinHandle<()>>,
shutdown_tx: Option<broadcast::Sender<()>>,
@@ -36,12 +40,14 @@ impl ScraperService {
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
service_statuses: ServiceStatusRegistry,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
db_pool,
banner_api,
reference_cache,
service_statuses,
job_events_tx,
scheduler_handle: None,
worker_handles: Vec::new(),
shutdown_tx: None,
@@ -49,7 +55,17 @@ impl ScraperService {
}
/// Starts the scheduler and a pool of workers.
pub fn start(&mut self) {
///
/// Force-unlocks any jobs left locked by a previous unclean shutdown before
/// spawning workers, so those jobs re-enter the queue immediately.
pub async fn start(&mut self) {
// Recover jobs left locked by a previous crash/unclean shutdown
match scrape_jobs::force_unlock_all(&self.db_pool).await {
Ok(0) => {}
Ok(count) => warn!(count, "Force-unlocked stale jobs from previous run"),
Err(e) => warn!(error = ?e, "Failed to force-unlock stale jobs"),
}
info!("ScraperService starting");
// Create shutdown channel
@@ -60,6 +76,7 @@ impl ScraperService {
self.db_pool.clone(),
self.banner_api.clone(),
self.reference_cache.clone(),
self.job_events_tx.clone(),
);
let shutdown_rx = shutdown_tx.subscribe();
let scheduler_handle = tokio::spawn(async move {
@@ -70,7 +87,12 @@ impl ScraperService {
let worker_count = 4; // This could be configurable
for i in 0..worker_count {
let worker = Worker::new(i, self.db_pool.clone(), self.banner_api.clone());
let worker = Worker::new(
i,
self.db_pool.clone(),
self.banner_api.clone(),
self.job_events_tx.clone(),
);
let shutdown_rx = shutdown_tx.subscribe();
let worker_handle = tokio::spawn(async move {
worker.run(shutdown_rx).await;
@@ -92,7 +114,7 @@ impl Service for ScraperService {
}
async fn run(&mut self) -> Result<(), anyhow::Error> {
self.start();
self.start().await;
std::future::pending::<()>().await;
Ok(())
}
+104 -23
View File
@@ -3,10 +3,14 @@ use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
use crate::data::scrape_jobs;
use crate::error::Result;
use crate::rmp::RmpClient;
use crate::scraper::adaptive::{SubjectSchedule, SubjectStats, evaluate_subject};
use crate::scraper::jobs::subject::SubjectJob;
use crate::state::ReferenceCache;
use crate::web::ws::{ScrapeJobDto, ScrapeJobEvent};
use chrono::{DateTime, Utc};
use serde_json::json;
use sqlx::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::{RwLock, broadcast};
@@ -25,6 +29,7 @@ pub struct Scheduler {
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl Scheduler {
@@ -32,11 +37,13 @@ impl Scheduler {
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
db_pool,
banner_api,
reference_cache,
job_events_tx,
}
}
@@ -74,6 +81,7 @@ impl Scheduler {
let banner_api = self.banner_api.clone();
let cancel_token = cancel_token.clone();
let reference_cache = self.reference_cache.clone();
let job_events_tx = self.job_events_tx.clone();
async move {
tokio::select! {
@@ -99,7 +107,7 @@ impl Scheduler {
tokio::join!(rmp_fut, ref_fut);
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api).await {
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api, Some(&job_events_tx)).await {
error!(error = ?e, "Failed to schedule jobs");
}
} => {}
@@ -143,18 +151,17 @@ impl Scheduler {
/// Core scheduling logic that analyzes data and creates scrape jobs.
///
/// Strategy:
/// 1. Fetch all subjects for the current term from Banner API
/// 2. Query existing jobs in a single batch query
/// 3. Create jobs only for subjects that don't have pending jobs
/// Uses adaptive scheduling to determine per-subject scrape intervals based
/// on recent change rates, failure patterns, and time of day. Only subjects
/// that are eligible (i.e. their cooldown has elapsed) are enqueued.
///
/// This is a static method (not &self) to allow it to be called from spawned tasks.
#[tracing::instrument(skip_all, fields(term))]
async fn schedule_jobs_impl(db_pool: &PgPool, banner_api: &BannerApi) -> Result<()> {
// For now, we will implement a simple baseline scheduling strategy:
// 1. Get a list of all subjects from the Banner API.
// 2. Query existing jobs for all subjects in a single query.
// 3. Create new jobs only for subjects that don't have existing jobs.
async fn schedule_jobs_impl(
db_pool: &PgPool,
banner_api: &BannerApi,
job_events_tx: Option<&broadcast::Sender<ScrapeJobEvent>>,
) -> Result<()> {
let term = Term::get_current().inner().to_string();
tracing::Span::current().record("term", term.as_str());
@@ -166,13 +173,70 @@ impl Scheduler {
"Retrieved subjects from API"
);
// Create payloads for all subjects
let subject_payloads: Vec<_> = subjects
.iter()
.map(|subject| json!({ "subject": subject.code }))
// Fetch per-subject stats and build a lookup map
let stats_rows = scrape_jobs::fetch_subject_stats(db_pool).await?;
let stats_map: HashMap<String, SubjectStats> = stats_rows
.into_iter()
.map(|row| {
let subject = row.subject.clone();
(subject, SubjectStats::from(row))
})
.collect();
// Query existing jobs for all subjects in a single query
// Evaluate each subject using adaptive scheduling
let now = Utc::now();
let is_past_term = false; // Scheduler currently only fetches current term subjects
let mut eligible_subjects: Vec<String> = Vec::new();
let mut cooldown_count: usize = 0;
let mut paused_count: usize = 0;
let mut read_only_count: usize = 0;
for subject in &subjects {
let stats = stats_map.get(&subject.code).cloned().unwrap_or_else(|| {
// Cold start: no history for this subject
SubjectStats {
subject: subject.code.clone(),
recent_runs: 0,
avg_change_ratio: 0.0,
consecutive_zero_changes: 0,
consecutive_empty_fetches: 0,
recent_failure_count: 0,
recent_success_count: 0,
last_completed: DateTime::<Utc>::MIN_UTC,
}
});
match evaluate_subject(&stats, now, is_past_term) {
SubjectSchedule::Eligible(_) => {
eligible_subjects.push(subject.code.clone());
}
SubjectSchedule::Cooldown(_) => cooldown_count += 1,
SubjectSchedule::Paused => paused_count += 1,
SubjectSchedule::ReadOnly => read_only_count += 1,
}
}
info!(
total = subjects.len(),
eligible = eligible_subjects.len(),
cooldown = cooldown_count,
paused = paused_count,
read_only = read_only_count,
"Adaptive scheduling decisions"
);
if eligible_subjects.is_empty() {
debug!("No eligible subjects to schedule");
return Ok(());
}
// Create payloads only for eligible subjects
let subject_payloads: Vec<_> = eligible_subjects
.iter()
.map(|code| json!({ "subject": code }))
.collect();
// Query existing jobs for eligible subjects only
let existing_payloads = scrape_jobs::find_existing_job_payloads(
TargetType::Subject,
&subject_payloads,
@@ -180,12 +244,12 @@ impl Scheduler {
)
.await?;
// Filter out subjects that already have jobs and prepare new jobs
// Filter out subjects that already have pending jobs
let mut skipped_count = 0;
let new_jobs: Vec<_> = subjects
let new_jobs: Vec<_> = eligible_subjects
.into_iter()
.filter_map(|subject| {
let job = SubjectJob::new(subject.code.clone());
.filter_map(|subject_code| {
let job = SubjectJob::new(subject_code.clone());
let payload = serde_json::to_value(&job).unwrap();
let payload_str = payload.to_string();
@@ -193,7 +257,7 @@ impl Scheduler {
skipped_count += 1;
None
} else {
Some((payload, subject.code))
Some((payload, subject_code))
}
})
.collect();
@@ -213,7 +277,16 @@ impl Scheduler {
.map(|(payload, _)| (payload, TargetType::Subject, ScrapePriority::Low))
.collect();
scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
let inserted = scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
if let Some(tx) = job_events_tx {
inserted.iter().for_each(|job| {
debug!(job_id = job.id, "Emitting JobCreated event");
let _ = tx.send(ScrapeJobEvent::JobCreated {
job: ScrapeJobDto::from(job),
});
});
}
}
debug!("Job scheduling complete");
@@ -232,8 +305,16 @@ impl Scheduler {
crate::data::rmp::batch_upsert_rmp_professors(&professors, db_pool).await?;
info!(total, "RMP professors upserted");
let matched = crate::data::rmp::auto_match_instructors(db_pool).await?;
info!(total, matched, "RMP sync complete");
let stats = crate::data::rmp_matching::generate_candidates(db_pool).await?;
info!(
total,
stats.total_unmatched,
stats.candidates_created,
stats.auto_matched,
stats.skipped_unparseable,
stats.skipped_no_candidates,
"RMP sync complete"
);
Ok(())
}
+178 -18
View File
@@ -1,8 +1,10 @@
use crate::banner::{BannerApi, BannerApiError};
use crate::data::models::ScrapeJob;
use crate::data::models::{ScrapeJob, ScrapeJobStatus, UpsertCounts};
use crate::data::scrape_jobs;
use crate::error::Result;
use crate::scraper::jobs::{JobError, JobType};
use crate::web::ws::ScrapeJobEvent;
use chrono::{DateTime, Utc};
use sqlx::PgPool;
use std::sync::Arc;
use std::time::Duration;
@@ -10,6 +12,9 @@ use tokio::sync::broadcast;
use tokio::time;
use tracing::{Instrument, debug, error, info, trace, warn};
/// Maximum time a single job is allowed to run before being considered stuck.
const JOB_TIMEOUT: Duration = Duration::from_secs(5 * 60);
/// A single worker instance.
///
/// Each worker runs in its own asynchronous task and continuously polls the
@@ -18,14 +23,21 @@ pub struct Worker {
id: usize, // For logging purposes
db_pool: PgPool,
banner_api: Arc<BannerApi>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl Worker {
pub fn new(id: usize, db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
pub fn new(
id: usize,
db_pool: PgPool,
banner_api: Arc<BannerApi>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
id,
db_pool,
banner_api,
job_events_tx,
}
}
@@ -60,22 +72,57 @@ impl Worker {
let job_id = job.id;
let retry_count = job.retry_count;
let max_retries = job.max_retries;
let target_type = job.target_type;
let payload = job.target_payload.clone();
let priority = job.priority;
let queued_at = job.queued_at;
let started_at = Utc::now();
let start = std::time::Instant::now();
// Process the job, racing against shutdown signal
// Emit JobLocked event
let locked_at = started_at.to_rfc3339();
debug!(job_id, "Emitting JobLocked event");
let _ = self.job_events_tx.send(ScrapeJobEvent::JobLocked {
id: job_id,
locked_at,
status: ScrapeJobStatus::Processing,
});
// Process the job, racing against shutdown signal and timeout
let process_result = tokio::select! {
_ = shutdown_rx.recv() => {
self.handle_shutdown_during_processing(job_id).await;
break;
}
result = self.process_job(job) => result
result = async {
match time::timeout(JOB_TIMEOUT, self.process_job(job)).await {
Ok(result) => result,
Err(_elapsed) => {
Err(JobError::Recoverable(anyhow::anyhow!(
"job timed out after {}s",
JOB_TIMEOUT.as_secs()
)))
}
}
} => result
};
let duration = start.elapsed();
// Handle the job processing result
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
.await;
self.handle_job_result(
job_id,
retry_count,
max_retries,
process_result,
duration,
target_type,
payload,
priority,
queued_at,
started_at,
)
.await;
}
}
@@ -87,7 +134,7 @@ impl Worker {
scrape_jobs::fetch_and_lock_job(&self.db_pool).await
}
async fn process_job(&self, job: ScrapeJob) -> Result<(), JobError> {
async fn process_job(&self, job: ScrapeJob) -> Result<UpsertCounts, JobError> {
// Convert the database job to our job type
let job_type = JobType::from_target_type_and_payload(job.target_type, job.target_payload)
.map_err(|e| JobError::Unrecoverable(anyhow::anyhow!(e)))?; // Parse errors are unrecoverable
@@ -114,9 +161,7 @@ impl Worker {
job_impl
.process(&self.banner_api, &self.db_pool)
.await
.map_err(JobError::Recoverable)?;
Ok(())
.map_err(JobError::Recoverable)
}
.instrument(span)
.await
@@ -130,7 +175,11 @@ impl Worker {
scrape_jobs::unlock_job(job_id, &self.db_pool).await
}
async fn unlock_and_increment_retry(&self, job_id: i32, max_retries: i32) -> Result<bool> {
async fn unlock_and_increment_retry(
&self,
job_id: i32,
max_retries: i32,
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
scrape_jobs::unlock_and_increment_retry(job_id, max_retries, &self.db_pool).await
}
@@ -156,31 +205,97 @@ impl Worker {
}
/// Handle the result of job processing
#[allow(clippy::too_many_arguments)]
async fn handle_job_result(
&self,
job_id: i32,
retry_count: i32,
max_retries: i32,
result: Result<(), JobError>,
result: Result<UpsertCounts, JobError>,
duration: std::time::Duration,
target_type: crate::data::models::TargetType,
payload: serde_json::Value,
priority: crate::data::models::ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
) {
let duration_ms = duration.as_millis() as i32;
match result {
Ok(()) => {
Ok(counts) => {
debug!(
worker_id = self.id,
job_id,
duration_ms = duration.as_millis(),
courses_fetched = counts.courses_fetched,
courses_changed = counts.courses_changed,
courses_unchanged = counts.courses_unchanged,
"Job completed successfully"
);
// Log the result
if let Err(e) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
true,
None,
retry_count,
Some(&counts),
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?e, "Failed to insert job result");
}
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete completed job");
}
debug!(job_id, "Emitting JobCompleted event");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobCompleted { id: job_id });
}
Err(JobError::Recoverable(e)) => {
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
.await;
self.handle_recoverable_error(
job_id,
retry_count,
max_retries,
e,
duration,
target_type,
payload,
priority,
queued_at,
started_at,
)
.await;
}
Err(JobError::Unrecoverable(e)) => {
// Log the failed result
let err_msg = format!("{e:#}");
if let Err(log_err) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
false,
Some(&err_msg),
retry_count,
None,
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
}
error!(
worker_id = self.id,
job_id,
@@ -191,11 +306,16 @@ impl Worker {
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete corrupted job");
}
debug!(job_id, "Emitting JobDeleted event");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobDeleted { id: job_id });
}
}
}
/// Handle recoverable errors by logging appropriately and unlocking the job
#[allow(clippy::too_many_arguments)]
async fn handle_recoverable_error(
&self,
job_id: i32,
@@ -203,6 +323,11 @@ impl Worker {
max_retries: i32,
e: anyhow::Error,
duration: std::time::Duration,
target_type: crate::data::models::TargetType,
payload: serde_json::Value,
priority: crate::data::models::ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
) {
let next_attempt = retry_count.saturating_add(1);
let remaining_retries = max_retries.saturating_sub(next_attempt);
@@ -233,7 +358,7 @@ impl Worker {
// Atomically unlock and increment retry count, checking if retry is allowed
match self.unlock_and_increment_retry(job_id, max_retries).await {
Ok(can_retry) if can_retry => {
Ok(Some(new_queued_at)) => {
debug!(
worker_id = self.id,
job_id,
@@ -241,9 +366,37 @@ impl Worker {
remaining_retries = remaining_retries,
"Job unlocked for retry"
);
debug!(job_id, "Emitting JobRetried event");
let _ = self.job_events_tx.send(ScrapeJobEvent::JobRetried {
id: job_id,
retry_count: next_attempt,
queued_at: new_queued_at.to_rfc3339(),
status: ScrapeJobStatus::Pending,
});
// Don't log a result yet — the job will be retried
}
Ok(_) => {
// Max retries exceeded (detected atomically)
Ok(None) => {
// Max retries exceeded — log final failure result
let duration_ms = duration.as_millis() as i32;
let err_msg = format!("{e:#}");
if let Err(log_err) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
false,
Some(&err_msg),
next_attempt,
None,
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
}
error!(
worker_id = self.id,
job_id,
@@ -256,6 +409,13 @@ impl Worker {
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete failed job");
}
debug!(job_id, "Emitting JobExhausted and JobDeleted events");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobExhausted { id: job_id });
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobDeleted { id: job_id });
}
Err(e) => {
error!(worker_id = self.id, job_id, error = ?e, "Failed to unlock and increment retry count");
+39 -2
View File
@@ -1,6 +1,7 @@
use super::Service;
use crate::state::AppState;
use crate::status::ServiceStatus;
use crate::web::auth::AuthConfig;
use crate::web::create_router;
use std::net::SocketAddr;
use tokio::net::TcpListener;
@@ -11,14 +12,16 @@ use tracing::{info, trace, warn};
pub struct WebService {
port: u16,
app_state: AppState,
auth_config: AuthConfig,
shutdown_tx: Option<broadcast::Sender<()>>,
}
impl WebService {
pub fn new(port: u16, app_state: AppState) -> Self {
pub fn new(port: u16, app_state: AppState, auth_config: AuthConfig) -> Self {
Self {
port,
app_state,
auth_config,
shutdown_tx: None,
}
}
@@ -48,6 +51,33 @@ impl WebService {
}
}
}
/// Periodically cleans up expired sessions from the database and in-memory cache.
async fn session_cleanup_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
use std::time::Duration;
// Run every hour
let mut interval = tokio::time::interval(Duration::from_secs(3600));
loop {
tokio::select! {
_ = interval.tick() => {
match state.session_cache.cleanup_expired().await {
Ok(deleted) => {
if deleted > 0 {
info!(deleted, "cleaned up expired sessions");
}
}
Err(e) => {
warn!(error = %e, "session cleanup failed");
}
}
}
_ = shutdown_rx.recv() => {
break;
}
}
}
}
}
#[async_trait::async_trait]
@@ -58,7 +88,7 @@ impl Service for WebService {
async fn run(&mut self) -> Result<(), anyhow::Error> {
// Create the main router with Banner API routes
let app = create_router(self.app_state.clone());
let app = create_router(self.app_state.clone(), self.auth_config.clone());
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
@@ -84,6 +114,13 @@ impl Service for WebService {
Self::db_health_check_loop(health_state, health_shutdown_rx).await;
});
// Spawn session cleanup task
let cleanup_state = self.app_state.clone();
let cleanup_shutdown_rx = shutdown_tx.subscribe();
tokio::spawn(async move {
Self::session_cleanup_loop(cleanup_state, cleanup_shutdown_rx).await;
});
// Use axum's graceful shutdown with the internal shutdown signal
axum::serve(listener, app)
.with_graceful_shutdown(async move {
+19 -1
View File
@@ -4,11 +4,14 @@ use crate::banner::BannerApi;
use crate::banner::Course;
use crate::data::models::ReferenceData;
use crate::status::ServiceStatusRegistry;
use crate::web::schedule_cache::ScheduleCache;
use crate::web::session_cache::{OAuthStateStore, SessionCache};
use crate::web::ws::ScrapeJobEvent;
use anyhow::Result;
use sqlx::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tokio::sync::{RwLock, broadcast};
/// In-memory cache for reference data (code→description lookups).
///
@@ -72,18 +75,33 @@ pub struct AppState {
pub db_pool: PgPool,
pub service_statuses: ServiceStatusRegistry,
pub reference_cache: Arc<RwLock<ReferenceCache>>,
pub session_cache: SessionCache,
pub oauth_state_store: OAuthStateStore,
pub schedule_cache: ScheduleCache,
pub scrape_job_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl AppState {
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
let (scrape_job_tx, _) = broadcast::channel(64);
let schedule_cache = ScheduleCache::new(db_pool.clone());
Self {
session_cache: SessionCache::new(db_pool.clone()),
oauth_state_store: OAuthStateStore::new(),
banner_api,
db_pool,
service_statuses: ServiceStatusRegistry::new(),
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
schedule_cache,
scrape_job_tx,
}
}
/// Subscribe to scrape job lifecycle events.
pub fn scrape_job_events(&self) -> broadcast::Receiver<ScrapeJobEvent> {
self.scrape_job_tx.subscribe()
}
/// Initialize the reference cache from the database.
pub async fn load_reference_cache(&self) -> Result<()> {
let entries = crate::data::reference::get_all(&self.db_pool).await?;
+269
View File
@@ -0,0 +1,269 @@
//! Admin API handlers.
//!
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
use axum::extract::{Path, State};
use axum::http::{HeaderMap, StatusCode, header};
use axum::response::{IntoResponse, Json, Response};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_json::{Value, json};
use crate::data::models::User;
use crate::state::AppState;
use crate::web::extractors::AdminUser;
/// `GET /api/admin/status` — Enhanced system status for admins.
pub async fn admin_status(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count users");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count users"})),
)
})?;
let (session_count,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM user_sessions WHERE expires_at > now()")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count sessions");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count sessions"})),
)
})?;
let course_count = state.get_course_count().await.map_err(|e| {
tracing::error!(error = %e, "failed to count courses");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count courses"})),
)
})?;
let (scrape_job_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count scrape jobs");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count scrape jobs"})),
)
})?;
let services: Vec<Value> = state
.service_statuses
.all()
.into_iter()
.map(|(name, status)| {
json!({
"name": name,
"status": status,
})
})
.collect();
Ok(Json(json!({
"userCount": user_count,
"sessionCount": session_count,
"courseCount": course_count,
"scrapeJobCount": scrape_job_count,
"services": services,
})))
}
/// `GET /api/admin/users` — List all users.
pub async fn list_users(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Vec<User>>, (StatusCode, Json<Value>)> {
let users = crate::data::users::list_users(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list users");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list users"})),
)
})?;
Ok(Json(users))
}
#[derive(Deserialize)]
pub struct SetAdminBody {
is_admin: bool,
}
/// `PUT /api/admin/users/{discord_id}/admin` — Set admin status for a user.
pub async fn set_user_admin(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(discord_id): Path<i64>,
Json(body): Json<SetAdminBody>,
) -> Result<Json<User>, (StatusCode, Json<Value>)> {
let user = crate::data::users::set_admin(&state.db_pool, discord_id, body.is_admin)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to set admin status");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to set admin status"})),
)
})?
.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "user not found"})),
)
})?;
state.session_cache.evict_user(discord_id);
Ok(Json(user))
}
/// `GET /api/admin/scrape-jobs` — List scrape jobs.
pub async fn list_scrape_jobs(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>(
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list scrape jobs");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list scrape jobs"})),
)
})?;
let jobs: Vec<Value> = rows
.iter()
.map(|j| {
json!({
"id": j.id,
"targetType": format!("{:?}", j.target_type),
"targetPayload": j.target_payload,
"priority": format!("{:?}", j.priority),
"executeAt": j.execute_at.to_rfc3339(),
"createdAt": j.created_at.to_rfc3339(),
"lockedAt": j.locked_at.map(|t| t.to_rfc3339()),
"retryCount": j.retry_count,
"maxRetries": j.max_retries,
"queuedAt": j.queued_at.to_rfc3339(),
"status": j.status(),
})
})
.collect();
Ok(Json(json!({ "jobs": jobs })))
}
/// Row returned by the audit-log query (audit + joined course fields).
#[derive(sqlx::FromRow, Debug)]
struct AuditRow {
id: i32,
course_id: i32,
timestamp: chrono::DateTime<chrono::Utc>,
field_changed: String,
old_value: String,
new_value: String,
// Joined from courses table (nullable in case the course was deleted)
subject: Option<String>,
course_number: Option<String>,
crn: Option<String>,
title: Option<String>,
}
/// Format a `DateTime<Utc>` as an HTTP-date (RFC 2822) for Last-Modified headers.
fn to_http_date(dt: &DateTime<Utc>) -> String {
dt.format("%a, %d %b %Y %H:%M:%S GMT").to_string()
}
/// Parse an `If-Modified-Since` header value into a `DateTime<Utc>`.
fn parse_if_modified_since(headers: &HeaderMap) -> Option<DateTime<Utc>> {
let val = headers.get(header::IF_MODIFIED_SINCE)?.to_str().ok()?;
DateTime::parse_from_rfc2822(val)
.ok()
.map(|dt| dt.with_timezone(&Utc))
}
/// `GET /api/admin/audit-log` — List recent audit entries.
///
/// Supports `If-Modified-Since`: returns 304 when the newest entry hasn't changed.
pub async fn list_audit_log(
AdminUser(_user): AdminUser,
headers: HeaderMap,
State(state): State<AppState>,
) -> Result<Response, (StatusCode, Json<Value>)> {
let rows = sqlx::query_as::<_, AuditRow>(
"SELECT a.id, a.course_id, a.timestamp, a.field_changed, a.old_value, a.new_value, \
c.subject, c.course_number, c.crn, c.title \
FROM course_audits a \
LEFT JOIN courses c ON c.id = a.course_id \
ORDER BY a.timestamp DESC LIMIT 200",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list audit log");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list audit log"})),
)
})?;
// Determine the latest timestamp across all rows (query is DESC so first row is newest)
let latest = rows.first().map(|r| r.timestamp);
// If the client sent If-Modified-Since and our data hasn't changed, return 304
if let (Some(since), Some(latest_ts)) = (parse_if_modified_since(&headers), latest) {
// Truncate to seconds for comparison (HTTP dates have second precision)
if latest_ts.timestamp() <= since.timestamp() {
let mut resp = StatusCode::NOT_MODIFIED.into_response();
if let Ok(val) = to_http_date(&latest_ts).parse() {
resp.headers_mut().insert(header::LAST_MODIFIED, val);
}
return Ok(resp);
}
}
let entries: Vec<Value> = rows
.iter()
.map(|a| {
json!({
"id": a.id,
"courseId": a.course_id,
"timestamp": a.timestamp.to_rfc3339(),
"fieldChanged": a.field_changed,
"oldValue": a.old_value,
"newValue": a.new_value,
"subject": a.subject,
"courseNumber": a.course_number,
"crn": a.crn,
"courseTitle": a.title,
})
})
.collect();
let mut resp = Json(json!({ "entries": entries })).into_response();
if let Some(latest_ts) = latest
&& let Ok(val) = to_http_date(&latest_ts).parse()
{
resp.headers_mut().insert(header::LAST_MODIFIED, val);
}
Ok(resp)
}
+865
View File
@@ -0,0 +1,865 @@
//! Admin API handlers for RMP instructor matching management.
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::response::Json;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use ts_rs::TS;
use crate::state::AppState;
use crate::web::extractors::AdminUser;
// ---------------------------------------------------------------------------
// Query / body types
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct ListInstructorsParams {
status: Option<String>,
search: Option<String>,
page: Option<i32>,
per_page: Option<i32>,
sort: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MatchBody {
rmp_legacy_id: i32,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RejectCandidateBody {
rmp_legacy_id: i32,
}
// ---------------------------------------------------------------------------
// Response types
// ---------------------------------------------------------------------------
/// Simple acknowledgement response for mutating operations.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct OkResponse {
pub ok: bool,
}
/// A top-candidate summary shown in the instructor list view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TopCandidateResponse {
pub rmp_legacy_id: i32,
pub score: Option<f32>,
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
pub score_breakdown: Option<serde_json::Value>,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub num_ratings: Option<i32>,
}
/// An instructor row in the paginated list.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorListItem {
pub id: i32,
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
#[ts(as = "i32")]
pub rmp_link_count: i64,
#[ts(as = "i32")]
pub candidate_count: i64,
#[ts(as = "i32")]
pub course_subject_count: i64,
pub top_candidate: Option<TopCandidateResponse>,
}
/// Aggregate status counts for the instructor list.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorStats {
#[ts(as = "i32")]
pub total: i64,
#[ts(as = "i32")]
pub unmatched: i64,
#[ts(as = "i32")]
pub auto: i64,
#[ts(as = "i32")]
pub confirmed: i64,
#[ts(as = "i32")]
pub rejected: i64,
#[ts(as = "i32")]
pub with_candidates: i64,
}
/// Response for `GET /api/admin/instructors`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ListInstructorsResponse {
pub instructors: Vec<InstructorListItem>,
#[ts(as = "i32")]
pub total: i64,
pub page: i32,
pub per_page: i32,
pub stats: InstructorStats,
}
/// Instructor summary in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorDetail {
pub id: i32,
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
pub subjects_taught: Vec<String>,
#[ts(as = "i32")]
pub course_count: i64,
}
/// A linked RMP profile in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct LinkedRmpProfile {
pub link_id: i32,
pub legacy_id: i32,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub avg_difficulty: Option<f32>,
pub num_ratings: Option<i32>,
pub would_take_again_pct: Option<f32>,
}
/// A match candidate in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CandidateResponse {
pub id: i32,
pub rmp_legacy_id: i32,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub avg_difficulty: Option<f32>,
pub num_ratings: Option<i32>,
pub would_take_again_pct: Option<f32>,
pub score: Option<f32>,
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
pub score_breakdown: Option<serde_json::Value>,
pub status: String,
}
/// Response for `GET /api/admin/instructors/{id}` and `POST .../match`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorDetailResponse {
pub instructor: InstructorDetail,
pub current_matches: Vec<LinkedRmpProfile>,
pub candidates: Vec<CandidateResponse>,
}
/// Response for `POST /api/admin/rmp/rescore`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RescoreResponse {
pub total_unmatched: usize,
pub candidates_created: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
}
// ---------------------------------------------------------------------------
// Helper: map sqlx errors to the standard admin error tuple
// ---------------------------------------------------------------------------
fn db_error(context: &str, e: sqlx::Error) -> (StatusCode, Json<Value>) {
tracing::error!(error = %e, "{context}");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": context})),
)
}
// ---------------------------------------------------------------------------
// Row types for SQL queries
// ---------------------------------------------------------------------------
#[derive(sqlx::FromRow)]
struct InstructorRow {
id: i32,
display_name: String,
email: String,
rmp_match_status: String,
rmp_link_count: Option<i64>,
top_candidate_rmp_id: Option<i32>,
top_candidate_score: Option<f32>,
top_candidate_breakdown: Option<serde_json::Value>,
tc_first_name: Option<String>,
tc_last_name: Option<String>,
tc_department: Option<String>,
tc_avg_rating: Option<f32>,
tc_num_ratings: Option<i32>,
candidate_count: Option<i64>,
course_subject_count: Option<i64>,
}
#[derive(sqlx::FromRow)]
struct StatusCount {
rmp_match_status: String,
count: i64,
}
#[derive(sqlx::FromRow)]
struct CandidateRow {
id: i32,
rmp_legacy_id: i32,
score: Option<f32>,
score_breakdown: Option<serde_json::Value>,
status: String,
first_name: Option<String>,
last_name: Option<String>,
department: Option<String>,
avg_rating: Option<f32>,
avg_difficulty: Option<f32>,
num_ratings: Option<i32>,
would_take_again_pct: Option<f32>,
}
#[derive(sqlx::FromRow)]
struct LinkedRmpProfileRow {
link_id: i32,
legacy_id: i32,
first_name: Option<String>,
last_name: Option<String>,
department: Option<String>,
avg_rating: Option<f32>,
avg_difficulty: Option<f32>,
num_ratings: Option<i32>,
would_take_again_pct: Option<f32>,
}
// ---------------------------------------------------------------------------
// 1. GET /api/admin/instructors — paginated list with filtering
// ---------------------------------------------------------------------------
/// `GET /api/admin/instructors` — List instructors with filtering and pagination.
pub async fn list_instructors(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Query(params): Query<ListInstructorsParams>,
) -> Result<Json<ListInstructorsResponse>, (StatusCode, Json<Value>)> {
let page = params.page.unwrap_or(1).max(1);
let per_page = params.per_page.unwrap_or(50).clamp(1, 100);
let offset = (page - 1) * per_page;
let sort_clause = match params.sort.as_deref() {
Some("name_asc") => "i.display_name ASC",
Some("name_desc") => "i.display_name DESC",
Some("status") => "i.rmp_match_status ASC, i.display_name ASC",
_ => "tc.score DESC NULLS LAST, i.display_name ASC",
};
// Build WHERE clause
let mut conditions = Vec::new();
let mut bind_idx = 0u32;
if params.status.is_some() {
bind_idx += 1;
conditions.push(format!("i.rmp_match_status = ${bind_idx}"));
}
if params.search.is_some() {
bind_idx += 1;
conditions.push(format!(
"(i.display_name ILIKE ${bind_idx} OR i.email ILIKE ${bind_idx})"
));
}
let where_clause = if conditions.is_empty() {
String::new()
} else {
format!("WHERE {}", conditions.join(" AND "))
};
let query_str = format!(
r#"
SELECT
i.id, i.display_name, i.email, i.rmp_match_status,
(SELECT COUNT(*) FROM instructor_rmp_links irl WHERE irl.instructor_id = i.id) as rmp_link_count,
tc.rmp_legacy_id as top_candidate_rmp_id,
tc.score as top_candidate_score,
tc.score_breakdown as top_candidate_breakdown,
rp.first_name as tc_first_name,
rp.last_name as tc_last_name,
rp.department as tc_department,
rp.avg_rating as tc_avg_rating,
rp.num_ratings as tc_num_ratings,
(SELECT COUNT(*) FROM rmp_match_candidates mc WHERE mc.instructor_id = i.id AND mc.status = 'pending') as candidate_count,
(SELECT COUNT(DISTINCT c.subject) FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = i.id) as course_subject_count
FROM instructors i
LEFT JOIN LATERAL (
SELECT mc.rmp_legacy_id, mc.score, mc.score_breakdown
FROM rmp_match_candidates mc
WHERE mc.instructor_id = i.id AND mc.status = 'pending'
ORDER BY mc.score DESC
LIMIT 1
) tc ON true
LEFT JOIN rmp_professors rp ON rp.legacy_id = tc.rmp_legacy_id
{where_clause}
ORDER BY {sort_clause}
LIMIT {per_page} OFFSET {offset}
"#
);
// Build the query with dynamic binds
let mut query = sqlx::query_as::<_, InstructorRow>(&query_str);
if let Some(ref status) = params.status {
query = query.bind(status);
}
if let Some(ref search) = params.search {
query = query.bind(format!("%{search}%"));
}
let rows = query
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to list instructors", e))?;
// Count total with filters
let count_query_str = format!("SELECT COUNT(*) FROM instructors i {where_clause}");
let mut count_query = sqlx::query_as::<_, (i64,)>(&count_query_str);
if let Some(ref status) = params.status {
count_query = count_query.bind(status);
}
if let Some(ref search) = params.search {
count_query = count_query.bind(format!("%{search}%"));
}
let (total,) = count_query
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count instructors", e))?;
// Aggregate stats (unfiltered)
let stats_rows = sqlx::query_as::<_, StatusCount>(
"SELECT rmp_match_status, COUNT(*) as count FROM instructors GROUP BY rmp_match_status",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to get instructor stats", e))?;
// Count instructors with at least one candidate (for progress bar denominator)
let (with_candidates,): (i64,) =
sqlx::query_as("SELECT COUNT(DISTINCT instructor_id) FROM rmp_match_candidates")
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count instructors with candidates", e))?;
let mut stats = InstructorStats {
total: 0,
unmatched: 0,
auto: 0,
confirmed: 0,
rejected: 0,
with_candidates,
};
for row in &stats_rows {
stats.total += row.count;
match row.rmp_match_status.as_str() {
"unmatched" => stats.unmatched = row.count,
"auto" => stats.auto = row.count,
"confirmed" => stats.confirmed = row.count,
"rejected" => stats.rejected = row.count,
_ => {}
}
}
let instructors: Vec<InstructorListItem> = rows
.iter()
.map(|r| {
let top_candidate = r.top_candidate_rmp_id.map(|rmp_id| TopCandidateResponse {
rmp_legacy_id: rmp_id,
score: r.top_candidate_score,
score_breakdown: r.top_candidate_breakdown.clone(),
first_name: r.tc_first_name.clone(),
last_name: r.tc_last_name.clone(),
department: r.tc_department.clone(),
avg_rating: r.tc_avg_rating,
num_ratings: r.tc_num_ratings,
});
InstructorListItem {
id: r.id,
display_name: r.display_name.clone(),
email: r.email.clone(),
rmp_match_status: r.rmp_match_status.clone(),
rmp_link_count: r.rmp_link_count.unwrap_or(0),
candidate_count: r.candidate_count.unwrap_or(0),
course_subject_count: r.course_subject_count.unwrap_or(0),
top_candidate,
}
})
.collect();
Ok(Json(ListInstructorsResponse {
instructors,
total,
page,
per_page,
stats,
}))
}
// ---------------------------------------------------------------------------
// 2. GET /api/admin/instructors/{id} — full detail
// ---------------------------------------------------------------------------
/// `GET /api/admin/instructors/{id}` — Full instructor detail with candidates.
pub async fn get_instructor(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
build_instructor_detail(&state, id).await
}
/// Shared helper that builds the full instructor detail response.
async fn build_instructor_detail(
state: &AppState,
id: i32,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
// Fetch instructor
let instructor: Option<(i32, String, String, String)> = sqlx::query_as(
"SELECT id, display_name, email, rmp_match_status FROM instructors WHERE id = $1",
)
.bind(id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch instructor", e))?;
let (inst_id, display_name, email, rmp_match_status) = instructor.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
)
})?;
// Subjects taught
let subjects: Vec<(String,)> = sqlx::query_as(
"SELECT DISTINCT c.subject FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = $1 ORDER BY c.subject",
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch subjects", e))?;
// Course count
let (course_count,): (i64,) = sqlx::query_as(
"SELECT COUNT(DISTINCT ci.course_id) FROM course_instructors ci WHERE ci.instructor_id = $1",
)
.bind(inst_id)
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count courses", e))?;
// Candidates with RMP professor info
let candidates = sqlx::query_as::<_, CandidateRow>(
r#"
SELECT mc.id, mc.rmp_legacy_id, mc.score, mc.score_breakdown, mc.status,
rp.first_name, rp.last_name, rp.department,
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
FROM rmp_match_candidates mc
JOIN rmp_professors rp ON rp.legacy_id = mc.rmp_legacy_id
WHERE mc.instructor_id = $1
ORDER BY mc.score DESC
"#,
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch candidates", e))?;
// Current matches (all linked RMP profiles)
let current_matches = sqlx::query_as::<_, LinkedRmpProfileRow>(
r#"
SELECT irl.id as link_id,
rp.legacy_id, rp.first_name, rp.last_name, rp.department,
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = $1
ORDER BY rp.num_ratings DESC NULLS LAST
"#,
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch linked rmp profiles", e))?;
let current_matches_resp: Vec<LinkedRmpProfile> = current_matches
.into_iter()
.map(|p| LinkedRmpProfile {
link_id: p.link_id,
legacy_id: p.legacy_id,
first_name: p.first_name,
last_name: p.last_name,
department: p.department,
avg_rating: p.avg_rating,
avg_difficulty: p.avg_difficulty,
num_ratings: p.num_ratings,
would_take_again_pct: p.would_take_again_pct,
})
.collect();
let candidates_resp: Vec<CandidateResponse> = candidates
.into_iter()
.map(|c| CandidateResponse {
id: c.id,
rmp_legacy_id: c.rmp_legacy_id,
first_name: c.first_name,
last_name: c.last_name,
department: c.department,
avg_rating: c.avg_rating,
avg_difficulty: c.avg_difficulty,
num_ratings: c.num_ratings,
would_take_again_pct: c.would_take_again_pct,
score: c.score,
score_breakdown: c.score_breakdown,
status: c.status,
})
.collect();
Ok(Json(InstructorDetailResponse {
instructor: InstructorDetail {
id: inst_id,
display_name,
email,
rmp_match_status,
subjects_taught: subjects.into_iter().map(|(s,)| s).collect(),
course_count,
},
current_matches: current_matches_resp,
candidates: candidates_resp,
}))
}
// ---------------------------------------------------------------------------
// 3. POST /api/admin/instructors/{id}/match — accept a candidate
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/match` — Accept a candidate match.
pub async fn match_instructor(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
Json(body): Json<MatchBody>,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
// Verify the candidate exists and is pending
let candidate: Option<(i32,)> = sqlx::query_as(
"SELECT id FROM rmp_match_candidates WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'pending'",
)
.bind(id)
.bind(body.rmp_legacy_id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check candidate", e))?;
if candidate.is_none() {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "pending candidate not found for this instructor"})),
));
}
// Check if this RMP profile is already linked to a different instructor
let conflict: Option<(i32,)> = sqlx::query_as(
"SELECT instructor_id FROM instructor_rmp_links WHERE rmp_legacy_id = $1 AND instructor_id != $2",
)
.bind(body.rmp_legacy_id)
.bind(id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check rmp uniqueness", e))?;
if let Some((other_id,)) = conflict {
return Err((
StatusCode::CONFLICT,
Json(json!({
"error": "RMP profile already linked to another instructor",
"conflictingInstructorId": other_id,
})),
));
}
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Insert link into instructor_rmp_links
sqlx::query(
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, created_by, source) VALUES ($1, $2, $3, 'manual') ON CONFLICT (rmp_legacy_id) DO NOTHING",
)
.bind(id)
.bind(body.rmp_legacy_id)
.bind(user.discord_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to insert rmp link", e))?;
// Update instructor match status
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor match status", e))?;
// Accept the candidate
sqlx::query(
"UPDATE rmp_match_candidates SET status = 'accepted', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3",
)
.bind(user.discord_id)
.bind(id)
.bind(body.rmp_legacy_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to accept candidate", e))?;
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
build_instructor_detail(&state, id).await
}
// ---------------------------------------------------------------------------
// 4. POST /api/admin/instructors/{id}/reject-candidate — reject one candidate
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/reject-candidate` — Reject a single candidate.
pub async fn reject_candidate(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
Json(body): Json<RejectCandidateBody>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let result = sqlx::query(
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3 AND status = 'pending'",
)
.bind(user.discord_id)
.bind(id)
.bind(body.rmp_legacy_id)
.execute(&state.db_pool)
.await
.map_err(|e| db_error("failed to reject candidate", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "pending candidate not found"})),
));
}
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 5. POST /api/admin/instructors/{id}/reject-all — no valid match
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/reject-all` — Mark instructor as having no valid RMP match.
pub async fn reject_all(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Check current status — cannot reject an instructor with confirmed matches
let current_status: Option<(String,)> =
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.await
.map_err(|e| db_error("failed to fetch instructor status", e))?;
let (status,) = current_status.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
)
})?;
if status == "confirmed" {
return Err((
StatusCode::CONFLICT,
Json(
json!({"error": "cannot reject instructor with confirmed matches — unmatch first"}),
),
));
}
// Update instructor status
sqlx::query("UPDATE instructors SET rmp_match_status = 'rejected' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
// Reject all pending candidates
sqlx::query(
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND status = 'pending'",
)
.bind(user.discord_id)
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to reject candidates", e))?;
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 6. POST /api/admin/instructors/{id}/unmatch — remove current match
// ---------------------------------------------------------------------------
/// Body for unmatch — optional `rmpLegacyId` to remove a specific link.
/// If omitted (or null), all links are removed.
#[derive(Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchBody {
rmp_legacy_id: Option<i32>,
}
/// `POST /api/admin/instructors/{id}/unmatch` — Remove RMP link(s).
///
/// Send `{ "rmpLegacyId": N }` to remove a specific link, or an empty body / `{}`
/// to remove all links for the instructor.
pub async fn unmatch_instructor(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
body: Option<Json<UnmatchBody>>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let rmp_legacy_id = body.and_then(|b| b.rmp_legacy_id);
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Verify instructor exists
let exists: Option<(i32,)> = sqlx::query_as("SELECT id FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.await
.map_err(|e| db_error("failed to check instructor", e))?;
if exists.is_none() {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
));
}
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
let result = sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(id)
.bind(legacy_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp link", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "link not found for this instructor"})),
));
}
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp links", e))?;
}
// Check if any links remain; update status accordingly
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.fetch_one(&mut *tx)
.await
.map_err(|e| db_error("failed to count remaining links", e))?;
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
}
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 7. POST /api/admin/rmp/rescore — re-run candidate generation
// ---------------------------------------------------------------------------
/// `POST /api/admin/rmp/rescore` — Re-run RMP candidate generation.
pub async fn rescore(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<RescoreResponse>, (StatusCode, Json<Value>)> {
let stats = crate::data::rmp_matching::generate_candidates(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to run candidate generation");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "candidate generation failed"})),
)
})?;
Ok(Json(RescoreResponse {
total_unmatched: stats.total_unmatched,
candidates_created: stats.candidates_created,
auto_matched: stats.auto_matched,
skipped_unparseable: stats.skipped_unparseable,
skipped_no_candidates: stats.skipped_no_candidates,
}))
}
+523
View File
@@ -0,0 +1,523 @@
//! Admin API handlers for scraper observability.
//!
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::response::Json;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::Row;
use ts_rs::TS;
use crate::banner::models::terms::Term;
use crate::data::scrape_jobs;
use crate::scraper::adaptive::{self, SubjectSchedule, SubjectStats};
use crate::state::AppState;
use crate::web::extractors::AdminUser;
type ApiError = (StatusCode, Json<serde_json::Value>);
fn parse_period(period: &str) -> Result<chrono::Duration, ApiError> {
match period {
"1h" => Ok(chrono::Duration::hours(1)),
"6h" => Ok(chrono::Duration::hours(6)),
"24h" => Ok(chrono::Duration::hours(24)),
"7d" => Ok(chrono::Duration::days(7)),
"30d" => Ok(chrono::Duration::days(30)),
_ => Err((
StatusCode::BAD_REQUEST,
Json(
json!({"error": format!("Invalid period '{period}'. Valid: 1h, 6h, 24h, 7d, 30d")}),
),
)),
}
}
fn period_to_interval_str(period: &str) -> &'static str {
match period {
"1h" => "1 hour",
"6h" => "6 hours",
"24h" => "24 hours",
"7d" => "7 days",
"30d" => "30 days",
_ => "24 hours",
}
}
fn parse_bucket(bucket: &str) -> Result<&'static str, ApiError> {
match bucket {
"1m" => Ok("1 minute"),
"5m" => Ok("5 minutes"),
"15m" => Ok("15 minutes"),
"1h" => Ok("1 hour"),
"6h" => Ok("6 hours"),
_ => Err((
StatusCode::BAD_REQUEST,
Json(
json!({"error": format!("Invalid bucket '{bucket}'. Valid: 1m, 5m, 15m, 1h, 6h")}),
),
)),
}
}
fn default_bucket_for_period(period: &str) -> &'static str {
match period {
"1h" => "1m",
"6h" => "5m",
"24h" => "15m",
"7d" => "1h",
"30d" => "6h",
_ => "15m",
}
}
// ---------------------------------------------------------------------------
// Endpoint 1: GET /api/admin/scraper/stats
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct StatsParams {
#[serde(default = "default_period")]
period: String,
}
fn default_period() -> String {
"24h".to_string()
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct ScraperStatsResponse {
period: String,
#[ts(type = "number")]
total_scrapes: i64,
#[ts(type = "number")]
successful_scrapes: i64,
#[ts(type = "number")]
failed_scrapes: i64,
success_rate: Option<f64>,
avg_duration_ms: Option<f64>,
#[ts(type = "number")]
total_courses_changed: i64,
#[ts(type = "number")]
total_courses_fetched: i64,
#[ts(type = "number")]
total_audits_generated: i64,
#[ts(type = "number")]
pending_jobs: i64,
#[ts(type = "number")]
locked_jobs: i64,
}
pub async fn scraper_stats(
_admin: AdminUser,
State(state): State<AppState>,
Query(params): Query<StatsParams>,
) -> Result<Json<ScraperStatsResponse>, ApiError> {
let _duration = parse_period(&params.period)?;
let interval_str = period_to_interval_str(&params.period);
let row = sqlx::query(
"SELECT \
COUNT(*) AS total_scrapes, \
COUNT(*) FILTER (WHERE success) AS successful_scrapes, \
COUNT(*) FILTER (WHERE NOT success) AS failed_scrapes, \
(AVG(duration_ms) FILTER (WHERE success))::FLOAT8 AS avg_duration_ms, \
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0) AS total_courses_changed, \
COALESCE(SUM(courses_fetched) FILTER (WHERE success), 0) AS total_courses_fetched, \
COALESCE(SUM(audits_generated) FILTER (WHERE success), 0) AS total_audits_generated \
FROM scrape_job_results \
WHERE completed_at > NOW() - $1::interval",
)
.bind(interval_str)
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch scraper stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch scraper stats"})),
)
})?;
let total_scrapes: i64 = row.get("total_scrapes");
let successful_scrapes: i64 = row.get("successful_scrapes");
let failed_scrapes: i64 = row.get("failed_scrapes");
let avg_duration_ms: Option<f64> = row.get("avg_duration_ms");
let total_courses_changed: i64 = row.get("total_courses_changed");
let total_courses_fetched: i64 = row.get("total_courses_fetched");
let total_audits_generated: i64 = row.get("total_audits_generated");
let queue_row = sqlx::query(
"SELECT \
COUNT(*) FILTER (WHERE locked_at IS NULL) AS pending_jobs, \
COUNT(*) FILTER (WHERE locked_at IS NOT NULL) AS locked_jobs \
FROM scrape_jobs",
)
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch queue stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch queue stats"})),
)
})?;
let pending_jobs: i64 = queue_row.get("pending_jobs");
let locked_jobs: i64 = queue_row.get("locked_jobs");
let success_rate = if total_scrapes > 0 {
Some(successful_scrapes as f64 / total_scrapes as f64)
} else {
None
};
Ok(Json(ScraperStatsResponse {
period: params.period,
total_scrapes,
successful_scrapes,
failed_scrapes,
success_rate,
avg_duration_ms,
total_courses_changed,
total_courses_fetched,
total_audits_generated,
pending_jobs,
locked_jobs,
}))
}
// ---------------------------------------------------------------------------
// Endpoint 2: GET /api/admin/scraper/timeseries
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct TimeseriesParams {
#[serde(default = "default_period")]
period: String,
bucket: Option<String>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct TimeseriesResponse {
period: String,
bucket: String,
points: Vec<TimeseriesPoint>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct TimeseriesPoint {
timestamp: DateTime<Utc>,
#[ts(type = "number")]
scrape_count: i64,
#[ts(type = "number")]
success_count: i64,
#[ts(type = "number")]
error_count: i64,
#[ts(type = "number")]
courses_changed: i64,
avg_duration_ms: f64,
}
pub async fn scraper_timeseries(
_admin: AdminUser,
State(state): State<AppState>,
Query(params): Query<TimeseriesParams>,
) -> Result<Json<TimeseriesResponse>, ApiError> {
let _duration = parse_period(&params.period)?;
let period_interval = period_to_interval_str(&params.period);
let bucket_code = match &params.bucket {
Some(b) => {
// Validate the bucket
parse_bucket(b)?;
b.as_str()
}
None => default_bucket_for_period(&params.period),
};
let bucket_interval = parse_bucket(bucket_code)?;
let rows = sqlx::query(
"WITH buckets AS ( \
SELECT generate_series( \
date_bin($1::interval, NOW() - $2::interval, '2020-01-01'::timestamptz), \
date_bin($1::interval, NOW(), '2020-01-01'::timestamptz), \
$1::interval \
) AS bucket_start \
), \
raw AS ( \
SELECT date_bin($1::interval, completed_at, '2020-01-01'::timestamptz) AS bucket_start, \
COUNT(*)::BIGINT AS scrape_count, \
COUNT(*) FILTER (WHERE success)::BIGINT AS success_count, \
COUNT(*) FILTER (WHERE NOT success)::BIGINT AS error_count, \
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0)::BIGINT AS courses_changed, \
COALESCE(AVG(duration_ms) FILTER (WHERE success), 0)::FLOAT8 AS avg_duration_ms \
FROM scrape_job_results \
WHERE completed_at > NOW() - $2::interval \
GROUP BY 1 \
) \
SELECT b.bucket_start, \
COALESCE(r.scrape_count, 0) AS scrape_count, \
COALESCE(r.success_count, 0) AS success_count, \
COALESCE(r.error_count, 0) AS error_count, \
COALESCE(r.courses_changed, 0) AS courses_changed, \
COALESCE(r.avg_duration_ms, 0) AS avg_duration_ms \
FROM buckets b \
LEFT JOIN raw r ON b.bucket_start = r.bucket_start \
ORDER BY b.bucket_start",
)
.bind(bucket_interval)
.bind(period_interval)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch scraper timeseries");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch scraper timeseries"})),
)
})?;
let points = rows
.iter()
.map(|row| TimeseriesPoint {
timestamp: row.get("bucket_start"),
scrape_count: row.get("scrape_count"),
success_count: row.get("success_count"),
error_count: row.get("error_count"),
courses_changed: row.get("courses_changed"),
avg_duration_ms: row.get("avg_duration_ms"),
})
.collect();
Ok(Json(TimeseriesResponse {
period: params.period,
bucket: bucket_code.to_string(),
points,
}))
}
// ---------------------------------------------------------------------------
// Endpoint 3: GET /api/admin/scraper/subjects
// ---------------------------------------------------------------------------
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectsResponse {
subjects: Vec<SubjectSummary>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectSummary {
subject: String,
subject_description: Option<String>,
#[ts(type = "number")]
tracked_course_count: i64,
schedule_state: String,
#[ts(type = "number")]
current_interval_secs: u64,
time_multiplier: u32,
last_scraped: DateTime<Utc>,
next_eligible_at: Option<DateTime<Utc>>,
#[ts(type = "number | null")]
cooldown_remaining_secs: Option<u64>,
avg_change_ratio: f64,
#[ts(type = "number")]
consecutive_zero_changes: i64,
#[ts(type = "number")]
recent_runs: i64,
#[ts(type = "number")]
recent_failures: i64,
}
pub async fn scraper_subjects(
_admin: AdminUser,
State(state): State<AppState>,
) -> Result<Json<SubjectsResponse>, ApiError> {
let raw_stats = scrape_jobs::fetch_subject_stats(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch subject stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch subject stats"})),
)
})?;
let now = Utc::now();
let multiplier = adaptive::time_of_day_multiplier(now);
// Look up subject descriptions from the reference cache
let ref_cache = state.reference_cache.read().await;
// Count tracked courses per subject for the current term
let term = Term::get_current().inner().to_string();
let course_counts: std::collections::HashMap<String, i64> = sqlx::query_as(
"SELECT subject, COUNT(*)::BIGINT AS cnt FROM courses WHERE term_code = $1 GROUP BY subject",
)
.bind(&term)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch course counts");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch course counts"})),
)
})?
.into_iter()
.map(|(subject, cnt): (String, i64)| (subject, cnt))
.collect();
let subjects = raw_stats
.into_iter()
.map(|row| {
let stats: SubjectStats = row.into();
let schedule = adaptive::evaluate_subject(&stats, now, false);
let base_interval = adaptive::compute_base_interval(&stats);
let schedule_state = match &schedule {
SubjectSchedule::Eligible(_) => "eligible",
SubjectSchedule::Cooldown(_) => "cooldown",
SubjectSchedule::Paused => "paused",
SubjectSchedule::ReadOnly => "read_only",
};
let current_interval_secs = base_interval.as_secs() * multiplier as u64;
let (next_eligible_at, cooldown_remaining_secs) = match &schedule {
SubjectSchedule::Eligible(_) => (Some(now), Some(0)),
SubjectSchedule::Cooldown(remaining) => {
let remaining_secs = remaining.as_secs();
(
Some(now + chrono::Duration::seconds(remaining_secs as i64)),
Some(remaining_secs),
)
}
SubjectSchedule::Paused | SubjectSchedule::ReadOnly => (None, None),
};
let subject_description = ref_cache
.lookup("subject", &stats.subject)
.map(|s| s.to_string());
let tracked_course_count = course_counts.get(&stats.subject).copied().unwrap_or(0);
SubjectSummary {
subject: stats.subject,
subject_description,
tracked_course_count,
schedule_state: schedule_state.to_string(),
current_interval_secs,
time_multiplier: multiplier,
last_scraped: stats.last_completed,
next_eligible_at,
cooldown_remaining_secs,
avg_change_ratio: stats.avg_change_ratio,
consecutive_zero_changes: stats.consecutive_zero_changes,
recent_runs: stats.recent_runs,
recent_failures: stats.recent_failure_count,
}
})
.collect();
Ok(Json(SubjectsResponse { subjects }))
}
// ---------------------------------------------------------------------------
// Endpoint 4: GET /api/admin/scraper/subjects/{subject}
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct SubjectDetailParams {
#[serde(default = "default_detail_limit")]
limit: i32,
}
fn default_detail_limit() -> i32 {
50
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectDetailResponse {
subject: String,
results: Vec<SubjectResultEntry>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectResultEntry {
#[ts(type = "number")]
id: i64,
completed_at: DateTime<Utc>,
duration_ms: i32,
success: bool,
error_message: Option<String>,
courses_fetched: Option<i32>,
courses_changed: Option<i32>,
courses_unchanged: Option<i32>,
audits_generated: Option<i32>,
metrics_generated: Option<i32>,
}
pub async fn scraper_subject_detail(
_admin: AdminUser,
State(state): State<AppState>,
Path(subject): Path<String>,
Query(params): Query<SubjectDetailParams>,
) -> Result<Json<SubjectDetailResponse>, ApiError> {
let limit = params.limit.clamp(1, 200);
let rows = sqlx::query(
"SELECT id, completed_at, duration_ms, success, error_message, \
courses_fetched, courses_changed, courses_unchanged, \
audits_generated, metrics_generated \
FROM scrape_job_results \
WHERE target_type = 'Subject' AND payload->>'subject' = $1 \
ORDER BY completed_at DESC \
LIMIT $2",
)
.bind(&subject)
.bind(limit)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, subject = %subject, "Failed to fetch subject detail");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch subject detail"})),
)
})?;
let results = rows
.iter()
.map(|row| SubjectResultEntry {
id: row.get("id"),
completed_at: row.get("completed_at"),
duration_ms: row.get("duration_ms"),
success: row.get("success"),
error_message: row.get("error_message"),
courses_fetched: row.get("courses_fetched"),
courses_changed: row.get("courses_changed"),
courses_unchanged: row.get("courses_unchanged"),
audits_generated: row.get("audits_generated"),
metrics_generated: row.get("metrics_generated"),
})
.collect();
Ok(Json(SubjectDetailResponse { subject, results }))
}
+114 -19
View File
@@ -1,14 +1,18 @@
//! Embedded assets for the web frontend
//! Embedded assets for the web frontend.
//!
//! This module handles serving static assets that are embedded into the binary
//! at compile time using rust-embed.
//! Serves static assets embedded into the binary at compile time using rust-embed.
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
//! generated at build time by `web/scripts/compress-assets.ts`.
use axum::http::{HeaderMap, HeaderValue, header};
use dashmap::DashMap;
use rapidhash::v3::rapidhash_v3;
use rust_embed::RustEmbed;
use std::fmt;
use std::sync::LazyLock;
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
/// Embedded web assets from the dist directory
#[derive(RustEmbed)]
#[folder = "web/dist/"]
@@ -21,17 +25,15 @@ pub struct WebAssets;
pub struct AssetHash(u64);
impl AssetHash {
/// Create a new AssetHash from u64 value
pub fn new(hash: u64) -> Self {
Self(hash)
}
/// Get the hash as a hex string
pub fn to_hex(&self) -> String {
format!("{:016x}", self.0)
}
/// Get the hash as a quoted hex string
/// Get the hash as a quoted hex string (for ETag headers)
pub fn quoted(&self) -> String {
format!("\"{}\"", self.to_hex())
}
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
}
impl AssetMetadata {
/// Check if the etag matches the asset hash
pub fn etag_matches(&self, etag: &str) -> bool {
// Remove quotes if present (ETags are typically quoted)
let etag = etag.trim_matches('"');
// ETags generated from u64 hex should be 16 characters
etag.len() == 16
&& u64::from_str_radix(etag, 16)
.map(|parsed| parsed == self.hash.0)
@@ -68,28 +66,125 @@ impl AssetMetadata {
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
/// Get cached asset metadata for a file path, caching on-demand
/// Returns AssetMetadata containing MIME type and RapidHash hash
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
// Check cache first
if let Some(cached) = ASSET_CACHE.get(path) {
return cached.value().clone();
}
// Calculate MIME type
let mime_type = mime_guess::from_path(path)
.first()
.map(|mime| mime.to_string());
// Calculate RapidHash hash (using u64 native output size)
let hash_value = rapidhash_v3(content);
let hash = AssetHash::new(hash_value);
let hash = AssetHash::new(rapidhash_v3(content));
let metadata = AssetMetadata { mime_type, hash };
// Only cache if we haven't exceeded the limit
if ASSET_CACHE.len() < 1000 {
ASSET_CACHE.insert(path.to_string(), metadata.clone());
}
metadata
}
/// Set appropriate `Cache-Control` header based on the asset path.
///
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
/// safe to cache indefinitely. Other assets get shorter cache durations.
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
let cache_control = if path.contains("immutable/") {
// SvelteKit fingerprinted assets — cache forever
"public, max-age=31536000, immutable"
} else if path == "index.html" || path.ends_with(".html") {
"public, max-age=300"
} else {
match path.rsplit_once('.').map(|(_, ext)| ext) {
Some("css" | "js") => "public, max-age=86400",
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
_ => "public, max-age=3600",
}
};
if let Ok(value) = HeaderValue::from_str(cache_control) {
headers.insert(header::CACHE_CONTROL, value);
}
}
/// Serve an embedded asset with content encoding negotiation.
///
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
/// the client's `Accept-Encoding` header, falling back to the uncompressed
/// original. Returns `None` if the asset doesn't exist at all.
pub fn try_serve_asset_with_encoding(
path: &str,
request_headers: &HeaderMap,
) -> Option<axum::response::Response> {
use axum::response::IntoResponse;
let asset_path = path.strip_prefix('/').unwrap_or(path);
// Get the uncompressed original first (for metadata: MIME type, ETag)
let original = WebAssets::get(asset_path)?;
let metadata = get_asset_metadata_cached(asset_path, &original.data);
// Check ETag for conditional requests (304 Not Modified)
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
}
let mime_type = metadata
.mime_type
.unwrap_or_else(|| "application/octet-stream".to_string());
// Only attempt pre-compressed variants for files above the compression
// threshold — the build script skips smaller files too.
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
parse_accepted_encodings(request_headers)
} else {
vec![ContentEncoding::Identity]
};
for encoding in &accepted_encodings {
if *encoding == ContentEncoding::Identity {
continue;
}
let compressed_path = format!("{}{}", asset_path, encoding.extension());
if let Some(compressed) = WebAssets::get(&compressed_path) {
let mut response_headers = HeaderMap::new();
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
response_headers.insert(header::CONTENT_TYPE, ct);
}
if let Some(ce) = encoding.header_value() {
response_headers.insert(header::CONTENT_ENCODING, ce);
}
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
response_headers.insert(header::ETAG, etag_val);
}
// Vary so caches distinguish by encoding
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
set_cache_control(&mut response_headers, asset_path);
return Some(
(
axum::http::StatusCode::OK,
response_headers,
compressed.data,
)
.into_response(),
);
}
}
// No compressed variant found — serve uncompressed original
let mut response_headers = HeaderMap::new();
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
response_headers.insert(header::CONTENT_TYPE, ct);
}
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
response_headers.insert(header::ETAG, etag_val);
}
set_cache_control(&mut response_headers, asset_path);
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
}
+304
View File
@@ -0,0 +1,304 @@
//! Discord OAuth2 authentication handlers.
//!
//! Provides login, callback, logout, and session introspection endpoints
//! for Discord OAuth2 authentication flow.
use axum::extract::{Extension, Query, State};
use axum::http::{HeaderMap, StatusCode, header};
use axum::response::{IntoResponse, Json, Redirect, Response};
use serde::Deserialize;
use serde_json::{Value, json};
use std::time::Duration;
use tracing::{error, info, warn};
use crate::state::AppState;
/// OAuth configuration passed as an Axum Extension.
#[derive(Clone)]
pub struct AuthConfig {
pub client_id: String,
pub client_secret: String,
/// Optional base URL override (e.g. "https://banner.xevion.dev").
/// When `None`, the redirect URI is derived from the request's Origin/Host header.
pub redirect_base: Option<String>,
}
const CALLBACK_PATH: &str = "/api/auth/callback";
/// Derive the origin (scheme + host + port) the user's browser is actually on.
///
/// Priority:
/// 1. Configured `redirect_base` (production override)
/// 2. `Referer` header — preserves the real browser origin even through
/// reverse proxies that rewrite `Host` (e.g. Vite dev proxy with
/// `changeOrigin: true`)
/// 3. `Origin` header (present on POST / CORS requests)
/// 4. `Host` header (last resort, may be rewritten by proxies)
fn resolve_origin(auth_config: &AuthConfig, headers: &HeaderMap) -> String {
if let Some(base) = &auth_config.redirect_base {
return base.trim_end_matches('/').to_owned();
}
// Referer carries the full browser URL; extract just the origin.
if let Some(referer) = headers.get(header::REFERER).and_then(|v| v.to_str().ok())
&& let Ok(parsed) = url::Url::parse(referer)
{
let origin = parsed.origin().unicode_serialization();
if origin != "null" {
return origin;
}
}
if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) {
return origin.trim_end_matches('/').to_owned();
}
if let Some(host) = headers.get(header::HOST).and_then(|v| v.to_str().ok()) {
return format!("http://{host}");
}
"http://localhost:8080".to_owned()
}
#[derive(Deserialize)]
pub struct CallbackParams {
code: String,
state: String,
}
#[derive(Deserialize)]
struct TokenResponse {
access_token: String,
}
#[derive(Deserialize)]
struct DiscordUser {
id: String,
username: String,
avatar: Option<String>,
}
/// Extract the `session` cookie value from request headers.
fn extract_session_token(headers: &HeaderMap) -> Option<String> {
headers
.get(header::COOKIE)?
.to_str()
.ok()?
.split(';')
.find_map(|cookie| {
let cookie = cookie.trim();
cookie.strip_prefix("session=").map(|v| v.to_owned())
})
}
/// Build a `Set-Cookie` header value for the session cookie.
fn session_cookie(token: &str, max_age: i64, secure: bool) -> String {
let mut cookie = format!("session={token}; HttpOnly; SameSite=Lax; Path=/; Max-Age={max_age}");
if secure {
cookie.push_str("; Secure");
}
cookie
}
/// `GET /api/auth/login` — Redirect to Discord OAuth2 authorization page.
pub async fn auth_login(
State(state): State<AppState>,
Extension(auth_config): Extension<AuthConfig>,
headers: HeaderMap,
) -> Redirect {
let origin = resolve_origin(&auth_config, &headers);
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
let csrf_state = state.oauth_state_store.generate(origin);
let redirect_uri_encoded = urlencoding::encode(&redirect_uri);
let url = format!(
"https://discord.com/oauth2/authorize\
?client_id={}\
&redirect_uri={redirect_uri_encoded}\
&response_type=code\
&scope=identify\
&state={csrf_state}",
auth_config.client_id,
);
Redirect::temporary(&url)
}
/// `GET /api/auth/callback` — Handle Discord OAuth2 callback.
pub async fn auth_callback(
State(state): State<AppState>,
Extension(auth_config): Extension<AuthConfig>,
Query(params): Query<CallbackParams>,
) -> Result<Response, (StatusCode, Json<Value>)> {
// 1. Validate CSRF state and recover the origin used during login
let origin = state
.oauth_state_store
.validate(&params.state)
.ok_or_else(|| {
warn!("OAuth callback with invalid CSRF state");
(
StatusCode::BAD_REQUEST,
Json(json!({ "error": "Invalid OAuth state" })),
)
})?;
// 2. Exchange authorization code for access token
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
let client = reqwest::Client::new();
let token_response = client
.post("https://discord.com/api/oauth2/token")
.form(&[
("client_id", auth_config.client_id.as_str()),
("client_secret", auth_config.client_secret.as_str()),
("grant_type", "authorization_code"),
("code", params.code.as_str()),
("redirect_uri", redirect_uri.as_str()),
])
.send()
.await
.map_err(|e| {
error!(error = %e, "failed to exchange OAuth code for token");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Failed to exchange code with Discord" })),
)
})?;
if !token_response.status().is_success() {
let status = token_response.status();
let body = token_response.text().await.unwrap_or_default();
error!(%status, %body, "Discord token exchange returned error");
return Err((
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Discord token exchange failed" })),
));
}
let token_data: TokenResponse = token_response.json().await.map_err(|e| {
error!(error = %e, "failed to parse Discord token response");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid token response from Discord" })),
)
})?;
// 3. Fetch Discord user profile
let discord_user: DiscordUser = client
.get("https://discord.com/api/users/@me")
.bearer_auth(&token_data.access_token)
.send()
.await
.map_err(|e| {
error!(error = %e, "failed to fetch Discord user profile");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Failed to fetch Discord profile" })),
)
})?
.json()
.await
.map_err(|e| {
error!(error = %e, "failed to parse Discord user profile");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid user profile from Discord" })),
)
})?;
let discord_id: i64 = discord_user.id.parse().map_err(|_| {
error!(id = %discord_user.id, "Discord user ID is not a valid i64");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid Discord user ID" })),
)
})?;
// 4. Upsert user
let user = crate::data::users::upsert_user(
&state.db_pool,
discord_id,
&discord_user.username,
discord_user.avatar.as_deref(),
)
.await
.map_err(|e| {
error!(error = %e, "failed to upsert user");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "Database error" })),
)
})?;
info!(discord_id, username = %user.discord_username, "user authenticated via OAuth");
// 5. Create session
let session = crate::data::sessions::create_session(
&state.db_pool,
discord_id,
Duration::from_secs(crate::data::sessions::SESSION_DURATION_SECS),
)
.await
.map_err(|e| {
error!(error = %e, "failed to create session");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "Failed to create session" })),
)
})?;
// 6. Build response with session cookie
let secure = redirect_uri.starts_with("https://");
let cookie = session_cookie(
&session.id,
crate::data::sessions::SESSION_DURATION_SECS as i64,
secure,
);
let redirect_to = if user.is_admin { "/admin" } else { "/" };
Ok((
[(header::SET_COOKIE, cookie)],
Redirect::temporary(redirect_to),
)
.into_response())
}
/// `POST /api/auth/logout` — Destroy the current session.
pub async fn auth_logout(State(state): State<AppState>, headers: HeaderMap) -> Response {
if let Some(token) = extract_session_token(&headers) {
if let Err(e) = crate::data::sessions::delete_session(&state.db_pool, &token).await {
warn!(error = %e, "failed to delete session from database");
}
state.session_cache.evict(&token);
}
let cookie = session_cookie("", 0, false);
(
StatusCode::OK,
[(header::SET_COOKIE, cookie)],
Json(json!({ "ok": true })),
)
.into_response()
}
/// `GET /api/auth/me` — Return the current authenticated user's info.
pub async fn auth_me(
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<Value>, StatusCode> {
let token = extract_session_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let user = state
.session_cache
.get_user(&token)
.await
.ok_or(StatusCode::UNAUTHORIZED)?;
Ok(Json(json!({
"discordId": user.discord_id.to_string(),
"username": user.discord_username,
"avatarHash": user.discord_avatar_hash,
"isAdmin": user.is_admin,
})))
}
+136
View File
@@ -0,0 +1,136 @@
//! Web API endpoints for calendar export (ICS download + Google Calendar redirect).
use axum::{
extract::{Path, State},
http::{StatusCode, header},
response::{IntoResponse, Redirect, Response},
};
use crate::calendar::{CalendarCourse, generate_gcal_url, generate_ics};
use crate::data::models::DbMeetingTime;
use crate::state::AppState;
/// Fetch course + meeting times, build a `CalendarCourse`.
async fn load_calendar_course(
state: &AppState,
term: &str,
crn: &str,
) -> Result<(CalendarCourse, Vec<DbMeetingTime>), (StatusCode, String)> {
let course = crate::data::courses::get_course_by_crn(&state.db_pool, crn, term)
.await
.map_err(|e| {
tracing::error!(error = %e, "Calendar: course lookup failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Lookup failed".to_string(),
)
})?
.ok_or_else(|| (StatusCode::NOT_FOUND, "Course not found".to_string()))?;
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
.await
.unwrap_or_default();
let primary_instructor = instructors
.iter()
.find(|i| i.is_primary)
.or(instructors.first())
.map(|i| i.display_name.clone());
let meeting_times: Vec<DbMeetingTime> =
serde_json::from_value(course.meeting_times.clone()).unwrap_or_default();
let cal_course = CalendarCourse {
crn: course.crn.clone(),
subject: course.subject.clone(),
course_number: course.course_number.clone(),
title: course.title.clone(),
sequence_number: course.sequence_number.clone(),
primary_instructor,
};
Ok((cal_course, meeting_times))
}
/// `GET /api/courses/{term}/{crn}/calendar.ics`
///
/// Returns an ICS file download for the course.
pub async fn course_ics(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Response, (StatusCode, String)> {
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
if meeting_times.is_empty() {
return Err((
StatusCode::NOT_FOUND,
"No meeting times found for this course".to_string(),
));
}
let result = generate_ics(&cal_course, &meeting_times).map_err(|e| {
tracing::error!(error = %e, "ICS generation failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to generate ICS file".to_string(),
)
})?;
let response = (
[
(header::CONTENT_TYPE, "text/calendar; charset=utf-8"),
(
header::CONTENT_DISPOSITION,
&format!("attachment; filename=\"{}\"", result.filename),
),
(header::CACHE_CONTROL, "no-cache"),
],
result.content,
)
.into_response();
Ok(response)
}
/// `GET /api/courses/{term}/{crn}/gcal`
///
/// Redirects to Google Calendar with a pre-filled event for the first meeting time.
/// If multiple meeting times exist, uses the first one with scheduled days/times.
pub async fn course_gcal(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Response, (StatusCode, String)> {
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
if meeting_times.is_empty() {
return Err((
StatusCode::NOT_FOUND,
"No meeting times found for this course".to_string(),
));
}
// Prefer the first meeting time that has actual days/times scheduled
let mt = meeting_times
.iter()
.find(|mt| {
mt.begin_time.is_some()
&& (mt.monday
|| mt.tuesday
|| mt.wednesday
|| mt.thursday
|| mt.friday
|| mt.saturday
|| mt.sunday)
})
.unwrap_or(&meeting_times[0]);
let url = generate_gcal_url(&cal_course, mt).map_err(|e| {
tracing::error!(error = %e, "Google Calendar URL generation failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to generate Google Calendar URL".to_string(),
)
})?;
Ok(Redirect::temporary(&url).into_response())
}
+196
View File
@@ -0,0 +1,196 @@
//! Content encoding negotiation for pre-compressed asset serving.
//!
//! Parses Accept-Encoding headers with quality values and returns
//! supported encodings in priority order for content negotiation.
use axum::http::{HeaderMap, HeaderValue, header};
/// Minimum size threshold for compression (bytes).
///
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
pub const COMPRESSION_MIN_SIZE: usize = 512;
/// Supported content encodings in priority order (best compression first).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ContentEncoding {
Zstd,
Brotli,
Gzip,
Identity,
}
impl ContentEncoding {
/// File extension suffix for pre-compressed variant lookup.
#[inline]
pub fn extension(&self) -> &'static str {
match self {
Self::Zstd => ".zst",
Self::Brotli => ".br",
Self::Gzip => ".gz",
Self::Identity => "",
}
}
/// `Content-Encoding` header value, or `None` for identity.
#[inline]
pub fn header_value(&self) -> Option<HeaderValue> {
match self {
Self::Zstd => Some(HeaderValue::from_static("zstd")),
Self::Brotli => Some(HeaderValue::from_static("br")),
Self::Gzip => Some(HeaderValue::from_static("gzip")),
Self::Identity => None,
}
}
/// Default priority when quality values are equal (higher = better).
#[inline]
fn default_priority(&self) -> u8 {
match self {
Self::Zstd => 4,
Self::Brotli => 3,
Self::Gzip => 2,
Self::Identity => 1,
}
}
}
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
///
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
/// When quality values are equal: zstd > brotli > gzip > identity.
/// Encodings with `q=0` are excluded.
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
let Some(accept) = headers
.get(header::ACCEPT_ENCODING)
.and_then(|v| v.to_str().ok())
else {
return vec![ContentEncoding::Identity];
};
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
for part in accept.split(',') {
let part = part.trim();
if part.is_empty() {
continue;
}
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
let q = params
.split(';')
.find_map(|p| p.trim().strip_prefix("q="))
.and_then(|q| q.parse::<f32>().ok())
.unwrap_or(1.0);
(enc.trim(), q)
} else {
(part, 1.0)
};
if quality == 0.0 {
continue;
}
let encoding = match encoding_str.to_lowercase().as_str() {
"zstd" => ContentEncoding::Zstd,
"br" | "brotli" => ContentEncoding::Brotli,
"gzip" | "x-gzip" => ContentEncoding::Gzip,
"*" => ContentEncoding::Gzip,
"identity" => ContentEncoding::Identity,
_ => continue,
};
encodings.push((encoding, quality));
}
// Sort by quality (desc), then default priority (desc)
encodings.sort_by(|a, b| {
b.1.partial_cmp(&a.1)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
});
if encodings.is_empty() {
vec![ContentEncoding::Identity]
} else {
encodings.into_iter().map(|(e, _)| e).collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_all_encodings() {
let mut headers = HeaderMap::new();
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Zstd);
assert_eq!(encodings[1], ContentEncoding::Brotli);
assert_eq!(encodings[2], ContentEncoding::Gzip);
}
#[test]
fn test_parse_with_quality_values() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
);
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Gzip);
assert_eq!(encodings[1], ContentEncoding::Zstd);
assert_eq!(encodings[2], ContentEncoding::Brotli);
}
#[test]
fn test_no_header_returns_identity() {
let headers = HeaderMap::new();
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings, vec![ContentEncoding::Identity]);
}
#[test]
fn test_disabled_encoding_excluded() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"zstd;q=0, br, gzip".parse().unwrap(),
);
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Brotli);
assert_eq!(encodings[1], ContentEncoding::Gzip);
assert!(!encodings.contains(&ContentEncoding::Zstd));
}
#[test]
fn test_real_chrome_header() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"gzip, deflate, br, zstd".parse().unwrap(),
);
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
}
#[test]
fn test_extensions() {
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
assert_eq!(ContentEncoding::Identity.extension(), "");
}
#[test]
fn test_header_values() {
assert_eq!(
ContentEncoding::Zstd.header_value().unwrap(),
HeaderValue::from_static("zstd")
);
assert_eq!(
ContentEncoding::Brotli.header_value().unwrap(),
HeaderValue::from_static("br")
);
assert!(ContentEncoding::Identity.header_value().is_none());
}
}
+74
View File
@@ -0,0 +1,74 @@
//! Axum extractors for authentication and authorization.
use axum::extract::FromRequestParts;
use axum::http::{StatusCode, header};
use axum::response::Json;
use http::request::Parts;
use serde_json::json;
use crate::data::models::User;
use crate::state::AppState;
/// Extractor that resolves the session cookie to an authenticated [`User`].
///
/// Returns 401 if no valid session cookie is present.
pub struct AuthUser(pub User);
impl FromRequestParts<AppState> for AuthUser {
type Rejection = (StatusCode, Json<serde_json::Value>);
async fn from_request_parts(
parts: &mut Parts,
state: &AppState,
) -> Result<Self, Self::Rejection> {
let token = parts
.headers
.get(header::COOKIE)
.and_then(|v| v.to_str().ok())
.and_then(|cookies| {
cookies
.split(';')
.find_map(|c| c.trim().strip_prefix("session=").map(|v| v.to_owned()))
})
.ok_or_else(|| {
(
StatusCode::UNAUTHORIZED,
Json(json!({"error": "unauthorized", "message": "No session cookie"})),
)
})?;
let user = state.session_cache.get_user(&token).await.ok_or_else(|| {
(
StatusCode::UNAUTHORIZED,
Json(json!({"error": "unauthorized", "message": "Invalid or expired session"})),
)
})?;
Ok(AuthUser(user))
}
}
/// Extractor that requires an authenticated admin user.
///
/// Returns 401 if not authenticated, 403 if not admin.
pub struct AdminUser(pub User);
impl FromRequestParts<AppState> for AdminUser {
type Rejection = (StatusCode, Json<serde_json::Value>);
async fn from_request_parts(
parts: &mut Parts,
state: &AppState,
) -> Result<Self, Self::Rejection> {
let AuthUser(user) = AuthUser::from_request_parts(parts, state).await?;
if !user.is_admin {
return Err((
StatusCode::FORBIDDEN,
Json(json!({"error": "forbidden", "message": "Admin access required"})),
));
}
Ok(AdminUser(user))
}
}
+12
View File
@@ -1,7 +1,19 @@
//! Web API module for the banner application.
pub mod admin;
pub mod admin_rmp;
pub mod admin_scraper;
#[cfg(feature = "embed-assets")]
pub mod assets;
pub mod auth;
pub mod calendar;
#[cfg(feature = "embed-assets")]
pub mod encoding;
pub mod extractors;
pub mod routes;
pub mod schedule_cache;
pub mod session_cache;
pub mod timeline;
pub mod ws;
pub use routes::*;
+228 -110
View File
@@ -1,20 +1,26 @@
//! Web API endpoints for Banner bot monitoring and metrics.
use axum::{
Router,
Extension, Router,
body::Body,
extract::{Path, Query, Request, State},
http::StatusCode as AxumStatusCode,
response::{Json, Response},
routing::get,
routing::{get, post, put},
};
use crate::web::admin;
use crate::web::admin_rmp;
use crate::web::admin_scraper;
use crate::web::auth::{self, AuthConfig};
use crate::web::calendar;
use crate::web::timeline;
use crate::web::ws;
#[cfg(feature = "embed-assets")]
use axum::{
http::{HeaderMap, HeaderValue, StatusCode, Uri},
response::{Html, IntoResponse},
http::{HeaderMap, StatusCode, Uri},
response::IntoResponse,
};
#[cfg(feature = "embed-assets")]
use http::header;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use std::{collections::BTreeMap, time::Duration};
@@ -24,63 +30,90 @@ use crate::state::AppState;
use crate::status::ServiceStatus;
#[cfg(not(feature = "embed-assets"))]
use tower_http::cors::{Any, CorsLayer};
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
use tower_http::{
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
trace::TraceLayer,
};
use tracing::{Span, debug, trace, warn};
#[cfg(feature = "embed-assets")]
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
/// Set appropriate caching headers based on asset type
#[cfg(feature = "embed-assets")]
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
let headers = response.headers_mut();
// Set ETag
if let Ok(etag_value) = HeaderValue::from_str(etag) {
headers.insert(header::ETAG, etag_value);
}
// Set Cache-Control based on asset type
let cache_control = if path.starts_with("assets/") {
// Static assets with hashed filenames - long-term cache
"public, max-age=31536000, immutable"
} else if path == "index.html" {
// HTML files - short-term cache
"public, max-age=300"
} else {
match path.split_once('.').map(|(_, extension)| extension) {
Some(ext) => match ext {
// CSS/JS files - medium-term cache
"css" | "js" => "public, max-age=86400",
// Images - long-term cache
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
// Default for other files
_ => "public, max-age=3600",
},
// Default for files without an extension
None => "public, max-age=3600",
}
};
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
headers.insert(header::CACHE_CONTROL, cache_control_value);
}
}
use crate::web::assets::try_serve_asset_with_encoding;
/// Creates the web server router
pub fn create_router(app_state: AppState) -> Router {
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
let api_router = Router::new()
.route("/health", get(health))
.route("/status", get(status))
.route("/metrics", get(metrics))
.route("/courses/search", get(search_courses))
.route("/courses/{term}/{crn}", get(get_course))
.route(
"/courses/{term}/{crn}/calendar.ics",
get(calendar::course_ics),
)
.route("/courses/{term}/{crn}/gcal", get(calendar::course_gcal))
.route("/terms", get(get_terms))
.route("/subjects", get(get_subjects))
.route("/reference/{category}", get(get_reference))
.route("/timeline", post(timeline::timeline))
.with_state(app_state.clone());
let auth_router = Router::new()
.route("/auth/login", get(auth::auth_login))
.route("/auth/callback", get(auth::auth_callback))
.route("/auth/logout", post(auth::auth_logout))
.route("/auth/me", get(auth::auth_me))
.layer(Extension(auth_config))
.with_state(app_state.clone());
let admin_router = Router::new()
.route("/admin/status", get(admin::admin_status))
.route("/admin/users", get(admin::list_users))
.route(
"/admin/users/{discord_id}/admin",
put(admin::set_user_admin),
)
.route("/admin/scrape-jobs", get(admin::list_scrape_jobs))
.route("/admin/scrape-jobs/ws", get(ws::scrape_jobs_ws))
.route("/admin/audit-log", get(admin::list_audit_log))
.route("/admin/instructors", get(admin_rmp::list_instructors))
.route("/admin/instructors/{id}", get(admin_rmp::get_instructor))
.route(
"/admin/instructors/{id}/match",
post(admin_rmp::match_instructor),
)
.route(
"/admin/instructors/{id}/reject-candidate",
post(admin_rmp::reject_candidate),
)
.route(
"/admin/instructors/{id}/reject-all",
post(admin_rmp::reject_all),
)
.route(
"/admin/instructors/{id}/unmatch",
post(admin_rmp::unmatch_instructor),
)
.route("/admin/rmp/rescore", post(admin_rmp::rescore))
.route("/admin/scraper/stats", get(admin_scraper::scraper_stats))
.route(
"/admin/scraper/timeseries",
get(admin_scraper::scraper_timeseries),
)
.route(
"/admin/scraper/subjects",
get(admin_scraper::scraper_subjects),
)
.route(
"/admin/scraper/subjects/{subject}",
get(admin_scraper::scraper_subject_detail),
)
.with_state(app_state);
let mut router = Router::new().nest("/api", api_router);
let mut router = Router::new()
.nest("/api", api_router)
.nest("/api", auth_router)
.nest("/api", admin_router);
// When embed-assets feature is enabled, serve embedded static assets
#[cfg(feature = "embed-assets")]
@@ -100,6 +133,13 @@ pub fn create_router(app_state: AppState) -> Router {
}
router.layer((
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
// assets already have Content-Encoding set, so tower-http skips them.
CompressionLayer::new()
.zstd(true)
.br(true)
.gzip(true)
.quality(tower_http::CompressionLevel::Fastest),
TraceLayer::new_for_http()
.make_span_with(|request: &Request<Body>| {
tracing::debug_span!("request", path = request.uri().path())
@@ -146,71 +186,35 @@ pub fn create_router(app_state: AppState) -> Router {
))
}
/// Handler that extracts request information for caching
/// SPA fallback handler with content encoding negotiation.
///
/// Serves embedded static assets with pre-compressed variants when available,
/// falling back to `index.html` for SPA client-side routing.
#[cfg(feature = "embed-assets")]
async fn fallback(request: Request) -> Response {
async fn fallback(request: Request) -> axum::response::Response {
let uri = request.uri().clone();
let headers = request.headers().clone();
handle_spa_fallback_with_headers(uri, headers).await
handle_spa_fallback(uri, headers).await
}
/// Handles SPA routing by serving index.html for non-API, non-asset requests
/// This version includes HTTP caching headers and ETag support
#[cfg(feature = "embed-assets")]
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
let path = uri.path().trim_start_matches('/');
if let Some(content) = WebAssets::get(path) {
// Get asset metadata (MIME type and hash) with caching
let metadata = get_asset_metadata_cached(path, &content.data);
// Check if client has a matching ETag (conditional request)
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return StatusCode::NOT_MODIFIED.into_response();
}
// Use cached MIME type, only set Content-Type if we have a valid MIME type
let mut response = (
[(
header::CONTENT_TYPE,
// For unknown types, set to application/octet-stream
metadata
.mime_type
.unwrap_or("application/octet-stream".to_string()),
)],
content.data,
)
.into_response();
// Set caching headers
set_caching_headers(&mut response, path, &metadata.hash.quoted());
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
let path = uri.path();
// Try serving the exact asset (with encoding negotiation)
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
return response;
} else {
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
if path.starts_with("assets/") {
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
}
}
// Fall back to the SPA index.html
match WebAssets::get("index.html") {
Some(content) => {
let metadata = get_asset_metadata_cached("index.html", &content.data);
// SvelteKit assets under _app/ that don't exist are a hard 404
let trimmed = path.trim_start_matches('/');
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
}
// Check if client has a matching ETag for index.html
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return StatusCode::NOT_MODIFIED.into_response();
}
let mut response = Html(content.data).into_response();
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
response
}
// SPA fallback: serve index.html with encoding negotiation
match try_serve_asset_with_encoding("/index.html", &request_headers) {
Some(response) => response,
None => (
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to load index.html",
@@ -284,20 +288,130 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
}
/// Metrics endpoint for monitoring
async fn metrics() -> Json<Value> {
// For now, return basic metrics structure
Json(json!({
"banner_api": {
"status": "connected"
},
"timestamp": chrono::Utc::now().to_rfc3339()
}))
async fn metrics(
State(state): State<AppState>,
Query(params): Query<MetricsParams>,
) -> Result<Json<Value>, (AxumStatusCode, String)> {
let limit = params.limit.clamp(1, 5000);
// Parse range shorthand, defaulting to 24h
let range_str = params.range.as_deref().unwrap_or("24h");
let duration = match range_str {
"1h" => chrono::Duration::hours(1),
"6h" => chrono::Duration::hours(6),
"24h" => chrono::Duration::hours(24),
"7d" => chrono::Duration::days(7),
"30d" => chrono::Duration::days(30),
_ => {
return Err((
AxumStatusCode::BAD_REQUEST,
format!("Invalid range '{range_str}'. Valid: 1h, 6h, 24h, 7d, 30d"),
));
}
};
let since = chrono::Utc::now() - duration;
// Resolve course_id: explicit param takes priority, then term+crn lookup
let course_id = if let Some(id) = params.course_id {
Some(id)
} else if let (Some(term), Some(crn)) = (params.term.as_deref(), params.crn.as_deref()) {
let row: Option<(i32,)> =
sqlx::query_as("SELECT id FROM courses WHERE term_code = $1 AND crn = $2")
.bind(term)
.bind(crn)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Course lookup for metrics failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Course lookup failed".to_string(),
)
})?;
row.map(|(id,)| id)
} else {
None
};
// Build query dynamically based on filters
let metrics: Vec<(i32, i32, chrono::DateTime<chrono::Utc>, i32, i32, i32)> =
if let Some(cid) = course_id {
sqlx::query_as(
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
FROM course_metrics \
WHERE course_id = $1 AND timestamp >= $2 \
ORDER BY timestamp DESC \
LIMIT $3",
)
.bind(cid)
.bind(since)
.bind(limit)
.fetch_all(&state.db_pool)
.await
} else {
sqlx::query_as(
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
FROM course_metrics \
WHERE timestamp >= $1 \
ORDER BY timestamp DESC \
LIMIT $2",
)
.bind(since)
.bind(limit)
.fetch_all(&state.db_pool)
.await
}
.map_err(|e| {
tracing::error!(error = %e, "Metrics query failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Metrics query failed".to_string(),
)
})?;
let count = metrics.len();
let metrics_json: Vec<Value> = metrics
.into_iter()
.map(
|(id, course_id, timestamp, enrollment, wait_count, seats_available)| {
json!({
"id": id,
"courseId": course_id,
"timestamp": timestamp.to_rfc3339(),
"enrollment": enrollment,
"waitCount": wait_count,
"seatsAvailable": seats_available,
})
},
)
.collect();
Ok(Json(json!({
"metrics": metrics_json,
"count": count,
"timestamp": chrono::Utc::now().to_rfc3339(),
})))
}
// ============================================================
// Course search & detail API
// ============================================================
#[derive(Deserialize)]
struct MetricsParams {
course_id: Option<i32>,
term: Option<String>,
crn: Option<String>,
/// Shorthand durations: "1h", "6h", "24h", "7d", "30d"
range: Option<String>,
#[serde(default = "default_metrics_limit")]
limit: i32,
}
fn default_metrics_limit() -> i32 {
500
}
#[derive(Deserialize)]
struct SubjectsParams {
term: String,
@@ -363,12 +477,14 @@ pub struct CourseResponse {
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorResponse {
instructor_id: i32,
banner_id: String,
display_name: String,
email: Option<String>,
email: String,
is_primary: bool,
rmp_rating: Option<f32>,
rmp_num_ratings: Option<i32>,
rmp_legacy_id: Option<i32>,
}
#[derive(Serialize, TS)]
@@ -397,12 +513,14 @@ fn build_course_response(
let instructors = instructors
.into_iter()
.map(|i| InstructorResponse {
instructor_id: i.instructor_id,
banner_id: i.banner_id,
display_name: i.display_name,
email: i.email,
is_primary: i.is_primary,
rmp_rating: i.avg_rating,
rmp_num_ratings: i.num_ratings,
rmp_legacy_id: i.rmp_legacy_id,
})
.collect();
+443
View File
@@ -0,0 +1,443 @@
//! ISR-style schedule cache for timeline enrollment queries.
//!
//! Loads all courses with their meeting times from the database, parses the
//! JSONB meeting times into a compact in-memory representation, and caches
//! the result. The cache is refreshed in the background every hour using a
//! stale-while-revalidate pattern with singleflight deduplication — readers
//! always get the current cached value instantly, never blocking on a refresh.
use chrono::NaiveDate;
use serde_json::Value;
use sqlx::PgPool;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use tokio::sync::watch;
use tracing::{debug, error, info};
/// How often the cache is considered fresh (1 hour).
const REFRESH_INTERVAL: std::time::Duration = std::time::Duration::from_secs(60 * 60);
// ── Compact schedule representation ─────────────────────────────────
/// A single meeting time block, pre-parsed for fast filtering.
#[derive(Debug, Clone)]
pub(crate) struct ParsedSchedule {
/// Bitmask of days: bit 0 = Monday, bit 6 = Sunday.
days: u8,
/// Minutes since midnight for start (e.g. 600 = 10:00).
begin_minutes: u16,
/// Minutes since midnight for end (e.g. 650 = 10:50).
end_minutes: u16,
/// First day the meeting pattern is active.
start_date: NaiveDate,
/// Last day the meeting pattern is active.
end_date: NaiveDate,
}
/// A course with its enrollment and pre-parsed schedule blocks.
#[derive(Debug, Clone)]
pub(crate) struct CachedCourse {
pub(crate) subject: String,
pub(crate) enrollment: i32,
pub(crate) schedules: Vec<ParsedSchedule>,
}
/// The immutable snapshot of all courses, swapped atomically on refresh.
#[derive(Debug, Clone)]
pub(crate) struct ScheduleSnapshot {
pub(crate) courses: Vec<CachedCourse>,
refreshed_at: std::time::Instant,
}
// ── Cache handle ────────────────────────────────────────────────────
/// Shared schedule cache. Clone-cheap (all `Arc`-wrapped internals).
#[derive(Clone)]
pub struct ScheduleCache {
/// Current snapshot, updated via `watch` channel for lock-free reads.
rx: watch::Receiver<Arc<ScheduleSnapshot>>,
/// Sender side, held to push new snapshots.
tx: Arc<watch::Sender<Arc<ScheduleSnapshot>>>,
/// Singleflight guard — true while a refresh task is in flight.
refreshing: Arc<AtomicBool>,
/// Database pool for refresh queries.
pool: PgPool,
}
impl ScheduleCache {
/// Create a new cache with an empty initial snapshot.
pub(crate) fn new(pool: PgPool) -> Self {
let empty = Arc::new(ScheduleSnapshot {
courses: Vec::new(),
refreshed_at: std::time::Instant::now(),
});
let (tx, rx) = watch::channel(empty);
Self {
rx,
tx: Arc::new(tx),
refreshing: Arc::new(AtomicBool::new(false)),
pool,
}
}
/// Get the current snapshot. Never blocks on refresh.
pub(crate) fn snapshot(&self) -> Arc<ScheduleSnapshot> {
self.rx.borrow().clone()
}
/// Check freshness and trigger a background refresh if stale.
/// Always returns immediately — the caller uses the current snapshot.
pub(crate) fn ensure_fresh(&self) {
let snap = self.rx.borrow();
if snap.refreshed_at.elapsed() < REFRESH_INTERVAL {
return;
}
// Singleflight: only one refresh at a time.
if self
.refreshing
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
.is_err()
{
debug!("Schedule cache refresh already in flight, skipping");
return;
}
let cache = self.clone();
tokio::spawn(async move {
match load_snapshot(&cache.pool).await {
Ok(snap) => {
let count = snap.courses.len();
let _ = cache.tx.send(Arc::new(snap));
info!(courses = count, "Schedule cache refreshed");
}
Err(e) => {
error!(error = %e, "Failed to refresh schedule cache");
}
}
cache.refreshing.store(false, Ordering::Release);
});
}
/// Force an initial load (blocking). Call once at startup.
pub(crate) async fn load(&self) -> anyhow::Result<()> {
let snap = load_snapshot(&self.pool).await?;
let count = snap.courses.len();
let _ = self.tx.send(Arc::new(snap));
info!(courses = count, "Schedule cache initially loaded");
Ok(())
}
}
// ── Database loading ────────────────────────────────────────────────
/// Row returned from the lightweight schedule query.
#[derive(sqlx::FromRow)]
struct ScheduleRow {
subject: String,
enrollment: i32,
meeting_times: Value,
}
/// Load all courses and parse their meeting times into a snapshot.
async fn load_snapshot(pool: &PgPool) -> anyhow::Result<ScheduleSnapshot> {
let start = std::time::Instant::now();
let rows: Vec<ScheduleRow> =
sqlx::query_as("SELECT subject, enrollment, meeting_times FROM courses")
.fetch_all(pool)
.await?;
let courses: Vec<CachedCourse> = rows
.into_iter()
.map(|row| {
let schedules = parse_meeting_times(&row.meeting_times);
CachedCourse {
subject: row.subject,
enrollment: row.enrollment,
schedules,
}
})
.collect();
debug!(
courses = courses.len(),
elapsed_ms = start.elapsed().as_millis(),
"Schedule snapshot built"
);
Ok(ScheduleSnapshot {
courses,
refreshed_at: std::time::Instant::now(),
})
}
// ── Meeting time parsing ────────────────────────────────────────────
/// Parse the JSONB `meeting_times` array into compact `ParsedSchedule` values.
fn parse_meeting_times(value: &Value) -> Vec<ParsedSchedule> {
let Value::Array(arr) = value else {
return Vec::new();
};
arr.iter().filter_map(parse_one_meeting).collect()
}
fn parse_one_meeting(mt: &Value) -> Option<ParsedSchedule> {
let begin_time = mt.get("begin_time")?.as_str()?;
let end_time = mt.get("end_time")?.as_str()?;
let begin_minutes = parse_hhmm(begin_time)?;
let end_minutes = parse_hhmm(end_time)?;
if end_minutes <= begin_minutes {
return None;
}
let start_date = parse_date(mt.get("start_date")?.as_str()?)?;
let end_date = parse_date(mt.get("end_date")?.as_str()?)?;
const DAY_KEYS: [&str; 7] = [
"monday",
"tuesday",
"wednesday",
"thursday",
"friday",
"saturday",
"sunday",
];
let mut days: u8 = 0;
for (bit, key) in DAY_KEYS.iter().enumerate() {
if mt.get(*key).and_then(Value::as_bool).unwrap_or(false) {
days |= 1 << bit;
}
}
// Skip meetings with no days (online async, etc.)
if days == 0 {
return None;
}
Some(ParsedSchedule {
days,
begin_minutes,
end_minutes,
start_date,
end_date,
})
}
/// Parse "HHMM" → minutes since midnight.
fn parse_hhmm(s: &str) -> Option<u16> {
if s.len() != 4 {
return None;
}
let hours: u16 = s[..2].parse().ok()?;
let mins: u16 = s[2..].parse().ok()?;
if hours >= 24 || mins >= 60 {
return None;
}
Some(hours * 60 + mins)
}
/// Parse "MM/DD/YYYY" → NaiveDate.
fn parse_date(s: &str) -> Option<NaiveDate> {
NaiveDate::parse_from_str(s, "%m/%d/%Y").ok()
}
// ── Slot matching ───────────────────────────────────────────────────
/// Day-of-week as our bitmask index (Monday = 0 .. Sunday = 6).
/// Chrono's `weekday().num_days_from_monday()` already gives 0=Mon..6=Sun.
pub(crate) fn weekday_bit(day: chrono::Weekday) -> u8 {
1 << day.num_days_from_monday()
}
impl ParsedSchedule {
/// Check if this schedule is active during a given slot.
///
/// `slot_date` is the calendar date of the slot.
/// `slot_start` / `slot_end` are minutes since midnight for the 15-min window.
#[inline]
pub(crate) fn active_during(
&self,
slot_date: NaiveDate,
slot_weekday_bit: u8,
slot_start_minutes: u16,
slot_end_minutes: u16,
) -> bool {
// Day-of-week check
if self.days & slot_weekday_bit == 0 {
return false;
}
// Date range check
if slot_date < self.start_date || slot_date > self.end_date {
return false;
}
// Time overlap: meeting [begin, end) overlaps slot [start, end)
self.begin_minutes < slot_end_minutes && self.end_minutes > slot_start_minutes
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::NaiveDate;
use serde_json::json;
#[test]
fn parse_hhmm_valid() {
assert_eq!(parse_hhmm("0000"), Some(0));
assert_eq!(parse_hhmm("0930"), Some(570));
assert_eq!(parse_hhmm("1350"), Some(830));
assert_eq!(parse_hhmm("2359"), Some(1439));
}
#[test]
fn parse_hhmm_invalid() {
assert_eq!(parse_hhmm(""), None);
assert_eq!(parse_hhmm("abc"), None);
assert_eq!(parse_hhmm("2500"), None);
assert_eq!(parse_hhmm("0060"), None);
}
#[test]
fn parse_date_valid() {
assert_eq!(
parse_date("08/26/2025"),
Some(NaiveDate::from_ymd_opt(2025, 8, 26).unwrap())
);
}
#[test]
fn parse_meeting_times_basic() {
let json = json!([{
"begin_time": "1000",
"end_time": "1050",
"start_date": "08/26/2025",
"end_date": "12/13/2025",
"monday": true,
"tuesday": false,
"wednesday": true,
"thursday": false,
"friday": true,
"saturday": false,
"sunday": false,
"building": "NPB",
"building_description": "North Paseo Building",
"room": "1.238",
"campus": "11",
"meeting_type": "FF",
"meeting_schedule_type": "AFF"
}]);
let schedules = parse_meeting_times(&json);
assert_eq!(schedules.len(), 1);
let s = &schedules[0];
assert_eq!(s.begin_minutes, 600); // 10:00
assert_eq!(s.end_minutes, 650); // 10:50
assert_eq!(s.days, 0b0010101); // Mon, Wed, Fri
}
#[test]
fn parse_meeting_times_skips_null_times() {
let json = json!([{
"begin_time": null,
"end_time": null,
"start_date": "08/26/2025",
"end_date": "12/13/2025",
"monday": false,
"tuesday": false,
"wednesday": false,
"thursday": false,
"friday": false,
"saturday": false,
"sunday": false,
"meeting_type": "OS",
"meeting_schedule_type": "AFF"
}]);
let schedules = parse_meeting_times(&json);
assert!(schedules.is_empty());
}
#[test]
fn active_during_matching_slot() {
let sched = ParsedSchedule {
days: 0b0000001, // Monday
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Monday Sept 1 2025, 10:00-10:15 slot
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
}
#[test]
fn active_during_wrong_day() {
let sched = ParsedSchedule {
days: 0b0000001, // Monday only
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Tuesday Sept 2 2025
let date = NaiveDate::from_ymd_opt(2025, 9, 2).unwrap();
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Tue), 600, 615));
}
#[test]
fn active_during_no_time_overlap() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600, // 10:00
end_minutes: 650, // 10:50
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap(); // Monday
// Slot 11:00-11:15 — after the meeting ends
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 660, 675));
// Slot 9:45-10:00 — just before meeting starts (end=600, begin=600 → no overlap)
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
}
#[test]
fn active_during_outside_date_range() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Monday Jan 6 2025 — before semester
let date = NaiveDate::from_ymd_opt(2025, 1, 6).unwrap();
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
}
#[test]
fn active_during_edge_overlap() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
// Slot 10:45-11:00 — overlaps last 5 minutes of meeting
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 645, 660));
// Slot 9:45-10:00 — ends exactly when meeting starts, no overlap
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
// Slot 10:50-11:05 — starts exactly when meeting ends, no overlap
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 650, 665));
}
}
+187
View File
@@ -0,0 +1,187 @@
//! In-memory caches for session resolution and OAuth CSRF state.
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use rand::Rng;
use sqlx::PgPool;
use std::sync::Arc;
use std::time::{Duration, Instant};
use crate::data::models::User;
/// Cached session entry with TTL.
#[derive(Debug, Clone)]
struct CachedSession {
user: User,
session_expires_at: DateTime<Utc>,
cached_at: Instant,
}
/// In-memory session cache backed by PostgreSQL.
///
/// Provides fast session resolution without a DB round-trip on every request.
/// Cache entries expire after a configurable TTL (default 5 minutes).
#[derive(Clone)]
pub struct SessionCache {
cache: Arc<DashMap<String, CachedSession>>,
db_pool: PgPool,
cache_ttl: Duration,
}
impl SessionCache {
/// Create a new session cache with a 5-minute default TTL.
pub fn new(db_pool: PgPool) -> Self {
Self {
cache: Arc::new(DashMap::new()),
db_pool,
cache_ttl: Duration::from_secs(5 * 60),
}
}
/// Resolve a session token to a [`User`], using the cache when possible.
///
/// On cache hit (entry present, not stale, session not expired), returns the
/// cached user immediately. On miss or stale entry, queries the database for
/// the session and user, populates the cache, and fire-and-forgets a
/// `touch_session` call to update `last_active_at`.
pub async fn get_user(&self, token: &str) -> Option<User> {
// Check cache first
if let Some(entry) = self.cache.get(token) {
let now_instant = Instant::now();
let now_utc = Utc::now();
let cache_fresh = entry.cached_at + self.cache_ttl > now_instant;
let session_valid = entry.session_expires_at > now_utc;
if cache_fresh && session_valid {
return Some(entry.user.clone());
}
// Stale or expired — drop the ref before removing
drop(entry);
self.cache.remove(token);
}
// Cache miss — query DB
let session = crate::data::sessions::get_session(&self.db_pool, token)
.await
.ok()
.flatten()?;
let user = crate::data::users::get_user(&self.db_pool, session.user_id)
.await
.ok()
.flatten()?;
self.cache.insert(
token.to_owned(),
CachedSession {
user: user.clone(),
session_expires_at: session.expires_at,
cached_at: Instant::now(),
},
);
// Fire-and-forget touch to update last_active_at
let pool = self.db_pool.clone();
let token_owned = token.to_owned();
tokio::spawn(async move {
if let Err(e) = crate::data::sessions::touch_session(&pool, &token_owned).await {
tracing::warn!(error = %e, "failed to touch session");
}
});
Some(user)
}
/// Remove a single session from the cache (e.g. on logout).
pub fn evict(&self, token: &str) {
self.cache.remove(token);
}
/// Remove all cached sessions belonging to a user.
pub fn evict_user(&self, discord_id: i64) {
self.cache
.retain(|_, entry| entry.user.discord_id != discord_id);
}
/// Delete expired sessions from the database and sweep the in-memory cache.
///
/// Returns the number of sessions deleted from the database.
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
let now = Utc::now();
self.cache.retain(|_, entry| entry.session_expires_at > now);
Ok(deleted)
}
}
/// Data stored alongside each OAuth CSRF state token.
struct OAuthStateEntry {
created_at: Instant,
/// The browser origin that initiated the login flow, so the callback
/// can reconstruct the exact redirect_uri Discord expects.
origin: String,
}
/// Ephemeral store for OAuth CSRF state tokens.
///
/// Tokens are stored with creation time and expire after a configurable TTL.
/// Each token is single-use: validation consumes it.
#[derive(Clone)]
pub struct OAuthStateStore {
states: Arc<DashMap<String, OAuthStateEntry>>,
ttl: Duration,
}
impl Default for OAuthStateStore {
fn default() -> Self {
Self::new()
}
}
impl OAuthStateStore {
/// Create a new store with a 10-minute TTL.
pub fn new() -> Self {
Self {
states: Arc::new(DashMap::new()),
ttl: Duration::from_secs(10 * 60),
}
}
/// Generate a random 16-byte hex CSRF token, store it with the given
/// origin, and return the token.
pub fn generate(&self, origin: String) -> String {
let bytes: [u8; 16] = rand::rng().random();
let token: String = bytes.iter().map(|b| format!("{b:02x}")).collect();
self.states.insert(
token.clone(),
OAuthStateEntry {
created_at: Instant::now(),
origin,
},
);
token
}
/// Validate and consume a CSRF token. Returns the stored origin if the
/// token was present and not expired.
pub fn validate(&self, state: &str) -> Option<String> {
let (_, entry) = self.states.remove(state)?;
if entry.created_at.elapsed() < self.ttl {
Some(entry.origin)
} else {
None
}
}
/// Remove all expired entries from the store.
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
pub fn cleanup(&self) {
let ttl = self.ttl;
self.states
.retain(|_, entry| entry.created_at.elapsed() < ttl);
}
}
+258
View File
@@ -0,0 +1,258 @@
//! Timeline API endpoint for enrollment aggregation by subject over time.
//!
//! Accepts multiple time ranges, merges overlaps, aligns to 15-minute
//! slot boundaries, and returns per-subject enrollment totals for each slot.
//! Only courses whose meeting times overlap a given slot contribute to that
//! slot's totals — so the chart reflects the actual class schedule rhythm.
//!
//! Course data is served from an ISR-style in-memory cache (see
//! [`ScheduleCache`]) that refreshes hourly in the background with
//! stale-while-revalidate semantics.
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Json, Response},
};
use chrono::{DateTime, Datelike, Duration, NaiveTime, Timelike, Utc};
use chrono_tz::US::Central;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, BTreeSet};
use ts_rs::TS;
use crate::state::AppState;
use crate::web::schedule_cache::weekday_bit;
/// 15 minutes in seconds, matching the frontend `SLOT_INTERVAL_MS`.
const SLOT_SECONDS: i64 = 15 * 60;
const SLOT_MINUTES: u16 = 15;
/// Maximum number of ranges in a single request.
const MAX_RANGES: usize = 20;
/// Maximum span of a single range (72 hours).
const MAX_RANGE_SPAN: Duration = Duration::hours(72);
/// Maximum total span across all ranges to prevent excessive queries.
const MAX_TOTAL_SPAN: Duration = Duration::hours(168); // 1 week
// ── Request / Response types ────────────────────────────────────────
#[derive(Debug, Deserialize)]
pub(crate) struct TimelineRequest {
ranges: Vec<TimeRange>,
}
#[derive(Debug, Deserialize)]
pub(crate) struct TimeRange {
start: DateTime<Utc>,
end: DateTime<Utc>,
}
#[derive(Debug, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TimelineResponse {
/// 15-minute slots with per-subject enrollment totals, sorted by time.
slots: Vec<TimelineSlot>,
/// All subject codes present in the returned data.
subjects: Vec<String>,
}
#[derive(Debug, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TimelineSlot {
/// ISO-8601 timestamp at the start of this 15-minute bucket.
time: DateTime<Utc>,
/// Subject code → total enrollment in this slot.
subjects: BTreeMap<String, i64>,
}
// ── Error type ──────────────────────────────────────────────────────
pub(crate) struct TimelineError {
status: StatusCode,
message: String,
}
impl TimelineError {
fn bad_request(msg: impl Into<String>) -> Self {
Self {
status: StatusCode::BAD_REQUEST,
message: msg.into(),
}
}
}
impl IntoResponse for TimelineError {
fn into_response(self) -> Response {
(
self.status,
Json(serde_json::json!({ "error": self.message })),
)
.into_response()
}
}
// ── Alignment helpers ───────────────────────────────────────────────
/// Floor a timestamp to the nearest 15-minute boundary.
fn align_floor(ts: DateTime<Utc>) -> DateTime<Utc> {
let secs = ts.timestamp();
let aligned = (secs / SLOT_SECONDS) * SLOT_SECONDS;
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
}
/// Ceil a timestamp to the nearest 15-minute boundary.
fn align_ceil(ts: DateTime<Utc>) -> DateTime<Utc> {
let secs = ts.timestamp();
let aligned = ((secs + SLOT_SECONDS - 1) / SLOT_SECONDS) * SLOT_SECONDS;
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
}
// ── Range merging ───────────────────────────────────────────────────
/// Aligned, validated range.
#[derive(Debug, Clone, Copy)]
struct AlignedRange {
start: DateTime<Utc>,
end: DateTime<Utc>,
}
/// Merge overlapping/adjacent ranges into a minimal set.
fn merge_ranges(mut ranges: Vec<AlignedRange>) -> Vec<AlignedRange> {
if ranges.is_empty() {
return ranges;
}
ranges.sort_by_key(|r| r.start);
let mut merged: Vec<AlignedRange> = vec![ranges[0]];
for r in &ranges[1..] {
let last = merged.last_mut().unwrap();
if r.start <= last.end {
last.end = last.end.max(r.end);
} else {
merged.push(*r);
}
}
merged
}
/// Generate all aligned slot timestamps within the merged ranges.
fn generate_slots(merged: &[AlignedRange]) -> BTreeSet<DateTime<Utc>> {
let mut slots = BTreeSet::new();
for range in merged {
let mut t = range.start;
while t < range.end {
slots.insert(t);
t += Duration::seconds(SLOT_SECONDS);
}
}
slots
}
// ── Handler ─────────────────────────────────────────────────────────
/// `POST /api/timeline`
///
/// Accepts a JSON body with multiple time ranges. Returns per-subject
/// enrollment totals bucketed into 15-minute slots. Only courses whose
/// meeting schedule overlaps a slot contribute to that slot's count.
pub(crate) async fn timeline(
State(state): State<AppState>,
Json(body): Json<TimelineRequest>,
) -> Result<Json<TimelineResponse>, TimelineError> {
// ── Validate ────────────────────────────────────────────────────
if body.ranges.is_empty() {
return Err(TimelineError::bad_request("At least one range is required"));
}
if body.ranges.len() > MAX_RANGES {
return Err(TimelineError::bad_request(format!(
"Too many ranges (max {MAX_RANGES})"
)));
}
let mut aligned: Vec<AlignedRange> = Vec::with_capacity(body.ranges.len());
for r in &body.ranges {
if r.end <= r.start {
return Err(TimelineError::bad_request(format!(
"Range end ({}) must be after start ({})",
r.end, r.start
)));
}
let span = r.end - r.start;
if span > MAX_RANGE_SPAN {
return Err(TimelineError::bad_request(format!(
"Range span ({} hours) exceeds maximum ({} hours)",
span.num_hours(),
MAX_RANGE_SPAN.num_hours()
)));
}
aligned.push(AlignedRange {
start: align_floor(r.start),
end: align_ceil(r.end),
});
}
let merged = merge_ranges(aligned);
// Validate total span
let total_span: Duration = merged.iter().map(|r| r.end - r.start).sum();
if total_span > MAX_TOTAL_SPAN {
return Err(TimelineError::bad_request(format!(
"Total time span ({} hours) exceeds maximum ({} hours)",
total_span.num_hours(),
MAX_TOTAL_SPAN.num_hours()
)));
}
// ── Get cached schedule data (ISR: stale-while-revalidate) ───────
state.schedule_cache.ensure_fresh();
let snapshot = state.schedule_cache.snapshot();
// ── Build per-slot enrollment by filtering on meeting times ──────
let slot_times = generate_slots(&merged);
let mut all_subjects: BTreeSet<String> = BTreeSet::new();
let slots: Vec<TimelineSlot> = slot_times
.into_iter()
.map(|utc_time| {
// Convert UTC slot to Central time for local day-of-week and time-of-day
let local = utc_time.with_timezone(&Central);
let local_date = local.date_naive();
let local_time = local.time();
let weekday = local.weekday();
let wday_bit = weekday_bit(weekday);
let slot_start_minutes = time_to_minutes(local_time);
let slot_end_minutes = slot_start_minutes + SLOT_MINUTES;
let mut subject_totals: BTreeMap<String, i64> = BTreeMap::new();
for course in &snapshot.courses {
let active = course.schedules.iter().any(|s| {
s.active_during(local_date, wday_bit, slot_start_minutes, slot_end_minutes)
});
if active {
*subject_totals.entry(course.subject.clone()).or_default() +=
course.enrollment as i64;
}
}
all_subjects.extend(subject_totals.keys().cloned());
TimelineSlot {
time: utc_time,
subjects: subject_totals,
}
})
.collect();
let subjects: Vec<String> = all_subjects.into_iter().collect();
Ok(Json(TimelineResponse { slots, subjects }))
}
/// Convert a `NaiveTime` to minutes since midnight.
fn time_to_minutes(t: NaiveTime) -> u16 {
(t.hour() * 60 + t.minute()) as u16
}
+205
View File
@@ -0,0 +1,205 @@
//! WebSocket event types and handler for real-time scrape job updates.
use axum::{
extract::{
State,
ws::{Message, WebSocket, WebSocketUpgrade},
},
response::IntoResponse,
};
use futures::{SinkExt, StreamExt};
use serde::Serialize;
use sqlx::PgPool;
use tokio::sync::broadcast;
use tracing::debug;
use crate::data::models::{ScrapeJob, ScrapeJobStatus};
use crate::state::AppState;
use crate::web::extractors::AdminUser;
/// A serializable DTO for `ScrapeJob` with computed `status`.
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ScrapeJobDto {
pub id: i32,
pub target_type: String,
pub target_payload: serde_json::Value,
pub priority: String,
pub execute_at: String,
pub created_at: String,
pub locked_at: Option<String>,
pub retry_count: i32,
pub max_retries: i32,
pub queued_at: String,
pub status: ScrapeJobStatus,
}
impl From<&ScrapeJob> for ScrapeJobDto {
fn from(job: &ScrapeJob) -> Self {
Self {
id: job.id,
target_type: format!("{:?}", job.target_type),
target_payload: job.target_payload.clone(),
priority: format!("{:?}", job.priority),
execute_at: job.execute_at.to_rfc3339(),
created_at: job.created_at.to_rfc3339(),
locked_at: job.locked_at.map(|t| t.to_rfc3339()),
retry_count: job.retry_count,
max_retries: job.max_retries,
queued_at: job.queued_at.to_rfc3339(),
status: job.status(),
}
}
}
/// Events broadcast when scrape job state changes.
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum ScrapeJobEvent {
Init {
jobs: Vec<ScrapeJobDto>,
},
JobCreated {
job: ScrapeJobDto,
},
JobLocked {
id: i32,
locked_at: String,
status: ScrapeJobStatus,
},
JobCompleted {
id: i32,
},
JobRetried {
id: i32,
retry_count: i32,
queued_at: String,
status: ScrapeJobStatus,
},
JobExhausted {
id: i32,
},
JobDeleted {
id: i32,
},
}
/// Fetch current scrape jobs from the DB and build an `Init` event.
async fn build_init_event(db_pool: &PgPool) -> Result<ScrapeJobEvent, sqlx::Error> {
let rows = sqlx::query_as::<_, ScrapeJob>(
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
)
.fetch_all(db_pool)
.await?;
let jobs = rows.iter().map(ScrapeJobDto::from).collect();
Ok(ScrapeJobEvent::Init { jobs })
}
/// WebSocket endpoint for real-time scrape job updates.
///
/// Auth is checked via `AdminUser` before the upgrade occurs — if rejected,
/// a 401/403 is returned and the upgrade never happens.
pub async fn scrape_jobs_ws(
ws: WebSocketUpgrade,
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> impl IntoResponse {
ws.on_upgrade(|socket| handle_scrape_jobs_ws(socket, state))
}
/// Serialize an event and send it over the WebSocket sink.
/// Returns `true` if the message was sent, `false` if the client disconnected.
async fn send_event(
sink: &mut futures::stream::SplitSink<WebSocket, Message>,
event: &ScrapeJobEvent,
) -> bool {
let Ok(json) = serde_json::to_string(event) else {
return true; // serialization failed, but connection is still alive
};
sink.send(Message::Text(json.into())).await.is_ok()
}
async fn handle_scrape_jobs_ws(socket: WebSocket, state: AppState) {
debug!("scrape-jobs WebSocket connected");
let (mut sink, mut stream) = socket.split();
// Send initial state
let init_event = match build_init_event(&state.db_pool).await {
Ok(event) => event,
Err(e) => {
debug!(error = %e, "failed to build init event, closing WebSocket");
return;
}
};
if !send_event(&mut sink, &init_event).await {
debug!("client disconnected during init send");
return;
}
// Subscribe to broadcast events
let mut rx = state.scrape_job_events();
loop {
tokio::select! {
result = rx.recv() => {
match result {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during event send");
break;
}
}
Err(broadcast::error::RecvError::Lagged(n)) => {
debug!(missed = n, "broadcast lagged, resyncing");
match build_init_event(&state.db_pool).await {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during resync send");
break;
}
}
Err(e) => {
debug!(error = %e, "failed to build resync init event");
}
}
}
Err(broadcast::error::RecvError::Closed) => {
debug!("broadcast channel closed");
break;
}
}
}
msg = stream.next() => {
match msg {
Some(Ok(Message::Text(text))) => {
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(&text)
&& parsed.get("type").and_then(|t| t.as_str()) == Some("resync")
{
debug!("client requested resync");
match build_init_event(&state.db_pool).await {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during resync send");
break;
}
}
Err(e) => {
debug!(error = %e, "failed to build resync init event");
}
}
}
}
Some(Ok(Message::Close(_))) | None => {
debug!("client disconnected");
break;
}
_ => {}
}
}
}
}
debug!("scrape-jobs WebSocket disconnected");
}
+113
View File
@@ -210,3 +210,116 @@ async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
assert_eq!(rows[1].0, "202520");
assert_eq!(rows[1].1, 10);
}
#[sqlx::test]
async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
// Insert initial data — should NOT create audits/metrics (it's a fresh insert)
let initial = vec![helpers::make_course(
"50001",
"202510",
"CS",
"3443",
"App Programming",
10,
35,
0,
5,
)];
batch_upsert_courses(&initial, &pool).await.unwrap();
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
audit_count, 0,
"initial insert should not create audit entries"
);
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
metric_count, 0,
"initial insert should not create metric entries"
);
// Update enrollment and wait_count
let updated = vec![helpers::make_course(
"50001",
"202510",
"CS",
"3443",
"App Programming",
20,
35,
2,
5,
)];
batch_upsert_courses(&updated, &pool).await.unwrap();
// Should have audit entries for enrollment and wait_count changes
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert!(
audit_count >= 2,
"should have audit entries for enrollment and wait_count changes, got {audit_count}"
);
// Should have exactly 1 metric entry
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(metric_count, 1, "should have 1 metric snapshot");
// Verify metric values
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
"SELECT enrollment, wait_count, seats_available FROM course_metrics LIMIT 1",
)
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(enrollment, 20);
assert_eq!(wait_count, 2);
assert_eq!(seats, 15); // 35 - 20
}
#[sqlx::test]
async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
// Insert then re-insert identical data — should produce zero audits/metrics
let course = vec![helpers::make_course(
"60001",
"202510",
"CS",
"1083",
"Intro to CS",
25,
30,
0,
5,
)];
batch_upsert_courses(&course, &pool).await.unwrap();
batch_upsert_courses(&course, &pool).await.unwrap();
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
audit_count, 0,
"identical re-upsert should not create audit entries"
);
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
metric_count, 0,
"identical re-upsert should not create metric entries"
);
}
+17 -11
View File
@@ -217,10 +217,13 @@ async fn unlock_and_increment_retry_has_retries_remaining(pool: PgPool) {
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(has_retries, "should have retries remaining (0→1, max=3)");
assert!(
result.is_some(),
"should have retries remaining (0→1, max=3)"
);
// Verify state in DB
let (retry_count, locked_at): (i32, Option<chrono::DateTime<chrono::Utc>>) =
@@ -241,17 +244,17 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
json!({"subject": "CS"}),
ScrapePriority::Medium,
true,
2, // retry_count
3, // retry_count (already used all 3 retries)
3, // max_retries
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(
!has_retries,
"should NOT have retries remaining (2→3, max=3)"
result.is_none(),
"should NOT have retries remaining (3→4, max=3)"
);
let (retry_count,): (i32,) =
@@ -260,7 +263,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(retry_count, 3);
assert_eq!(retry_count, 4);
}
#[sqlx::test]
@@ -276,11 +279,11 @@ async fn unlock_and_increment_retry_already_exceeded(pool: PgPool) {
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(
!has_retries,
result.is_none(),
"should NOT have retries remaining (5→6, max=3)"
);
@@ -346,7 +349,7 @@ async fn find_existing_payloads_returns_matching(pool: PgPool) {
}
#[sqlx::test]
async fn find_existing_payloads_ignores_locked(pool: PgPool) {
async fn find_existing_payloads_includes_locked(pool: PgPool) {
let payload = json!({"subject": "CS"});
helpers::insert_scrape_job(
@@ -365,7 +368,10 @@ async fn find_existing_payloads_ignores_locked(pool: PgPool) {
.await
.unwrap();
assert!(existing.is_empty(), "locked jobs should be ignored");
assert!(
existing.contains(&payload.to_string()),
"locked jobs should be included in deduplication"
);
}
#[sqlx::test]
+241
View File
@@ -5,6 +5,12 @@
"": {
"name": "banner-web",
"dependencies": {
"@icons-pack/svelte-simple-icons": "^6.5.0",
"d3-scale": "^4.0.2",
"d3-shape": "^3.2.0",
"d3-time-format": "^4.1.0",
"date-fns": "^4.1.0",
"layerchart": "^1.0.13",
"overlayscrollbars": "^2.14.0",
"overlayscrollbars-svelte": "^0.5.5",
},
@@ -17,6 +23,9 @@
"@sveltejs/vite-plugin-svelte": "^5.0.3",
"@tailwindcss/vite": "^4.0.0",
"@tanstack/table-core": "^8.21.3",
"@types/d3-scale": "^4.0.9",
"@types/d3-shape": "^3.1.8",
"@types/d3-time-format": "^4.0.3",
"@types/node": "^25.1.0",
"bits-ui": "^1.3.7",
"clsx": "^2.1.1",
@@ -32,6 +41,8 @@
},
},
"packages": {
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
"@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="],
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
@@ -62,6 +73,10 @@
"@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="],
"@dagrejs/dagre": ["@dagrejs/dagre@1.1.8", "", { "dependencies": { "@dagrejs/graphlib": "2.2.4" } }, "sha512-5SEDlndt4W/LaVzPYJW+bSmSEZc9EzTf8rJ20WCKvjS5EAZAN0b+x0Yww7VMT4R3Wootkg+X9bUfUxazYw6Blw=="],
"@dagrejs/graphlib": ["@dagrejs/graphlib@2.2.4", "", {}, "sha512-mepCf/e9+SKYy1d02/UkvSy6+6MoyXhVxP8lLDfA7BPE1X1d4dR0sZznmbM8/XVJ1GPM+Svnx7Xj6ZweByWUkw=="],
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
@@ -122,6 +137,8 @@
"@fontsource-variable/inter": ["@fontsource-variable/inter@5.2.8", "", {}, "sha512-kOfP2D+ykbcX/P3IFnokOhVRNoTozo5/JxhAIVYLpea/UBmCQ/YWPBfWIDuBImXX/15KH+eKh4xpEUyS2sQQGQ=="],
"@icons-pack/svelte-simple-icons": ["@icons-pack/svelte-simple-icons@6.5.0", "", { "peerDependencies": { "@sveltejs/kit": "^2.5.0", "svelte": "^4.2.0 || ^5.0.0" } }, "sha512-Xj3PTioiV3TJ1NTKsXY95NFG8FUqw90oeyDZIlslWHs1KkuCheu1HOPrlHb0/IM0b4cldPgx/0TldzxzBlM8Cw=="],
"@internationalized/date": ["@internationalized/date@3.10.1", "", { "dependencies": { "@swc/helpers": "^0.5.0" } }, "sha512-oJrXtQiAXLvT9clCf1K4kxp3eKsQhIaZqxEyowkBcsvZDdZkbWrVmnGknxs5flTD0VGsxrxKgBCZty1EzoiMzA=="],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
@@ -134,8 +151,22 @@
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
"@layerstack/svelte-actions": ["@layerstack/svelte-actions@1.0.1", "", { "dependencies": { "@floating-ui/dom": "^1.6.13", "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "d3-scale": "^4.0.2", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-Tv8B3TeT7oaghx0R0I4avnSdfAT6GxEK+StL8k/hEaa009iNOIGFl3f76kfvNvPioQHAMFGtnWGLPHfsfD41nQ=="],
"@layerstack/svelte-stores": ["@layerstack/svelte-stores@1.0.2", "", { "dependencies": { "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "immer": "^10.1.1", "lodash-es": "^4.17.21", "zod": "^3.24.2" } }, "sha512-IxK0UKD0PVxg1VsyaR+n7NyJ+NlvyqvYYAp+J10lkjDQxm0yx58CaF2LBV08T22C3aY1iTlqJaatn/VHV4SoQg=="],
"@layerstack/tailwind": ["@layerstack/tailwind@1.0.1", "", { "dependencies": { "@layerstack/utils": "^1.0.1", "clsx": "^2.1.1", "culori": "^4.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21", "tailwind-merge": "^2.5.4", "tailwindcss": "^3.4.15" } }, "sha512-nlshEkUCfaV0zYzrFXVVYRnS8bnBjs4M7iui6l/tu6NeBBlxDivIyRraJkdYGCSL1lZHi6FqacLQ3eerHtz90A=="],
"@layerstack/utils": ["@layerstack/utils@1.0.1", "", { "dependencies": { "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-sWP9b+SFMkJYMZyYFI01aLxbg2ZUrix6Tv+BCDmeOrcLNxtWFsMYAomMhALzTMHbb+Vis/ua5vXhpdNXEw8a2Q=="],
"@lucide/svelte": ["@lucide/svelte@0.563.1", "", { "peerDependencies": { "svelte": "^5" } }, "sha512-Kt+MbnE5D9RsuI/csmf7M+HWxALe57x3A0DhQ8pPnnUpneh7zuldrYjlT+veWtk+tVnp5doQtaAAxLujzIlhBw=="],
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
"@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="],
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.57.0", "", { "os": "android", "cpu": "arm" }, "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA=="],
@@ -238,6 +269,16 @@
"@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
"@types/d3-path": ["@types/d3-path@3.1.1", "", {}, "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg=="],
"@types/d3-scale": ["@types/d3-scale@4.0.9", "", { "dependencies": { "@types/d3-time": "*" } }, "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw=="],
"@types/d3-shape": ["@types/d3-shape@3.1.8", "", { "dependencies": { "@types/d3-path": "*" } }, "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w=="],
"@types/d3-time": ["@types/d3-time@3.0.4", "", {}, "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g=="],
"@types/d3-time-format": ["@types/d3-time-format@4.0.3", "", {}, "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg=="],
"@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
@@ -262,16 +303,28 @@
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
"any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="],
"anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="],
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
"aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
"bits-ui": ["bits-ui@1.8.0", "", { "dependencies": { "@floating-ui/core": "^1.6.4", "@floating-ui/dom": "^1.6.7", "@internationalized/date": "^3.5.6", "css.escape": "^1.5.1", "esm-env": "^1.1.2", "runed": "^0.23.2", "svelte-toolbelt": "^0.7.1", "tabbable": "^6.2.0" }, "peerDependencies": { "svelte": "^5.11.0" } }, "sha512-CXD6Orp7l8QevNDcRPLXc/b8iMVgxDWT2LyTwsdLzJKh9CxesOmPuNePSPqAxKoT59FIdU4aFPS1k7eBdbaCxg=="],
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
"camelcase-css": ["camelcase-css@2.0.1", "", {}, "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA=="],
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
@@ -280,14 +333,70 @@
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
"commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="],
"cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
"css.escape": ["css.escape@1.5.1", "", {}, "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg=="],
"cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="],
"cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="],
"culori": ["culori@4.0.2", "", {}, "sha512-1+BhOB8ahCn4O0cep0Sh2l9KCOfOdY+BXJnKMHFFzDEouSr/el18QwXEMRlOj9UY5nCeA8UN3a/82rUWRBeyBw=="],
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
"d3-delaunay": ["d3-delaunay@6.0.4", "", { "dependencies": { "delaunator": "5" } }, "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A=="],
"d3-dispatch": ["d3-dispatch@3.0.1", "", {}, "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg=="],
"d3-dsv": ["d3-dsv@3.0.1", "", { "dependencies": { "commander": "7", "iconv-lite": "0.6", "rw": "1" }, "bin": { "csv2json": "bin/dsv2json.js", "csv2tsv": "bin/dsv2dsv.js", "dsv2dsv": "bin/dsv2dsv.js", "dsv2json": "bin/dsv2json.js", "json2csv": "bin/json2dsv.js", "json2dsv": "bin/json2dsv.js", "json2tsv": "bin/json2dsv.js", "tsv2csv": "bin/dsv2dsv.js", "tsv2json": "bin/dsv2json.js" } }, "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q=="],
"d3-force": ["d3-force@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", "d3-timer": "1 - 3" } }, "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg=="],
"d3-format": ["d3-format@3.1.2", "", {}, "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg=="],
"d3-geo": ["d3-geo@3.1.1", "", { "dependencies": { "d3-array": "2.5.0 - 3" } }, "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q=="],
"d3-geo-voronoi": ["d3-geo-voronoi@2.1.0", "", { "dependencies": { "d3-array": "3", "d3-delaunay": "6", "d3-geo": "3", "d3-tricontour": "1" } }, "sha512-kqE4yYuOjPbKdBXG0xztCacPwkVSK2REF1opSNrnqqtXJmNcM++UbwQ8SxvwP6IQTj9RvIjjK4qeiVsEfj0Z2Q=="],
"d3-hierarchy": ["d3-hierarchy@3.1.2", "", {}, "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA=="],
"d3-interpolate": ["d3-interpolate@3.0.1", "", { "dependencies": { "d3-color": "1 - 3" } }, "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g=="],
"d3-interpolate-path": ["d3-interpolate-path@2.3.0", "", {}, "sha512-tZYtGXxBmbgHsIc9Wms6LS5u4w6KbP8C09a4/ZYc4KLMYYqub57rRBUgpUr2CIarIrJEpdAWWxWQvofgaMpbKQ=="],
"d3-path": ["d3-path@3.1.0", "", {}, "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ=="],
"d3-quadtree": ["d3-quadtree@3.0.1", "", {}, "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw=="],
"d3-random": ["d3-random@3.0.1", "", {}, "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ=="],
"d3-sankey": ["d3-sankey@0.12.3", "", { "dependencies": { "d3-array": "1 - 2", "d3-shape": "^1.2.0" } }, "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ=="],
"d3-scale": ["d3-scale@4.0.2", "", { "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", "d3-interpolate": "1.2.0 - 3", "d3-time": "2.1.1 - 3", "d3-time-format": "2 - 4" } }, "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ=="],
"d3-scale-chromatic": ["d3-scale-chromatic@3.1.0", "", { "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" } }, "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ=="],
"d3-shape": ["d3-shape@3.2.0", "", { "dependencies": { "d3-path": "^3.1.0" } }, "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA=="],
"d3-tile": ["d3-tile@1.0.0", "", {}, "sha512-79fnTKpPMPDS5xQ0xuS9ir0165NEwwkFpe/DSOmc2Gl9ldYzKKRDWogmTTE8wAJ8NA7PMapNfEcyKhI9Lxdu5Q=="],
"d3-time": ["d3-time@3.1.0", "", { "dependencies": { "d3-array": "2 - 3" } }, "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q=="],
"d3-time-format": ["d3-time-format@4.1.0", "", { "dependencies": { "d3-time": "1 - 3" } }, "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg=="],
"d3-timer": ["d3-timer@3.0.1", "", {}, "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA=="],
"d3-tricontour": ["d3-tricontour@1.1.0", "", { "dependencies": { "d3-delaunay": "6", "d3-scale": "4" } }, "sha512-G7gHKj89n2owmkGb6WX6ixcnQ0Kf/0wpa9VIh9DGdbHu8wdrlaHU4ir3/bFNERl8N8nn4G7e7qbtBG8N9caihQ=="],
"data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="],
"date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="],
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
"decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="],
@@ -296,10 +405,16 @@
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
"delaunator": ["delaunator@5.0.1", "", { "dependencies": { "robust-predicates": "^3.0.2" } }, "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"devalue": ["devalue@5.6.2", "", {}, "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg=="],
"didyoumean": ["didyoumean@1.2.2", "", {}, "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="],
"dlv": ["dlv@1.1.3", "", {}, "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA=="],
"enhanced-resolve": ["enhanced-resolve@5.18.4", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q=="],
"entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="],
@@ -316,12 +431,24 @@
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
"fastq": ["fastq@1.20.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw=="],
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
"html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="],
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
@@ -330,8 +457,22 @@
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"immer": ["immer@10.2.0", "", {}, "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw=="],
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
"internmap": ["internmap@2.0.3", "", {}, "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="],
"is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="],
"is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="],
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
"is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="],
"is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="],
@@ -344,6 +485,10 @@
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
"layercake": ["layercake@8.4.3", "", { "dependencies": { "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-scale": "^4.0.2", "d3-shape": "^3.2.0" }, "peerDependencies": { "svelte": "3 - 5 || >=5.0.0-next.120", "typescript": "^5.0.2" } }, "sha512-PZDduaPFxgHHkxlmsz5MVBECf6ZCT39DI3LgMVvuMwrmlrtlXwXUM/elJp46zHYzCE1j+cGyDuBDxnANv94tOQ=="],
"layerchart": ["layerchart@1.0.13", "", { "dependencies": { "@dagrejs/dagre": "^1.1.4", "@layerstack/svelte-actions": "^1.0.1", "@layerstack/svelte-stores": "^1.0.2", "@layerstack/tailwind": "^1.0.1", "@layerstack/utils": "^1.0.1", "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-delaunay": "^6.0.4", "d3-dsv": "^3.0.1", "d3-force": "^3.0.0", "d3-geo": "^3.1.1", "d3-geo-voronoi": "^2.1.0", "d3-hierarchy": "^3.1.2", "d3-interpolate": "^3.0.1", "d3-interpolate-path": "^2.3.0", "d3-path": "^3.1.0", "d3-quadtree": "^3.0.1", "d3-random": "^3.0.1", "d3-sankey": "^0.12.3", "d3-scale": "^4.0.2", "d3-scale-chromatic": "^3.1.0", "d3-shape": "^3.2.0", "d3-tile": "^1.0.0", "d3-time": "^3.1.0", "date-fns": "^4.1.0", "layercake": "8.4.3", "lodash-es": "^4.17.21" }, "peerDependencies": { "svelte": "^3.56.0 || ^4.0.0 || ^5.0.0" } }, "sha512-bjcrfyTdHtfYZn7yj26dvA1qUjM+R6+akp2VeBJ4JWKmDGhb5WvT9nMCs52Rb+gSd/omFq5SjZLz49MqlVljZw=="],
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
@@ -368,30 +513,50 @@
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
"lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
"lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="],
"locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="],
"lodash-es": ["lodash-es@4.17.23", "", {}, "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg=="],
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
"mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"mz": ["mz@2.7.0", "", { "dependencies": { "any-promise": "^1.0.0", "object-assign": "^4.0.1", "thenify-all": "^1.0.0" } }, "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
"normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="],
"nwsapi": ["nwsapi@2.2.23", "", {}, "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ=="],
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
"object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
"overlayscrollbars": ["overlayscrollbars@2.14.0", "", {}, "sha512-RjV0pqc79kYhQLC3vTcLRb5GLpI1n6qh0Oua3g+bGH4EgNOJHVBGP7u0zZtxoAa0dkHlAqTTSYRb9MMmxNLjig=="],
"overlayscrollbars-svelte": ["overlayscrollbars-svelte@0.5.5", "", { "peerDependencies": { "overlayscrollbars": "^2.0.0", "svelte": "^5.0.0" } }, "sha512-+dRW3YZSvFbKi5vDCpnUOHuoPLLSdu0BUVVMYZdmfVghu7XkafDRebG2y91/ImPqj6YDAUsz1rcWVYhCJSS/pQ=="],
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
"path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="],
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
@@ -400,18 +565,48 @@
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
"pify": ["pify@2.3.0", "", {}, "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog=="],
"pirates": ["pirates@4.0.7", "", {}, "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA=="],
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
"postcss-import": ["postcss-import@15.1.0", "", { "dependencies": { "postcss-value-parser": "^4.0.0", "read-cache": "^1.0.0", "resolve": "^1.1.7" }, "peerDependencies": { "postcss": "^8.0.0" } }, "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew=="],
"postcss-js": ["postcss-js@4.1.0", "", { "dependencies": { "camelcase-css": "^2.0.1" }, "peerDependencies": { "postcss": "^8.4.21" } }, "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw=="],
"postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="],
"postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="],
"postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
"postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="],
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
"read-cache": ["read-cache@1.0.0", "", { "dependencies": { "pify": "^2.3.0" } }, "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
"resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="],
"reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="],
"robust-predicates": ["robust-predicates@3.0.2", "", {}, "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="],
"rollup": ["rollup@4.57.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.57.0", "@rollup/rollup-android-arm64": "4.57.0", "@rollup/rollup-darwin-arm64": "4.57.0", "@rollup/rollup-darwin-x64": "4.57.0", "@rollup/rollup-freebsd-arm64": "4.57.0", "@rollup/rollup-freebsd-x64": "4.57.0", "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", "@rollup/rollup-linux-arm-musleabihf": "4.57.0", "@rollup/rollup-linux-arm64-gnu": "4.57.0", "@rollup/rollup-linux-arm64-musl": "4.57.0", "@rollup/rollup-linux-loong64-gnu": "4.57.0", "@rollup/rollup-linux-loong64-musl": "4.57.0", "@rollup/rollup-linux-ppc64-gnu": "4.57.0", "@rollup/rollup-linux-ppc64-musl": "4.57.0", "@rollup/rollup-linux-riscv64-gnu": "4.57.0", "@rollup/rollup-linux-riscv64-musl": "4.57.0", "@rollup/rollup-linux-s390x-gnu": "4.57.0", "@rollup/rollup-linux-x64-gnu": "4.57.0", "@rollup/rollup-linux-x64-musl": "4.57.0", "@rollup/rollup-openbsd-x64": "4.57.0", "@rollup/rollup-openharmony-arm64": "4.57.0", "@rollup/rollup-win32-arm64-msvc": "4.57.0", "@rollup/rollup-win32-ia32-msvc": "4.57.0", "@rollup/rollup-win32-x64-gnu": "4.57.0", "@rollup/rollup-win32-x64-msvc": "4.57.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA=="],
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
"runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="],
"rw": ["rw@1.3.3", "", {}, "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ=="],
"sade": ["sade@1.8.1", "", { "dependencies": { "mri": "^1.1.0" } }, "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A=="],
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
@@ -434,6 +629,10 @@
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
"sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="],
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
"svelte": ["svelte@5.49.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-Fn2mCc3XX0gnnbBYzWOTrZHi5WnF9KvqmB1+KGlUWoJkdioPmFYtg2ALBr6xl2dcnFTz3Vi7/mHpbKSVg/imVg=="],
"svelte-check": ["svelte-check@4.3.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q=="],
@@ -450,6 +649,10 @@
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
"thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="],
"thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="],
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
@@ -466,18 +669,24 @@
"tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="],
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
"totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="],
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
"vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="],
"vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="],
@@ -506,6 +715,12 @@
"zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
"@layerstack/tailwind/tailwind-merge": ["tailwind-merge@2.6.0", "", {}, "sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA=="],
"@layerstack/tailwind/tailwindcss": ["tailwindcss@3.4.19", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.7", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
@@ -517,5 +732,31 @@
"@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
"@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ=="],
"d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="],
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
"@layerstack/tailwind/tailwindcss/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
"@layerstack/tailwind/tailwindcss/jiti": ["jiti@1.21.7", "", { "bin": { "jiti": "bin/jiti.js" } }, "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A=="],
"d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="],
"d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="],
"@layerstack/tailwind/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"@layerstack/tailwind/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
"@layerstack/tailwind/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
}
}
+9
View File
@@ -20,6 +20,9 @@
"@sveltejs/vite-plugin-svelte": "^5.0.3",
"@tailwindcss/vite": "^4.0.0",
"@tanstack/table-core": "^8.21.3",
"@types/d3-scale": "^4.0.9",
"@types/d3-shape": "^3.1.8",
"@types/d3-time-format": "^4.0.3",
"@types/node": "^25.1.0",
"bits-ui": "^1.3.7",
"clsx": "^2.1.1",
@@ -33,6 +36,12 @@
"vitest": "^3.0.5"
},
"dependencies": {
"@icons-pack/svelte-simple-icons": "^6.5.0",
"d3-scale": "^4.0.2",
"d3-shape": "^3.2.0",
"d3-time-format": "^4.1.0",
"date-fns": "^4.1.0",
"layerchart": "^1.0.13",
"overlayscrollbars": "^2.14.0",
"overlayscrollbars-svelte": "^0.5.5"
}
+148
View File
@@ -0,0 +1,148 @@
#!/usr/bin/env bun
/**
* Pre-compress static assets with maximum compression levels.
* Run after `bun run build`.
*
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
* These are embedded alongside originals by rust-embed and served via
* content negotiation in src/web/assets.rs.
*/
import { readdir, stat, readFile, writeFile } from "fs/promises";
import { join, extname } from "path";
import { gzipSync, brotliCompressSync, constants } from "zlib";
import { $ } from "bun";
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
const MIN_SIZE = 512;
const COMPRESSIBLE_EXTENSIONS = new Set([
".js",
".css",
".html",
".json",
".svg",
".txt",
".xml",
".map",
]);
// Check if zstd CLI is available
let hasZstd = false;
try {
await $`which zstd`.quiet();
hasZstd = true;
} catch {
console.warn("Warning: zstd not found, skipping .zst generation");
}
async function* walkDir(dir: string): AsyncGenerator<string> {
try {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const path = join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkDir(path);
} else if (entry.isFile()) {
yield path;
}
}
} catch {
// Directory doesn't exist, skip
}
}
async function compressFile(path: string): Promise<void> {
const ext = extname(path);
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
const stats = await stat(path);
if (stats.size < MIN_SIZE) return;
// Skip if all compressed variants already exist
const variantsExist = await Promise.all([
stat(`${path}.br`).then(
() => true,
() => false
),
stat(`${path}.gz`).then(
() => true,
() => false
),
hasZstd
? stat(`${path}.zst`).then(
() => true,
() => false
)
: Promise.resolve(false),
]);
if (variantsExist.every((exists) => exists || !hasZstd)) {
return;
}
const content = await readFile(path);
const originalSize = content.length;
// Brotli (maximum quality = 11)
const brContent = brotliCompressSync(content, {
params: {
[constants.BROTLI_PARAM_QUALITY]: 11,
},
});
await writeFile(`${path}.br`, brContent);
// Gzip (level 9)
const gzContent = gzipSync(content, { level: 9 });
await writeFile(`${path}.gz`, gzContent);
// Zstd (level 19 - maximum)
if (hasZstd) {
try {
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
} catch (e) {
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
}
}
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
}
async function main() {
console.log("Pre-compressing static assets...");
// Banner uses adapter-static with output in dist/
const dirs = ["dist"];
let scannedFiles = 0;
let compressedFiles = 0;
for (const dir of dirs) {
for await (const file of walkDir(dir)) {
const ext = extname(file);
scannedFiles++;
if (
COMPRESSIBLE_EXTENSIONS.has(ext) &&
!file.endsWith(".br") &&
!file.endsWith(".gz") &&
!file.endsWith(".zst")
) {
const stats = await stat(file);
if (stats.size >= MIN_SIZE) {
await compressFile(file);
compressedFiles++;
}
}
}
}
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
}
main().catch((e) => {
console.error("Compression failed:", e);
process.exit(1);
});
+1 -1
View File
@@ -1,5 +1,5 @@
<!doctype html>
<html lang="en" class="no-transition">
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
+328 -2
View File
@@ -1,25 +1,59 @@
import { authStore } from "$lib/auth.svelte";
import type {
CandidateResponse,
CodeDescription,
CourseResponse,
DbMeetingTime,
InstructorDetail,
InstructorDetailResponse,
InstructorListItem,
InstructorResponse,
InstructorStats,
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
ScraperStatsResponse,
SearchResponse as SearchResponseGenerated,
ServiceInfo,
ServiceStatus,
StatusResponse,
SubjectDetailResponse,
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
User,
} from "$lib/bindings";
const API_BASE_URL = "/api";
// Re-export generated types under their canonical names
export type {
CandidateResponse,
CodeDescription,
CourseResponse,
DbMeetingTime,
InstructorDetail,
InstructorDetailResponse,
InstructorListItem,
InstructorResponse,
InstructorStats,
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
ScraperStatsResponse,
ServiceInfo,
ServiceStatus,
StatusResponse,
SubjectDetailResponse,
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
};
// Semantic aliases — these all share the CodeDescription shape
@@ -30,10 +64,101 @@ export type ReferenceEntry = CodeDescription;
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
export type SearchResponse = SearchResponseGenerated;
export type ScraperPeriod = "1h" | "6h" | "24h" | "7d" | "30d";
// Client-side only — not generated from Rust
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
export type SortDirection = "asc" | "desc";
export interface AdminStatus {
userCount: number;
sessionCount: number;
courseCount: number;
scrapeJobCount: number;
services: { name: string; status: string }[];
}
export interface ScrapeJob {
id: number;
targetType: string;
targetPayload: unknown;
priority: string;
executeAt: string;
createdAt: string;
lockedAt: string | null;
retryCount: number;
maxRetries: number;
queuedAt: string;
status: "processing" | "staleLock" | "exhausted" | "scheduled" | "pending";
}
export interface ScrapeJobsResponse {
jobs: ScrapeJob[];
}
export interface AuditLogEntry {
id: number;
courseId: number;
timestamp: string;
fieldChanged: string;
oldValue: string;
newValue: string;
subject: string | null;
courseNumber: string | null;
crn: string | null;
courseTitle: string | null;
}
export interface AuditLogResponse {
entries: AuditLogEntry[];
}
export interface MetricEntry {
id: number;
courseId: number;
timestamp: string;
enrollment: number;
waitCount: number;
seatsAvailable: number;
}
export interface MetricsResponse {
metrics: MetricEntry[];
count: number;
timestamp: string;
}
export interface MetricsParams {
course_id?: number;
term?: string;
crn?: string;
range?: "1h" | "6h" | "24h" | "7d" | "30d";
limit?: number;
}
/** A time range for timeline queries (ISO-8601 strings). */
export interface TimelineRange {
start: string;
end: string;
}
/** Request body for POST /api/timeline. */
export interface TimelineRequest {
ranges: TimelineRange[];
}
/** A single 15-minute slot returned by the timeline API. */
export interface TimelineSlot {
time: string;
subjects: Record<string, number>;
}
/** Response from POST /api/timeline. */
export interface TimelineResponse {
slots: TimelineSlot[];
subjects: string[];
}
export interface SearchParams {
term: string;
subjects?: string[];
@@ -45,6 +170,15 @@ export interface SearchParams {
sort_dir?: SortDirection;
}
// Admin instructor query params (client-only, not generated)
export interface AdminInstructorListParams {
status?: string;
search?: string;
page?: number;
per_page?: number;
sort?: string;
}
export class BannerApiClient {
private baseUrl: string;
private fetchFn: typeof fetch;
@@ -54,8 +188,34 @@ export class BannerApiClient {
this.fetchFn = fetchFn;
}
private async request<T>(endpoint: string): Promise<T> {
const response = await this.fetchFn(`${this.baseUrl}${endpoint}`);
private buildInit(options?: { method?: string; body?: unknown }): RequestInit | undefined {
if (!options) return undefined;
const init: RequestInit = {};
if (options.method) {
init.method = options.method;
}
if (options.body !== undefined) {
init.headers = { "Content-Type": "application/json" };
init.body = JSON.stringify(options.body);
} else if (options.method) {
init.headers = { "Content-Type": "application/json" };
}
return Object.keys(init).length > 0 ? init : undefined;
}
private async request<T>(
endpoint: string,
options?: { method?: string; body?: unknown }
): Promise<T> {
const init = this.buildInit(options);
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
if (init) args.push(init);
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
@@ -64,6 +224,25 @@ export class BannerApiClient {
return (await response.json()) as T;
}
private async requestVoid(
endpoint: string,
options?: { method?: string; body?: unknown }
): Promise<void> {
const init = this.buildInit(options);
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
if (init) args.push(init);
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
}
async getStatus(): Promise<StatusResponse> {
return this.request<StatusResponse>("/status");
}
@@ -96,6 +275,153 @@ export class BannerApiClient {
async getReference(category: string): Promise<ReferenceEntry[]> {
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
}
// Admin endpoints
async getAdminStatus(): Promise<AdminStatus> {
return this.request<AdminStatus>("/admin/status");
}
async getAdminUsers(): Promise<User[]> {
return this.request<User[]>("/admin/users");
}
async setUserAdmin(discordId: string, isAdmin: boolean): Promise<User> {
return this.request<User>(`/admin/users/${discordId}/admin`, {
method: "PUT",
body: { is_admin: isAdmin },
});
}
async getAdminScrapeJobs(): Promise<ScrapeJobsResponse> {
return this.request<ScrapeJobsResponse>("/admin/scrape-jobs");
}
/**
* Fetch the audit log with conditional request support.
*
* Returns `null` when the server responds 304 (data unchanged).
* Stores and sends `Last-Modified` / `If-Modified-Since` automatically.
*/
async getAdminAuditLog(): Promise<AuditLogResponse | null> {
const headers: Record<string, string> = {};
if (this._auditLastModified) {
headers["If-Modified-Since"] = this._auditLastModified;
}
const response = await this.fetchFn(`${this.baseUrl}/admin/audit-log`, { headers });
if (response.status === 304) {
return null;
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
const lastMod = response.headers.get("Last-Modified");
if (lastMod) {
this._auditLastModified = lastMod;
}
return (await response.json()) as AuditLogResponse;
}
/** Stored `Last-Modified` value for audit log conditional requests. */
private _auditLastModified: string | null = null;
async getTimeline(ranges: TimelineRange[]): Promise<TimelineResponse> {
return this.request<TimelineResponse>("/timeline", {
method: "POST",
body: { ranges } satisfies TimelineRequest,
});
}
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
const query = new URLSearchParams();
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
if (params?.term) query.set("term", params.term);
if (params?.crn) query.set("crn", params.crn);
if (params?.range) query.set("range", params.range);
if (params?.limit !== undefined) query.set("limit", String(params.limit));
const qs = query.toString();
return this.request<MetricsResponse>(`/metrics${qs ? `?${qs}` : ""}`);
}
// Admin instructor endpoints
async getAdminInstructors(params?: AdminInstructorListParams): Promise<ListInstructorsResponse> {
const query = new URLSearchParams();
if (params?.status) query.set("status", params.status);
if (params?.search) query.set("search", params.search);
if (params?.page !== undefined) query.set("page", String(params.page));
if (params?.per_page !== undefined) query.set("per_page", String(params.per_page));
if (params?.sort) query.set("sort", params.sort);
const qs = query.toString();
return this.request<ListInstructorsResponse>(`/admin/instructors${qs ? `?${qs}` : ""}`);
}
async getAdminInstructor(id: number): Promise<InstructorDetailResponse> {
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}`);
}
async matchInstructor(id: number, rmpLegacyId: number): Promise<InstructorDetailResponse> {
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}/match`, {
method: "POST",
body: { rmpLegacyId },
});
}
async rejectCandidate(id: number, rmpLegacyId: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/reject-candidate`, {
method: "POST",
body: { rmpLegacyId },
});
}
async rejectAllCandidates(id: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/reject-all`, {
method: "POST",
});
}
async unmatchInstructor(id: number, rmpLegacyId?: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/unmatch`, {
method: "POST",
...(rmpLegacyId !== undefined ? { body: { rmpLegacyId } } : {}),
});
}
async rescoreInstructors(): Promise<RescoreResponse> {
return this.request<RescoreResponse>("/admin/rmp/rescore", {
method: "POST",
});
}
// Scraper analytics endpoints
async getScraperStats(period?: ScraperPeriod): Promise<ScraperStatsResponse> {
const qs = period ? `?period=${period}` : "";
return this.request<ScraperStatsResponse>(`/admin/scraper/stats${qs}`);
}
async getScraperTimeseries(period?: ScraperPeriod, bucket?: string): Promise<TimeseriesResponse> {
const query = new URLSearchParams();
if (period) query.set("period", period);
if (bucket) query.set("bucket", bucket);
const qs = query.toString();
return this.request<TimeseriesResponse>(`/admin/scraper/timeseries${qs ? `?${qs}` : ""}`);
}
async getScraperSubjects(): Promise<SubjectsResponse> {
return this.request<SubjectsResponse>("/admin/scraper/subjects");
}
async getScraperSubjectDetail(subject: string, limit?: number): Promise<SubjectDetailResponse> {
const qs = limit !== undefined ? `?limit=${limit}` : "";
return this.request<SubjectDetailResponse>(
`/admin/scraper/subjects/${encodeURIComponent(subject)}${qs}`
);
}
}
export const client = new BannerApiClient();
+84
View File
@@ -0,0 +1,84 @@
import type { User } from "$lib/bindings";
type AuthState =
| { mode: "loading" }
| { mode: "authenticated"; user: User }
| { mode: "unauthenticated" };
class AuthStore {
state = $state<AuthState>({ mode: "loading" });
get user(): User | null {
return this.state.mode === "authenticated" ? this.state.user : null;
}
get isAdmin(): boolean {
return this.user?.isAdmin ?? false;
}
get isLoading(): boolean {
return this.state.mode === "loading";
}
get isAuthenticated(): boolean {
return this.state.mode === "authenticated";
}
/**
* Attempt to load the current user session from the backend.
* Only transitions to "unauthenticated" on a definitive 401/403.
* Retries indefinitely on transient failures (network errors, 5xx)
* so that a slow backend startup doesn't kick the user to login.
*/
async init() {
const MAX_DELAY_MS = 7_000;
let delayMs = 500;
for (;;) {
try {
const response = await fetch("/api/auth/me");
if (response.ok) {
const user: User = await response.json();
this.state = { mode: "authenticated", user };
return;
}
// Definitive rejection — no session or not authorized
if (response.status === 401 || response.status === 403) {
this.state = { mode: "unauthenticated" };
return;
}
// Server error (5xx) or unexpected status — retry
} catch {
// Network error (backend not up yet) — retry
}
await new Promise((r) => setTimeout(r, delayMs));
delayMs = Math.min(delayMs * 2, MAX_DELAY_MS);
}
}
/** Idempotently mark the session as lost. Called by apiFetch on 401. */
handleUnauthorized() {
if (this.state.mode !== "unauthenticated") {
this.state = { mode: "unauthenticated" };
}
}
login() {
window.location.href = "/api/auth/login";
}
async logout() {
try {
await fetch("/api/auth/logout", { method: "POST" });
} finally {
this.state = { mode: "unauthenticated" };
window.location.href = "/";
}
}
}
export const authStore = new AuthStore();
+18
View File
@@ -1,8 +1,26 @@
export type { CandidateResponse } from "./CandidateResponse";
export type { CodeDescription } from "./CodeDescription";
export type { CourseResponse } from "./CourseResponse";
export type { DbMeetingTime } from "./DbMeetingTime";
export type { InstructorDetail } from "./InstructorDetail";
export type { InstructorDetailResponse } from "./InstructorDetailResponse";
export type { InstructorListItem } from "./InstructorListItem";
export type { InstructorResponse } from "./InstructorResponse";
export type { InstructorStats } from "./InstructorStats";
export type { LinkedRmpProfile } from "./LinkedRmpProfile";
export type { ListInstructorsResponse } from "./ListInstructorsResponse";
export type { OkResponse } from "./OkResponse";
export type { RescoreResponse } from "./RescoreResponse";
export type { ScraperStatsResponse } from "./ScraperStatsResponse";
export type { SearchResponse } from "./SearchResponse";
export type { ServiceInfo } from "./ServiceInfo";
export type { ServiceStatus } from "./ServiceStatus";
export type { StatusResponse } from "./StatusResponse";
export type { SubjectDetailResponse } from "./SubjectDetailResponse";
export type { SubjectResultEntry } from "./SubjectResultEntry";
export type { SubjectSummary } from "./SubjectSummary";
export type { SubjectsResponse } from "./SubjectsResponse";
export type { TimeseriesPoint } from "./TimeseriesPoint";
export type { TimeseriesResponse } from "./TimeseriesResponse";
export type { TopCandidateResponse } from "./TopCandidateResponse";
export type { User } from "./User";
+328 -202
View File
@@ -7,13 +7,25 @@ import {
formatMeetingDaysLong,
isMeetingTimeTBA,
isTimeTBA,
ratingColor,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { cn, tooltipContentClass } from "$lib/utils";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import { Tooltip } from "bits-ui";
import SimpleTooltip from "./SimpleTooltip.svelte";
import { Info, Copy, Check } from "@lucide/svelte";
import {
Info,
Copy,
Check,
Star,
Triangle,
ExternalLink,
Calendar,
Download,
} from "@lucide/svelte";
let { course }: { course: CourseResponse } = $props();
@@ -21,206 +33,320 @@ const clipboard = useClipboard();
</script>
<div class="bg-muted/60 p-5 text-sm border-b border-border">
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
<!-- Instructors -->
<div>
<h4 class="text-sm text-foreground mb-2">
Instructors
</h4>
{#if course.instructors.length > 0}
<div class="flex flex-wrap gap-1.5">
{#each course.instructors as instructor}
<Tooltip.Root delayDuration={200}>
<Tooltip.Trigger>
<span
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
{instructor.displayName}
{#if instructor.rmpRating != null}
{@const rating = instructor.rmpRating}
<span
class="text-[10px] font-semibold {ratingColor(rating)}"
>{rating.toFixed(1)}</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
sideOffset={6}
class={cn(tooltipContentClass, "px-3 py-2")}
>
<div class="space-y-1.5">
<div class="font-medium">{instructor.displayName}</div>
{#if instructor.isPrimary}
<div class="text-muted-foreground">Primary instructor</div>
{/if}
{#if instructor.rmpRating != null}
<div class="text-muted-foreground">
{instructor.rmpRating.toFixed(1)}/5 ({instructor.rmpNumRatings ?? 0} ratings)
</div>
{/if}
{#if instructor.email}
<button
onclick={(e) => clipboard.copy(instructor.email!, e)}
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
>
{#if clipboard.copiedValue === instructor.email}
<Check class="size-3" />
<span>Copied!</span>
{:else}
<Copy class="size-3" />
<span>{instructor.email}</span>
{/if}
</button>
{/if}
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
<!-- Instructors -->
<div>
<h4 class="text-sm text-foreground mb-2">Instructors</h4>
{#if course.instructors.length > 0}
<div class="flex flex-wrap gap-1.5">
{#each course.instructors as instructor}
<Tooltip.Root delayDuration={200}>
<Tooltip.Trigger>
<span
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
{instructor.displayName}
{#if instructor.rmpRating != null}
{@const rating = instructor.rmpRating}
{@const lowConfidence =
(instructor.rmpNumRatings ?? 0) <
RMP_CONFIDENCE_THRESHOLD}
<span
class="text-[10px] font-semibold inline-flex items-center gap-0.5"
style={ratingStyle(
rating,
themeStore.isDark,
)}
>
{rating.toFixed(1)}
{#if lowConfidence}
<Triangle
class="size-2 fill-current"
/>
{:else}
<Star
class="size-2.5 fill-current"
/>
{/if}
</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
sideOffset={6}
class={cn(tooltipContentClass, "px-3 py-2")}
>
<div class="space-y-1.5">
<div class="font-medium">
{instructor.displayName}
</div>
{#if instructor.isPrimary}
<div class="text-muted-foreground">
Primary instructor
</div>
{/if}
{#if instructor.rmpRating != null}
<div class="text-muted-foreground">
{instructor.rmpRating.toFixed(1)}/5
· {instructor.rmpNumRatings ?? 0} ratings
{#if (instructor.rmpNumRatings ?? 0) < RMP_CONFIDENCE_THRESHOLD}
(low)
{/if}
</div>
{/if}
{#if instructor.rmpLegacyId != null}
<a
href={rmpUrl(
instructor.rmpLegacyId,
)}
target="_blank"
rel="noopener"
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors"
>
<ExternalLink class="size-3" />
<span>View on RMP</span>
</a>
{/if}
{#if instructor.email}
<button
onclick={(e) =>
clipboard.copy(
instructor.email!,
e,
)}
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
>
{#if clipboard.copiedValue === instructor.email}
<Check class="size-3" />
<span>Copied!</span>
{:else}
<Copy class="size-3" />
<span>{instructor.email}</span>
{/if}
</button>
{/if}
</div>
</Tooltip.Content>
</Tooltip.Root>
{/each}
</div>
</Tooltip.Content>
</Tooltip.Root>
{/each}
</div>
{:else}
<span class="text-muted-foreground italic">Staff</span>
{/if}
</div>
<!-- Meeting Times -->
<div>
<h4 class="text-sm text-foreground mb-2">
Meeting Times
</h4>
{#if course.meetingTimes.length > 0}
<ul class="space-y-2">
{#each course.meetingTimes as mt}
<li>
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
<span class="italic text-muted-foreground">TBA</span>
{:else}
<div class="flex items-baseline gap-1.5">
{#if !isMeetingTimeTBA(mt)}
<span class="font-medium text-foreground">
{formatMeetingDaysLong(mt)}
</span>
{/if}
{#if !isTimeTBA(mt)}
<span class="text-muted-foreground">
{formatTime(mt.begin_time)}&ndash;{formatTime(mt.end_time)}
</span>
{:else}
<span class="italic text-muted-foreground">Time TBA</span>
{/if}
</div>
{/if}
{#if mt.building || mt.room}
<div class="text-xs text-muted-foreground mt-0.5">
{mt.building_description ?? mt.building}{mt.room ? ` ${mt.room}` : ""}
</div>
{/if}
<div class="text-xs text-muted-foreground/70 mt-0.5">
{formatDate(mt.start_date)} &ndash; {formatDate(mt.end_date)}
</div>
</li>
{/each}
</ul>
{:else}
<span class="italic text-muted-foreground">TBA</span>
{/if}
</div>
<!-- Delivery -->
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Delivery
<SimpleTooltip text="How the course is taught: in-person, online, hybrid, etc." delay={150} passthrough>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<span class="text-foreground">
{course.instructionalMethod ?? "—"}
{#if course.campus}
<span class="text-muted-foreground"> · {course.campus}</span>
{/if}
</span>
</div>
<!-- Credits -->
<div>
<h4 class="text-sm text-foreground mb-2">
Credits
</h4>
<span class="text-foreground">{formatCreditHours(course)}</span>
</div>
<!-- Attributes -->
{#if course.attributes.length > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Attributes
<SimpleTooltip text="Course flags for degree requirements, core curriculum, or special designations" delay={150} passthrough>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<div class="flex flex-wrap gap-1.5">
{#each course.attributes as attr}
<SimpleTooltip text="Course attribute code" delay={150} passthrough>
<span
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
>
{attr}
</span>
</SimpleTooltip>
{/each}
</div>
</div>
{/if}
<!-- Cross-list -->
{#if course.crossList}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Cross-list
<SimpleTooltip text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class." delay={150} passthrough>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<Tooltip.Root delayDuration={150} disableHoverableContent>
<Tooltip.Trigger>
<span class="inline-flex items-center gap-1.5 text-foreground font-mono">
<span class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium">
{course.crossList}
</span>
{#if course.crossListCount != null && course.crossListCapacity != null}
<span class="text-muted-foreground text-xs">
{course.crossListCount}/{course.crossListCapacity}
</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
sideOffset={6}
class={tooltipContentClass}
>
Group <span class="font-mono font-medium">{course.crossList}</span>
{#if course.crossListCount != null && course.crossListCapacity != null}
{course.crossListCount} enrolled across {course.crossListCapacity} shared seats
{:else}
<span class="text-muted-foreground italic">Staff</span>
{/if}
</Tooltip.Content>
</Tooltip.Root>
</div>
{/if}
</div>
<!-- Waitlist -->
{#if course.waitCapacity > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
Waitlist
</h4>
<span class="text-foreground">{course.waitCount} / {course.waitCapacity}</span>
</div>
{/if}
</div>
<!-- Meeting Times -->
<div>
<h4 class="text-sm text-foreground mb-2">Meeting Times</h4>
{#if course.meetingTimes.length > 0}
<ul class="space-y-2">
{#each course.meetingTimes as mt}
<li>
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
<span class="italic text-muted-foreground"
>TBA</span
>
{:else}
<div class="flex items-baseline gap-1.5">
{#if !isMeetingTimeTBA(mt)}
<span
class="font-medium text-foreground"
>
{formatMeetingDaysLong(mt)}
</span>
{/if}
{#if !isTimeTBA(mt)}
<span class="text-muted-foreground">
{formatTime(
mt.begin_time,
)}&ndash;{formatTime(mt.end_time)}
</span>
{:else}
<span
class="italic text-muted-foreground"
>Time TBA</span
>
{/if}
</div>
{/if}
{#if mt.building || mt.room}
<div
class="text-xs text-muted-foreground mt-0.5"
>
{mt.building_description ??
mt.building}{mt.room
? ` ${mt.room}`
: ""}
</div>
{/if}
<div
class="text-xs text-muted-foreground/70 mt-0.5"
>
{formatDate(mt.start_date)} &ndash; {formatDate(
mt.end_date,
)}
</div>
</li>
{/each}
</ul>
{:else}
<span class="italic text-muted-foreground">TBA</span>
{/if}
</div>
<!-- Delivery -->
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Delivery
<SimpleTooltip
text="How the course is taught: in-person, online, hybrid, etc."
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<span class="text-foreground">
{course.instructionalMethod ?? "—"}
{#if course.campus}
<span class="text-muted-foreground">
· {course.campus}
</span>
{/if}
</span>
</div>
<!-- Credits -->
<div>
<h4 class="text-sm text-foreground mb-2">Credits</h4>
<span class="text-foreground">{formatCreditHours(course)}</span>
</div>
<!-- Attributes -->
{#if course.attributes.length > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Attributes
<SimpleTooltip
text="Course flags for degree requirements, core curriculum, or special designations"
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<div class="flex flex-wrap gap-1.5">
{#each course.attributes as attr}
<SimpleTooltip
text="Course attribute code"
delay={150}
passthrough
>
<span
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
>
{attr}
</span>
</SimpleTooltip>
{/each}
</div>
</div>
{/if}
<!-- Cross-list -->
{#if course.crossList}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Cross-list
<SimpleTooltip
text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class."
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<Tooltip.Root delayDuration={150} disableHoverableContent>
<Tooltip.Trigger>
<span
class="inline-flex items-center gap-1.5 text-foreground font-mono"
>
<span
class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium"
>
{course.crossList}
</span>
{#if course.crossListCount != null && course.crossListCapacity != null}
<span class="text-muted-foreground text-xs">
{formatNumber(course.crossListCount)}/{formatNumber(course.crossListCapacity)}
</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content sideOffset={6} class={tooltipContentClass}>
Group <span class="font-mono font-medium"
>{course.crossList}</span
>
{#if course.crossListCount != null && course.crossListCapacity != null}
{formatNumber(course.crossListCount)} enrolled across {formatNumber(course.crossListCapacity)}
shared seats
{/if}
</Tooltip.Content>
</Tooltip.Root>
</div>
{/if}
<!-- Waitlist -->
{#if course.waitCapacity > 0}
<div>
<h4 class="text-sm text-foreground mb-2">Waitlist</h4>
<span class="text-2foreground"
>{formatNumber(course.waitCount)} / {formatNumber(course.waitCapacity)}</span
>
</div>
{/if}
<!-- Calendar Export -->
{#if course.meetingTimes.length > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Calendar
<SimpleTooltip
text="Export this course schedule to your calendar app"
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<div class="flex flex-wrap gap-1.5">
<a
href="/api/courses/{course.termCode}/{course.crn}/calendar.ics"
download
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
<Download class="size-3.5" />
ICS File
</a>
<a
href="/api/courses/{course.termCode}/{course.crn}/gcal"
target="_blank"
rel="noopener"
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
<Calendar class="size-3.5" />
Google Calendar
</a>
</div>
</div>
{/if}
</div>
</div>
+161 -58
View File
@@ -15,8 +15,11 @@ import {
openSeats,
seatsColor,
seatsDotColor,
ratingColor,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
import CourseDetail from "./CourseDetail.svelte";
@@ -31,8 +34,19 @@ import {
type VisibilityState,
type Updater,
} from "@tanstack/table-core";
import { ArrowUp, ArrowDown, ArrowUpDown, Columns3, Check, RotateCcw } from "@lucide/svelte";
import { DropdownMenu, ContextMenu } from "bits-ui";
import {
ArrowUp,
ArrowDown,
ArrowUpDown,
Columns3,
Check,
RotateCcw,
Star,
Triangle,
ExternalLink,
} from "@lucide/svelte";
import { DropdownMenu, ContextMenu, Tooltip } from "bits-ui";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import SimpleTooltip from "./SimpleTooltip.svelte";
let {
@@ -91,10 +105,16 @@ function primaryInstructorDisplay(course: CourseResponse): string {
return abbreviateInstructor(primary.displayName);
}
function primaryRating(course: CourseResponse): { rating: number; count: number } | null {
function primaryRating(
course: CourseResponse
): { rating: number; count: number; legacyId: number | null } | null {
const primary = getPrimaryInstructor(course.instructors);
if (!primary?.rmpRating) return null;
return { rating: primary.rmpRating, count: primary.rmpNumRatings ?? 0 };
return {
rating: primary.rmpRating,
count: primary.rmpNumRatings ?? 0,
legacyId: primary.rmpLegacyId ?? null,
};
}
function timeIsTBA(course: CourseResponse): boolean {
@@ -207,8 +227,7 @@ const table = createSvelteTable({
</GroupHeading>
{#each columns as col}
{@const id = col.id!}
{@const label =
typeof col.header === "string" ? col.header : id}
{@const label = typeof col.header === "string" ? col.header : id}
<CheckboxItem
checked={columnVisibility[id] !== false}
closeOnSelect={false}
@@ -269,12 +288,12 @@ const table = createSvelteTable({
transition:fly={{ duration: 150, y: -10 }}
>
{@render columnVisibilityGroup(
DropdownMenu.Group,
DropdownMenu.GroupHeading,
DropdownMenu.CheckboxItem,
DropdownMenu.Separator,
DropdownMenu.Item,
)}
DropdownMenu.Group,
DropdownMenu.GroupHeading,
DropdownMenu.CheckboxItem,
DropdownMenu.Separator,
DropdownMenu.Item,
)}
</div>
</div>
{/if}
@@ -379,12 +398,15 @@ const table = createSvelteTable({
</tr>
</tbody>
{:else}
<!-- No out: transition — Svelte outros break table layout (tbody loses positioning and overlaps) -->
{#each table.getRowModel().rows as row, i (row.id)}
{@const course = row.original}
<tbody
animate:flip={{ duration: 300 }}
in:fade={{ duration: 200, delay: Math.min(i * 20, 400) }}
out:fade={{ duration: 150 }}
in:fade={{
duration: 200,
delay: Math.min(i * 20, 400),
}}
>
<tr
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
@@ -405,9 +427,15 @@ const table = createSvelteTable({
e,
)}
onkeydown={(e) => {
if (e.key === "Enter" || e.key === " ") {
if (
e.key === "Enter" ||
e.key === " "
) {
e.preventDefault();
clipboard.copy(course.crn, e);
clipboard.copy(
course.crn,
e,
);
}
}}
aria-label="Copy CRN {course.crn} to clipboard"
@@ -468,9 +496,12 @@ const table = createSvelteTable({
{@const primary = getPrimaryInstructor(
course.instructors,
)}
{@const display = primaryInstructorDisplay(course)}
{@const commaIdx = display.indexOf(", ")}
{@const ratingData = primaryRating(course)}
{@const display =
primaryInstructorDisplay(course)}
{@const commaIdx =
display.indexOf(", ")}
{@const ratingData =
primaryRating(course)}
<td class="py-2 px-2 whitespace-nowrap">
{#if display === "Staff"}
<span
@@ -486,38 +517,98 @@ const table = createSvelteTable({
passthrough
>
{#if commaIdx !== -1}
<span>{display.slice(0, commaIdx)},
<span class="text-muted-foreground">{display.slice(commaIdx + 1)}</span
></span>
<span
>{display.slice(
0,
commaIdx,
)},
<span
class="text-muted-foreground"
>{display.slice(
commaIdx +
1,
)}</span
></span
>
{:else}
<span>{display}</span>
{/if}
</SimpleTooltip>
{/if}
{#if ratingData}
<SimpleTooltip
text="{ratingData.rating.toFixed(
1,
)}/5 ({ratingData.count} ratings on RateMyProfessors)"
delay={150}
side="bottom"
passthrough
{@const lowConfidence =
ratingData.count <
RMP_CONFIDENCE_THRESHOLD}
<Tooltip.Root
delayDuration={150}
>
<span
class="ml-1 text-xs font-medium {ratingColor(
ratingData.rating,
)}"
>{ratingData.rating.toFixed(
1,
)}★</span
<Tooltip.Trigger>
<span
class="ml-1 text-xs font-medium inline-flex items-center gap-0.5"
style={ratingStyle(
ratingData.rating,
themeStore.isDark,
)}
>
{ratingData.rating.toFixed(
1,
)}
{#if lowConfidence}
<Triangle
class="size-2 fill-current"
/>
{:else}
<Star
class="size-2.5 fill-current"
/>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
side="bottom"
sideOffset={6}
class={cn(
tooltipContentClass,
"px-2.5 py-1.5",
)}
>
</SimpleTooltip>
<span
class="inline-flex items-center gap-1.5 text-xs"
>
{ratingData.rating.toFixed(
1,
)}/5 · {formatNumber(ratingData.count)}
ratings
{#if (ratingData.count ?? 0) < RMP_CONFIDENCE_THRESHOLD}
(low)
{/if}
{#if ratingData.legacyId != null}
·
<a
href={rmpUrl(
ratingData.legacyId,
)}
target="_blank"
rel="noopener"
class="inline-flex items-center gap-0.5 text-muted-foreground hover:text-foreground transition-colors"
>
RMP
<ExternalLink
class="size-3"
/>
</a>
{/if}
</span>
</Tooltip.Content>
</Tooltip.Root>
{/if}
</td>
{:else if colId === "time"}
<td class="py-2 px-2 whitespace-nowrap">
<SimpleTooltip
text={formatMeetingTimesTooltip(course.meetingTimes)}
text={formatMeetingTimesTooltip(
course.meetingTimes,
)}
passthrough
>
{#if timeIsTBA(course)}
@@ -566,10 +657,14 @@ const table = createSvelteTable({
</SimpleTooltip>
</td>
{:else if colId === "location"}
{@const concern = getDeliveryConcern(course)}
{@const accentColor = concernAccentColor(concern)}
{@const locTooltip = formatLocationTooltip(course)}
{@const locDisplay = formatLocationDisplay(course)}
{@const concern =
getDeliveryConcern(course)}
{@const accentColor =
concernAccentColor(concern)}
{@const locTooltip =
formatLocationTooltip(course)}
{@const locDisplay =
formatLocationDisplay(course)}
<td class="py-2 px-2 whitespace-nowrap">
{#if locTooltip}
<SimpleTooltip
@@ -579,18 +674,26 @@ const table = createSvelteTable({
>
<span
class="text-muted-foreground"
class:pl-2={accentColor !== null}
style:border-left={accentColor ? `2px solid ${accentColor}` : undefined}
class:pl-2={accentColor !==
null}
style:border-left={accentColor
? `2px solid ${accentColor}`
: undefined}
>
{locDisplay ?? "—"}
</span>
</SimpleTooltip>
{:else if locDisplay}
<span class="text-muted-foreground">
<span
class="text-muted-foreground"
>
{locDisplay}
</span>
{:else}
<span class="text-xs text-muted-foreground/50">—</span>
<span
class="text-xs text-muted-foreground/50"
>—</span
>
{/if}
</td>
{:else if colId === "seats"}
@@ -598,11 +701,11 @@ const table = createSvelteTable({
class="py-2 px-2 text-right whitespace-nowrap"
>
<SimpleTooltip
text="{openSeats(
text="{formatNumber(openSeats(
course,
)} of {course.maxEnrollment} seats open, {course.enrollment} enrolled{course.waitCount >
))} of {formatNumber(course.maxEnrollment)} seats open, {formatNumber(course.enrollment)} enrolled{course.waitCount >
0
? `, ${course.waitCount} waitlisted`
? `, ${formatNumber(course.waitCount)} waitlisted`
: ''}"
delay={200}
side="left"
@@ -626,8 +729,8 @@ const table = createSvelteTable({
>
<span
class="text-muted-foreground/60 tabular-nums"
>{course.enrollment}/{course.maxEnrollment}{#if course.waitCount > 0}
· WL {course.waitCount}/{course.waitCapacity}{/if}</span
>{formatNumber(course.enrollment)}/{formatNumber(course.maxEnrollment)}{#if course.waitCount > 0}
· WL {formatNumber(course.waitCount)}/{formatNumber(course.waitCapacity)}{/if}</span
>
</span>
</SimpleTooltip>
@@ -668,12 +771,12 @@ const table = createSvelteTable({
out:fade={{ duration: 100 }}
>
{@render columnVisibilityGroup(
ContextMenu.Group,
ContextMenu.GroupHeading,
ContextMenu.CheckboxItem,
ContextMenu.Separator,
ContextMenu.Item,
)}
ContextMenu.Group,
ContextMenu.GroupHeading,
ContextMenu.CheckboxItem,
ContextMenu.Separator,
ContextMenu.Item,
)}
</div>
</div>
{/if}
@@ -0,0 +1,56 @@
<script lang="ts">
import { page } from "$app/state";
import { TriangleAlert, RotateCcw } from "@lucide/svelte";
interface Props {
/** Heading shown in the error card */
title?: string;
/** The error value from svelte:boundary */
error: unknown;
/** Reset callback from svelte:boundary */
reset: () => void;
}
let { title = "Something went wrong", error, reset }: Props = $props();
let errorName = $derived(error instanceof Error ? error.constructor.name : "Error");
let errorMessage = $derived(error instanceof Error ? error.message : String(error));
let errorStack = $derived(error instanceof Error ? error.stack : null);
</script>
<div class="flex items-center justify-center py-16 px-4">
<div class="w-full max-w-lg rounded-lg border border-status-red/25 bg-status-red/5 overflow-hidden text-sm">
<div class="px-4 py-2.5 border-b border-status-red/15 flex items-center justify-between gap-4">
<div class="flex items-center gap-2 text-status-red">
<TriangleAlert size={16} strokeWidth={2.25} />
<span class="font-semibold">{title}</span>
</div>
<span class="text-xs text-muted-foreground font-mono">{page.url.pathname}</span>
</div>
<div class="px-4 py-3 border-b border-status-red/15">
<span class="text-xs text-muted-foreground/70 font-mono">{errorName}</span>
<pre class="mt-1 text-xs text-foreground/80 overflow-auto whitespace-pre-wrap break-words">{errorMessage}</pre>
</div>
{#if errorStack}
<details class="border-b border-status-red/15">
<summary class="px-4 py-2 text-xs text-muted-foreground/70 cursor-pointer hover:text-muted-foreground select-none">
Stack trace
</summary>
<pre class="px-4 py-3 text-xs text-muted-foreground/60 overflow-auto whitespace-pre-wrap break-words max-h-48">{errorStack}</pre>
</details>
{/if}
<div class="px-4 py-2.5 flex items-center justify-end gap-3">
<span class="text-xs text-muted-foreground/60">Retries this section, not the full page</span>
<button
class="shrink-0 cursor-pointer inline-flex items-center gap-1.5 rounded-md bg-status-red px-3 py-1.5 text-sm font-medium text-white hover:brightness-110 transition-all"
onclick={reset}
>
<RotateCcw size={14} strokeWidth={2.25} />
Try again
</button>
</div>
</div>
</div>
+62
View File
@@ -0,0 +1,62 @@
<script lang="ts">
import { page } from "$app/state";
import { Search, User, Clock } from "@lucide/svelte";
import { authStore } from "$lib/auth.svelte";
import ThemeToggle from "./ThemeToggle.svelte";
const staticTabs = [
{ href: "/", label: "Search", icon: Search },
{ href: "/timeline", label: "Timeline", icon: Clock },
] as const;
const APP_PREFIXES = ["/profile", "/settings", "/admin"];
let profileTab = $derived(
authStore.isLoading
? { href: "/login" as const, label: null, icon: User }
: {
href: authStore.isAuthenticated ? ("/profile" as const) : ("/login" as const),
label: authStore.isAuthenticated ? "Account" : "Login",
icon: User,
}
);
function isActive(tabHref: string): boolean {
if (tabHref === "/") return page.url.pathname === "/";
if (tabHref === "/profile") {
return APP_PREFIXES.some((p) => page.url.pathname.startsWith(p));
}
return page.url.pathname.startsWith(tabHref);
}
</script>
<nav class="w-full flex justify-center pt-5 px-5">
<div class="w-full max-w-6xl flex items-center justify-between">
<!-- pointer-events-auto: root layout wraps nav in pointer-events-none overlay -->
<div class="flex items-center gap-1 rounded-lg bg-muted p-1 pointer-events-auto">
{#each staticTabs as tab}
<a
href={tab.href}
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
{isActive(tab.href)
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<tab.icon size={15} strokeWidth={2} />
{tab.label}
</a>
{/each}
<a
href={profileTab.href}
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
{isActive(profileTab.href)
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<User size={15} strokeWidth={2} />
{#if profileTab.label}{profileTab.label}{/if}
</a>
<ThemeToggle />
</div>
</div>
</nav>
@@ -0,0 +1,76 @@
<script lang="ts">
import { navigationStore } from "$lib/stores/navigation.svelte";
import type { Snippet } from "svelte";
import { cubicOut } from "svelte/easing";
import type { TransitionConfig } from "svelte/transition";
type Axis = "horizontal" | "vertical";
let {
key,
children,
axis = "horizontal",
inDelay = 0,
outDelay = 0,
}: {
key: string;
children: Snippet;
axis?: Axis;
inDelay?: number;
outDelay?: number;
} = $props();
const DURATION = 400;
const OFFSET = 40;
function translate(axis: Axis, value: number): string {
return axis === "vertical" ? `translateY(${value}px)` : `translateX(${value}px)`;
}
function inTransition(_node: HTMLElement): TransitionConfig {
const dir = navigationStore.direction;
if (dir === "fade") {
return {
duration: DURATION,
delay: inDelay,
easing: cubicOut,
css: (t: number) => `opacity: ${t}`,
};
}
const offset = dir === "right" ? OFFSET : -OFFSET;
return {
duration: DURATION,
delay: inDelay,
easing: cubicOut,
css: (t: number) => `opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
};
}
function outTransition(_node: HTMLElement): TransitionConfig {
const dir = navigationStore.direction;
const base = "position: absolute; top: 0; left: 0; width: 100%; height: 100%";
if (dir === "fade") {
return {
duration: DURATION,
delay: outDelay,
easing: cubicOut,
css: (t: number) => `${base}; opacity: ${t}`,
};
}
const offset = dir === "right" ? -OFFSET : OFFSET;
return {
duration: DURATION,
delay: outDelay,
easing: cubicOut,
css: (t: number) => `${base}; opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
};
}
</script>
<div class="relative flex flex-1 flex-col overflow-hidden">
{#key key}
<div in:inTransition out:outTransition class="flex flex-1 flex-col">
{@render children()}
</div>
{/key}
</div>
+22 -17
View File
@@ -1,7 +1,20 @@
<script lang="ts">
import { Select } from "bits-ui";
import { ChevronUp, ChevronDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Action } from "svelte/action";
import { formatNumber } from "$lib/utils";
const slideIn: Action<HTMLElement, number> = (node, direction) => {
if (direction !== 0) {
node.animate(
[
{ transform: `translateX(${direction * 20}px)`, opacity: 0 },
{ transform: "translateX(0)", opacity: 1 },
],
{ duration: 200, easing: "ease-out" }
);
}
};
let {
totalCount,
@@ -21,17 +34,8 @@ const start = $derived(offset + 1);
const end = $derived(Math.min(offset + limit, totalCount));
// Track direction for slide animation
let prevPage = $state(1);
let direction = $state(0);
$effect(() => {
const page = currentPage;
if (page !== prevPage) {
direction = page > prevPage ? 1 : -1;
prevPage = page;
}
});
// 5 page slots: current-2, current-1, current, current+1, current+2
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
@@ -40,6 +44,7 @@ function isSlotVisible(page: number): boolean {
}
function goToPage(page: number) {
direction = page > currentPage ? 1 : -1;
onPageChange((page - 1) * limit);
}
@@ -55,11 +60,11 @@ const selectValue = $derived(String(currentPage));
</script>
{#if totalCount > 0 && totalPages > 1}
<div class="flex items-center text-sm">
<div class="flex items-start text-xs -mt-3 pl-2">
<!-- Left zone: result count -->
<div class="flex-1">
<span class="text-muted-foreground">
Showing {start}&ndash;{end} of {totalCount} courses
Showing {formatNumber(start)}&ndash;{formatNumber(end)} of {formatNumber(totalCount)} courses
</span>
</div>
@@ -86,7 +91,7 @@ const selectValue = $derived(String(currentPage));
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
aria-label="Page {currentPage} of {totalPages}, click to select page"
>
<span in:fly={{ x: direction * 20, duration: 200 }}>{currentPage}</span>
<span use:slideIn={direction}>{currentPage}</span>
<ChevronUp class="size-3 text-muted-foreground" />
</Select.Trigger>
<Select.Portal>
@@ -140,8 +145,8 @@ const selectValue = $derived(String(currentPage));
aria-hidden={!isSlotVisible(page)}
tabindex={isSlotVisible(page) ? 0 : -1}
disabled={!isSlotVisible(page)}
in:fly={{ x: direction * 20, duration: 200 }}
>
use:slideIn={direction}
>
{page}
</button>
{/if}
@@ -154,9 +159,9 @@ const selectValue = $derived(String(currentPage));
</div>
{:else if totalCount > 0}
<!-- Single page: just show the count, no pagination controls -->
<div class="flex items-center text-sm">
<div class="flex items-start text-xs -mt-3 pl-2">
<span class="text-muted-foreground">
Showing {start}&ndash;{end} of {totalCount} courses
Showing {formatNumber(start)}&ndash;{formatNumber(end)} of {formatNumber(totalCount)} courses
</span>
</div>
{/if}
@@ -0,0 +1,68 @@
<script lang="ts">
import { onMount } from "svelte";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
export interface SearchMeta {
totalCount: number;
durationMs: number;
timestamp: Date;
}
let { meta }: { meta: SearchMeta | null } = $props();
let now = $state(new Date());
let formattedTime = $derived(
meta
? meta.timestamp.toLocaleTimeString(undefined, {
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
})
: ""
);
let relativeTimeResult = $derived(meta ? relativeTime(meta.timestamp, now) : null);
let relativeTimeText = $derived(relativeTimeResult?.text ?? "");
let countLabel = $derived(meta ? formatNumber(meta.totalCount) : "");
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
let tooltipText = $derived(meta ? `${relativeTimeText} · ${formattedTime}` : "");
onMount(() => {
let nowTimeoutId: ReturnType<typeof setTimeout> | null = null;
function scheduleNowTick() {
const delay = relativeTimeResult?.nextUpdateMs ?? 1000;
nowTimeoutId = setTimeout(() => {
now = new Date();
scheduleNowTick();
}, delay);
}
scheduleNowTick();
return () => {
if (nowTimeoutId) clearTimeout(nowTimeoutId);
};
});
</script>
<SimpleTooltip
text={tooltipText}
contentClass="whitespace-nowrap text-[12px] px-2 py-1"
triggerClass="self-start"
sideOffset={0}
>
<span
class="pl-1 text-xs transition-opacity duration-200"
style:opacity={meta ? 1 : 0}
>
<span class="text-muted-foreground/70">{countLabel}</span>
<span class="text-muted-foreground/35">{resultNoun} in</span>
<span class="text-muted-foreground/70">{durationLabel}</span>
</span>
</SimpleTooltip>
+14 -3
View File
@@ -1,30 +1,41 @@
<script lang="ts">
import { Tooltip } from "bits-ui";
import type { Snippet } from "svelte";
import { cn } from "$lib/utils";
let {
text,
delay = 150,
side = "top",
passthrough = false,
triggerClass = "",
contentClass = "",
sideOffset = 6,
children,
}: {
text: string;
delay?: number;
side?: "top" | "bottom" | "left" | "right";
passthrough?: boolean;
triggerClass?: string;
contentClass?: string;
sideOffset?: number;
children: Snippet;
} = $props();
</script>
<Tooltip.Root delayDuration={delay} disableHoverableContent={passthrough}>
<Tooltip.Trigger>
{@render children()}
{#snippet child({ props })}
<span class={triggerClass} {...props}>
{@render children()}
</span>
{/snippet}
</Tooltip.Trigger>
<Tooltip.Content
{side}
sideOffset={6}
class="z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md whitespace-pre-line max-w-max"
{sideOffset}
class={cn("z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-sm whitespace-pre-line max-w-max text-left", contentClass)}
>
{text}
</Tooltip.Content>
@@ -3,6 +3,7 @@ import { Combobox } from "bits-ui";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Subject } from "$lib/api";
import { formatNumber } from "$lib/utils";
let {
subjects,
@@ -92,7 +93,7 @@ $effect(() => {
</span>
{/each}
{#if !open && overflowCount > 0}
<span class="text-xs text-muted-foreground shrink-0">+{overflowCount}</span>
<span class="text-xs text-muted-foreground shrink-0">+{formatNumber(overflowCount)}</span>
{/if}
{/if}
<Combobox.Input
+2 -2
View File
@@ -48,8 +48,8 @@ async function handleToggle(event: MouseEvent) {
type="button"
onclick={(e) => handleToggle(e)}
aria-label={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"}
class="cursor-pointer border-none rounded-md flex items-center justify-center p-2 scale-125
text-muted-foreground hover:bg-muted bg-transparent transition-colors"
class="cursor-pointer border-none rounded-md flex items-center justify-center p-1.5
text-muted-foreground hover:text-foreground hover:bg-background/50 bg-transparent transition-colors"
>
<div class="relative size-[18px]">
<Sun
@@ -0,0 +1,676 @@
<script lang="ts">
import { onMount } from "svelte";
import { scaleTime, scaleLinear } from "d3-scale";
import type { TimeSlot, ChartContext } from "$lib/timeline/types";
import {
PADDING,
DEFAULT_AXIS_RATIO,
CHART_HEIGHT_RATIO,
MIN_SPAN_MS,
MAX_SPAN_MS,
DEFAULT_SPAN_MS,
ZOOM_FACTOR,
ZOOM_KEY_FACTOR,
ZOOM_EASE,
ZOOM_SETTLE_THRESHOLD,
PAN_FRICTION,
PAN_STOP_THRESHOLD,
PAN_STOP_THRESHOLD_Y,
VELOCITY_SAMPLE_WINDOW,
VELOCITY_MIN_DT,
PAN_STEP_RATIO,
PAN_STEP_CTRL_RATIO,
PAN_EASE,
PAN_SETTLE_THRESHOLD_PX,
YRATIO_STEP,
YRATIO_MIN,
YRATIO_MAX,
YRATIO_SETTLE_THRESHOLD,
FOLLOW_EASE,
MIN_MAXY,
MAX_DT,
DEFAULT_DT,
TAP_MAX_DURATION_MS,
TAP_MAX_DISTANCE_PX,
} from "$lib/timeline/constants";
import { createTimelineStore } from "$lib/timeline/store.svelte";
import {
createAnimMap,
syncAnimTargets,
stepAnimations,
pruneAnimMap,
} from "$lib/timeline/animation";
import {
getVisibleSlots,
findSlotByTime,
snapToSlot,
enabledTotalClasses,
} from "$lib/timeline/viewport";
import {
drawGrid,
drawHoverColumn,
drawStackedArea,
drawNowLine,
drawTimeAxis,
stackVisibleSlots,
} from "$lib/timeline/renderer";
import TimelineDrawer from "./TimelineDrawer.svelte";
import TimelineTooltip from "./TimelineTooltip.svelte";
// ── Reactive DOM state ──────────────────────────────────────────────
let canvasEl: HTMLCanvasElement | undefined = $state();
let containerEl: HTMLDivElement | undefined = $state();
let width = $state(800);
let height = $state(400);
let dpr = $state(1);
// ── View window ─────────────────────────────────────────────────────
let viewCenter = $state(Date.now());
let viewSpan = $state(DEFAULT_SPAN_MS);
let viewYRatio = $state(DEFAULT_AXIS_RATIO);
// ── Interaction state ───────────────────────────────────────────────
let isDragging = $state(false);
let dragStartX = $state(0);
let dragStartY = $state(0);
let dragStartCenter = $state(0);
let dragStartYRatio = $state(0);
let followEnabled = $state(true);
let ctrlHeld = $state(false);
// ── Animation state (intentionally non-reactive — updated in rAF) ──
let panVelocity = 0;
let panVelocityY = 0;
let pointerSamples: { time: number; x: number; y: number }[] = [];
// ── Multi-touch / pinch state ────────────────────────────────────────
let activePointers = new Map<number, { x: number; y: number }>();
let isPinching = false;
let pinchStartDist = 0;
let pinchStartSpan = 0;
let pinchAnchorTime = 0;
let pinchAnchorRatio = 0.5;
// ── Tap detection ────────────────────────────────────────────────────
let pointerDownTime = 0;
let pointerDownPos = { x: 0, y: 0 };
let targetSpan = DEFAULT_SPAN_MS;
let zoomAnchorTime = 0;
let zoomAnchorRatio = 0.5;
let isZoomAnimating = false;
let targetCenter = Date.now();
let isPanAnimating = false;
let targetYRatio = DEFAULT_AXIS_RATIO;
let isYPanAnimating = false;
let animationFrameId = 0;
let lastFrameTime = 0;
let animatedMaxY = MIN_MAXY;
const animMap = createAnimMap();
// ── Tooltip + hover ─────────────────────────────────────────────────
let tooltipVisible = $state(false);
let tooltipX = $state(0);
let tooltipY = $state(0);
let tooltipSlot: TimeSlot | null = $state(null);
let hoverSlotTime: number | null = $state(null);
let lastPointerClientX = 0;
let lastPointerClientY = 0;
let pointerOverCanvas = false;
// ── Drawer ──────────────────────────────────────────────────────────
let drawerOpen = $state(false);
// Start with an empty set — subjects are populated dynamically from the API.
let enabledSubjects: Set<string> = $state(new Set());
// ── Data store ──────────────────────────────────────────────────────
const store = createTimelineStore();
let data: TimeSlot[] = $derived(store.data);
let allSubjects: string[] = $derived(store.subjects);
// Auto-enable newly discovered subjects.
$effect(() => {
const storeSubjects = store.subjects;
const next = new Set(enabledSubjects);
let changed = false;
for (const s of storeSubjects) {
if (!next.has(s)) {
next.add(s);
changed = true;
}
}
if (changed) {
enabledSubjects = next;
}
});
let activeSubjects = $derived(allSubjects.filter((s) => enabledSubjects.has(s)));
// ── Derived layout ──────────────────────────────────────────────────
let viewStart = $derived(viewCenter - viewSpan / 2);
let viewEnd = $derived(viewCenter + viewSpan / 2);
let chartHeight = $derived(height * CHART_HEIGHT_RATIO);
let chartBottom = $derived(height * viewYRatio);
let chartTop = $derived(chartBottom - chartHeight);
let xScale = $derived(
scaleTime()
.domain([new Date(viewStart), new Date(viewEnd)])
.range([PADDING.left, width - PADDING.right])
);
// Reused across frames — domain/range updated imperatively in render().
let yScale = scaleLinear()
.domain([0, MIN_MAXY * 1.1])
.range([0, 1]);
// ── Subject toggling ────────────────────────────────────────────────
function toggleSubject(subject: string) {
const next = new Set(enabledSubjects);
if (next.has(subject)) next.delete(subject);
else next.add(subject);
enabledSubjects = next;
}
function enableAll() {
enabledSubjects = new Set(allSubjects);
}
function disableAll() {
enabledSubjects = new Set();
}
// ── Rendering ───────────────────────────────────────────────────────
function render() {
if (!canvasEl) return;
const ctx = canvasEl.getContext("2d");
if (!ctx) return;
// Update yScale in-place (no allocation per frame).
yScale.domain([0, animatedMaxY * 1.1]).range([chartBottom, chartTop]);
ctx.save();
ctx.scale(dpr, dpr);
ctx.clearRect(0, 0, width, height);
const chart: ChartContext = {
ctx,
xScale,
yScale,
width,
chartTop,
chartBottom,
viewSpan,
viewStart,
viewEnd,
};
const visible = getVisibleSlots(data, viewStart, viewEnd);
const visibleStack = stackVisibleSlots(visible, allSubjects, enabledSubjects, animMap);
drawGrid(chart);
drawHoverColumn(chart, visibleStack, hoverSlotTime);
drawStackedArea(chart, visibleStack);
drawNowLine(chart);
drawTimeAxis(chart);
ctx.restore();
}
// ── Hover logic ─────────────────────────────────────────────────────
function updateHover() {
if (!pointerOverCanvas || isDragging || !canvasEl) return;
const rect = canvasEl.getBoundingClientRect();
const x = lastPointerClientX - rect.left;
const y = lastPointerClientY - rect.top;
if (y < chartTop || y > chartBottom) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
const time = xScale.invert(x);
const snappedTime = snapToSlot(time.getTime());
const slot = findSlotByTime(data, snappedTime);
if (!slot) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
const total = enabledTotalClasses(slot, activeSubjects);
if (total <= 0) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
// Bypass area overlap check when CTRL is held.
if (!ctrlHeld) {
const stackTopY = yScale(total);
if (y < stackTopY) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
}
tooltipSlot = slot;
tooltipX = lastPointerClientX;
tooltipY = lastPointerClientY;
tooltipVisible = true;
hoverSlotTime = snappedTime;
}
// ── Interaction helpers ───────────────────────────────────────────────
function pinchDistance(): number {
const pts = [...activePointers.values()];
if (pts.length < 2) return 0;
const dx = pts[1].x - pts[0].x;
const dy = pts[1].y - pts[0].y;
return Math.hypot(dx, dy);
}
function pinchMidpoint(): { x: number; y: number } {
const pts = [...activePointers.values()];
if (pts.length < 2) return { x: 0, y: 0 };
return { x: (pts[0].x + pts[1].x) / 2, y: (pts[0].y + pts[1].y) / 2 };
}
// ── Interaction handlers ────────────────────────────────────────────
function onPointerDown(e: PointerEvent) {
if (e.button !== 0) return;
(e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
// Two fingers down → start pinch-to-zoom
if (activePointers.size === 2) {
isDragging = false;
isPinching = true;
pinchStartDist = pinchDistance();
pinchStartSpan = viewSpan;
const mid = pinchMidpoint();
const rect = canvasEl?.getBoundingClientRect();
const midX = rect ? mid.x - rect.left : mid.x;
const chartWidth = width - PADDING.left - PADDING.right;
pinchAnchorTime = xScale.invert(midX).getTime();
pinchAnchorRatio = (midX - PADDING.left) / chartWidth;
return;
}
// Single finger / mouse → start drag
isDragging = true;
dragStartX = e.clientX;
dragStartY = e.clientY;
dragStartCenter = viewCenter;
dragStartYRatio = viewYRatio;
followEnabled = false;
panVelocity = 0;
panVelocityY = 0;
isZoomAnimating = false;
isPanAnimating = false;
isYPanAnimating = false;
targetSpan = viewSpan;
tooltipVisible = false;
hoverSlotTime = null;
pointerDownTime = performance.now();
pointerDownPos = { x: e.clientX, y: e.clientY };
pointerSamples = [{ time: performance.now(), x: e.clientX, y: e.clientY }];
}
function onPointerMove(e: PointerEvent) {
ctrlHeld = e.ctrlKey || e.metaKey;
lastPointerClientX = e.clientX;
lastPointerClientY = e.clientY;
pointerOverCanvas = true;
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
// Pinch-to-zoom (two-finger gesture)
if (isPinching && activePointers.size >= 2) {
const dist = pinchDistance();
if (pinchStartDist > 0) {
const scale = pinchStartDist / dist; // fingers apart = zoom in
const newSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, pinchStartSpan * scale));
viewSpan = newSpan;
targetSpan = newSpan;
viewCenter = pinchAnchorTime + (0.5 - pinchAnchorRatio) * viewSpan;
}
return;
}
if (isDragging) {
const dx = e.clientX - dragStartX;
const dy = e.clientY - dragStartY;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
viewCenter = dragStartCenter - dx * msPerPx;
viewYRatio = dragStartYRatio + dy / height;
const now = performance.now();
pointerSamples.push({ time: now, x: e.clientX, y: e.clientY });
const cutoff = now - VELOCITY_SAMPLE_WINDOW;
pointerSamples = pointerSamples.filter((s) => s.time >= cutoff);
} else {
updateHover();
}
}
function onPointerUp(e: PointerEvent) {
(e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
activePointers.delete(e.pointerId);
// End pinch when fewer than 2 fingers remain
if (isPinching) {
if (activePointers.size < 2) {
isPinching = false;
// If one finger remains, reset drag origin to that finger's position
if (activePointers.size === 1) {
const remaining = [...activePointers.values()][0];
isDragging = true;
dragStartX = remaining.x;
dragStartY = remaining.y;
dragStartCenter = viewCenter;
dragStartYRatio = viewYRatio;
pointerSamples = [{ time: performance.now(), x: remaining.x, y: remaining.y }];
}
}
return;
}
isDragging = false;
// Tap detection: short duration + minimal movement → show tooltip
const elapsed = performance.now() - pointerDownTime;
const dist = Math.hypot(e.clientX - pointerDownPos.x, e.clientY - pointerDownPos.y);
if (elapsed < TAP_MAX_DURATION_MS && dist < TAP_MAX_DISTANCE_PX) {
lastPointerClientX = e.clientX;
lastPointerClientY = e.clientY;
pointerOverCanvas = true;
// Bypass the isDragging guard in updateHover since we just cleared it
updateHover();
pointerSamples = [];
return;
}
// Momentum from drag
if (pointerSamples.length >= 2) {
const first = pointerSamples[0];
const last = pointerSamples[pointerSamples.length - 1];
const dt = last.time - first.time;
if (dt > VELOCITY_MIN_DT) {
const pxPerMsX = (last.x - first.x) / dt;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
panVelocity = -pxPerMsX * msPerPx;
panVelocityY = (last.y - first.y) / dt;
}
}
pointerSamples = [];
}
function onPointerLeave() {
pointerOverCanvas = false;
tooltipVisible = false;
hoverSlotTime = null;
}
function onPointerCancel(e: PointerEvent) {
activePointers.delete(e.pointerId);
if (activePointers.size < 2) isPinching = false;
if (activePointers.size === 0) isDragging = false;
}
function onWheel(e: WheelEvent) {
e.preventDefault();
if (!canvasEl) return;
followEnabled = false;
panVelocity = 0;
panVelocityY = 0;
const rect = canvasEl.getBoundingClientRect();
const mouseX = e.clientX - rect.left;
const chartWidth = width - PADDING.left - PADDING.right;
zoomAnchorTime = xScale.invert(mouseX).getTime();
zoomAnchorRatio = (mouseX - PADDING.left) / chartWidth;
const zoomIn = e.deltaY < 0;
const factor = zoomIn ? 1 / ZOOM_FACTOR : ZOOM_FACTOR;
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
isZoomAnimating = true;
}
function onKeyDown(e: KeyboardEvent) {
const wasCtrl = ctrlHeld;
ctrlHeld = e.ctrlKey || e.metaKey;
if (ctrlHeld !== wasCtrl) updateHover();
switch (e.key) {
case "ArrowLeft":
case "ArrowRight": {
e.preventDefault();
followEnabled = false;
panVelocity = 0;
const ratio = e.ctrlKey ? PAN_STEP_CTRL_RATIO : PAN_STEP_RATIO;
const step = viewSpan * ratio;
if (!isPanAnimating) targetCenter = viewCenter;
targetCenter += e.key === "ArrowRight" ? step : -step;
isPanAnimating = true;
break;
}
case "ArrowUp":
case "ArrowDown": {
e.preventDefault();
if (e.ctrlKey || e.metaKey) {
const direction = e.key === "ArrowUp" ? -1 : 1;
if (!isYPanAnimating) targetYRatio = viewYRatio;
targetYRatio = Math.max(
YRATIO_MIN,
Math.min(YRATIO_MAX, targetYRatio + direction * YRATIO_STEP)
);
isYPanAnimating = true;
} else {
const factor = e.key === "ArrowUp" ? 1 / ZOOM_KEY_FACTOR : ZOOM_KEY_FACTOR;
followEnabled = false;
panVelocity = 0;
zoomAnchorTime = isPanAnimating ? targetCenter : viewCenter;
zoomAnchorRatio = 0.5;
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
isZoomAnimating = true;
}
break;
}
}
}
function onKeyUp(e: KeyboardEvent) {
const wasCtrl = ctrlHeld;
ctrlHeld = e.ctrlKey || e.metaKey;
if (ctrlHeld !== wasCtrl) updateHover();
}
function onWindowBlur() {
const wasCtrl = ctrlHeld;
ctrlHeld = false;
if (wasCtrl) updateHover();
}
function resumeFollow() {
panVelocity = 0;
panVelocityY = 0;
isPanAnimating = false;
isYPanAnimating = false;
targetYRatio = DEFAULT_AXIS_RATIO;
viewYRatio = DEFAULT_AXIS_RATIO;
targetSpan = DEFAULT_SPAN_MS;
isZoomAnimating = true;
zoomAnchorTime = Date.now();
zoomAnchorRatio = 0.5;
followEnabled = true;
}
// ── Resize ──────────────────────────────────────────────────────────
function updateSize() {
if (!containerEl) return;
const rect = containerEl.getBoundingClientRect();
width = rect.width;
height = rect.height;
dpr = window.devicePixelRatio || 1;
if (canvasEl) {
canvasEl.width = width * dpr;
canvasEl.height = height * dpr;
}
}
// ── Animation loop ──────────────────────────────────────────────────
function tick(timestamp: number) {
const dt = lastFrameTime > 0 ? Math.min(timestamp - lastFrameTime, MAX_DT) : DEFAULT_DT;
lastFrameTime = timestamp;
const friction = Math.pow(PAN_FRICTION, dt / 16);
// Momentum panning
if (
!isDragging &&
(Math.abs(panVelocity) > PAN_STOP_THRESHOLD || Math.abs(panVelocityY) > PAN_STOP_THRESHOLD_Y)
) {
viewCenter += panVelocity * dt;
viewYRatio += (panVelocityY * dt) / height;
panVelocity *= friction;
panVelocityY *= friction;
if (Math.abs(panVelocity) < PAN_STOP_THRESHOLD) panVelocity = 0;
if (Math.abs(panVelocityY) < PAN_STOP_THRESHOLD_Y) panVelocityY = 0;
}
// Smooth zoom
if (isZoomAnimating && !isDragging) {
const spanDiff = targetSpan - viewSpan;
if (Math.abs(spanDiff) < ZOOM_SETTLE_THRESHOLD) {
viewSpan = targetSpan;
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
isZoomAnimating = false;
} else {
const zf = 1 - Math.pow(1 - ZOOM_EASE, dt / 16);
viewSpan += spanDiff * zf;
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
}
}
// Keyboard pan
if (isPanAnimating && !isDragging) {
const panDiff = targetCenter - viewCenter;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
if (Math.abs(panDiff) < msPerPx * PAN_SETTLE_THRESHOLD_PX) {
viewCenter = targetCenter;
isPanAnimating = false;
} else {
viewCenter += panDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
}
}
// Y-axis pan
if (isYPanAnimating && !isDragging) {
const yDiff = targetYRatio - viewYRatio;
if (Math.abs(yDiff) < YRATIO_SETTLE_THRESHOLD) {
viewYRatio = targetYRatio;
isYPanAnimating = false;
} else {
viewYRatio += yDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
}
}
// Follow mode
if (followEnabled && !isDragging) {
const target = Date.now();
viewCenter += (target - viewCenter) * (1 - Math.pow(1 - FOLLOW_EASE, dt / 16));
}
// Step value animations & prune offscreen entries
const result = stepAnimations(animMap, dt, animatedMaxY);
animatedMaxY = result.maxY;
pruneAnimMap(animMap, viewStart, viewEnd, viewSpan);
render();
animationFrameId = requestAnimationFrame(tick);
}
// ── Animation sync ──────────────────────────────────────────────────
$effect(() => {
const slots = data;
const subs = allSubjects;
const enabled = enabledSubjects;
syncAnimTargets(animMap, slots, subs, enabled);
});
// Request data whenever the visible window changes.
$effect(() => {
store.requestRange(viewStart, viewEnd);
});
// ── Lifecycle ───────────────────────────────────────────────────────
onMount(() => {
updateSize();
const ro = new ResizeObserver(updateSize);
if (containerEl) ro.observe(containerEl);
window.addEventListener("blur", onWindowBlur);
viewCenter = Date.now();
targetCenter = viewCenter;
targetSpan = viewSpan;
canvasEl?.focus();
animationFrameId = requestAnimationFrame(tick);
return () => {
cancelAnimationFrame(animationFrameId);
ro.disconnect();
window.removeEventListener("blur", onWindowBlur);
store.dispose();
};
});
</script>
<div class="absolute inset-0 select-none" bind:this={containerEl}>
<canvas
bind:this={canvasEl}
class="w-full h-full cursor-grab outline-none"
class:cursor-grabbing={isDragging}
style="display: block; touch-action: none;"
tabindex="0"
aria-label="Interactive enrollment timeline chart"
onpointerdown={(e) => { canvasEl?.focus(); onPointerDown(e); }}
onpointermove={onPointerMove}
onpointerup={onPointerUp}
onpointerleave={onPointerLeave}
onpointercancel={onPointerCancel}
onwheel={onWheel}
onkeydown={onKeyDown}
onkeyup={onKeyUp}
></canvas>
<TimelineDrawer
bind:open={drawerOpen}
subjects={allSubjects}
{enabledSubjects}
{followEnabled}
onToggleSubject={toggleSubject}
onEnableAll={enableAll}
onDisableAll={disableAll}
onResumeFollow={resumeFollow}
/>
<TimelineTooltip
visible={tooltipVisible}
x={tooltipX}
y={tooltipY}
slot={tooltipSlot}
{activeSubjects}
/>
</div>
@@ -0,0 +1,138 @@
<script lang="ts">
import { Filter, X } from "@lucide/svelte";
import { getSubjectColor } from "$lib/timeline/data";
import { DRAWER_WIDTH } from "$lib/timeline/constants";
interface Props {
open: boolean;
subjects: readonly string[];
enabledSubjects: Set<string>;
followEnabled: boolean;
onToggleSubject: (subject: string) => void;
onEnableAll: () => void;
onDisableAll: () => void;
onResumeFollow: () => void;
}
let {
open = $bindable(),
subjects,
enabledSubjects,
followEnabled,
onToggleSubject,
onEnableAll,
onDisableAll,
onResumeFollow,
}: Props = $props();
function onKeyDown(e: KeyboardEvent) {
if (e.key === "Escape" && open) {
open = false;
}
}
</script>
<svelte:window onkeydown={onKeyDown} />
<!-- Filter toggle button — slides out when drawer opens -->
<button
class="absolute right-3 z-50 p-2 rounded-md
bg-black text-white dark:bg-white dark:text-black
hover:bg-neutral-800 dark:hover:bg-neutral-200
border border-black/20 dark:border-white/20
shadow-md transition-all duration-200 ease-in-out cursor-pointer
{open ? 'opacity-0 pointer-events-none' : 'opacity-100'}"
style="top: 20%; transform: translateX({open ? '60px' : '0'});"
onclick={() => (open = true)}
aria-label="Open filters"
>
<Filter size={18} strokeWidth={2} />
</button>
<!-- Drawer panel -->
<div
class="absolute right-0 z-40 rounded-l-lg shadow-xl transition-transform duration-200 ease-in-out {open ? '' : 'pointer-events-none'}"
style="top: 20%; width: {DRAWER_WIDTH}px; height: 60%; transform: translateX({open
? 0
: DRAWER_WIDTH}px);"
>
<div
class="h-full flex flex-col bg-background/90 backdrop-blur-md border border-border/40 rounded-l-lg overflow-hidden"
style="width: {DRAWER_WIDTH}px;"
>
<!-- Header -->
<div class="flex items-center justify-between px-3 py-2.5 border-b border-border/40">
<span class="text-xs font-semibold text-foreground">Filters</span>
<button
class="p-0.5 rounded text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
onclick={() => (open = false)}
aria-label="Close filters"
>
<X size={14} strokeWidth={2} />
</button>
</div>
<!-- Follow status -->
<div class="px-3 py-2 border-b border-border/40">
{#if followEnabled}
<div
class="px-2 py-1 rounded-md text-[10px] font-medium text-center
bg-green-500/10 text-green-600 dark:text-green-400 border border-green-500/20"
>
FOLLOWING
</div>
{:else}
<button
class="w-full px-2 py-1 rounded-md text-[10px] font-medium text-center
bg-muted/80 text-muted-foreground hover:text-foreground
border border-border/50 transition-colors cursor-pointer"
onclick={onResumeFollow}
aria-label="Resume following current time"
>
FOLLOW
</button>
{/if}
</div>
<!-- Subject toggles -->
<div class="flex-1 overflow-y-auto px-3 py-2">
<div class="flex items-center justify-between mb-2 text-[10px] text-muted-foreground">
<span class="uppercase tracking-wider font-medium">Subjects</span>
<div class="flex gap-1.5">
<button
class="hover:text-foreground transition-colors cursor-pointer"
onclick={onEnableAll}>All</button
>
<span class="opacity-40">|</span>
<button
class="hover:text-foreground transition-colors cursor-pointer"
onclick={onDisableAll}>None</button
>
</div>
</div>
<div class="space-y-0.5">
{#each subjects as subject}
{@const enabled = enabledSubjects.has(subject)}
{@const color = getSubjectColor(subject)}
<button
class="flex items-center gap-2 w-full px-1.5 py-1 rounded text-xs
hover:bg-muted/50 transition-colors cursor-pointer text-left"
onclick={() => onToggleSubject(subject)}
>
<span
class="inline-block w-3 h-3 rounded-sm shrink-0 transition-opacity"
style="background: {color}; opacity: {enabled ? 1 : 0.2};"
></span>
<span
class="transition-opacity {enabled
? 'text-foreground'
: 'text-muted-foreground/50'}"
>
{subject}
</span>
</button>
{/each}
</div>
</div>
</div>
</div>
@@ -0,0 +1,52 @@
<script lang="ts">
import { timeFormat } from "d3-time-format";
import { getSubjectColor } from "$lib/timeline/data";
import type { TimeSlot } from "$lib/timeline/types";
import { enabledTotalClasses } from "$lib/timeline/viewport";
interface Props {
visible: boolean;
x: number;
y: number;
slot: TimeSlot | null;
activeSubjects: readonly string[];
}
let { visible, x, y, slot, activeSubjects }: Props = $props();
const fmtTime = timeFormat("%-I:%M %p");
</script>
{#if visible && slot}
{@const total = enabledTotalClasses(slot, activeSubjects)}
<div
class="pointer-events-none fixed z-50 rounded-lg border border-border/60 bg-background/95
backdrop-blur-sm shadow-lg px-3 py-2 text-xs min-w-[140px]"
style="left: {x + 12}px; top: {y - 10}px; transform: translateY(-100%);"
>
<div class="font-semibold text-foreground mb-1.5">
{fmtTime(slot.time)}
</div>
<div class="space-y-0.5">
{#each activeSubjects as subject}
{@const count = slot.subjects[subject] || 0}
{#if count > 0}
<div class="flex items-center justify-between gap-3">
<div class="flex items-center gap-1.5">
<span
class="inline-block w-2 h-2 rounded-sm"
style="background: {getSubjectColor(subject)}"
></span>
<span class="text-muted-foreground">{subject}</span>
</div>
<span class="font-medium tabular-nums">{count}</span>
</div>
{/if}
{/each}
</div>
<div class="mt-1.5 pt-1.5 border-t border-border/40 flex justify-between font-medium">
<span>Total</span>
<span class="tabular-nums">{total}</span>
</div>
</div>
{/if}
@@ -39,7 +39,7 @@ export function createSvelteTable<TData extends RowData>(options: TableOptions<T
// eslint-disable-next-line @typescript-eslint/no-explicit-any
onStateChange: (updater: any) => {
if (updater instanceof Function) state = updater(state);
else state = mergeObjects(state, updater as Partial<TableState>);
else state = { ...state, ...(updater as Partial<TableState>) };
options.onStateChange?.(updater);
},
+9 -3
View File
@@ -187,20 +187,24 @@ describe("getPrimaryInstructor", () => {
it("returns primary instructor", () => {
const instructors: InstructorResponse[] = [
{
instructorId: 1,
bannerId: "1",
displayName: "A",
email: null,
email: "a@utsa.edu",
isPrimary: false,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
{
instructorId: 2,
bannerId: "2",
displayName: "B",
email: null,
email: "b@utsa.edu",
isPrimary: true,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
];
expect(getPrimaryInstructor(instructors)?.displayName).toBe("B");
@@ -208,12 +212,14 @@ describe("getPrimaryInstructor", () => {
it("returns first instructor when no primary", () => {
const instructors: InstructorResponse[] = [
{
instructorId: 3,
bannerId: "1",
displayName: "A",
email: null,
email: "a@utsa.edu",
isPrimary: false,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
];
expect(getPrimaryInstructor(instructors)?.displayName).toBe("A");
+64 -5
View File
@@ -362,11 +362,49 @@ export function seatsDotColor(course: CourseResponse): string {
return "bg-green-500";
}
/** Text color class for a RateMyProfessors rating */
export function ratingColor(rating: number): string {
if (rating >= 4.0) return "text-status-green";
if (rating >= 3.0) return "text-yellow-500";
return "text-status-red";
/** Minimum number of ratings needed to consider RMP data reliable */
export const RMP_CONFIDENCE_THRESHOLD = 7;
/** RMP professor page URL from legacy ID */
export function rmpUrl(legacyId: number): string {
return `https://www.ratemyprofessors.com/professor/${legacyId}`;
}
/**
* Smooth OKLCH color + text-shadow for a RateMyProfessors rating.
*
* Three-stop gradient interpolated in OKLCH:
* 1.0 → red, 3.0 → amber, 5.0 → green
* with separate light/dark mode tuning.
*/
export function ratingStyle(rating: number, isDark: boolean): string {
const clamped = Math.max(1, Math.min(5, rating));
// OKLCH stops: [lightness, chroma, hue]
const stops: { light: [number, number, number]; dark: [number, number, number] }[] = [
{ light: [0.63, 0.2, 25], dark: [0.7, 0.19, 25] }, // 1.0 red
{ light: [0.7, 0.16, 85], dark: [0.78, 0.15, 85] }, // 3.0 amber
{ light: [0.65, 0.2, 145], dark: [0.72, 0.19, 145] }, // 5.0 green
];
let t: number;
let fromIdx: number;
if (clamped <= 3) {
t = (clamped - 1) / 2;
fromIdx = 0;
} else {
t = (clamped - 3) / 2;
fromIdx = 1;
}
const from = isDark ? stops[fromIdx].dark : stops[fromIdx].light;
const to = isDark ? stops[fromIdx + 1].dark : stops[fromIdx + 1].light;
const l = from[0] + (to[0] - from[0]) * t;
const c = from[1] + (to[1] - from[1]) * t;
const h = from[2] + (to[2] - from[2]) * t;
return `color: oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)}); text-shadow: 0 0 4px oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)} / 0.3);`;
}
/** Format credit hours display */
@@ -377,3 +415,24 @@ export function formatCreditHours(course: CourseResponse): string {
}
return "—";
}
/**
* Convert Banner "Last, First Middle" → "First Middle Last".
* Handles: no comma (returned as-is), trailing/leading spaces,
* middle names/initials preserved.
*/
export function formatInstructorName(displayName: string): string {
const commaIdx = displayName.indexOf(",");
if (commaIdx === -1) return displayName.trim();
const last = displayName.slice(0, commaIdx).trim();
const rest = displayName.slice(commaIdx + 1).trim();
if (!rest) return last;
return `${rest} ${last}`;
}
/** Check if a rating value represents real data (not the 0.0 placeholder for unrated professors). */
export function isRatingValid(avgRating: number | null, numRatings: number): boolean {
return avgRating !== null && !(avgRating === 0 && numRatings === 0);
}
+13
View File
@@ -0,0 +1,13 @@
import { format, formatDistanceToNow } from "date-fns";
/** Returns a relative time string like "3 minutes ago" or "in 2 hours". */
export function formatRelativeDate(date: string | Date): string {
const d = typeof date === "string" ? new Date(date) : date;
return formatDistanceToNow(d, { addSuffix: true });
}
/** Returns a full absolute datetime string for tooltip display, e.g. "Jan 29, 2026, 3:45:12 PM". */
export function formatAbsoluteDate(date: string | Date): string {
const d = typeof date === "string" ? new Date(date) : date;
return format(d, "MMM d, yyyy, h:mm:ss a");
}
+131
View File
@@ -0,0 +1,131 @@
import { describe, expect, it } from "vitest";
import { formatDiffPath, jsonDiff, tryParseJson } from "./diff";
describe("jsonDiff", () => {
describe("scalars", () => {
it("returns empty array for identical primitives", () => {
expect(jsonDiff(42, 42)).toEqual([]);
expect(jsonDiff("hello", "hello")).toEqual([]);
expect(jsonDiff(true, true)).toEqual([]);
expect(jsonDiff(null, null)).toEqual([]);
});
it("returns single entry for different primitives", () => {
expect(jsonDiff("Open", "Closed")).toEqual([{ path: "", oldVal: "Open", newVal: "Closed" }]);
expect(jsonDiff(25, 30)).toEqual([{ path: "", oldVal: 25, newVal: 30 }]);
expect(jsonDiff(true, false)).toEqual([{ path: "", oldVal: true, newVal: false }]);
});
it("returns entry when types differ", () => {
expect(jsonDiff(1, "1")).toEqual([{ path: "", oldVal: 1, newVal: "1" }]);
expect(jsonDiff(null, 0)).toEqual([{ path: "", oldVal: null, newVal: 0 }]);
});
});
describe("objects", () => {
it("detects changed key", () => {
expect(jsonDiff({ a: 1 }, { a: 2 })).toEqual([{ path: ".a", oldVal: 1, newVal: 2 }]);
});
it("detects added key", () => {
expect(jsonDiff({}, { a: 1 })).toEqual([{ path: ".a", oldVal: undefined, newVal: 1 }]);
});
it("detects removed key", () => {
expect(jsonDiff({ a: 1 }, {})).toEqual([{ path: ".a", oldVal: 1, newVal: undefined }]);
});
it("handles deeply nested changes", () => {
const oldVal = { a: { b: { c: 1 } } };
const newVal = { a: { b: { c: 2 } } };
expect(jsonDiff(oldVal, newVal)).toEqual([{ path: ".a.b.c", oldVal: 1, newVal: 2 }]);
});
it("returns empty for identical objects", () => {
expect(jsonDiff({ a: 1, b: "x" }, { a: 1, b: "x" })).toEqual([]);
});
});
describe("arrays", () => {
it("detects changed element", () => {
expect(jsonDiff([1, 2, 3], [1, 99, 3])).toEqual([{ path: "[1]", oldVal: 2, newVal: 99 }]);
});
it("detects added element (new array longer)", () => {
expect(jsonDiff([1], [1, 2])).toEqual([{ path: "[1]", oldVal: undefined, newVal: 2 }]);
});
it("detects removed element (new array shorter)", () => {
expect(jsonDiff([1, 2], [1])).toEqual([{ path: "[1]", oldVal: 2, newVal: undefined }]);
});
it("returns empty for identical arrays", () => {
expect(jsonDiff([1, 2, 3], [1, 2, 3])).toEqual([]);
});
});
describe("mixed nesting", () => {
it("handles array of objects", () => {
const oldVal = [{ name: "Alice" }, { name: "Bob" }];
const newVal = [{ name: "Alice" }, { name: "Charlie" }];
expect(jsonDiff(oldVal, newVal)).toEqual([
{ path: "[1].name", oldVal: "Bob", newVal: "Charlie" },
]);
});
it("handles object with nested arrays", () => {
const oldVal = { meetingTimes: [{ beginTime: "0900" }] };
const newVal = { meetingTimes: [{ beginTime: "1000" }] };
expect(jsonDiff(oldVal, newVal)).toEqual([
{ path: ".meetingTimes[0].beginTime", oldVal: "0900", newVal: "1000" },
]);
});
it("handles type change from object to array", () => {
expect(jsonDiff({ a: 1 }, [1])).toEqual([{ path: "", oldVal: { a: 1 }, newVal: [1] }]);
});
});
});
describe("tryParseJson", () => {
it("parses valid JSON object", () => {
expect(tryParseJson('{"a":1}')).toEqual({ a: 1 });
});
it("parses valid JSON array", () => {
expect(tryParseJson("[1,2,3]")).toEqual([1, 2, 3]);
});
it("parses plain string numbers", () => {
expect(tryParseJson("42")).toBe(42);
expect(tryParseJson("3.14")).toBe(3.14);
});
it("returns null for invalid JSON", () => {
expect(tryParseJson("not json")).toBeNull();
expect(tryParseJson("{broken")).toBeNull();
});
it("parses boolean and null literals", () => {
expect(tryParseJson("true")).toBe(true);
expect(tryParseJson("null")).toBeNull();
});
});
describe("formatDiffPath", () => {
it("strips leading dot", () => {
expect(formatDiffPath(".a.b.c")).toBe("a.b.c");
});
it("returns (root) for empty path", () => {
expect(formatDiffPath("")).toBe("(root)");
});
it("preserves bracket notation", () => {
expect(formatDiffPath("[0].name")).toBe("[0].name");
});
it("handles mixed paths", () => {
expect(formatDiffPath(".meetingTimes[0].beginTime")).toBe("meetingTimes[0].beginTime");
});
});
+86
View File
@@ -0,0 +1,86 @@
export interface DiffEntry {
path: string;
oldVal: unknown;
newVal: unknown;
}
function isObject(val: unknown): val is Record<string, unknown> {
return val !== null && typeof val === "object" && !Array.isArray(val);
}
/**
* Recursively compares two JSON-compatible values and returns a list of
* structural differences with dot-notation paths.
*/
export function jsonDiff(oldVal: unknown, newVal: unknown): DiffEntry[] {
return diffRecurse("", oldVal, newVal);
}
function diffRecurse(path: string, oldVal: unknown, newVal: unknown): DiffEntry[] {
// Both arrays: compare by index up to max length
if (Array.isArray(oldVal) && Array.isArray(newVal)) {
const entries: DiffEntry[] = [];
const maxLen = Math.max(oldVal.length, newVal.length);
for (let i = 0; i < maxLen; i++) {
const childPath = `${path}[${i}]`;
const inOld = i < oldVal.length;
const inNew = i < newVal.length;
if (inOld && inNew) {
entries.push(...diffRecurse(childPath, oldVal[i], newVal[i]));
} else if (inNew) {
entries.push({ path: childPath, oldVal: undefined, newVal: newVal[i] });
} else {
entries.push({ path: childPath, oldVal: oldVal[i], newVal: undefined });
}
}
return entries;
}
// Both objects: iterate union of keys
if (isObject(oldVal) && isObject(newVal)) {
const entries: DiffEntry[] = [];
const allKeys = new Set([...Object.keys(oldVal), ...Object.keys(newVal)]);
for (const key of allKeys) {
const childPath = `${path}.${key}`;
const inOld = key in oldVal;
const inNew = key in newVal;
if (inOld && inNew) {
entries.push(...diffRecurse(childPath, oldVal[key], newVal[key]));
} else if (inNew) {
entries.push({ path: childPath, oldVal: undefined, newVal: newVal[key] });
} else {
entries.push({ path: childPath, oldVal: oldVal[key], newVal: undefined });
}
}
return entries;
}
// Leaf comparison (primitives, or type mismatch between object/array/primitive)
if (oldVal !== newVal) {
return [{ path, oldVal, newVal }];
}
return [];
}
/**
* Cleans up a diff path for display. Strips the leading dot produced by
* object-key paths, and returns "(root)" for the empty root path.
*/
export function formatDiffPath(path: string): string {
if (path === "") return "(root)";
if (path.startsWith(".")) return path.slice(1);
return path;
}
/**
* Attempts to parse a string as JSON. Returns the parsed value on success,
* or null if parsing fails. Used by the audit log to detect JSON values.
*/
export function tryParseJson(value: string): unknown | null {
try {
return JSON.parse(value);
} catch {
return null;
}
}
+52
View File
@@ -0,0 +1,52 @@
import { beforeNavigate } from "$app/navigation";
export type NavDirection = "left" | "right" | "fade";
/** Sidebar nav order — indexes determine slide direction for same-depth siblings */
const SIDEBAR_NAV_ORDER = [
"/profile",
"/settings",
"/admin",
"/admin/jobs",
"/admin/audit",
"/admin/users",
];
function getDepth(path: string): number {
return path.replace(/\/$/, "").split("/").filter(Boolean).length;
}
function getSidebarIndex(path: string): number {
return SIDEBAR_NAV_ORDER.indexOf(path);
}
function computeDirection(from: string, to: string): NavDirection {
const fromDepth = getDepth(from);
const toDepth = getDepth(to);
if (toDepth > fromDepth) return "right";
if (toDepth < fromDepth) return "left";
// Same depth — use sidebar ordering if both are sidebar routes
const fromIdx = getSidebarIndex(from);
const toIdx = getSidebarIndex(to);
if (fromIdx >= 0 && toIdx >= 0) {
return toIdx > fromIdx ? "right" : "left";
}
return "fade";
}
class NavigationStore {
direction: NavDirection = $state("fade");
}
export const navigationStore = new NavigationStore();
/** Call once from root layout to start tracking navigation direction */
export function initNavigation() {
beforeNavigate(({ from, to }) => {
if (!from?.url || !to?.url) return;
navigationStore.direction = computeDirection(from.url.pathname, to.url.pathname);
});
}
+70 -44
View File
@@ -1,69 +1,95 @@
/**
* Relative time formatting with adaptive refresh intervals.
*
* The key insight: a timestamp showing "3 seconds ago" needs to update every second,
* but "2 hours ago" only needs to update every minute. This module provides both
* The key insight: a timestamp showing "3s" needs to update every second,
* but "2h 15m" only needs to update every minute. This module provides both
* the formatted string and the optimal interval until the next meaningful change.
*/
interface RelativeTimeResult {
/** The human-readable relative time string (e.g. "3 seconds ago") */
/** Compact relative time string (e.g. "9m 35s", "1h 23m", "3d") */
text: string;
/** Milliseconds until the displayed text would change */
nextUpdateMs: number;
}
/**
* Compute a relative time string and the interval until it next changes.
* Format a duration in milliseconds as a compact human-readable string.
*
* Granularity tiers:
* - < 60s: per-second ("1 second ago", "45 seconds ago")
* - < 60m: per-minute ("1 minute ago", "12 minutes ago")
* - < 24h: per-hour ("1 hour ago", "5 hours ago")
* - >= 24h: per-day ("1 day ago", "3 days ago")
* Format tiers:
* - < 60s: seconds only ("45s")
* - < 1h: minutes + seconds ("9m 35s")
* - < 24h: hours + minutes ("1h 23m")
* - >= 24h: days only ("3d")
*/
export function formatDuration(ms: number): string {
const totalSeconds = Math.floor(Math.abs(ms) / 1000);
if (totalSeconds < 60) return `${totalSeconds}s`;
const totalMinutes = Math.floor(totalSeconds / 60);
if (totalMinutes < 60) {
const secs = totalSeconds % 60;
return `${totalMinutes}m ${secs}s`;
}
const totalHours = Math.floor(totalMinutes / 60);
if (totalHours < 24) {
const mins = totalMinutes % 60;
return `${totalHours}h ${mins}m`;
}
const days = Math.floor(totalHours / 24);
return `${days}d`;
}
/**
* Compute a compact relative time string and the interval until it next changes.
*
* Uses {@link formatDuration} for the text, plus computes the optimal refresh
* interval so callers can schedule the next update efficiently.
*/
/**
* Format a millisecond duration with a dynamic unit, optimised for
* scrape-style timings that are typically under 60 seconds.
*
* - < 1 000 ms → "423ms"
* - < 10 000 ms → "4.52s" (two decimals)
* - < 60 000 ms → "16.9s" (one decimal)
* - ≥ 60 000 ms → delegates to {@link formatDuration} ("1m 5s")
*/
export function formatDurationMs(ms: number): string {
const abs = Math.abs(ms);
if (abs < 1_000) return `${Math.round(abs)}ms`;
if (abs < 10_000) return `${(abs / 1_000).toFixed(2)}s`;
if (abs < 60_000) return `${(abs / 1_000).toFixed(1)}s`;
return formatDuration(ms);
}
export function relativeTime(date: Date, ref: Date): RelativeTimeResult {
const diffMs = ref.getTime() - date.getTime();
const seconds = Math.round(diffMs / 1000);
const totalSeconds = Math.floor(diffMs / 1000);
if (seconds < 1) {
return { text: "just now", nextUpdateMs: 1000 - (diffMs % 1000) || 1000 };
if (totalSeconds < 1) {
return { text: "now", nextUpdateMs: 1000 - (diffMs % 1000) || 1000 };
}
if (seconds < 60) {
const remainder = 1000 - (diffMs % 1000);
return {
text: seconds === 1 ? "1 second ago" : `${seconds} seconds ago`,
nextUpdateMs: remainder || 1000,
};
}
const text = formatDuration(diffMs);
const minutes = Math.floor(seconds / 60);
if (minutes < 60) {
// Update when the next minute boundary is crossed
// Compute optimal next-update interval based on the current tier
const totalMinutes = Math.floor(totalSeconds / 60);
const totalHours = Math.floor(totalMinutes / 60);
let nextUpdateMs: number;
if (totalHours >= 24) {
const msIntoCurrentDay = diffMs % 86_400_000;
nextUpdateMs = 86_400_000 - msIntoCurrentDay || 86_400_000;
} else if (totalMinutes >= 60) {
const msIntoCurrentMinute = diffMs % 60_000;
const msUntilNextMinute = 60_000 - msIntoCurrentMinute;
return {
text: minutes === 1 ? "1 minute ago" : `${minutes} minutes ago`,
nextUpdateMs: msUntilNextMinute || 60_000,
};
nextUpdateMs = 60_000 - msIntoCurrentMinute || 60_000;
} else {
nextUpdateMs = 1000 - (diffMs % 1000) || 1000;
}
const hours = Math.floor(minutes / 60);
if (hours < 24) {
const msIntoCurrentHour = diffMs % 3_600_000;
const msUntilNextHour = 3_600_000 - msIntoCurrentHour;
return {
text: hours === 1 ? "1 hour ago" : `${hours} hours ago`,
nextUpdateMs: msUntilNextHour || 3_600_000,
};
}
const days = Math.floor(hours / 24);
const msIntoCurrentDay = diffMs % 86_400_000;
const msUntilNextDay = 86_400_000 - msIntoCurrentDay;
return {
text: days === 1 ? "1 day ago" : `${days} days ago`,
nextUpdateMs: msUntilNextDay || 86_400_000,
};
return { text, nextUpdateMs };
}
+111
View File
@@ -0,0 +1,111 @@
/**
* Animation-map management for the timeline's stacked-area transitions.
*
* Each visible slot has per-subject animated values that lerp toward
* targets. This module owns the AnimMap lifecycle: syncing targets,
* stepping current values, and pruning offscreen entries.
*/
import { VALUE_EASE, MAXY_EASE, SETTLE_THRESHOLD, MIN_MAXY } from "./constants";
import type { AnimEntry, TimeSlot } from "./types";
export type AnimMap = Map<number, Map<string, AnimEntry>>;
/** Create a fresh, empty animation map. */
export function createAnimMap(): AnimMap {
return new Map();
}
/**
* Sync animMap targets from data + filter state.
* New slots start at current=0 so they animate in from the baseline.
* Disabled subjects get target=0 so they animate out.
*
* @param subjects - the full list of known subject codes
* @param enabledSubjects - subjects currently toggled on
*/
export function syncAnimTargets(
animMap: AnimMap,
slots: TimeSlot[],
subjects: readonly string[],
enabledSubjects: Set<string>
): void {
for (const slot of slots) {
const timeMs = slot.time.getTime();
let subjectMap = animMap.get(timeMs);
if (!subjectMap) {
subjectMap = new Map();
animMap.set(timeMs, subjectMap);
}
for (const subject of subjects) {
const realValue = enabledSubjects.has(subject) ? slot.subjects[subject] || 0 : 0;
const entry = subjectMap.get(subject);
if (entry) {
entry.target = realValue;
} else {
subjectMap.set(subject, { current: 0, target: realValue });
}
}
}
}
/**
* Advance all animated values toward their targets.
* Returns the new animatedMaxY value and whether any entry is still moving.
*/
export function stepAnimations(
animMap: AnimMap,
dt: number,
prevMaxY: number
): { animating: boolean; maxY: number } {
const ease = 1 - Math.pow(1 - VALUE_EASE, dt / 16);
let animating = false;
let computedMaxY = MIN_MAXY;
for (const subjectMap of animMap.values()) {
let slotSum = 0;
for (const entry of subjectMap.values()) {
const diff = entry.target - entry.current;
if (Math.abs(diff) < SETTLE_THRESHOLD) {
if (entry.current !== entry.target) {
entry.current = entry.target;
}
} else {
entry.current += diff * ease;
animating = true;
}
slotSum += entry.current;
}
if (slotSum > computedMaxY) computedMaxY = slotSum;
}
const maxYEase = 1 - Math.pow(1 - MAXY_EASE, dt / 16);
const maxDiff = computedMaxY - prevMaxY;
let maxY: number;
if (Math.abs(maxDiff) < SETTLE_THRESHOLD) {
maxY = computedMaxY;
} else {
maxY = prevMaxY + maxDiff * maxYEase;
animating = true;
}
return { animating, maxY };
}
/**
* Remove animMap entries whose timestamps fall outside the viewport
* (with margin), preventing unbounded memory growth during long sessions.
*/
export function pruneAnimMap(
animMap: AnimMap,
viewStart: number,
viewEnd: number,
viewSpan: number
): void {
const margin = viewSpan;
for (const timeMs of animMap.keys()) {
if (timeMs < viewStart - margin || timeMs > viewEnd + margin) {
animMap.delete(timeMs);
}
}
}
+79
View File
@@ -0,0 +1,79 @@
/** Layout & padding */
export const PADDING = { top: 20, right: 0, bottom: 40, left: 0 } as const;
export const DEFAULT_AXIS_RATIO = 0.8;
export const CHART_HEIGHT_RATIO = 0.6;
/** Viewport span limits (ms) */
export const MIN_SPAN_MS = 5 * 60 * 1000;
export const MAX_SPAN_MS = 72 * 60 * 60 * 1000;
export const DEFAULT_SPAN_MS = 36 * 60 * 60 * 1000;
/** Slot interval (15 minutes) */
export const SLOT_INTERVAL_MS = 15 * 60 * 1000;
/** Zoom */
export const ZOOM_FACTOR = 1.15;
export const ZOOM_KEY_FACTOR = 1.25;
export const ZOOM_EASE = 0.12;
export const ZOOM_SETTLE_THRESHOLD = 100;
/** Grid rendering */
export const GRID_ALPHA = 0.15;
export const HOUR_GRID_ALPHA = 0.25;
/** Now line */
export const NOW_LINE_WIDTH = 2;
export const NOW_LINE_COLOR = "#ef4444";
export const NOW_TRIANGLE_HEIGHT = 6;
export const NOW_TRIANGLE_HALF_WIDTH = 5;
export const NOW_LABEL_FONT = "bold 10px Inter, system-ui, sans-serif";
/** Hover highlight */
export const HOVER_HIGHLIGHT_ALPHA = 0.25;
/** Stacked area */
export const AREA_FILL_ALPHA = 0.82;
export const AREA_STROKE_ALPHA = 0.4;
/** Physics / momentum */
export const PAN_FRICTION = 0.94;
export const PAN_STOP_THRESHOLD = 0.5;
export const PAN_STOP_THRESHOLD_Y = 0.01;
export const VELOCITY_SAMPLE_WINDOW = 80;
export const VELOCITY_MIN_DT = 5;
/** Keyboard panning */
export const PAN_STEP_RATIO = 0.1;
export const PAN_STEP_CTRL_RATIO = 0.35;
export const PAN_EASE = 0.12;
export const PAN_SETTLE_THRESHOLD_PX = 1;
/** Y-axis panning */
export const YRATIO_STEP = 0.03;
export const YRATIO_MIN = 0.3;
export const YRATIO_MAX = 1.2;
export const YRATIO_SETTLE_THRESHOLD = 0.001;
/** Follow mode */
export const FOLLOW_EASE = 0.03;
/** Animation */
export const VALUE_EASE = 0.12;
export const MAXY_EASE = 0.4;
export const SETTLE_THRESHOLD = 0.5;
export const MIN_MAXY = 60;
export const MAX_DT = 50;
export const DEFAULT_DT = 16;
/** Visible-slot margin for smooth curve edges at viewport boundaries */
export const RENDER_MARGIN_SLOTS = 3;
/** Drawer */
export const DRAWER_WIDTH = 220;
/** Touch / tap */
export const TAP_MAX_DURATION_MS = 250;
export const TAP_MAX_DISTANCE_PX = 10;
/** Axis text */
export const AXIS_FONT = "11px Inter, system-ui, sans-serif";
+78
View File
@@ -0,0 +1,78 @@
/**
* Subject color palette for the timeline chart.
*
* Subjects are dynamic (coming from the API), so we assign colors from
* a fixed palette based on a deterministic hash of the subject code.
* Known high-enrollment subjects get hand-picked colors for familiarity.
*/
/** Hand-picked colors for common UTSA subject codes. */
const KNOWN_SUBJECT_COLORS: Record<string, string> = {
CS: "#6366f1", // indigo
MAT: "#f59e0b", // amber
BIO: "#10b981", // emerald
ENG: "#ef4444", // red
PHY: "#3b82f6", // blue
HIS: "#8b5cf6", // violet
CHE: "#f97316", // orange
PSY: "#ec4899", // pink
ECE: "#14b8a6", // teal
ART: "#a855f7", // purple
ACC: "#84cc16", // lime
FIN: "#06b6d4", // cyan
MUS: "#e11d48", // rose
POL: "#d946ef", // fuchsia
SOC: "#22d3ee", // sky
KIN: "#4ade80", // green
IS: "#fb923c", // light orange
STA: "#818cf8", // light indigo
MGT: "#fbbf24", // yellow
MKT: "#2dd4bf", // teal-light
};
/**
* Extended palette for subjects that don't have a hand-picked color.
* These are chosen to be visually distinct from each other.
*/
const FALLBACK_PALETTE = [
"#f472b6", // pink-400
"#60a5fa", // blue-400
"#34d399", // emerald-400
"#fbbf24", // amber-400
"#a78bfa", // violet-400
"#fb7185", // rose-400
"#38bdf8", // sky-400
"#4ade80", // green-400
"#facc15", // yellow-400
"#c084fc", // purple-400
"#f87171", // red-400
"#2dd4bf", // teal-400
"#fb923c", // orange-400
"#818cf8", // indigo-400
"#a3e635", // lime-400
"#22d3ee", // cyan-400
];
/** Simple string hash for deterministic color assignment. */
function hashCode(str: string): number {
let hash = 0;
for (let i = 0; i < str.length; i++) {
hash = ((hash << 5) - hash + str.charCodeAt(i)) | 0;
}
return Math.abs(hash);
}
/** Cache of assigned colors to avoid re-computing. */
const colorCache = new Map<string, string>();
/** Get a consistent color for any subject code. */
export function getSubjectColor(subject: string): string {
const cached = colorCache.get(subject);
if (cached) return cached;
const color =
KNOWN_SUBJECT_COLORS[subject] ?? FALLBACK_PALETTE[hashCode(subject) % FALLBACK_PALETTE.length];
colorCache.set(subject, color);
return color;
}
+300
View File
@@ -0,0 +1,300 @@
/**
* Pure canvas rendering functions for the timeline chart.
*
* Every function takes a {@link ChartContext} plus any data it needs.
* No Svelte reactivity, no side-effects beyond drawing on the context.
*/
import { stack, area, curveMonotoneX, type Series } from "d3-shape";
import { timeFormat } from "d3-time-format";
import { getSubjectColor } from "./data";
import type { AnimMap } from "./animation";
import { getStackSubjects } from "./viewport";
import type { ChartContext, TimeSlot } from "./types";
import {
GRID_ALPHA,
HOUR_GRID_ALPHA,
NOW_LINE_WIDTH,
NOW_LINE_COLOR,
NOW_TRIANGLE_HEIGHT,
NOW_TRIANGLE_HALF_WIDTH,
NOW_LABEL_FONT,
HOVER_HIGHLIGHT_ALPHA,
AREA_FILL_ALPHA,
AREA_STROKE_ALPHA,
SLOT_INTERVAL_MS,
SETTLE_THRESHOLD,
AXIS_FONT,
} from "./constants";
// ── Formatters (allocated once) ─────────────────────────────────────
const fmtHour = timeFormat("%-I %p");
const fmtAxisDetailed = timeFormat("%-I:%M %p");
const fmtAxisCoarse = timeFormat("%-I %p");
const fmtNow = timeFormat("%-I:%M %p");
// ── Stacked-area types ──────────────────────────────────────────────
type StackPoint = Series<TimeSlot, string>[number];
export type VisibleStack = Series<TimeSlot, string>[];
// ── Tick count heuristic ────────────────────────────────────────────
/** Choose the number of x-axis ticks based on the viewport span. */
export function chooseTickCount(viewSpan: number): number {
const spanHours = viewSpan / (60 * 60 * 1000);
if (spanHours <= 1) return 12;
if (spanHours <= 3) return 12;
if (spanHours <= 8) return 16;
if (spanHours <= 14) return 14;
return 10;
}
// ── Stack computation ───────────────────────────────────────────────
/**
* Stack only the visible slice using *animated* values so transitions
* between filter/data states are smooth. Includes subjects that are
* still animating out so removal is gradual.
*
* @param allSubjects - full set of known subject codes
*/
export function stackVisibleSlots(
visible: TimeSlot[],
allSubjects: readonly string[],
enabledSubjects: Set<string>,
animMap: AnimMap
): VisibleStack {
if (visible.length === 0) return [];
const stackKeys = getStackSubjects(
visible,
allSubjects,
enabledSubjects,
animMap,
SETTLE_THRESHOLD
);
if (stackKeys.length === 0) return [];
// Build synthetic slots with animated current values.
const animatedSlots: TimeSlot[] = visible.map((slot) => {
const timeMs = slot.time.getTime();
const subjectMap = animMap.get(timeMs);
const subjects: Record<string, number> = {};
for (const subject of stackKeys) {
const entry = subjectMap?.get(subject);
subjects[subject] = entry ? entry.current : slot.subjects[subject] || 0;
}
return { time: slot.time, subjects };
});
const gen = stack<TimeSlot>()
.keys(stackKeys)
.value((d, key) => d.subjects[key] || 0);
return gen(animatedSlots);
}
// ── Drawing functions ───────────────────────────────────────────────
export function drawGrid(chart: ChartContext): void {
const { ctx, xScale, chartTop, chartBottom, width, viewSpan, viewStart, viewEnd } = chart;
const tickCount = chooseTickCount(viewSpan);
ctx.save();
ctx.lineWidth = 1;
// Minor tick grid lines
const ticks = xScale.ticks(tickCount);
ctx.strokeStyle = `rgba(128, 128, 128, ${GRID_ALPHA})`;
for (const tick of ticks) {
if (tick.getMinutes() === 0) continue;
const x = Math.round(xScale(tick)) + 0.5;
if (x < 0 || x > width) continue;
ctx.beginPath();
ctx.moveTo(x, chartTop);
ctx.lineTo(x, chartBottom);
ctx.stroke();
}
// Hourly grid lines with labels
const spanHours = viewSpan / (60 * 60 * 1000);
let hourStep = 1;
if (spanHours > 48) hourStep = 6;
else if (spanHours > 24) hourStep = 4;
else if (spanHours > 12) hourStep = 2;
const startHour = new Date(viewStart);
startHour.setMinutes(0, 0, 0);
if (hourStep > 1) {
const h = startHour.getHours();
startHour.setHours(Math.ceil(h / hourStep) * hourStep);
} else if (startHour.getTime() < viewStart) {
startHour.setHours(startHour.getHours() + 1);
}
const hourStepMs = hourStep * 60 * 60 * 1000;
for (let t = startHour.getTime(); t <= viewEnd; t += hourStepMs) {
const d = new Date(t);
const x = Math.round(xScale(d)) + 0.5;
if (x < 0 || x > width) continue;
ctx.strokeStyle = `rgba(128, 128, 128, ${HOUR_GRID_ALPHA})`;
ctx.beginPath();
ctx.moveTo(x, chartTop);
ctx.lineTo(x, chartBottom);
ctx.stroke();
ctx.fillText(fmtHour(d), x + 5, chartTop + 6);
}
ctx.restore();
}
/**
* Trace the top outline of the stacked area onto `ctx` as a clip path.
*/
function traceStackOutline(chart: ChartContext, visibleStack: VisibleStack): void {
if (visibleStack.length === 0) return;
const { ctx, xScale, yScale } = chart;
const topLayer = visibleStack[visibleStack.length - 1];
ctx.beginPath();
area<StackPoint>()
.x((d) => xScale(d.data.time))
.y0(() => yScale(0))
.y1((d) => yScale(d[1]))
.curve(curveMonotoneX)
.context(ctx)(topLayer as unknown as StackPoint[]);
}
export function drawHoverColumn(
chart: ChartContext,
visibleStack: VisibleStack,
hoverSlotTime: number | null
): void {
if (hoverSlotTime == null || visibleStack.length === 0) return;
const { ctx, xScale, chartTop, chartBottom } = chart;
const x0 = xScale(new Date(hoverSlotTime));
const x1 = xScale(new Date(hoverSlotTime + SLOT_INTERVAL_MS));
ctx.save();
traceStackOutline(chart, visibleStack);
ctx.clip();
ctx.fillStyle = `rgba(255, 255, 255, ${HOVER_HIGHLIGHT_ALPHA})`;
ctx.fillRect(x0, chartTop, x1 - x0, chartBottom - chartTop);
ctx.restore();
}
export function drawStackedArea(chart: ChartContext, visibleStack: VisibleStack): void {
const { ctx, xScale, yScale, width, chartTop, chartBottom } = chart;
ctx.save();
ctx.beginPath();
ctx.rect(0, chartTop, width, chartBottom - chartTop);
ctx.clip();
for (let i = visibleStack.length - 1; i >= 0; i--) {
const layer = visibleStack[i];
const subject = layer.key;
const color = getSubjectColor(subject);
ctx.beginPath();
area<StackPoint>()
.x((d) => xScale(d.data.time))
.y0((d) => yScale(d[0]))
.y1((d) => yScale(d[1]))
.curve(curveMonotoneX)
.context(ctx)(layer as unknown as StackPoint[]);
ctx.globalAlpha = AREA_FILL_ALPHA;
ctx.fillStyle = color;
ctx.fill();
ctx.globalAlpha = AREA_STROKE_ALPHA;
ctx.strokeStyle = color;
ctx.lineWidth = 1;
ctx.stroke();
}
ctx.restore();
}
export function drawNowLine(chart: ChartContext): void {
const { ctx, xScale, chartTop, chartBottom } = chart;
const now = new Date();
const x = xScale(now);
if (x < 0 || x > chart.width) return;
ctx.save();
ctx.shadowColor = "rgba(239, 68, 68, 0.5)";
ctx.shadowBlur = 8;
ctx.strokeStyle = NOW_LINE_COLOR;
ctx.lineWidth = NOW_LINE_WIDTH;
ctx.setLineDash([]);
ctx.beginPath();
ctx.moveTo(x, chartTop);
ctx.lineTo(x, chartBottom);
ctx.stroke();
ctx.shadowBlur = 0;
ctx.fillStyle = NOW_LINE_COLOR;
// Triangle marker at chart top
ctx.beginPath();
ctx.moveTo(x - NOW_TRIANGLE_HALF_WIDTH, chartTop);
ctx.lineTo(x + NOW_TRIANGLE_HALF_WIDTH, chartTop);
ctx.lineTo(x, chartTop + NOW_TRIANGLE_HEIGHT);
ctx.closePath();
ctx.fill();
// Time label
ctx.font = NOW_LABEL_FONT;
ctx.textAlign = "left";
ctx.textBaseline = "bottom";
ctx.fillText(fmtNow(now), x + 6, chartTop - 1);
ctx.restore();
}
export function drawTimeAxis(chart: ChartContext): void {
const { ctx, xScale, width, chartBottom, viewSpan } = chart;
const tickCount = chooseTickCount(viewSpan);
const y = chartBottom;
const ticks = xScale.ticks(tickCount);
ctx.save();
// Axis baseline
ctx.strokeStyle = "rgba(128, 128, 128, 0.15)";
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, y + 0.5);
ctx.lineTo(width, y + 0.5);
ctx.stroke();
const spanHours = viewSpan / (60 * 60 * 1000);
const fmt = spanHours <= 3 ? fmtAxisDetailed : fmtAxisCoarse;
ctx.fillStyle = "rgba(128, 128, 128, 0.6)";
ctx.font = AXIS_FONT;
ctx.textAlign = "center";
ctx.textBaseline = "top";
for (const tick of ticks) {
const x = xScale(tick);
if (x < 20 || x > width - 20) continue;
ctx.strokeStyle = "rgba(128, 128, 128, 0.2)";
ctx.beginPath();
ctx.moveTo(x, y);
ctx.lineTo(x, y + 4);
ctx.stroke();
ctx.fillText(fmt(tick), x, y + 6);
}
ctx.restore();
}
+202
View File
@@ -0,0 +1,202 @@
/**
* Reactive timeline data store with gap-aware on-demand loading.
*
* Tracks which time ranges have already been fetched and only requests
* the missing segments when the view expands into unloaded territory.
* Fetches are throttled so rapid panning/zooming doesn't flood the API.
*/
import { client, type TimelineRange } from "$lib/api";
import { SLOT_INTERVAL_MS } from "./constants";
import type { TimeSlot } from "./types";
/** Inclusive range of aligned slot timestamps [start, end]. */
type Range = [start: number, end: number];
const FETCH_THROTTLE_MS = 500;
const BUFFER_RATIO = 0.15;
/** Align a timestamp down to the nearest slot boundary. */
function alignFloor(ms: number): number {
return Math.floor(ms / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
}
/** Align a timestamp up to the nearest slot boundary. */
function alignCeil(ms: number): number {
return Math.ceil(ms / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
}
/**
* Given a requested range and a sorted list of already-loaded ranges,
* return the sub-ranges that still need fetching.
*/
function findGaps(start: number, end: number, loaded: Range[]): Range[] {
const s = alignFloor(start);
const e = alignCeil(end);
if (loaded.length === 0) return [[s, e]];
const gaps: Range[] = [];
let cursor = s;
for (const [ls, le] of loaded) {
if (le < cursor) continue; // entirely before cursor
if (ls > e) break; // entirely past our range
if (ls > cursor) {
gaps.push([cursor, Math.min(ls, e)]);
}
cursor = Math.max(cursor, le);
}
if (cursor < e) {
gaps.push([cursor, e]);
}
return gaps;
}
/** Merge a new range into a sorted, non-overlapping range list (mutates nothing). */
function mergeRange(ranges: Range[], added: Range): Range[] {
const all = [...ranges, added].sort((a, b) => a[0] - b[0]);
const merged: Range[] = [];
for (const r of all) {
if (merged.length === 0 || merged[merged.length - 1][1] < r[0]) {
merged.push([r[0], r[1]]);
} else {
merged[merged.length - 1][1] = Math.max(merged[merged.length - 1][1], r[1]);
}
}
return merged;
}
/**
* Fetch timeline data for the given gap ranges from the API.
* Converts gap ranges into the API request format.
*/
async function fetchFromApi(gaps: Range[]): Promise<TimeSlot[]> {
const ranges: TimelineRange[] = gaps.map(([start, end]) => ({
start: new Date(start).toISOString(),
end: new Date(end).toISOString(),
}));
const response = await client.getTimeline(ranges);
return response.slots.map((slot) => ({
time: new Date(slot.time),
subjects: slot.subjects,
}));
}
/**
* Create a reactive timeline store.
*
* Call `requestRange(viewStart, viewEnd)` whenever the visible window
* changes. The store applies a 15 % buffer, computes which sub-ranges
* are missing, and fetches them (throttled to 500 ms).
*
* The `data` getter returns a sorted `TimeSlot[]` that reactively
* updates as new segments arrive.
*
* The `subjects` getter returns the sorted list of all subject codes
* seen so far across all fetched data.
*/
export function createTimelineStore() {
// All loaded slots keyed by aligned timestamp (ms).
let slotMap: Map<number, TimeSlot> = $state(new Map());
// Sorted, non-overlapping list of fetched ranges.
let loadedRanges: Range[] = [];
// All subject codes observed across all fetched data.
let knownSubjects: Set<string> = $state(new Set());
let throttleTimer: ReturnType<typeof setTimeout> | undefined;
let pendingStart = 0;
let pendingEnd = 0;
let hasFetchedOnce = false;
// Sorted array derived from the map. The O(n log n) sort only runs when
// slotMap is reassigned, which happens on fetch completion — not per frame.
const data: TimeSlot[] = $derived(
[...slotMap.values()].sort((a, b) => a.time.getTime() - b.time.getTime())
);
// Sorted subject list derived from the known subjects set.
const subjects: string[] = $derived([...knownSubjects].sort());
async function fetchGaps(start: number, end: number): Promise<void> {
const gaps = findGaps(start, end, loadedRanges);
if (gaps.length === 0) return;
let slots: TimeSlot[];
try {
slots = await fetchFromApi(gaps);
} catch (err) {
console.error("Timeline fetch failed:", err);
return;
}
// Merge results into the slot map.
const next = new Map(slotMap);
const nextSubjects = new Set(knownSubjects);
for (const slot of slots) {
next.set(slot.time.getTime(), slot);
for (const subject of Object.keys(slot.subjects)) {
nextSubjects.add(subject);
}
}
// Update loaded-range bookkeeping.
for (const gap of gaps) {
loadedRanges = mergeRange(loadedRanges, gap);
}
// Single reactive assignments.
slotMap = next;
knownSubjects = nextSubjects;
}
/**
* Notify the store that the viewport now covers [viewStart, viewEnd] (ms).
* Automatically buffers by 15 % each side and throttles fetches.
*
* The first call fetches immediately. Subsequent calls update the pending
* range but don't reset an existing timer, so continuous view changes
* (follow mode, momentum pan) don't starve the fetch.
*/
function requestRange(viewStart: number, viewEnd: number): void {
const span = viewEnd - viewStart;
const buffer = span * BUFFER_RATIO;
pendingStart = viewStart - buffer;
pendingEnd = viewEnd + buffer;
if (!hasFetchedOnce) {
hasFetchedOnce = true;
fetchGaps(pendingStart, pendingEnd);
return;
}
if (throttleTimer === undefined) {
throttleTimer = setTimeout(() => {
throttleTimer = undefined;
fetchGaps(pendingStart, pendingEnd);
}, FETCH_THROTTLE_MS);
}
}
/** Clean up the throttle timer (call on component destroy). */
function dispose(): void {
clearTimeout(throttleTimer);
}
return {
get data() {
return data;
},
get subjects() {
return subjects;
},
requestRange,
dispose,
};
}
+36
View File
@@ -0,0 +1,36 @@
/**
* Shared types for the timeline feature.
*
* Subjects are dynamic strings (actual Banner subject codes like "CS",
* "MAT", "BIO") rather than a fixed enum — the set of subjects comes
* from the API response.
*/
import type { ScaleLinear, ScaleTime } from "d3-scale";
/** A single 15-minute time slot with per-subject enrollment totals. */
export interface TimeSlot {
time: Date;
subjects: Record<string, number>;
}
/** Lerped animation entry for a single subject within a slot. */
export interface AnimEntry {
current: number;
target: number;
}
/**
* Shared context passed to all canvas rendering functions.
* Computed once per frame in the orchestrator and threaded through.
*/
export interface ChartContext {
ctx: CanvasRenderingContext2D;
xScale: ScaleTime<number, number>;
yScale: ScaleLinear<number, number>;
width: number;
chartTop: number;
chartBottom: number;
viewSpan: number;
viewStart: number;
viewEnd: number;
}
+94
View File
@@ -0,0 +1,94 @@
/**
* Pure viewport utility functions: binary search, visible-slot slicing,
* hit-testing, and snapping for the timeline canvas.
*/
import { SLOT_INTERVAL_MS, RENDER_MARGIN_SLOTS } from "./constants";
import type { TimeSlot } from "./types";
/**
* Binary-search for the index of the first slot whose time >= target.
* `slots` must be sorted ascending by time.
*/
export function lowerBound(slots: TimeSlot[], targetMs: number): number {
let lo = 0;
let hi = slots.length;
while (lo < hi) {
const mid = (lo + hi) >>> 1;
if (slots[mid].time.getTime() < targetMs) lo = mid + 1;
else hi = mid;
}
return lo;
}
/**
* Return the sub-array of `data` covering the viewport [viewStart, viewEnd]
* plus a small margin for smooth curve edges.
*/
export function getVisibleSlots(data: TimeSlot[], viewStart: number, viewEnd: number): TimeSlot[] {
if (data.length === 0) return data;
const lo = Math.max(0, lowerBound(data, viewStart) - RENDER_MARGIN_SLOTS);
const hi = Math.min(data.length, lowerBound(data, viewEnd) + RENDER_MARGIN_SLOTS);
return data.slice(lo, hi);
}
/** Find the slot closest to `timeMs`, or null if none within one interval. */
export function findSlotByTime(data: TimeSlot[], timeMs: number): TimeSlot | null {
if (data.length === 0) return null;
const idx = lowerBound(data, timeMs);
let best: TimeSlot | null = null;
let bestDist = Infinity;
for (const i of [idx - 1, idx]) {
if (i < 0 || i >= data.length) continue;
const dist = Math.abs(data[i].time.getTime() - timeMs);
if (dist < bestDist) {
bestDist = dist;
best = data[i];
}
}
if (best && bestDist < SLOT_INTERVAL_MS) return best;
return null;
}
/** Snap a timestamp down to the nearest 15-minute slot boundary. */
export function snapToSlot(timeMs: number): number {
return Math.floor(timeMs / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
}
/** Sum of enrollment counts for enabled subjects in a slot. */
export function enabledTotalClasses(slot: TimeSlot, activeSubjects: readonly string[]): number {
let sum = 0;
for (const s of activeSubjects) {
sum += slot.subjects[s] || 0;
}
return sum;
}
/**
* Determine which subjects to include in the stack: all enabled subjects
* plus any disabled subjects still animating out (current > threshold).
*
* @param allSubjects - the full set of known subject codes
*/
export function getStackSubjects(
visible: TimeSlot[],
allSubjects: readonly string[],
enabledSubjects: Set<string>,
animMap: Map<number, Map<string, { current: number }>>,
settleThreshold: number
): string[] {
const subjects: string[] = [];
for (const subject of allSubjects) {
if (enabledSubjects.has(subject)) {
subjects.push(subject);
continue;
}
for (const slot of visible) {
const entry = animMap.get(slot.time.getTime())?.get(subject);
if (entry && Math.abs(entry.current) > settleThreshold) {
subjects.push(subject);
break;
}
}
}
return subjects;
}
+23
View File
@@ -8,3 +8,26 @@ export function cn(...inputs: ClassValue[]) {
/** Shared tooltip content styling for bits-ui Tooltip.Content */
export const tooltipContentClass =
"z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72";
export interface FormatNumberOptions {
/** Include sign for positive numbers (default: false) */
sign?: boolean;
/** Maximum fraction digits (default: 0 for integers) */
maximumFractionDigits?: number;
}
/**
* Format a number with locale-aware thousands separators.
* Uses browser locale via Intl.NumberFormat.
*/
export function formatNumber(num: number, options: FormatNumberOptions = {}): string {
const { sign = false, maximumFractionDigits = 0 } = options;
const formatted = new Intl.NumberFormat(undefined, {
maximumFractionDigits,
}).format(num);
if (sign && num >= 0) {
return `+${formatted}`;
}
return formatted;
}
+210
View File
@@ -0,0 +1,210 @@
import type { ScrapeJob } from "$lib/api";
export type ScrapeJobStatus = "processing" | "staleLock" | "exhausted" | "scheduled" | "pending";
export type ScrapeJobEvent =
| { type: "init"; jobs: ScrapeJob[] }
| { type: "jobCreated"; job: ScrapeJob }
| { type: "jobLocked"; id: number; lockedAt: string; status: ScrapeJobStatus }
| { type: "jobCompleted"; id: number }
| {
type: "jobRetried";
id: number;
retryCount: number;
queuedAt: string;
status: ScrapeJobStatus;
}
| { type: "jobExhausted"; id: number }
| { type: "jobDeleted"; id: number };
export type ConnectionState = "connected" | "reconnecting" | "disconnected";
const PRIORITY_ORDER: Record<string, number> = {
critical: 0,
high: 1,
medium: 2,
low: 3,
};
const MAX_RECONNECT_DELAY = 30_000;
const MAX_RECONNECT_ATTEMPTS = 10;
function sortJobs(jobs: Iterable<ScrapeJob>): ScrapeJob[] {
return Array.from(jobs).sort((a, b) => {
const pa = PRIORITY_ORDER[a.priority.toLowerCase()] ?? 2;
const pb = PRIORITY_ORDER[b.priority.toLowerCase()] ?? 2;
if (pa !== pb) return pa - pb;
return new Date(a.executeAt).getTime() - new Date(b.executeAt).getTime();
});
}
export class ScrapeJobsStore {
private ws: WebSocket | null = null;
private jobs = new Map<number, ScrapeJob>();
private _connectionState: ConnectionState = "disconnected";
private _initialized = false;
private onUpdate: () => void;
private reconnectAttempts = 0;
private reconnectTimer: ReturnType<typeof setTimeout> | null = null;
private intentionalClose = false;
/** Cached sorted array, invalidated on data mutations. */
private cachedJobs: ScrapeJob[] = [];
private cacheDirty = false;
constructor(onUpdate: () => void) {
this.onUpdate = onUpdate;
}
getJobs(): ScrapeJob[] {
if (this.cacheDirty) {
this.cachedJobs = sortJobs(this.jobs.values());
this.cacheDirty = false;
}
return this.cachedJobs;
}
getConnectionState(): ConnectionState {
return this._connectionState;
}
isInitialized(): boolean {
return this._initialized;
}
connect(): void {
this.intentionalClose = false;
const protocol = window.location.protocol === "https:" ? "wss:" : "ws:";
const url = `${protocol}//${window.location.host}/api/admin/scrape-jobs/ws`;
try {
this.ws = new WebSocket(url);
} catch {
this.scheduleReconnect();
return;
}
this.ws.onopen = () => {
this._connectionState = "connected";
this.reconnectAttempts = 0;
this.onUpdate();
};
this.ws.onmessage = (event) => {
try {
const parsed = JSON.parse(event.data as string) as ScrapeJobEvent;
this.handleEvent(parsed);
} catch {
// Ignore malformed messages
}
};
this.ws.onclose = () => {
this.ws = null;
if (!this.intentionalClose) {
this.scheduleReconnect();
}
};
this.ws.onerror = () => {
// onclose will fire after onerror, so reconnect is handled there
};
}
handleEvent(event: ScrapeJobEvent): void {
switch (event.type) {
case "init":
this.jobs.clear();
for (const job of event.jobs) {
this.jobs.set(job.id, job);
}
this._initialized = true;
break;
case "jobCreated":
this.jobs.set(event.job.id, event.job);
break;
case "jobLocked": {
const job = this.jobs.get(event.id);
if (job) {
this.jobs.set(event.id, { ...job, lockedAt: event.lockedAt, status: event.status });
}
break;
}
case "jobCompleted":
this.jobs.delete(event.id);
break;
case "jobRetried": {
const job = this.jobs.get(event.id);
if (job) {
this.jobs.set(event.id, {
...job,
retryCount: event.retryCount,
queuedAt: event.queuedAt,
status: event.status,
lockedAt: null,
});
}
break;
}
case "jobExhausted": {
const job = this.jobs.get(event.id);
if (job) {
this.jobs.set(event.id, { ...job, status: "exhausted" });
}
break;
}
case "jobDeleted":
this.jobs.delete(event.id);
break;
}
this.cacheDirty = true;
this.onUpdate();
}
disconnect(): void {
this.intentionalClose = true;
if (this.reconnectTimer !== null) {
clearTimeout(this.reconnectTimer);
this.reconnectTimer = null;
}
if (this.ws) {
this.ws.close();
this.ws = null;
}
this._connectionState = "disconnected";
this.onUpdate();
}
resync(): void {
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify({ type: "resync" }));
}
}
/** Attempt to reconnect after being disconnected. Resets attempt counter. */
retry(): void {
this.reconnectAttempts = 0;
this._connectionState = "reconnecting";
this.onUpdate();
this.connect();
}
private scheduleReconnect(): void {
if (this.reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {
this._connectionState = "disconnected";
this.onUpdate();
return;
}
this._connectionState = "reconnecting";
this.onUpdate();
const delay = Math.min(1000 * 2 ** this.reconnectAttempts, MAX_RECONNECT_DELAY);
this.reconnectAttempts++;
this.reconnectTimer = setTimeout(() => {
this.reconnectTimer = null;
this.connect();
}, delay);
}
}
+151
View File
@@ -0,0 +1,151 @@
<script lang="ts">
import { goto } from "$app/navigation";
import { page } from "$app/state";
import { authStore } from "$lib/auth.svelte";
import PageTransition from "$lib/components/PageTransition.svelte";
import ErrorBoundaryFallback from "$lib/components/ErrorBoundaryFallback.svelte";
import {
Activity,
ClipboardList,
FileText,
GraduationCap,
LayoutDashboard,
LogOut,
Settings,
User,
Users,
} from "@lucide/svelte";
import { tick } from "svelte";
let { children } = $props();
// Track boundary reset function so navigation can auto-clear errors
let boundaryReset = $state<(() => void) | null>(null);
let errorPathname = $state<string | null>(null);
function onBoundaryError(e: unknown, reset: () => void) {
console.error("[page boundary]", e);
boundaryReset = reset;
errorPathname = page.url.pathname;
}
// Auto-reset the boundary only when the user navigates away from the errored page
$effect(() => {
const currentPath = page.url.pathname;
if (boundaryReset && errorPathname && currentPath !== errorPathname) {
const reset = boundaryReset;
boundaryReset = null;
errorPathname = null;
tick().then(() => reset());
}
});
$effect(() => {
if (authStore.state.mode === "unauthenticated") {
goto("/login");
}
});
const userItems = [
{ href: "/profile", label: "Profile", icon: User },
{ href: "/settings", label: "Settings", icon: Settings },
];
const adminItems = [
{ href: "/admin", label: "Dashboard", icon: LayoutDashboard },
{ href: "/admin/scraper", label: "Scraper", icon: Activity },
{ href: "/admin/jobs", label: "Scrape Jobs", icon: ClipboardList },
{ href: "/admin/audit", label: "Audit Log", icon: FileText },
{ href: "/admin/users", label: "Users", icon: Users },
{ href: "/admin/instructors", label: "Instructors", icon: GraduationCap },
];
function isActive(href: string): boolean {
if (href === "/admin") return page.url.pathname === "/admin";
return page.url.pathname.startsWith(href);
}
</script>
{#if authStore.isLoading}
<div class="flex flex-col items-center px-5 pb-5 pt-20">
<div class="w-full max-w-6xl">
<p class="text-muted-foreground py-12 text-center text-sm">Loading...</p>
</div>
</div>
{:else if !authStore.isAuthenticated}
<div class="flex flex-col items-center px-5 pb-5 pt-20">
<div class="w-full max-w-6xl">
<p class="text-muted-foreground py-12 text-center text-sm">Redirecting to login...</p>
</div>
</div>
{:else}
<div class="flex flex-col items-center px-5 pb-5 pt-20">
<div class="w-full max-w-6xl flex gap-8">
<!-- Inline sidebar -->
<aside class="w-48 shrink-0 pt-1">
{#if authStore.user}
<div class="mb-4 px-2">
<p class="text-sm font-medium text-foreground">{authStore.user.discordUsername}</p>
</div>
{/if}
<nav class="flex flex-col gap-0.5">
<span class="px-2 text-[11px] font-medium uppercase tracking-wider text-muted-foreground/60 mb-0.5">User</span>
{#each userItems as item}
<a
href={item.href}
class="flex items-center gap-2 rounded-md px-2 py-1.5 text-sm no-underline transition-colors
{isActive(item.href)
? 'text-foreground bg-muted font-medium'
: 'text-muted-foreground hover:text-foreground hover:bg-muted/50'}"
>
<item.icon size={15} strokeWidth={2} />
{item.label}
</a>
{/each}
{#if authStore.isAdmin}
<div class="my-2 mx-2 border-t border-border"></div>
<span class="px-2 text-[11px] font-medium uppercase tracking-wider text-muted-foreground/60 mb-0.5">Admin</span>
{#each adminItems as item}
<a
href={item.href}
class="flex items-center gap-2 rounded-md px-2 py-1.5 text-sm no-underline transition-colors
{isActive(item.href)
? 'text-foreground bg-muted font-medium'
: 'text-muted-foreground hover:text-foreground hover:bg-muted/50'}"
>
<item.icon size={15} strokeWidth={2} />
{item.label}
</a>
{/each}
{/if}
<div class="my-2 mx-2 border-t border-border"></div>
<button
onclick={() => authStore.logout()}
class="flex items-center gap-2 rounded-md px-2 py-1.5 text-sm cursor-pointer
bg-transparent border-none text-muted-foreground hover:text-foreground hover:bg-muted/50 transition-colors"
>
<LogOut size={15} strokeWidth={2} />
Sign Out
</button>
</nav>
</aside>
<!-- Content -->
<main class="flex-1 min-w-0">
<svelte:boundary onerror={onBoundaryError}>
<PageTransition key={page.url.pathname} axis="vertical">
{@render children()}
</PageTransition>
{#snippet failed(error, reset)}
<ErrorBoundaryFallback title="Page error" {error} {reset} />
{/snippet}
</svelte:boundary>
</main>
</div>
</div>
{/if}
+55
View File
@@ -0,0 +1,55 @@
<script lang="ts">
import { onMount } from "svelte";
import { client, type AdminStatus } from "$lib/api";
import { formatNumber } from "$lib/utils";
let status = $state<AdminStatus | null>(null);
let error = $state<string | null>(null);
onMount(async () => {
try {
status = await client.getAdminStatus();
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load status";
}
});
</script>
<h1 class="mb-4 text-lg font-semibold text-foreground">Dashboard</h1>
{#if error}
<p class="text-destructive">{error}</p>
{:else if !status}
<p class="text-muted-foreground">Loading...</p>
{:else}
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
<div class="bg-card border-border rounded-lg border p-4">
<p class="text-muted-foreground text-sm">Users</p>
<p class="text-3xl font-bold">{formatNumber(status.userCount)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-4">
<p class="text-muted-foreground text-sm">Active Sessions</p>
<p class="text-3xl font-bold">{formatNumber(status.sessionCount)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-4">
<p class="text-muted-foreground text-sm">Courses</p>
<p class="text-3xl font-bold">{formatNumber(status.courseCount)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-4">
<p class="text-muted-foreground text-sm">Scrape Jobs</p>
<p class="text-3xl font-bold">{formatNumber(status.scrapeJobCount)}</p>
</div>
</div>
<h2 class="mt-6 mb-3 text-sm font-semibold text-foreground">Services</h2>
<div class="bg-card border-border rounded-lg border">
{#each status.services as service}
<div class="border-border flex items-center justify-between border-b px-4 py-3 last:border-b-0">
<span class="font-medium">{service.name}</span>
<span class="rounded-full bg-green-100 px-2 py-0.5 text-xs font-medium text-green-800 dark:bg-green-900 dark:text-green-200">
{service.status}
</span>
</div>
{/each}
</div>
{/if}
@@ -0,0 +1,422 @@
<script module lang="ts">
import type { AuditLogResponse } from "$lib/api";
// Persisted across navigation so returning to the page shows cached data
// instead of a skeleton loader. Timers are instance-scoped and restart on mount.
let data = $state<AuditLogResponse | null>(null);
let error = $state<string | null>(null);
let refreshError = $state(false);
let refreshInterval = 5_000;
</script>
<script lang="ts">
import { type AuditLogEntry, client } from "$lib/api";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { FlexRender, createSvelteTable } from "$lib/components/ui/data-table/index.js";
import { formatAbsoluteDate } from "$lib/date";
import { type DiffEntry, formatDiffPath, jsonDiff, tryParseJson } from "$lib/diff";
import { relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
import {
AlertCircle,
ArrowDown,
ArrowUp,
ArrowUpDown,
ChevronDown,
ChevronRight,
LoaderCircle,
} from "@lucide/svelte";
import {
type ColumnDef,
type SortingState,
type Updater,
getCoreRowModel,
getSortedRowModel,
} from "@tanstack/table-core";
import { onDestroy, onMount } from "svelte";
import { fade, slide } from "svelte/transition";
let expandedId: number | null = $state(null);
// --- Live-updating clock for relative timestamps ---
let now = $state(new Date());
let tickTimer: ReturnType<typeof setTimeout> | undefined;
function scheduleTick() {
tickTimer = setTimeout(() => {
now = new Date();
scheduleTick();
}, 1000);
}
// --- Auto-refresh with backoff ---
// Backoff increases on errors AND on 304 (no change). Resets to min on new data.
const MIN_INTERVAL = 5_000;
const MAX_INTERVAL = 60_000;
let refreshTimer: ReturnType<typeof setTimeout> | undefined;
// Spinner stays visible for at least MIN_SPIN_MS so the animation isn't jarring.
const MIN_SPIN_MS = 700;
let spinnerVisible = $state(false);
let spinHoldTimer: ReturnType<typeof setTimeout> | undefined;
async function fetchData() {
refreshError = false;
spinnerVisible = true;
clearTimeout(spinHoldTimer);
const startedAt = performance.now();
try {
const result = await client.getAdminAuditLog();
if (result === null) {
refreshInterval = Math.min(refreshInterval * 2, MAX_INTERVAL);
} else {
data = result;
error = null;
refreshInterval = MIN_INTERVAL;
}
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load audit log";
refreshError = true;
refreshInterval = Math.min(refreshInterval * 2, MAX_INTERVAL);
} finally {
const elapsed = performance.now() - startedAt;
const remaining = MIN_SPIN_MS - elapsed;
if (remaining > 0) {
spinHoldTimer = setTimeout(() => {
spinnerVisible = false;
}, remaining);
} else {
spinnerVisible = false;
}
scheduleRefresh();
}
}
function scheduleRefresh() {
clearTimeout(refreshTimer);
refreshTimer = setTimeout(fetchData, refreshInterval);
}
onMount(() => {
fetchData();
scheduleTick();
});
onDestroy(() => {
clearTimeout(tickTimer);
clearTimeout(refreshTimer);
clearTimeout(spinHoldTimer);
});
// --- Change column helpers ---
interface ChangeAnalysis {
kind: "scalar" | "json-single" | "json-multi";
oldRaw: string;
newRaw: string;
diffs: DiffEntry[];
delta: number | null;
}
function analyzeChange(entry: AuditLogEntry): ChangeAnalysis {
const parsedOld = tryParseJson(entry.oldValue);
const parsedNew = tryParseJson(entry.newValue);
const isJsonOld = typeof parsedOld === "object" && parsedOld !== null;
const isJsonNew = typeof parsedNew === "object" && parsedNew !== null;
if (isJsonOld && isJsonNew) {
const diffs = jsonDiff(parsedOld, parsedNew);
const kind = diffs.length <= 1 ? "json-single" : "json-multi";
return { kind, oldRaw: entry.oldValue, newRaw: entry.newValue, diffs, delta: null };
}
let delta: number | null = null;
const numOld = Number(entry.oldValue);
const numNew = Number(entry.newValue);
if (
!Number.isNaN(numOld) &&
!Number.isNaN(numNew) &&
entry.oldValue !== "" &&
entry.newValue !== ""
) {
delta = numNew - numOld;
}
return { kind: "scalar", oldRaw: entry.oldValue, newRaw: entry.newValue, diffs: [], delta };
}
function stringify(val: unknown): string {
if (val === undefined) return "∅";
if (typeof val === "string") return val;
return JSON.stringify(val);
}
function toggleExpanded(id: number) {
expandedId = expandedId === id ? null : id;
}
function formatCourse(entry: AuditLogEntry): string {
if (entry.subject && entry.courseNumber) {
return `${entry.subject} ${entry.courseNumber}`;
}
return `#${entry.courseId}`;
}
function formatCourseTooltip(entry: AuditLogEntry): string {
const parts: string[] = [];
if (entry.courseTitle) parts.push(entry.courseTitle);
if (entry.crn) parts.push(`CRN ${entry.crn}`);
parts.push(`ID ${entry.courseId}`);
return parts.join("\n");
}
// --- TanStack Table ---
let sorting: SortingState = $state([{ id: "time", desc: true }]);
function handleSortingChange(updater: Updater<SortingState>) {
sorting = typeof updater === "function" ? updater(sorting) : updater;
}
const columns: ColumnDef<AuditLogEntry, unknown>[] = [
{
id: "time",
accessorKey: "timestamp",
header: "Time",
enableSorting: true,
},
{
id: "course",
accessorKey: "courseId",
header: "Course",
enableSorting: false,
},
{
id: "field",
accessorKey: "fieldChanged",
header: "Field",
enableSorting: true,
},
{
id: "change",
accessorFn: () => "",
header: "Change",
enableSorting: false,
},
];
const table = createSvelteTable({
get data() {
return data?.entries ?? [];
},
getRowId: (row) => String(row.id),
columns,
state: {
get sorting() {
return sorting;
},
},
onSortingChange: handleSortingChange,
getCoreRowModel: getCoreRowModel(),
getSortedRowModel: getSortedRowModel<AuditLogEntry>(),
enableSortingRemoval: true,
});
const skeletonWidths: Record<string, string> = {
time: "w-24",
course: "w-20",
field: "w-20",
change: "w-40",
};
const columnCount = columns.length;
</script>
<div class="mb-4 flex items-center gap-2">
<h1 class="text-lg font-semibold text-foreground">Audit Log</h1>
{#if spinnerVisible}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<LoaderCircle class="size-4 animate-spin text-muted-foreground" />
</span>
{:else if refreshError}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<SimpleTooltip text={error ?? "Refresh failed"} side="right" passthrough>
<AlertCircle class="size-4 text-destructive" />
</SimpleTooltip>
</span>
{/if}
</div>
{#if error && !data}
<p class="text-destructive">{error}</p>
{:else}
<div class="bg-card border-border overflow-hidden rounded-lg border">
<table class="w-full text-sm">
<thead>
{#each table.getHeaderGroups() as headerGroup}
<tr class="border-b border-border text-left text-muted-foreground">
{#each headerGroup.headers as header}
<th
class="px-4 py-3 font-medium"
class:cursor-pointer={header.column.getCanSort()}
class:select-none={header.column.getCanSort()}
onclick={header.column.getToggleSortingHandler()}
>
{#if header.column.getCanSort()}
<span class="inline-flex items-center gap-1">
{#if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
{#if header.column.getIsSorted() === "asc"}
<ArrowUp class="size-3.5" />
{:else if header.column.getIsSorted() === "desc"}
<ArrowDown class="size-3.5" />
{:else}
<ArrowUpDown class="size-3.5 text-muted-foreground/40" />
{/if}
</span>
{:else if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
</th>
{/each}
</tr>
{/each}
</thead>
<tbody>
{#if !data}
<!-- Skeleton loading -->
{#each Array(20) as _}
<tr class="border-b border-border">
{#each columns as col}
<td class="px-4 py-3">
<div
class="h-4 rounded bg-muted animate-pulse {skeletonWidths[col.id ?? ''] ?? 'w-20'}"
></div>
</td>
{/each}
</tr>
{/each}
{:else if data.entries.length === 0}
<tr>
<td colspan={columnCount} class="px-4 py-12 text-center text-muted-foreground">
No audit log entries found.
</td>
</tr>
{:else}
{#each table.getRowModel().rows as row (row.id)}
{@const entry = row.original}
{@const change = analyzeChange(entry)}
{@const isExpanded = expandedId === entry.id}
{@const clickable = change.kind === "json-multi"}
<tr
class="border-b border-border transition-colors last:border-b-0
{clickable ? 'cursor-pointer hover:bg-muted/50' : ''}
{isExpanded ? 'bg-muted/30' : ''}"
onclick={clickable ? () => toggleExpanded(entry.id) : undefined}
>
{#each row.getVisibleCells() as cell (cell.id)}
{@const colId = cell.column.id}
{#if colId === "time"}
{@const rel = relativeTime(new Date(entry.timestamp), now)}
<td class="px-4 py-3 whitespace-nowrap">
<SimpleTooltip text={formatAbsoluteDate(entry.timestamp)} side="right" passthrough>
<span class="font-mono text-xs text-muted-foreground">{rel.text === "now" ? "just now" : `${rel.text} ago`}</span>
</SimpleTooltip>
</td>
{:else if colId === "course"}
<td class="px-4 py-3 whitespace-nowrap">
<SimpleTooltip text={formatCourseTooltip(entry)} side="right" passthrough>
<span class="font-mono text-xs text-foreground">{formatCourse(entry)}</span>
</SimpleTooltip>
</td>
{:else if colId === "field"}
<td class="px-4 py-3">
<span
class="inline-block rounded-full bg-muted px-2 py-0.5 font-mono text-xs text-muted-foreground"
>
{entry.fieldChanged}
</span>
</td>
{:else if colId === "change"}
<td class="px-4 py-3">
{#if change.kind === "scalar"}
<span class="inline-flex items-center gap-1.5 text-sm">
{#if change.delta !== null}
<span class="text-foreground">{formatNumber(change.delta, { sign: true })}<span class="text-muted-foreground/60">,</span></span>
{/if}
<span class="text-red-400">{change.oldRaw}</span>
<span class="text-muted-foreground/60"></span>
<span class="text-green-600 dark:text-green-400">{change.newRaw}</span>
</span>
{:else if change.kind === "json-single"}
{#if change.diffs.length === 1}
{@const d = change.diffs[0]}
<span class="font-mono text-xs">
<span class="text-muted-foreground">{formatDiffPath(d.path)}:</span>
{" "}
<span class="text-red-400">{stringify(d.oldVal)}</span>
<span class="text-muted-foreground"></span>
<span class="text-green-600 dark:text-green-400">{stringify(d.newVal)}</span>
</span>
{:else}
<span class="text-muted-foreground text-xs italic">No changes</span>
{/if}
{:else if change.kind === "json-multi"}
<span class="inline-flex items-center gap-1.5 text-sm text-muted-foreground">
{#if isExpanded}
<ChevronDown class="size-3.5 shrink-0" />
{:else}
<ChevronRight class="size-3.5 shrink-0" />
{/if}
<span class="underline decoration-dotted underline-offset-2">
{formatNumber(change.diffs.length)} fields changed
</span>
</span>
{/if}
</td>
{/if}
{/each}
</tr>
<!-- Expandable detail row for multi-path JSON diffs -->
{#if isExpanded && change.kind === "json-multi"}
<tr class="border-b border-border last:border-b-0">
<td colspan={columnCount} class="p-0">
<div transition:slide={{ duration: 200 }}>
<div class="bg-muted/20 px-4 py-3">
<div class="space-y-1.5">
{#each change.diffs as d}
<div class="font-mono text-xs">
<span class="text-muted-foreground">{formatDiffPath(d.path)}:</span>
{" "}
<span class="text-red-400">{stringify(d.oldVal)}</span>
<span class="text-muted-foreground"></span>
<span class="text-green-600 dark:text-green-400">{stringify(d.newVal)}</span>
</div>
{/each}
</div>
</div>
</div>
</td>
</tr>
{/if}
{/each}
{/if}
</tbody>
</table>
</div>
{/if}
@@ -0,0 +1,651 @@
<script lang="ts">
import { onMount, onDestroy } from "svelte";
import {
client,
type InstructorListItem,
type InstructorStats,
type InstructorDetailResponse,
type CandidateResponse,
} from "$lib/api";
import { formatInstructorName, isRatingValid, ratingStyle, rmpUrl } from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import {
Check,
ChevronLeft,
ChevronRight,
ExternalLink,
RefreshCw,
X,
XCircle,
} from "@lucide/svelte";
// --- State ---
let instructors = $state<InstructorListItem[]>([]);
let stats = $state<InstructorStats>({
total: 0,
unmatched: 0,
auto: 0,
confirmed: 0,
rejected: 0,
withCandidates: 0,
});
let totalCount = $state(0);
let currentPage = $state(1);
let perPage = $state(25);
let activeFilter = $state<string | undefined>(undefined);
let searchQuery = $state("");
let searchInput = $state("");
let error = $state<string | null>(null);
let loading = $state(true);
// Expanded row detail
let expandedId = $state<number | null>(null);
let detail = $state<InstructorDetailResponse | null>(null);
let detailLoading = $state(false);
let detailError = $state<string | null>(null);
// Action states
let actionLoading = $state<string | null>(null);
let rescoreLoading = $state(false);
let rescoreResult = $state<string | null>(null);
// Search debounce
let searchTimeout: ReturnType<typeof setTimeout> | undefined;
const filterOptions = [
{ label: "All", value: undefined as string | undefined },
{ label: "Needs Review", value: "unmatched" },
{ label: "Auto-matched", value: "auto" },
{ label: "Confirmed", value: "confirmed" },
{ label: "Rejected", value: "rejected" },
];
let matchedLegacyIds = $derived(
new Set(detail?.currentMatches.map((m: { legacyId: number }) => m.legacyId) ?? [])
);
let progressDenom = $derived(stats.total || 1);
let autoPct = $derived((stats.auto / progressDenom) * 100);
let confirmedPct = $derived((stats.confirmed / progressDenom) * 100);
let unmatchedPct = $derived((stats.unmatched / progressDenom) * 100);
let totalPages = $derived(Math.max(1, Math.ceil(totalCount / perPage)));
// --- Data fetching ---
async function fetchInstructors() {
loading = true;
error = null;
try {
const res = await client.getAdminInstructors({
status: activeFilter,
search: searchQuery || undefined,
page: currentPage,
per_page: perPage,
});
instructors = res.instructors;
totalCount = res.total;
stats = res.stats;
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load instructors";
} finally {
loading = false;
}
}
async function fetchDetail(id: number) {
detailLoading = true;
detailError = null;
detail = null;
try {
detail = await client.getAdminInstructor(id);
} catch (e) {
detailError = e instanceof Error ? e.message : "Failed to load details";
} finally {
detailLoading = false;
}
}
onMount(() => {
fetchInstructors();
});
onDestroy(() => clearTimeout(searchTimeout));
function setFilter(value: string | undefined) {
activeFilter = value;
currentPage = 1;
expandedId = null;
fetchInstructors();
}
function handleSearch() {
clearTimeout(searchTimeout);
searchTimeout = setTimeout(() => {
searchQuery = searchInput;
currentPage = 1;
expandedId = null;
fetchInstructors();
}, 300);
}
function goToPage(page: number) {
if (page < 1 || page > totalPages) return;
currentPage = page;
expandedId = null;
fetchInstructors();
}
async function toggleExpand(id: number) {
if (expandedId === id) {
expandedId = null;
detail = null;
return;
}
expandedId = id;
await fetchDetail(id);
}
// --- Actions ---
async function handleMatch(instructorId: number, rmpLegacyId: number) {
actionLoading = `match-${rmpLegacyId}`;
try {
detail = await client.matchInstructor(instructorId, rmpLegacyId);
await fetchInstructors();
} catch (e) {
detailError = e instanceof Error ? e.message : "Match failed";
} finally {
actionLoading = null;
}
}
async function handleReject(instructorId: number, rmpLegacyId: number) {
actionLoading = `reject-${rmpLegacyId}`;
try {
await client.rejectCandidate(instructorId, rmpLegacyId);
await Promise.all([fetchDetail(instructorId), fetchInstructors()]);
} catch (e) {
detailError = e instanceof Error ? e.message : "Reject failed";
} finally {
actionLoading = null;
}
}
async function handleRejectAll(instructorId: number) {
actionLoading = "reject-all";
try {
await client.rejectAllCandidates(instructorId);
await Promise.all([fetchDetail(instructorId), fetchInstructors()]);
} catch (e) {
detailError = e instanceof Error ? e.message : "Reject all failed";
} finally {
actionLoading = null;
}
}
async function handleUnmatch(instructorId: number, rmpLegacyId: number) {
actionLoading = `unmatch-${rmpLegacyId}`;
try {
await client.unmatchInstructor(instructorId, rmpLegacyId);
await Promise.all([fetchDetail(instructorId), fetchInstructors()]);
} catch (e) {
detailError = e instanceof Error ? e.message : "Unmatch failed";
} finally {
actionLoading = null;
}
}
async function handleRescore() {
rescoreLoading = true;
rescoreResult = null;
try {
const res = await client.rescoreInstructors();
rescoreResult = `Rescored: ${res.totalUnmatched} unmatched, ${res.candidatesCreated} candidates created, ${res.autoMatched} auto-matched`;
await fetchInstructors();
} catch (e) {
rescoreResult = e instanceof Error ? e.message : "Rescore failed";
} finally {
rescoreLoading = false;
}
}
// --- Helpers ---
function statusBadge(status: string): { label: string; classes: string } {
switch (status) {
case "unmatched":
return {
label: "Unmatched",
classes: "bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-200",
};
case "auto":
return {
label: "Auto",
classes: "bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200",
};
case "confirmed":
return {
label: "Confirmed",
classes: "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
};
case "rejected":
return {
label: "Rejected",
classes: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
};
default:
return { label: status, classes: "bg-muted text-muted-foreground" };
}
}
function formatScore(score: number): string {
return (score * 100).toFixed(0);
}
const scoreWeights: Record<string, number> = {
name: 0.5,
department: 0.25,
uniqueness: 0.15,
volume: 0.1,
};
const scoreColors: Record<string, string> = {
name: "bg-blue-500",
department: "bg-purple-500",
uniqueness: "bg-amber-500",
volume: "bg-emerald-500",
};
const scoreLabels: Record<string, string> = {
name: "Name",
department: "Dept",
uniqueness: "Unique",
volume: "Volume",
};
</script>
<div class="flex items-center justify-between mb-4">
<h1 class="text-lg font-semibold text-foreground">Instructors</h1>
<button
onclick={handleRescore}
disabled={rescoreLoading}
class="inline-flex items-center gap-1.5 rounded-md bg-muted px-3 py-1.5 text-sm font-medium text-foreground hover:bg-accent transition-colors disabled:opacity-50 cursor-pointer"
>
<RefreshCw size={14} class={rescoreLoading ? "animate-spin" : ""} />
Rescore
</button>
</div>
{#if rescoreResult}
<div class="mb-4 rounded-md bg-muted px-3 py-2 text-sm text-muted-foreground">
{rescoreResult}
</div>
{/if}
{#if error}
<p class="text-destructive mb-4">{error}</p>
{/if}
{#if loading && instructors.length === 0}
<!-- Skeleton stats cards -->
<div class="mb-4 grid grid-cols-5 gap-3">
{#each Array(5) as _}
<div class="bg-card border-border rounded-lg border p-3">
<div class="h-3 w-16 animate-pulse rounded bg-muted mb-2"></div>
<div class="h-6 w-12 animate-pulse rounded bg-muted"></div>
</div>
{/each}
</div>
<div class="bg-muted mb-6 h-2 rounded-full overflow-hidden"></div>
<!-- Skeleton table rows -->
<div class="bg-card border-border overflow-hidden rounded-lg border">
<div class="divide-y divide-border">
{#each Array(8) as _}
<div class="flex items-center gap-4 px-4 py-3">
<div class="space-y-1.5 flex-1">
<div class="h-4 w-40 animate-pulse rounded bg-muted"></div>
<div class="h-3 w-28 animate-pulse rounded bg-muted"></div>
</div>
<div class="h-5 w-20 animate-pulse rounded-full bg-muted"></div>
<div class="h-4 w-32 animate-pulse rounded bg-muted"></div>
<div class="h-4 w-8 animate-pulse rounded bg-muted"></div>
<div class="h-6 w-16 animate-pulse rounded bg-muted"></div>
</div>
{/each}
</div>
</div>
{:else}
<!-- Stats Bar -->
<div class="mb-4 grid grid-cols-5 gap-3">
<div class="bg-card border-border rounded-lg border p-3">
<div class="text-muted-foreground text-xs">Total</div>
<div class="text-lg font-semibold">{stats.total}</div>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<div class="text-xs text-amber-600 dark:text-amber-400">Unmatched</div>
<div class="text-lg font-semibold">{stats.unmatched}</div>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<div class="text-xs text-blue-600 dark:text-blue-400">Auto</div>
<div class="text-lg font-semibold">{stats.auto}</div>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<div class="text-xs text-green-600 dark:text-green-400">Confirmed</div>
<div class="text-lg font-semibold">{stats.confirmed}</div>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<div class="text-xs text-red-600 dark:text-red-400">Rejected</div>
<div class="text-lg font-semibold">{stats.rejected}</div>
</div>
</div>
<!-- Progress Bar -->
<div class="bg-muted mb-6 h-2 rounded-full overflow-hidden flex">
<div class="bg-blue-500 h-full transition-all duration-300"
style="width: {autoPct}%" title="Auto: {stats.auto}"></div>
<div class="bg-green-500 h-full transition-all duration-300"
style="width: {confirmedPct}%" title="Confirmed: {stats.confirmed}"></div>
<div class="bg-amber-500 h-full transition-all duration-300"
style="width: {unmatchedPct}%" title="Unmatched: {stats.unmatched}"></div>
</div>
<!-- Filters -->
<div class="mb-4 flex items-center gap-4">
<div class="flex gap-2">
{#each filterOptions as opt}
<button
onclick={() => setFilter(opt.value)}
class="rounded-md px-3 py-1.5 text-sm transition-colors cursor-pointer
{activeFilter === opt.value
? 'bg-primary text-primary-foreground'
: 'bg-muted text-muted-foreground hover:bg-accent'}"
>
{opt.label}
</button>
{/each}
</div>
<input
type="text"
placeholder="Search by name or email..."
bind:value={searchInput}
oninput={handleSearch}
class="bg-card border-border rounded-md border px-3 py-1.5 text-sm text-foreground placeholder:text-muted-foreground outline-none focus:ring-1 focus:ring-ring w-64"
/>
</div>
{#if instructors.length === 0}
<p class="text-muted-foreground py-8 text-center text-sm">No instructors found.</p>
{:else}
<div class="bg-card border-border overflow-hidden rounded-lg border">
<table class="w-full text-sm">
<thead>
<tr class="border-border border-b text-left text-muted-foreground">
<th class="px-4 py-2.5 font-medium">Name</th>
<th class="px-4 py-2.5 font-medium">Status</th>
<th class="px-4 py-2.5 font-medium">Top Candidate</th>
<th class="px-4 py-2.5 font-medium text-center">Candidates</th>
<th class="px-4 py-2.5 font-medium text-right">Actions</th>
</tr>
</thead>
<tbody>
{#each instructors as instructor (instructor.id)}
{@const badge = statusBadge(instructor.rmpMatchStatus)}
{@const isExpanded = expandedId === instructor.id}
<tr
class="border-border border-b cursor-pointer hover:bg-muted/50 transition-colors {isExpanded ? 'bg-muted/30' : ''}"
onclick={() => toggleExpand(instructor.id)}
>
<td class="px-4 py-2.5">
<div class="font-medium text-foreground">{formatInstructorName(instructor.displayName)}</div>
<div class="text-xs text-muted-foreground">{instructor.email}</div>
</td>
<td class="px-4 py-2.5">
<span class="inline-flex items-center rounded-full px-2 py-0.5 text-xs font-medium {badge.classes}">
{badge.label}
</span>
</td>
<td class="px-4 py-2.5">
{#if instructor.topCandidate}
{@const tc = instructor.topCandidate}
<div class="flex items-center gap-2">
<span class="text-foreground">{tc.firstName} {tc.lastName}</span>
{#if isRatingValid(tc.avgRating, tc.numRatings ?? 0)}
<span class="font-semibold tabular-nums" style={ratingStyle(tc.avgRating!, themeStore.isDark)}>
{tc.avgRating!.toFixed(1)}
</span>
{:else}
<span class="text-xs text-muted-foreground">N/A</span>
{/if}
<span class="text-xs text-muted-foreground tabular-nums">
({formatScore(tc.score ?? 0)}%)
</span>
</div>
{:else}
<span class="text-muted-foreground text-xs">No candidates</span>
{/if}
</td>
<td class="px-4 py-2.5 text-center tabular-nums text-muted-foreground">
{instructor.candidateCount}
</td>
<td class="px-4 py-2.5 text-right">
<div class="inline-flex items-center gap-1">
{#if instructor.topCandidate && instructor.rmpMatchStatus === "unmatched"}
<button
onclick={(e) => { e.stopPropagation(); handleMatch(instructor.id, instructor.topCandidate!.rmpLegacyId); }}
disabled={actionLoading !== null}
class="rounded p-1 text-green-600 hover:bg-green-100 dark:hover:bg-green-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Accept top candidate"
>
<Check size={16} />
</button>
{/if}
<button
onclick={(e) => { e.stopPropagation(); toggleExpand(instructor.id); }}
class="rounded p-1 text-muted-foreground hover:bg-muted transition-colors cursor-pointer"
title={isExpanded ? "Collapse" : "Expand"}
>
<ChevronRight size={16} class="transition-transform duration-200 {isExpanded ? 'rotate-90' : ''}" />
</button>
</div>
</td>
</tr>
<!-- Expanded detail panel -->
{#if isExpanded}
<tr class="border-border border-b bg-muted/20">
<td colspan="5" class="p-0">
<div class="p-4">
{#if detailLoading}
<p class="text-muted-foreground text-sm py-4 text-center">Loading details...</p>
{:else if detailError}
<p class="text-destructive text-sm py-2">{detailError}</p>
{:else if detail}
<div class="grid grid-cols-3 gap-6">
<!-- Left: Instructor info -->
<div class="space-y-3">
<h3 class="font-medium text-foreground">Instructor</h3>
<dl class="grid grid-cols-[auto_1fr] gap-x-3 gap-y-1.5 text-sm">
<dt class="text-muted-foreground">Name</dt>
<dd class="text-foreground">{formatInstructorName(detail.instructor.displayName)}</dd>
<dt class="text-muted-foreground">Email</dt>
<dd class="text-foreground">{detail.instructor.email}</dd>
<dt class="text-muted-foreground">Courses</dt>
<dd class="text-foreground tabular-nums">{detail.instructor.courseCount}</dd>
{#if detail.instructor.subjectsTaught.length > 0}
<dt class="text-muted-foreground">Subjects</dt>
<dd class="flex flex-wrap gap-1">
{#each detail.instructor.subjectsTaught as subj}
<span class="rounded bg-muted px-1.5 py-0.5 text-xs">{subj}</span>
{/each}
</dd>
{/if}
</dl>
</div>
<!-- Right: Candidates (spans 2 cols) -->
<div class="col-span-2 space-y-3">
<div class="flex items-center justify-between">
<h3 class="font-medium text-foreground">
Candidates ({detail.candidates.length})
</h3>
{#if detail.candidates.some((c: CandidateResponse) => c.status !== "rejected" && !matchedLegacyIds.has(c.rmpLegacyId))}
<button
onclick={(e) => { e.stopPropagation(); handleRejectAll(detail!.instructor.id); }}
disabled={actionLoading !== null}
class="inline-flex items-center gap-1 rounded-md bg-red-100 px-2 py-1 text-xs font-medium text-red-700 hover:bg-red-200 dark:bg-red-900/30 dark:text-red-400 dark:hover:bg-red-900/50 transition-colors disabled:opacity-50 cursor-pointer"
>
<X size={12} /> Reject All
</button>
{/if}
</div>
{#if detail.candidates.length === 0}
<p class="text-muted-foreground text-sm">No candidates available.</p>
{:else}
<div class="max-h-80 overflow-y-auto space-y-2 pr-1">
{#each detail.candidates as candidate (candidate.id)}
{@const isMatched = candidate.status === "matched" || matchedLegacyIds.has(candidate.rmpLegacyId)}
{@const isRejected = candidate.status === "rejected"}
{@const isPending = !isMatched && !isRejected}
<div class="rounded-md border p-3 {isMatched ? 'border-l-4 border-l-green-500 bg-green-500/5 border-border' : isRejected ? 'border-border bg-card opacity-50' : 'border-border bg-card'}">
<div class="flex items-start justify-between gap-2">
<div>
<div class="flex items-center gap-2">
<span class="font-medium text-foreground text-sm">
{candidate.firstName} {candidate.lastName}
</span>
{#if isMatched}
<span class="text-[10px] rounded px-1 py-0.5 bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400">
Matched
</span>
{:else if isRejected}
<span class="text-[10px] rounded px-1 py-0.5 bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400">
Rejected
</span>
{/if}
</div>
{#if candidate.department}
<div class="text-xs text-muted-foreground">{candidate.department}</div>
{/if}
</div>
<div class="flex items-center gap-1 shrink-0">
{#if isMatched}
<button
onclick={(e) => { e.stopPropagation(); handleUnmatch(detail!.instructor.id, candidate.rmpLegacyId); }}
disabled={actionLoading !== null}
class="inline-flex items-center gap-1 rounded p-1 text-xs text-red-500 hover:bg-red-100 dark:hover:bg-red-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Unmatch"
>
<XCircle size={14} /> Unmatch
</button>
{:else if isPending}
<button
onclick={(e) => { e.stopPropagation(); handleMatch(detail!.instructor.id, candidate.rmpLegacyId); }}
disabled={actionLoading !== null}
class="rounded p-1 text-green-600 hover:bg-green-100 dark:hover:bg-green-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Accept"
>
<Check size={14} />
</button>
<button
onclick={(e) => { e.stopPropagation(); handleReject(detail!.instructor.id, candidate.rmpLegacyId); }}
disabled={actionLoading !== null}
class="rounded p-1 text-red-500 hover:bg-red-100 dark:hover:bg-red-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Reject"
>
<X size={14} />
</button>
{/if}
<a
href={rmpUrl(candidate.rmpLegacyId)}
target="_blank"
rel="noopener noreferrer"
onclick={(e) => e.stopPropagation()}
class="rounded p-1 text-muted-foreground hover:bg-muted transition-colors cursor-pointer"
title="View on RMP"
>
<ExternalLink size={14} />
</a>
</div>
</div>
<!-- Stats row -->
<div class="mt-2 flex items-center gap-3 text-xs">
{#if isRatingValid(candidate.avgRating, candidate.numRatings ?? 0)}
<span class="font-semibold tabular-nums" style={ratingStyle(candidate.avgRating!, themeStore.isDark)}>
{candidate.avgRating!.toFixed(1)}
</span>
{:else}
<span class="text-xs text-muted-foreground">N/A</span>
{/if}
{#if candidate.avgDifficulty !== null}
<span class="text-muted-foreground tabular-nums">{candidate.avgDifficulty.toFixed(1)} diff</span>
{/if}
<span class="text-muted-foreground">{candidate.numRatings} ratings</span>
{#if candidate.wouldTakeAgainPct !== null}
<span class="text-muted-foreground">{candidate.wouldTakeAgainPct.toFixed(0)}% again</span>
{/if}
</div>
<!-- Score bar (weighted segments) -->
<div class="mt-2">
<div class="flex items-center gap-2 text-xs">
<span class="text-muted-foreground shrink-0">Score:</span>
<div class="bg-muted h-2 flex-1 rounded-full overflow-hidden flex">
{#each Object.entries(candidate.scoreBreakdown ?? {}) as [key, value]}
{@const w = scoreWeights[key] ?? 0}
{@const widthPct = (value as number) * w * 100}
<div
class="{scoreColors[key] ?? 'bg-primary'} h-full transition-all"
style="width: {widthPct}%"
title="{scoreLabels[key] ?? key}: {((value as number) * 100).toFixed(0)}% (×{w})"
></div>
{/each}
</div>
<span class="tabular-nums font-medium text-foreground shrink-0">{formatScore(candidate.score ?? 0)}%</span>
</div>
</div>
</div>
{/each}
</div>
{/if}
</div>
</div>
{/if}
</div>
</td>
</tr>
{/if}
{/each}
</tbody>
</table>
</div>
<!-- Pagination -->
<div class="mt-4 flex items-center justify-between text-sm">
<span class="text-muted-foreground">
Showing {(currentPage - 1) * perPage + 1}{Math.min(currentPage * perPage, totalCount)} of {totalCount}
</span>
<div class="flex items-center gap-2">
<button
onclick={() => goToPage(currentPage - 1)}
disabled={currentPage <= 1}
class="inline-flex items-center gap-1 rounded-md bg-muted px-2 py-1 text-sm text-foreground hover:bg-accent transition-colors disabled:opacity-50 cursor-pointer"
>
<ChevronLeft size={14} /> Prev
</button>
<span class="text-muted-foreground tabular-nums">
{currentPage} / {totalPages}
</span>
<button
onclick={() => goToPage(currentPage + 1)}
disabled={currentPage >= totalPages}
class="inline-flex items-center gap-1 rounded-md bg-muted px-2 py-1 text-sm text-foreground hover:bg-accent transition-colors disabled:opacity-50 cursor-pointer"
>
Next <ChevronRight size={14} />
</button>
</div>
</div>
{/if}
{/if}
@@ -0,0 +1,526 @@
<script lang="ts">
import { type ScrapeJob, client } from "$lib/api";
import { FlexRender, createSvelteTable } from "$lib/components/ui/data-table/index.js";
import { formatAbsoluteDate } from "$lib/date";
import { formatDuration } from "$lib/time";
import { ArrowDown, ArrowUp, ArrowUpDown, TriangleAlert } from "@lucide/svelte";
import {
type ColumnDef,
type SortingState,
type Updater,
getCoreRowModel,
getSortedRowModel,
} from "@tanstack/table-core";
import { onMount } from "svelte";
import { type ConnectionState, ScrapeJobsStore } from "$lib/ws";
let jobs = $state<ScrapeJob[]>([]);
let connectionState = $state<ConnectionState>("disconnected");
let initialized = $state(false);
let error = $state<string | null>(null);
let sorting: SortingState = $state([]);
let tick = $state(0);
let subjectMap = $state(new Map<string, string>());
let store: ScrapeJobsStore | undefined;
// Shared tooltip state — single tooltip for all timing cells via event delegation
let tooltipText = $state<string | null>(null);
let tooltipX = $state(0);
let tooltipY = $state(0);
function showTooltip(event: MouseEvent) {
const target = (event.target as HTMLElement).closest<HTMLElement>("[data-timing-tooltip]");
if (!target) return;
tooltipText = target.dataset.timingTooltip ?? null;
tooltipX = event.clientX;
tooltipY = event.clientY;
}
function moveTooltip(event: MouseEvent) {
if (tooltipText === null) return;
const target = (event.target as HTMLElement).closest<HTMLElement>("[data-timing-tooltip]");
if (!target) {
tooltipText = null;
return;
}
tooltipText = target.dataset.timingTooltip ?? null;
tooltipX = event.clientX;
tooltipY = event.clientY;
}
function hideTooltip() {
tooltipText = null;
}
onMount(() => {
// Tick every second for live time displays
const tickInterval = setInterval(() => {
tick++;
}, 1000);
// Load subject reference data
client
.getReference("subject")
.then((entries) => {
const map = new Map<string, string>();
for (const entry of entries) {
map.set(entry.code, entry.description);
}
subjectMap = map;
})
.catch(() => {
// Subject lookup is best-effort
});
// Initialize WebSocket store
store = new ScrapeJobsStore(() => {
if (!store) return;
connectionState = store.getConnectionState();
initialized = store.isInitialized();
// getJobs() returns a cached array when unchanged, so only reassign
// when the reference differs to avoid triggering reactive table rebuilds.
const next = store.getJobs();
if (next !== jobs) jobs = next;
});
store.connect();
return () => {
clearInterval(tickInterval);
store?.disconnect();
};
});
function handleSortingChange(updater: Updater<SortingState>) {
sorting = typeof updater === "function" ? updater(sorting) : updater;
}
// --- Helper functions ---
function formatJobDetails(job: ScrapeJob, subjects: Map<string, string>): string {
const payload = job.targetPayload as Record<string, unknown>;
switch (job.targetType) {
case "Subject": {
const code = payload.subject as string;
const desc = subjects.get(code);
return desc ? `${code} \u2014 ${desc}` : code;
}
case "CrnList": {
const crns = payload.crns as string[];
return `${crns.length} CRNs`;
}
case "SingleCrn":
return `CRN ${payload.crn as string}`;
case "CourseRange":
return `${payload.subject as string} ${payload.low as number}\u2013${payload.high as number}`;
default:
return JSON.stringify(payload);
}
}
function priorityColor(priority: string): string {
const p = priority.toLowerCase();
if (p === "urgent" || p === "critical") return "text-red-500";
if (p === "high") return "text-orange-500";
if (p === "low") return "text-muted-foreground";
return "text-foreground";
}
function retryColor(retryCount: number, maxRetries: number): string {
if (retryCount >= maxRetries && maxRetries > 0) return "text-red-500";
if (retryCount > 0) return "text-amber-500";
return "text-muted-foreground";
}
function statusColor(status: string): { text: string; dot: string } {
switch (status) {
case "processing":
return { text: "text-blue-500", dot: "bg-blue-500" };
case "pending":
return { text: "text-green-500", dot: "bg-green-500" };
case "scheduled":
return { text: "text-muted-foreground", dot: "bg-muted-foreground" };
case "staleLock":
return { text: "text-red-500", dot: "bg-red-500" };
case "exhausted":
return { text: "text-red-500", dot: "bg-red-500" };
default:
return { text: "text-muted-foreground", dot: "bg-muted-foreground" };
}
}
function formatStatusLabel(status: string): string {
// Convert camelCase to separate words, capitalize first letter
return status.replace(/([a-z])([A-Z])/g, "$1 $2").replace(/^\w/, (c) => c.toUpperCase());
}
function lockDurationColor(ms: number): string {
const minutes = ms / 60_000;
if (minutes >= 8) return "text-red-500";
if (minutes >= 5) return "text-amber-500";
return "text-foreground";
}
function overdueDurationColor(ms: number): string {
const minutes = ms / 60_000;
if (minutes >= 5) return "text-red-500";
return "text-amber-500";
}
// --- Table columns ---
const columns: ColumnDef<ScrapeJob, unknown>[] = [
{
id: "id",
accessorKey: "id",
header: "ID",
enableSorting: false,
},
{
id: "status",
accessorKey: "status",
header: "Status",
enableSorting: true,
sortingFn: (rowA, rowB) => {
const order: Record<string, number> = {
processing: 0,
staleLock: 1,
pending: 2,
scheduled: 3,
exhausted: 4,
};
const a = order[rowA.original.status] ?? 3;
const b = order[rowB.original.status] ?? 3;
return a - b;
},
},
{
id: "targetType",
accessorKey: "targetType",
header: "Type",
enableSorting: false,
},
{
id: "details",
accessorFn: () => "",
header: "Details",
enableSorting: false,
},
{
id: "priority",
accessorKey: "priority",
header: "Priority",
enableSorting: true,
sortingFn: (rowA, rowB) => {
const order: Record<string, number> = {
critical: 0,
urgent: 0,
high: 1,
normal: 2,
medium: 2,
low: 3,
};
const a = order[String(rowA.original.priority).toLowerCase()] ?? 2;
const b = order[String(rowB.original.priority).toLowerCase()] ?? 2;
return a - b;
},
},
{
id: "timing",
accessorFn: (row) => {
if (row.lockedAt) return Date.now() - new Date(row.lockedAt).getTime();
return Date.now() - new Date(row.queuedAt).getTime();
},
header: "Timing",
enableSorting: true,
},
];
const table = createSvelteTable({
get data() {
return jobs;
},
getRowId: (row) => String(row.id),
columns,
state: {
get sorting() {
return sorting;
},
},
onSortingChange: handleSortingChange,
getCoreRowModel: getCoreRowModel(),
getSortedRowModel: getSortedRowModel(),
enableSortingRemoval: true,
});
const skeletonWidths: Record<string, string> = {
id: "w-6",
status: "w-20",
targetType: "w-16",
details: "w-32",
priority: "w-16",
timing: "w-32",
};
// Unified timing display: shows the most relevant duration for the job's current state.
// Uses _tick dependency so Svelte re-evaluates every second.
function getTimingDisplay(
job: ScrapeJob,
_tick: number
): { text: string; colorClass: string; icon: "warning" | "none"; tooltip: string } {
const now = Date.now();
const queuedTime = new Date(job.queuedAt).getTime();
const executeTime = new Date(job.executeAt).getTime();
if (job.status === "processing" || job.status === "staleLock") {
const lockedTime = job.lockedAt ? new Date(job.lockedAt).getTime() : now;
const processingMs = now - lockedTime;
const waitedMs = lockedTime - queuedTime;
const prefix = job.status === "staleLock" ? "stale" : "processing";
const colorClass =
job.status === "staleLock" ? "text-red-500" : lockDurationColor(processingMs);
const tooltipLines = [
`Queued: ${formatAbsoluteDate(job.queuedAt)}`,
`Waited: ${formatDuration(Math.max(0, waitedMs))}`,
];
if (job.lockedAt) {
tooltipLines.push(`Locked: ${formatAbsoluteDate(job.lockedAt)}`);
}
tooltipLines.push(
`${job.status === "staleLock" ? "Stale for" : "Processing"}: ${formatDuration(processingMs)}`
);
return {
text: `${prefix} ${formatDuration(processingMs)}`,
colorClass,
icon: job.status === "staleLock" ? "warning" : "none",
tooltip: tooltipLines.join("\n"),
};
}
if (job.status === "exhausted") {
const tooltipLines = [
`Queued: ${formatAbsoluteDate(job.queuedAt)}`,
`Retries: ${job.retryCount}/${job.maxRetries} exhausted`,
];
return {
text: "exhausted",
colorClass: "text-red-500",
icon: "warning",
tooltip: tooltipLines.join("\n"),
};
}
// Scheduled (future execute_at)
const executeAtDiff = now - executeTime;
if (job.status === "scheduled" || executeAtDiff < 0) {
const tooltipLines = [
`Queued: ${formatAbsoluteDate(job.queuedAt)}`,
`Executes: ${formatAbsoluteDate(job.executeAt)}`,
];
return {
text: `in ${formatDuration(Math.abs(executeAtDiff))}`,
colorClass: "text-muted-foreground",
icon: "none",
tooltip: tooltipLines.join("\n"),
};
}
// Pending (overdue — execute_at is in the past, waiting to be picked up)
const waitingMs = now - queuedTime;
const tooltipLines = [
`Queued: ${formatAbsoluteDate(job.queuedAt)}`,
`Waiting: ${formatDuration(waitingMs)}`,
];
return {
text: `waiting ${formatDuration(waitingMs)}`,
colorClass: overdueDurationColor(waitingMs),
icon: "warning",
tooltip: tooltipLines.join("\n"),
};
}
</script>
<div class="flex items-center justify-between mb-4">
<h1 class="text-lg font-semibold text-foreground">Scrape Jobs</h1>
<div class="flex items-center gap-2 text-sm">
{#if connectionState === "connected"}
<span class="inline-flex items-center gap-1.5">
<span class="size-2 shrink-0 rounded-full bg-green-500"></span>
<span class="text-green-500">Live</span>
</span>
{:else if connectionState === "reconnecting"}
<span class="inline-flex items-center gap-1.5">
<span class="size-2 shrink-0 rounded-full bg-amber-500"></span>
<span class="text-amber-500">Reconnecting...</span>
</span>
{:else}
<span class="inline-flex items-center gap-2">
<span class="inline-flex items-center gap-1.5">
<span class="size-2 shrink-0 rounded-full bg-red-500"></span>
<span class="text-red-500">Disconnected</span>
</span>
<button
class="rounded-md bg-muted px-2 py-0.5 text-xs font-medium text-foreground hover:bg-muted/80 transition-colors"
onclick={() => store?.retry()}
>
Retry
</button>
</span>
{/if}
</div>
</div>
{#if error}
<p class="text-destructive">{error}</p>
{:else}
<div class="bg-card border-border overflow-hidden rounded-lg border">
<!-- svelte-ignore a11y_no_noninteractive_element_interactions -->
<table
class="w-full border-collapse text-xs"
onmouseenter={showTooltip}
onmousemove={moveTooltip}
onmouseleave={hideTooltip}
>
<thead>
{#each table.getHeaderGroups() as headerGroup}
<tr class="border-b border-border text-left text-muted-foreground">
{#each headerGroup.headers as header}
<th
class="px-3 py-2.5 font-medium whitespace-nowrap"
class:cursor-pointer={header.column.getCanSort()}
class:select-none={header.column.getCanSort()}
onclick={header.column.getToggleSortingHandler()}
>
{#if header.column.getCanSort()}
<span class="inline-flex items-center gap-1">
{#if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
{#if header.column.getIsSorted() === "asc"}
<ArrowUp class="size-3.5" />
{:else if header.column.getIsSorted() === "desc"}
<ArrowDown class="size-3.5" />
{:else}
<ArrowUpDown class="size-3.5 text-muted-foreground/40" />
{/if}
</span>
{:else if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
</th>
{/each}
</tr>
{/each}
</thead>
{#if !initialized}
<tbody>
{#each Array(5) as _}
<tr class="border-b border-border">
{#each columns as col}
<td class="px-3 py-2.5">
<div
class="h-3.5 rounded bg-muted animate-pulse {skeletonWidths[col.id ?? ''] ?? 'w-20'}"
></div>
</td>
{/each}
</tr>
{/each}
</tbody>
{:else if jobs.length === 0}
<tbody>
<tr>
<td colspan={columns.length} class="py-12 text-center text-muted-foreground">
No scrape jobs found.
</td>
</tr>
</tbody>
{:else}
<tbody>
{#each table.getRowModel().rows as row (row.id)}
{@const job = row.original}
{@const sc = statusColor(job.status)}
{@const timingDisplay = getTimingDisplay(job, tick)}
<tr
class="border-b border-border last:border-b-0 hover:bg-muted/50 transition-colors"
>
{#each row.getVisibleCells() as cell (cell.id)}
{@const colId = cell.column.id}
{#if colId === "id"}
<td class="px-3 py-2.5 tabular-nums text-muted-foreground/70 w-12">{job.id}</td>
{:else if colId === "status"}
<td class="px-3 py-2.5 whitespace-nowrap">
<span class="inline-flex items-center gap-1.5">
<span class="size-1.5 shrink-0 rounded-full {sc.dot}"></span>
<span class="flex flex-col leading-tight">
<span class={sc.text}>{formatStatusLabel(job.status)}</span>
{#if job.maxRetries > 0}
<span class="text-[10px] {retryColor(job.retryCount, job.maxRetries)}">
{job.retryCount}/{job.maxRetries} retries
</span>
{/if}
</span>
</span>
</td>
{:else if colId === "targetType"}
<td class="px-3 py-2.5 whitespace-nowrap">
<span
class="inline-flex items-center rounded-md bg-muted/60 px-1.5 py-0.5 font-mono text-[11px] text-muted-foreground"
>
{job.targetType}
</span>
</td>
{:else if colId === "details"}
<td class="px-3 py-2.5 max-w-48 truncate text-muted-foreground" title={formatJobDetails(job, subjectMap)}>
{formatJobDetails(job, subjectMap)}
</td>
{:else if colId === "priority"}
<td class="px-3 py-2.5 whitespace-nowrap">
<span class="font-medium capitalize {priorityColor(job.priority)}">
{job.priority}
</span>
</td>
{:else if colId === "timing"}
<td class="px-3 py-2.5 whitespace-nowrap">
<span
class="inline-flex items-center gap-1.5 tabular-nums text-foreground"
data-timing-tooltip={timingDisplay.tooltip}
>
<span class="size-3.5 shrink-0 inline-flex items-center justify-center {timingDisplay.colorClass}">
{#if timingDisplay.icon === "warning"}
<TriangleAlert class="size-3.5" />
{/if}
</span>
{timingDisplay.text}
</span>
</td>
{/if}
{/each}
</tr>
{/each}
</tbody>
{/if}
</table>
</div>
{#if tooltipText !== null}
<div
class="pointer-events-none fixed z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-sm whitespace-pre-line max-w-max text-left"
style="left: {tooltipX + 12}px; top: {tooltipY + 12}px;"
>
{tooltipText}
</div>
{/if}
{/if}
@@ -0,0 +1,756 @@
<script module lang="ts">
import type {
ScraperStatsResponse,
SubjectDetailResponse,
SubjectSummary,
TimeseriesResponse,
} from "$lib/bindings";
// Persisted across navigation so returning to the page shows cached data.
let stats = $state<ScraperStatsResponse | null>(null);
let timeseries = $state<TimeseriesResponse | null>(null);
let subjects = $state<SubjectSummary[]>([]);
let error = $state<string | null>(null);
let refreshError = $state(false);
let refreshInterval = 5_000;
</script>
<script lang="ts">
import { client, type ScraperPeriod } from "$lib/api";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { FlexRender, createSvelteTable } from "$lib/components/ui/data-table/index.js";
import { formatAbsoluteDate } from "$lib/date";
import { formatDuration, formatDurationMs, relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
import { Chart, Svg, Area, Axis, Highlight, Tooltip } from "layerchart";
import { curveMonotoneX } from "d3-shape";
import { cubicOut } from "svelte/easing";
import { Tween } from "svelte/motion";
import { scaleTime, scaleLinear } from "d3-scale";
import {
AlertCircle,
ChevronDown,
ChevronRight,
LoaderCircle,
ArrowUp,
ArrowDown,
ArrowUpDown,
} from "@lucide/svelte";
import {
type ColumnDef,
type SortingState,
type Updater,
getCoreRowModel,
getSortedRowModel,
} from "@tanstack/table-core";
import { onDestroy, onMount } from "svelte";
import { fade, slide } from "svelte/transition";
const PERIODS: ScraperPeriod[] = ["1h", "6h", "24h", "7d", "30d"];
let selectedPeriod = $state<ScraperPeriod>("24h");
// Expanded subject detail
let expandedSubject = $state<string | null>(null);
let subjectDetail = $state<SubjectDetailResponse | null>(null);
let detailLoading = $state(false);
// Live-updating clock for relative timestamps
let now = $state(new Date());
let tickTimer: ReturnType<typeof setTimeout> | undefined;
function scheduleTick() {
tickTimer = setTimeout(() => {
now = new Date();
scheduleTick();
}, 1000);
}
// --- Auto-refresh with backoff (ported from audit log) ---
const MIN_INTERVAL = 5_000;
const MAX_INTERVAL = 60_000;
let refreshTimer: ReturnType<typeof setTimeout> | undefined;
const MIN_SPIN_MS = 700;
let spinnerVisible = $state(false);
let spinHoldTimer: ReturnType<typeof setTimeout> | undefined;
async function fetchAll() {
refreshError = false;
spinnerVisible = true;
clearTimeout(spinHoldTimer);
const startedAt = performance.now();
try {
const [statsRes, timeseriesRes, subjectsRes] = await Promise.all([
client.getScraperStats(selectedPeriod),
client.getScraperTimeseries(selectedPeriod),
client.getScraperSubjects(),
]);
stats = statsRes;
timeseries = timeseriesRes;
subjects = subjectsRes.subjects;
error = null;
refreshInterval = MIN_INTERVAL;
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load scraper data";
refreshError = true;
refreshInterval = Math.min(refreshInterval * 2, MAX_INTERVAL);
} finally {
const elapsed = performance.now() - startedAt;
const remaining = MIN_SPIN_MS - elapsed;
if (remaining > 0) {
spinHoldTimer = setTimeout(() => {
spinnerVisible = false;
}, remaining);
} else {
spinnerVisible = false;
}
scheduleRefresh();
}
}
function scheduleRefresh() {
clearTimeout(refreshTimer);
refreshTimer = setTimeout(fetchAll, refreshInterval);
}
async function toggleSubjectDetail(subject: string) {
if (expandedSubject === subject) {
expandedSubject = null;
subjectDetail = null;
return;
}
expandedSubject = subject;
detailLoading = true;
try {
subjectDetail = await client.getScraperSubjectDetail(subject);
} catch {
subjectDetail = null;
} finally {
detailLoading = false;
}
}
// --- Chart data ---
type ChartPoint = { date: Date; success: number; errors: number; coursesChanged: number };
let chartData = $derived(
(timeseries?.points ?? []).map((p) => ({
date: new Date(p.timestamp),
success: p.successCount,
errors: p.errorCount,
coursesChanged: p.coursesChanged,
})),
);
// Tween the data array so stacked areas stay aligned (both read the same interpolated values each frame)
const tweenedChart = new Tween<ChartPoint[]>([], {
duration: 600,
easing: cubicOut,
interpolate(from, to) {
// Different lengths: snap immediately (period change reshapes the array)
if (from.length !== to.length) return () => to;
return (t) =>
to.map((dest, i) => ({
date: dest.date,
success: from[i].success + (dest.success - from[i].success) * t,
errors: from[i].errors + (dest.errors - from[i].errors) * t,
coursesChanged: from[i].coursesChanged + (dest.coursesChanged - from[i].coursesChanged) * t,
}));
},
});
$effect(() => {
tweenedChart.set(chartData);
});
let scrapeYMax = $derived(Math.max(1, ...chartData.map((d) => d.success + d.errors)));
let changesYMax = $derived(Math.max(1, ...chartData.map((d) => d.coursesChanged)));
// --- Helpers ---
function formatInterval(secs: number): string {
if (secs < 60) return `${secs}s`;
if (secs < 3600) return `${Math.round(secs / 60)}m`;
return `${(secs / 3600).toFixed(1)}h`;
}
function successRateColor(rate: number): string {
if (rate >= 0.95) return "text-green-600 dark:text-green-400";
if (rate >= 0.8) return "text-yellow-600 dark:text-yellow-400";
return "text-red-600 dark:text-red-400";
}
/** Muted class for zero/default values, foreground for interesting ones. */
function emphasisClass(value: number, zeroIsDefault = true): string {
if (zeroIsDefault) {
return value === 0 ? "text-muted-foreground" : "text-foreground";
}
return value === 1 ? "text-muted-foreground" : "text-foreground";
}
function xAxisFormat(period: ScraperPeriod) {
return (v: Date) => {
if (period === "1h" || period === "6h") {
return v.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" });
}
if (period === "24h") {
return v.toLocaleTimeString("en-US", { hour: "numeric" });
}
return v.toLocaleDateString("en-US", { month: "short", day: "numeric" });
};
}
// --- TanStack Table ---
let sorting: SortingState = $state([{ id: "subject", desc: false }]);
function handleSortingChange(updater: Updater<SortingState>) {
sorting = typeof updater === "function" ? updater(sorting) : updater;
}
const columns: ColumnDef<SubjectSummary, unknown>[] = [
{
id: "subject",
accessorKey: "subject",
header: "Subject",
enableSorting: true,
sortingFn: (a, b) => a.original.subject.localeCompare(b.original.subject),
},
{
id: "status",
accessorFn: (row) => row.scheduleState,
header: "Status",
enableSorting: true,
sortingFn: (a, b) => {
const order: Record<string, number> = { eligible: 0, cooldown: 1, paused: 2, read_only: 3 };
const sa = order[a.original.scheduleState] ?? 4;
const sb = order[b.original.scheduleState] ?? 4;
if (sa !== sb) return sa - sb;
return (a.original.cooldownRemainingSecs ?? Infinity) - (b.original.cooldownRemainingSecs ?? Infinity);
},
},
{
id: "interval",
accessorFn: (row) => row.currentIntervalSecs * row.timeMultiplier,
header: "Interval",
enableSorting: true,
},
{
id: "lastScraped",
accessorKey: "lastScraped",
header: "Last Scraped",
enableSorting: true,
},
{
id: "changeRate",
accessorKey: "avgChangeRatio",
header: "Change %",
enableSorting: true,
},
{
id: "zeros",
accessorKey: "consecutiveZeroChanges",
header: "Zeros",
enableSorting: true,
},
{
id: "runs",
accessorKey: "recentRuns",
header: "Runs",
enableSorting: true,
},
{
id: "fails",
accessorKey: "recentFailures",
header: "Fails",
enableSorting: true,
},
];
const table = createSvelteTable({
get data() {
return subjects;
},
getRowId: (row) => row.subject,
columns,
state: {
get sorting() {
return sorting;
},
},
onSortingChange: handleSortingChange,
getCoreRowModel: getCoreRowModel(),
getSortedRowModel: getSortedRowModel<SubjectSummary>(),
enableSortingRemoval: true,
});
const skeletonWidths: Record<string, string> = {
subject: "w-24",
status: "w-20",
interval: "w-14",
lastScraped: "w-20",
changeRate: "w-12",
zeros: "w-8",
runs: "w-8",
fails: "w-8",
};
const columnCount = columns.length;
// --- Lifecycle ---
onMount(() => {
fetchAll();
scheduleTick();
});
onDestroy(() => {
clearTimeout(tickTimer);
clearTimeout(refreshTimer);
clearTimeout(spinHoldTimer);
});
// Refetch when period changes
$effect(() => {
void selectedPeriod;
fetchAll();
});
</script>
<div class="space-y-6">
<!-- Header -->
<div class="flex items-center justify-between">
<div class="flex items-center gap-2">
<h1 class="text-base font-semibold text-foreground">Scraper</h1>
{#if spinnerVisible}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<LoaderCircle class="size-4 animate-spin text-muted-foreground" />
</span>
{:else if refreshError}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<SimpleTooltip text={error ?? "Refresh failed"} side="right" passthrough>
<AlertCircle class="size-4 text-destructive" />
</SimpleTooltip>
</span>
{/if}
</div>
<div class="bg-muted flex rounded-md p-0.5">
{#each PERIODS as period}
<button
class="rounded px-2.5 py-1 text-xs font-medium transition-colors
{selectedPeriod === period
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'}"
onclick={() => (selectedPeriod = period)}
>
{period}
</button>
{/each}
</div>
</div>
{#if error && !stats}
<p class="text-destructive">{error}</p>
{:else if stats}
<!-- Stats Cards -->
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Total Scrapes</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalScrapes)}</p>
<p class="text-muted-foreground mt-1 text-[10px]">
{formatNumber(stats.successfulScrapes)} ok / {formatNumber(stats.failedScrapes)} failed
</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Success Rate</p>
{#if stats.successRate != null}
<p class="text-2xl font-bold {successRateColor(stats.successRate)}">
{(stats.successRate * 100).toFixed(1)}%
</p>
{:else}
<p class="text-2xl font-bold text-muted-foreground">N/A</p>
{/if}
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Avg Duration</p>
{#if stats.avgDurationMs != null}
<p class="text-2xl font-bold">{formatDurationMs(stats.avgDurationMs)}</p>
{:else}
<p class="text-2xl font-bold text-muted-foreground">N/A</p>
{/if}
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Courses Changed</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalCoursesChanged)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Pending Jobs</p>
<p class="text-2xl font-bold">{formatNumber(stats.pendingJobs)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Locked Jobs</p>
<p class="text-2xl font-bold">{formatNumber(stats.lockedJobs)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Courses Fetched</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalCoursesFetched)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Audits Generated</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalAuditsGenerated)}</p>
</div>
</div>
<!-- Time-Series Charts -->
{#if chartData.length > 0}
<div class="bg-card border-border rounded-lg border p-4">
<h2 class="mb-3 text-xs font-semibold text-foreground">Scrape Activity</h2>
<div class="h-[250px]">
<Chart
data={tweenedChart.current}
x="date"
xScale={scaleTime()}
y={(d: any) => d.success + d.errors}
yScale={scaleLinear()}
yDomain={[0, scrapeYMax]}
yNice
padding={{ top: 10, bottom: 30, left: 45, right: 10 }}
tooltip={{ mode: "bisect-x" }}
>
<Svg>
<Axis
placement="left"
grid={{ class: "stroke-muted-foreground/15" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Axis
placement="bottom"
format={xAxisFormat(selectedPeriod)}
grid={{ class: "stroke-muted-foreground/10" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Area
y1="success"
fill="var(--status-green)"
fillOpacity={0.4}
curve={curveMonotoneX}
/>
<Area
y0="success"
y1={(d: any) => d.success + d.errors}
fill="var(--status-red)"
fillOpacity={0.4}
curve={curveMonotoneX}
/>
<Highlight lines />
</Svg>
<Tooltip.Root
let:data
classes={{ root: "text-xs" }}
variant="none"
>
<div class="bg-card text-card-foreground shadow-md rounded-md px-2.5 py-1.5 space-y-1">
<p class="text-muted-foreground font-medium">{data.date.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" })}</p>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-green"></span>Successful</span>
<span class="tabular-nums font-medium">{data.success}</span>
</div>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-red"></span>Errors</span>
<span class="tabular-nums font-medium">{data.errors}</span>
</div>
</div>
</Tooltip.Root>
</Chart>
</div>
<h2 class="mt-4 mb-3 text-xs font-semibold text-foreground">Courses Changed</h2>
<div class="h-[150px]">
<Chart
data={tweenedChart.current}
x="date"
xScale={scaleTime()}
y="coursesChanged"
yScale={scaleLinear()}
yDomain={[0, changesYMax]}
yNice
padding={{ top: 10, bottom: 30, left: 45, right: 10 }}
tooltip={{ mode: "bisect-x" }}
>
<Svg>
<Axis
placement="left"
grid={{ class: "stroke-muted-foreground/15" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Axis
placement="bottom"
format={xAxisFormat(selectedPeriod)}
grid={{ class: "stroke-muted-foreground/10" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Area
fill="var(--status-blue)"
fillOpacity={0.3}
curve={curveMonotoneX}
/>
<Highlight lines />
</Svg>
<Tooltip.Root
let:data
classes={{ root: "text-xs" }}
variant="none"
>
<div class="bg-card text-card-foreground shadow-md rounded-md px-2.5 py-1.5 space-y-1">
<p class="text-muted-foreground font-medium">{data.date.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" })}</p>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-blue"></span>Changed</span>
<span class="tabular-nums font-medium">{data.coursesChanged}</span>
</div>
</div>
</Tooltip.Root>
</Chart>
</div>
</div>
{/if}
<!-- Subjects Table -->
<div class="bg-card border-border rounded-lg border">
<h2 class="border-border border-b px-3 py-2.5 text-xs font-semibold text-foreground">
Subjects ({subjects.length})
</h2>
<div class="overflow-x-auto">
<table class="w-full text-xs">
<thead>
{#each table.getHeaderGroups() as headerGroup}
<tr class="border-border border-b text-left text-muted-foreground">
{#each headerGroup.headers as header}
<th
class="px-3 py-1.5 text-[10px] font-medium uppercase tracking-wider"
class:cursor-pointer={header.column.getCanSort()}
class:select-none={header.column.getCanSort()}
onclick={header.column.getToggleSortingHandler()}
>
{#if header.column.getCanSort()}
<span class="inline-flex items-center gap-1 hover:text-foreground">
{#if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
{#if header.column.getIsSorted() === "asc"}
<ArrowUp class="size-3.5" />
{:else if header.column.getIsSorted() === "desc"}
<ArrowDown class="size-3.5" />
{:else}
<ArrowUpDown class="size-3.5 text-muted-foreground/40" />
{/if}
</span>
{:else if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
</th>
{/each}
</tr>
{/each}
</thead>
<tbody>
{#if !subjects.length && !error}
<!-- Skeleton loading -->
{#each Array(12) as _}
<tr class="border-border border-b">
{#each columns as col}
<td class="px-3 py-2">
<div
class="h-3.5 rounded bg-muted animate-pulse {skeletonWidths[col.id ?? ''] ?? 'w-16'}"
></div>
</td>
{/each}
</tr>
{/each}
{:else}
{#each table.getRowModel().rows as row (row.id)}
{@const subject = row.original}
{@const isExpanded = expandedSubject === subject.subject}
{@const rel = relativeTime(new Date(subject.lastScraped), now)}
<tr
class="border-border cursor-pointer border-b transition-colors hover:bg-muted/50
{isExpanded ? 'bg-muted/30' : ''}"
onclick={() => toggleSubjectDetail(subject.subject)}
>
{#each row.getVisibleCells() as cell (cell.id)}
{@const colId = cell.column.id}
{#if colId === "subject"}
<td class="px-3 py-1.5 font-medium">
<div class="flex items-center gap-1.5">
{#if isExpanded}
<ChevronDown size={12} class="shrink-0" />
{:else}
<ChevronRight size={12} class="shrink-0" />
{/if}
<span>{subject.subject}</span>
{#if subject.subjectDescription}
<span
class="text-muted-foreground font-normal text-[10px] max-w-[140px] truncate inline-block align-middle"
title={subject.subjectDescription}
>{subject.subjectDescription}</span>
{/if}
{#if subject.trackedCourseCount > 0}
<span class="text-muted-foreground/60 font-normal text-[10px]">({subject.trackedCourseCount})</span>
{/if}
</div>
</td>
{:else if colId === "status"}
<td class="px-3 py-1.5">
{#if subject.scheduleState === "paused"}
<span class="text-orange-600 dark:text-orange-400">paused</span>
{:else if subject.scheduleState === "read_only"}
<span class="text-muted-foreground">read only</span>
{:else if subject.nextEligibleAt}
{@const remainingMs = new Date(subject.nextEligibleAt).getTime() - now.getTime()}
{#if remainingMs > 0}
<span class="text-muted-foreground">{formatDuration(remainingMs)}</span>
{:else}
<span class="text-green-600 dark:text-green-400 font-medium">ready</span>
{/if}
{:else}
<span class="text-green-600 dark:text-green-400 font-medium">ready</span>
{/if}
</td>
{:else if colId === "interval"}
<td class="px-3 py-1.5">
<span>{formatInterval(subject.currentIntervalSecs)}</span>
{#if subject.timeMultiplier !== 1}
<span class="text-muted-foreground ml-0.5">&times;{subject.timeMultiplier}</span>
{/if}
</td>
{:else if colId === "lastScraped"}
<td class="px-3 py-1.5">
<SimpleTooltip text={formatAbsoluteDate(subject.lastScraped)} side="top" passthrough>
<span class="text-muted-foreground">{rel.text === "now" ? "just now" : rel.text}</span>
</SimpleTooltip>
</td>
{:else if colId === "changeRate"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.avgChangeRatio)}>{(subject.avgChangeRatio * 100).toFixed(2)}%</span>
</td>
{:else if colId === "zeros"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.consecutiveZeroChanges)}>{subject.consecutiveZeroChanges}</span>
</td>
{:else if colId === "runs"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.recentRuns)}>{subject.recentRuns}</span>
</td>
{:else if colId === "fails"}
<td class="px-3 py-1.5">
{#if subject.recentFailures > 0}
<span class="text-red-600 dark:text-red-400">{subject.recentFailures}</span>
{:else}
<span class="text-muted-foreground">{subject.recentFailures}</span>
{/if}
</td>
{/if}
{/each}
</tr>
<!-- Expanded Detail -->
{#if isExpanded}
<tr class="border-border border-b last:border-b-0">
<td colspan={columnCount} class="p-0">
<div transition:slide={{ duration: 200 }}>
<div class="bg-muted/20 px-4 py-3">
{#if detailLoading}
<p class="text-muted-foreground text-sm">Loading results...</p>
{:else if subjectDetail && subjectDetail.results.length > 0}
<div class="overflow-x-auto">
<table class="w-full text-xs">
<thead>
<tr class="text-muted-foreground text-left">
<th class="px-3 py-1.5 font-medium">Time</th>
<th class="px-3 py-1.5 font-medium">Duration</th>
<th class="px-3 py-1.5 font-medium">Status</th>
<th class="px-3 py-1.5 font-medium">Fetched</th>
<th class="px-3 py-1.5 font-medium">Changed</th>
<th class="px-3 py-1.5 font-medium">Unchanged</th>
<th class="px-3 py-1.5 font-medium">Audits</th>
<th class="px-3 py-1.5 font-medium">Error</th>
</tr>
</thead>
<tbody>
{#each subjectDetail.results as result (result.id)}
{@const detailRel = relativeTime(new Date(result.completedAt), now)}
<tr class="border-border/50 border-t">
<td class="px-3 py-1.5">
<SimpleTooltip text={formatAbsoluteDate(result.completedAt)} side="top" passthrough>
<span class="text-muted-foreground">{detailRel.text === "now" ? "just now" : detailRel.text}</span>
</SimpleTooltip>
</td>
<td class="px-3 py-1.5">{formatDurationMs(result.durationMs)}</td>
<td class="px-3 py-1.5">
{#if result.success}
<span class="text-green-600 dark:text-green-400">ok</span>
{:else}
<span class="text-red-600 dark:text-red-400">fail</span>
{/if}
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.coursesFetched ?? 0)}>{result.coursesFetched ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.coursesChanged ?? 0)}>{result.coursesChanged ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class="text-muted-foreground">{result.coursesUnchanged ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.auditsGenerated ?? 0)}>{result.auditsGenerated ?? "\u2014"}</span>
</td>
<td class="text-muted-foreground max-w-[200px] truncate px-3 py-1.5">
{result.errorMessage ?? ""}
</td>
</tr>
{/each}
</tbody>
</table>
</div>
{:else}
<p class="text-muted-foreground text-sm">No recent results.</p>
{/if}
</div>
</div>
</td>
</tr>
{/if}
{/each}
{/if}
</tbody>
</table>
</div>
</div>
{:else}
<!-- Initial loading skeleton -->
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
{#each Array(8) as _}
<div class="bg-card border-border rounded-lg border p-4">
<div class="h-4 w-24 rounded bg-muted animate-pulse"></div>
<div class="mt-2 h-8 w-16 rounded bg-muted animate-pulse"></div>
</div>
{/each}
</div>
{/if}
</div>
@@ -0,0 +1,92 @@
<script lang="ts">
import { onMount } from "svelte";
import { client } from "$lib/api";
import type { User } from "$lib/bindings";
import { Shield, ShieldOff } from "@lucide/svelte";
let users = $state<User[]>([]);
let error = $state<string | null>(null);
let updating = $state<string | null>(null);
onMount(async () => {
try {
users = await client.getAdminUsers();
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load users";
}
});
async function toggleAdmin(user: User) {
updating = user.discordId;
try {
const updated = await client.setUserAdmin(user.discordId, !user.isAdmin);
users = users.map((u) => (u.discordId === updated.discordId ? updated : u));
} catch (e) {
error = e instanceof Error ? e.message : "Failed to update user";
} finally {
updating = null;
}
}
</script>
<h1 class="mb-4 text-lg font-semibold text-foreground">Users</h1>
{#if error}
<p class="text-destructive mb-4">{error}</p>
{/if}
{#if users.length === 0 && !error}
<p class="text-muted-foreground">Loading...</p>
{:else}
<div class="bg-card border-border overflow-hidden rounded-lg border">
<table class="w-full text-sm">
<thead>
<tr class="border-border border-b">
<th class="px-4 py-3 text-left font-medium">Username</th>
<th class="px-4 py-3 text-left font-medium">Discord ID</th>
<th class="px-4 py-3 text-left font-medium">Admin</th>
<th class="px-4 py-3 text-left font-medium">Actions</th>
</tr>
</thead>
<tbody>
{#each users as user}
<tr class="border-border border-b last:border-b-0">
<td class="flex items-center gap-2 px-4 py-3">
{#if user.discordAvatarHash}
<img
src="https://cdn.discordapp.com/avatars/{user.discordId}/{user.discordAvatarHash}.png?size=32"
alt=""
class="h-6 w-6 rounded-full"
/>
{/if}
{user.discordUsername}
</td>
<td class="text-muted-foreground px-4 py-3 font-mono text-xs">{user.discordId}</td>
<td class="px-4 py-3">
{#if user.isAdmin}
<span class="rounded-full bg-blue-100 px-2 py-0.5 text-xs font-medium text-blue-800 dark:bg-blue-900 dark:text-blue-200">Admin</span>
{:else}
<span class="text-muted-foreground text-xs">User</span>
{/if}
</td>
<td class="px-4 py-3">
<button
onclick={() => toggleAdmin(user)}
disabled={updating === user.discordId}
class="hover:bg-accent inline-flex items-center gap-1 rounded px-2 py-1 text-xs transition-colors disabled:opacity-50"
>
{#if user.isAdmin}
<ShieldOff size={14} />
Remove Admin
{:else}
<Shield size={14} />
Make Admin
{/if}
</button>
</td>
</tr>
{/each}
</tbody>
</table>
</div>
{/if}
+24
View File
@@ -0,0 +1,24 @@
<script lang="ts">
import { authStore } from "$lib/auth.svelte";
</script>
<h1 class="mb-4 text-lg font-semibold text-foreground">Profile</h1>
{#if authStore.user}
<div class="bg-card border-border rounded-lg border p-4">
<div class="flex flex-col gap-3">
<div>
<p class="text-muted-foreground text-sm">Username</p>
<p class="font-medium">{authStore.user.discordUsername}</p>
</div>
<div>
<p class="text-muted-foreground text-sm">Discord ID</p>
<p class="font-medium font-mono text-sm">{authStore.user.discordId}</p>
</div>
<div>
<p class="text-muted-foreground text-sm">Role</p>
<p class="font-medium">{authStore.isAdmin ? "Admin" : "User"}</p>
</div>
</div>
</div>
{/if}
@@ -0,0 +1,5 @@
<h1 class="mb-4 text-lg font-semibold text-foreground">Settings</h1>
<div class="bg-card border-border rounded-lg border p-4">
<p class="text-muted-foreground text-sm">No settings available yet.</p>
</div>
+46
View File
@@ -0,0 +1,46 @@
<script lang="ts">
import { page } from "$app/state";
import { House } from "@lucide/svelte";
const status = $derived(page.status);
const messages: Record<number, string> = {
400: "Bad request",
401: "Unauthorized",
403: "Forbidden",
404: "Page not found",
405: "Method not allowed",
408: "Request timeout",
429: "Too many requests",
500: "Something went wrong",
502: "Service temporarily unavailable",
503: "Service temporarily unavailable",
504: "Gateway timeout",
};
const message = $derived(messages[status] ?? "An error occurred");
const isServerError = $derived(status >= 500);
</script>
<svelte:head>
<title>{status} - {message}</title>
</svelte:head>
<div class="flex min-h-screen items-center justify-center px-4 pb-14">
<div class="max-w-md text-center">
<h1 class="text-8xl font-bold tracking-tight text-muted-foreground/50">{status}</h1>
<p class="mt-4 text-xl text-muted-foreground">{message}</p>
{#if isServerError}
<p class="mt-2 text-sm text-muted-foreground/60">This may be temporary. Try again in a moment.</p>
{/if}
<a
href="/"
class="mt-8 inline-flex items-center gap-2 rounded-lg border border-border bg-card px-4 py-2.5 text-sm font-medium text-foreground shadow-sm transition-colors hover:bg-muted"
>
<House size={16} strokeWidth={2} />
Return home
</a>
</div>
</div>

Some files were not shown because too many files have changed in this diff Show More