44 Commits

Author SHA1 Message Date
75a99c10ea feat: add scrape job result persistence for effectiveness tracking 2026-01-30 01:37:41 -06:00
857ceabcca fix: prevent ts-rs serde warnings 2026-01-30 01:36:57 -06:00
203c337cf0 feat: add confidence-based RMP matching with manual review workflow
Replace simple auto-matching with scored candidate generation that
considers department overlap, name uniqueness, and rating volume.
Candidates above 0.85 auto-accept; others require admin approval.
2026-01-30 01:31:11 -06:00
39ba131322 feat: add mobile touch controls with gesture detection 2026-01-29 23:56:45 -06:00
2fad9c969d fix: avoid title on icon, use simpler href-based login redirect 2026-01-29 23:44:05 -06:00
47b4f3315f feat: enhance login page with FAQ section and improved styling 2026-01-29 23:40:48 -06:00
fa28f13a45 feat: add interactive timeline visualization for class times
Implements a canvas-based timeline view with D3 scales showing class
counts across subjects. Features drag-to-pan, mouse wheel zoom, subject
filtering, hover tooltips, and smooth animations. Timeline auto-follows
current time and supports keyboard navigation.
2026-01-29 23:19:39 -06:00
5a6ea1e53a fix: handle backend startup delays with retry logic in auth 2026-01-29 20:04:50 -06:00
ba2b2fc50a fix: increase Banner API timeouts to handle slow responses 2026-01-29 19:49:57 -06:00
cfe098d193 feat: add websocket support for real-time scrape job monitoring 2026-01-29 19:31:04 -06:00
d861888e5e fix: proper centering for login page content, avoid unnecssary scrollbar 2026-01-29 18:05:50 -06:00
f0645d82d9 refactor: persist audit log state in module scope for cross-navigation caching 2026-01-29 17:54:27 -06:00
7a1cd2a39b refactor: centralize number formatting with locale-aware utility 2026-01-29 17:53:38 -06:00
d2985f98ce feat: enhance audit log with smart diffing, conditional request caching, auto refreshing 2026-01-29 17:35:11 -06:00
b58eb840f3 refactor: consolidate navigation with top nav bar and route groups 2026-01-29 17:01:47 -06:00
2bc6fbdf30 feat: implement relative time feedback and improve tooltip customization 2026-01-29 16:44:06 -06:00
e41b970d6e fix: implement i64 serialization for JavaScript compatibility, fixing avatar URL display 2026-01-29 15:51:19 -06:00
e880126281 feat: implement worker timeout protection and crash recovery for job queue
Add JOB_TIMEOUT constant to fail stuck jobs after 5 minutes, and
LOCK_EXPIRY to reclaim abandoned locks after 10 minutes. Introduce
force_unlock_all to recover orphaned jobs at startup. Fix retry limit
off-by-one error and update deduplication to include locked jobs.
2026-01-29 15:50:09 -06:00
db0ec1e69d feat: add rmp profile links and confidence-aware rating display 2026-01-29 15:43:39 -06:00
2947face06 fix: run frontend build first with -e embed flag in Justfile 2026-01-29 15:00:13 -06:00
36bcc27d7f feat: setup smart page transitions, fix laggy theme-aware element transitions 2026-01-29 14:59:47 -06:00
9e403e5043 refactor: modernize Justfile commands and simplify service management 2026-01-29 14:33:16 -06:00
98a6d978c6 feat: implement course change auditing with time-series metrics endpoint 2026-01-29 14:19:36 -06:00
4deeef2f00 feat: optimize asset delivery with build-time compression and encoding negotiation 2026-01-29 13:56:10 -06:00
e008ee5a12 feat: show search duration and result count feedback 2026-01-29 13:15:25 -06:00
a007ccb6a2 fix: remove out:fade transition from CourseTable 2026-01-29 13:08:45 -06:00
527cbebc6a feat: implement user authentication system with admin dashboard 2026-01-29 12:56:51 -06:00
4207783cdd docs: add changelog entries and roadmap completion tracking 2026-01-29 12:27:46 -06:00
c90bd740de refactor: consolidate query logic and eliminate N+1 instructor loads 2026-01-29 12:03:06 -06:00
61f8bd9de7 refactor: consolidate menu snippets and strengthen type safety
Replaces duplicated dropdown/context menu code with parameterized snippet,
eliminates unsafe type casts, adds error handling for clipboard and API
calls, and improves accessibility annotations.
2026-01-29 11:40:55 -06:00
b5eaedc9bc feat: add delivery mode indicators and tooltips to location column 2026-01-29 11:32:35 -06:00
58475c8673 feat: add page selector dropdown with animated pagination controls
Replace Previous/Next buttons with 5-slot page navigation centered in
pagination bar. Current page becomes a dropdown trigger allowing direct
page jumps. Side slots animate on page transitions.
2026-01-29 11:31:55 -06:00
78159707e2 feat: table FLIP animations, improved time tooltip details & day abbreviations 2026-01-29 03:40:40 -06:00
779144a4d5 feat: implement smart name abbreviation for instructor display 2026-01-29 03:14:55 -06:00
0da2e810fe feat: add multi-select subject filtering with searchable comboboxes 2026-01-29 03:03:21 -06:00
ed72ac6bff refactor: extract reusable SimpleTooltip component and enhance UI hints 2026-01-29 01:37:04 -06:00
57b5cafb27 feat: enhance table scrolling and eliminate initial theme flash 2026-01-29 01:18:02 -06:00
841191c44d feat: integrate OverlayScrollbars with theme-aware styling 2026-01-29 01:05:19 -06:00
67d7c81ef4 feat: implement interactive data table with sorting and column control
Replaces static course table with TanStack Table featuring sortable
columns, column visibility management, and server-side sort handling.
Adds reusable data-table primitives adapted for Svelte 5 runes.
2026-01-29 01:04:18 -06:00
d108a41f91 feat: sync RMP professor ratings and display in course search interface 2026-01-29 00:26:40 -06:00
5fab8c216a feat: add course search UI with ts-rs type bindings
Integrate ts-rs for Rust-to-TypeScript type generation, build course
search page with filters, pagination, and expandable detail rows,
and refactor theme toggle into a reactive store with view transition
animation.
2026-01-28 22:11:17 -06:00
15256ff91c docs: replace feature wishlist with organized roadmap and changelog 2026-01-28 21:07:10 -06:00
6df4303bd6 feat: implement comprehensive course data model with reference cache and search 2026-01-28 21:06:53 -06:00
e3b855b956 refactor: migrate frontend from React to SvelteKit 2026-01-28 21:06:53 -06:00
145 changed files with 16247 additions and 2398 deletions
+2
View File
@@ -0,0 +1,2 @@
[env]
TS_RS_EXPORT_DIR = { value = "web/src/lib/bindings/", relative = true }
Vendored
+4 -3
View File
@@ -1,5 +1,6 @@
.env
/target
/go/
.cargo/config.toml
src/scraper/README.md
# ts-rs bindings
web/src/lib/bindings/**/*.ts
!web/src/lib/bindings/index.ts
Generated
+262 -14
View File
@@ -26,6 +26,21 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloc-no-stdlib"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
[[package]]
name = "alloc-stdlib"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
dependencies = [
"alloc-no-stdlib",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -106,6 +121,19 @@ dependencies = [
"serde",
]
[[package]]
name = "async-compression"
version = "0.4.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
dependencies = [
"compression-codecs",
"compression-core",
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "async-trait"
version = "0.1.89"
@@ -149,11 +177,12 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "axum"
version = "0.8.4"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core",
"base64 0.22.1",
"bytes",
"form_urlencoded",
"futures-util",
@@ -168,13 +197,14 @@ dependencies = [
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"serde_core",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sha1",
"sync_wrapper 1.0.2",
"tokio",
"tokio-tungstenite 0.28.0",
"tower",
"tower-layer",
"tower-service",
@@ -183,9 +213,9 @@ dependencies = [
[[package]]
name = "axum-core"
version = "0.5.2"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
dependencies = [
"bytes",
"futures-core",
@@ -194,13 +224,37 @@ dependencies = [
"http-body-util",
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper 1.0.2",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-extra"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fef252edff26ddba56bbcdf2ee3307b8129acb86f5749b68990c168a6fcc9c76"
dependencies = [
"axum",
"axum-core",
"bytes",
"form_urlencoded",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http-body-util",
"mime",
"pin-project-lite",
"serde_core",
"serde_html_form",
"serde_path_to_error",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "backtrace"
version = "0.3.75"
@@ -218,11 +272,12 @@ dependencies = [
[[package]]
name = "banner"
version = "0.3.4"
version = "0.5.0"
dependencies = [
"anyhow",
"async-trait",
"axum",
"axum-extra",
"bitflags 2.9.4",
"chrono",
"clap",
@@ -235,6 +290,7 @@ dependencies = [
"fundu",
"futures",
"governor",
"html-escape",
"http 1.3.1",
"mime_guess",
"num-format",
@@ -257,7 +313,9 @@ dependencies = [
"tower-http",
"tracing",
"tracing-subscriber",
"ts-rs",
"url",
"urlencoding",
"yansi",
]
@@ -303,6 +361,27 @@ dependencies = [
"generic-array",
]
[[package]]
name = "brotli"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
]
[[package]]
name = "brotli-decompressor"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
name = "bstr"
version = "1.12.0"
@@ -380,6 +459,8 @@ version = "1.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
dependencies = [
"jobserver",
"libc",
"shlex",
]
@@ -474,6 +555,26 @@ dependencies = [
"time",
]
[[package]]
name = "compression-codecs"
version = "0.4.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
dependencies = [
"brotli",
"compression-core",
"flate2",
"memchr",
"zstd",
"zstd-safe",
]
[[package]]
name = "compression-core"
version = "0.4.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
[[package]]
name = "concurrent-queue"
version = "2.5.0"
@@ -1227,6 +1328,15 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "html-escape"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476"
dependencies = [
"utf8-width",
]
[[package]]
name = "http"
version = "0.2.12"
@@ -1606,6 +1716,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.3",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.77"
@@ -2677,9 +2797,19 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.219"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
dependencies = [
"serde_core",
"serde_derive",
]
[[package]]
name = "serde_core"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
dependencies = [
"serde_derive",
]
@@ -2695,15 +2825,28 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.219"
version = "1.0.228"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.106",
]
[[package]]
name = "serde_html_form"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2f2d7ff8a2140333718bb329f5c40fc5f0865b84c426183ce14c97d2ab8154f"
dependencies = [
"form_urlencoded",
"indexmap",
"itoa",
"ryu",
"serde_core",
]
[[package]]
name = "serde_json"
version = "1.0.143"
@@ -2776,7 +2919,7 @@ dependencies = [
"static_assertions",
"time",
"tokio",
"tokio-tungstenite",
"tokio-tungstenite 0.21.0",
"tracing",
"typemap_rev",
"typesize",
@@ -3256,6 +3399,15 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "termcolor"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
dependencies = [
"winapi-util",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -3455,10 +3607,22 @@ dependencies = [
"rustls-pki-types",
"tokio",
"tokio-rustls 0.25.0",
"tungstenite",
"tungstenite 0.21.0",
"webpki-roots 0.26.11",
]
[[package]]
name = "tokio-tungstenite"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857"
dependencies = [
"futures-util",
"log",
"tokio",
"tungstenite 0.28.0",
]
[[package]]
name = "tokio-util"
version = "0.7.16"
@@ -3535,14 +3699,17 @@ version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"async-compression",
"bitflags 2.9.4",
"bytes",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"iri-string",
"pin-project-lite",
"tokio",
"tokio-util",
"tower",
"tower-layer",
"tower-service",
@@ -3648,6 +3815,30 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "ts-rs"
version = "11.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424"
dependencies = [
"chrono",
"serde_json",
"thiserror 2.0.16",
"ts-rs-macros",
]
[[package]]
name = "ts-rs-macros"
version = "11.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.106",
"termcolor",
]
[[package]]
name = "tungstenite"
version = "0.21.0"
@@ -3669,6 +3860,23 @@ dependencies = [
"utf-8",
]
[[package]]
name = "tungstenite"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
dependencies = [
"bytes",
"data-encoding",
"http 1.3.1",
"httparse",
"log",
"rand 0.9.2",
"sha1",
"thiserror 2.0.16",
"utf-8",
]
[[package]]
name = "typemap_rev"
version = "0.3.0"
@@ -3770,12 +3978,24 @@ dependencies = [
"serde",
]
[[package]]
name = "urlencoding"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1292c0d970b54115d14f2492fe0170adf21d68a1de108eebc51c1df4f346a091"
[[package]]
name = "utf8_iter"
version = "1.0.4"
@@ -4459,3 +4679,31 @@ dependencies = [
"quote",
"syn 2.0.106",
]
[[package]]
name = "zstd"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "7.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
dependencies = [
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.16+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
dependencies = [
"cc",
"pkg-config",
]
+7 -3
View File
@@ -1,6 +1,6 @@
[package]
name = "banner"
version = "0.3.4"
version = "0.5.0"
edition = "2024"
default-run = "banner"
@@ -11,7 +11,7 @@ embed-assets = ["dep:rust-embed", "dep:mime_guess"]
[dependencies]
anyhow = "1.0.99"
async-trait = "0.1"
axum = "0.8.4"
axum = { version = "0.8.4", features = ["ws"] }
bitflags = { version = "2.9.4", features = ["serde"] }
chrono = { version = "0.4.42", features = ["serde"] }
compile-time = "0.2.0"
@@ -48,13 +48,17 @@ url = "2.5"
governor = "0.10.1"
serde_path_to_error = "0.1.17"
num-format = "0.4.4"
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
mime_guess = { version = "2.0", optional = true }
clap = { version = "4.5", features = ["derive"] }
rapidhash = "4.1.0"
yansi = "1.0.1"
extension-traits = "2"
ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-json-impl", "no-serde-warnings"] }
html-escape = "0.2.13"
axum-extra = { version = "0.12.5", features = ["query"] }
urlencoding = "2.1.3"
[dev-dependencies]
+7 -4
View File
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
WORKDIR /app
# Install zstd for pre-compression
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
# Copy backend Cargo.toml for build-time version retrieval
COPY ./Cargo.toml ./
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
# Copy frontend source code
COPY ./web ./
# Build frontend
RUN bun run build
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
RUN bun run build && bun run scripts/compress-assets.ts
# --- Chef Base Stage ---
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
@@ -112,5 +115,5 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
ENV HOSTS=0.0.0.0,[::]
# Implicitly uses PORT environment variable
# temporary: running without 'scraper' service
CMD ["sh", "-c", "exec ./banner --services web,bot"]
# Runs all services: web, bot, and scraper
CMD ["sh", "-c", "exec ./banner"]
+273 -109
View File
@@ -1,48 +1,292 @@
set dotenv-load
default_services := "bot,web,scraper"
default:
just --list
# Run all checks (format, clippy, tests, lint)
check:
cargo fmt --all -- --check
cargo clippy --all-features -- --deny warnings
cargo nextest run
bun run --cwd web typecheck
bun run --cwd web lint
bun run --cwd web test --run
# Run all checks in parallel. Pass -f/--fix to auto-format and fix first.
[script("bun")]
check *flags:
const args = "{{flags}}".split(/\s+/).filter(Boolean);
let fix = false;
for (const arg of args) {
if (arg === "-f" || arg === "--fix") fix = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
}
# Run all tests (Rust + frontend)
test: test-rust test-web
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
# Run only Rust tests
test-rust *ARGS:
cargo nextest run {{ARGS}}
if (fix) {
console.log("\x1b[1;36m→ Fixing...\x1b[0m");
run(["cargo", "fmt", "--all"]);
run(["bun", "run", "--cwd", "web", "format"]);
run(["cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
"--", "--deny", "warnings"]);
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
}
# Run only frontend tests
test-web:
bun run --cwd web test --run
const checks = [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
// { name: "sqlx-prepare", cmd: ["cargo", "sqlx", "prepare", "--check"] },
];
# Quick check: clippy + tests only (skips formatting)
check-quick:
cargo clippy --all-features -- --deny warnings
cargo nextest run
bun run --cwd web typecheck
const isTTY = process.stderr.isTTY;
const start = Date.now();
const remaining = new Set(checks.map(c => c.name));
# Run the Banner API search demo (hits live UTSA API, ~20s)
search *ARGS:
cargo run -q --bin search -- {{ARGS}}
const promises = checks.map(async (check) => {
const proc = Bun.spawn(check.cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe", stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { ...check, stdout, stderr, exitCode: proc.exitCode,
elapsed: ((Date.now() - start) / 1000).toFixed(1) };
});
const interval = isTTY ? setInterval(() => {
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
}, 100) : null;
let anyFailed = false;
for (const promise of promises) {
const r = await promise;
remaining.delete(r.name);
if (isTTY) process.stderr.write(`\r\x1b[K`);
if (r.exitCode !== 0) {
anyFailed = true;
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
} else {
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
}
if (interval) clearInterval(interval);
if (isTTY) process.stderr.write(`\r\x1b[K`);
process.exit(anyFailed ? 1 : 0);
# Format all Rust and TypeScript code
format:
cargo fmt --all
bun run --cwd web format
# Check formatting without modifying (CI-friendly)
format-check:
cargo fmt --all -- --check
bun run --cwd web format:check
# Run tests. Usage: just test [rust|web|<nextest filter args>]
[script("bun")]
test *args:
const input = "{{args}}".trim();
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
if (input === "web") {
run(["bun", "run", "--cwd", "web", "test"]);
} else if (input === "rust") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
} else if (input === "") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
run(["bun", "run", "--cwd", "web", "test"]);
} else {
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
}
# Generate TypeScript bindings from Rust types (ts-rs)
bindings:
cargo test export_bindings
# Run the Banner API search demo (hits live UTSA API, ~20s)
search *ARGS:
cargo run -q --bin search -- {{ARGS}}
# Pass args to binary after --: just dev -n -- --some-flag
# Dev server. Flags: -f(rontend) -b(ackend) -W(no-watch) -n(o-build) -r(elease) -e(mbed) --tracing <fmt>
[script("bun")]
dev *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let frontendOnly = false, backendOnly = false;
let noWatch = false, noBuild = false, release = false, embed = false;
let tracing = "pretty";
const passthrough = [];
let i = 0;
let seenDashDash = false;
while (i < argv.length) {
const arg = argv[i];
if (seenDashDash) { passthrough.push(arg); i++; continue; }
if (arg === "--") { seenDashDash = true; i++; continue; }
if (arg.startsWith("--")) {
if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else if (arg === "--no-watch") noWatch = true;
else if (arg === "--no-build") noBuild = true;
else if (arg === "--release") release = true;
else if (arg === "--embed") embed = true;
else if (arg === "--tracing") { tracing = argv[++i] || "pretty"; }
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else if (c === "W") noWatch = true;
else if (c === "n") noBuild = true;
else if (c === "r") release = true;
else if (c === "e") embed = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
i++;
}
// -e implies -b (no point running Vite if assets are embedded)
if (embed) backendOnly = true;
// -n implies -W (no build means no watch)
if (noBuild) noWatch = true;
// Validate conflicting flags
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together (or -e implies -b)");
process.exit(1);
}
const runFrontend = !backendOnly;
const runBackend = !frontendOnly;
const profile = release ? "release" : "dev";
const profileDir = release ? "release" : "debug";
const procs = [];
const cleanup = async () => {
for (const p of procs) p.kill();
await Promise.all(procs.map(p => p.exited));
};
process.on("SIGINT", async () => { await cleanup(); process.exit(0); });
process.on("SIGTERM", async () => { await cleanup(); process.exit(0); });
// Build frontend first when embedding assets (backend will bake them in)
if (embed && !noBuild) {
console.log(`\x1b[1;36m→ Building frontend (for embedding)...\x1b[0m`);
const fb = Bun.spawnSync(["bun", "run", "--cwd", "web", "build"], {
stdio: ["inherit", "inherit", "inherit"],
});
if (fb.exitCode !== 0) process.exit(fb.exitCode);
}
// Frontend: Vite dev server
if (runFrontend) {
const proc = Bun.spawn(["bun", "run", "--cwd", "web", "dev"], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
// Backend
if (runBackend) {
const backendArgs = [`--tracing`, tracing, ...passthrough];
const bin = `target/${profileDir}/banner`;
if (noWatch) {
// Build first unless -n (skip build)
if (!noBuild) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cargoArgs = ["cargo", "build", "--bin", "banner"];
if (!embed) cargoArgs.push("--no-default-features");
if (release) cargoArgs.push("--release");
const build = Bun.spawnSync(cargoArgs, { stdio: ["inherit", "inherit", "inherit"] });
if (build.exitCode !== 0) { cleanup(); process.exit(build.exitCode); }
}
// Run the binary directly (no watch)
const { existsSync } = await import("fs");
if (!existsSync(bin)) {
console.error(`Binary not found: ${bin}`);
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
cleanup();
process.exit(1);
}
console.log(`\x1b[1;36m→ Running ${bin} (no watch)\x1b[0m`);
const proc = Bun.spawn([bin, ...backendArgs], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
} else {
// Bacon watch mode
const baconArgs = ["bacon", "--headless", "run", "--"];
if (!embed) baconArgs.push("--no-default-features");
if (release) baconArgs.push("--profile", "release");
baconArgs.push("--", ...backendArgs);
const proc = Bun.spawn(baconArgs, {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
}
// Wait for any process to exit, then kill the rest
const results = procs.map((p, i) => p.exited.then(code => ({ i, code })));
const first = await Promise.race(results);
cleanup();
process.exit(first.code);
# Production build. Flags: -d(ebug) -f(rontend-only) -b(ackend-only)
[script("bun")]
build *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let debug = false, frontendOnly = false, backendOnly = false;
for (const arg of argv) {
if (arg.startsWith("--")) {
if (arg === "--debug") debug = true;
else if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "d") debug = true;
else if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
}
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together");
process.exit(1);
}
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
const buildFrontend = !backendOnly;
const buildBackend = !frontendOnly;
const profile = debug ? "debug" : "release";
if (buildFrontend) {
console.log("\x1b[1;36m→ Building frontend...\x1b[0m");
run(["bun", "run", "--cwd", "web", "build"]);
}
if (buildBackend) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cmd = ["cargo", "build", "--bin", "banner"];
if (!debug) cmd.push("--release");
run(cmd);
}
# Start PostgreSQL in Docker and update .env with connection string
# Commands: start (default), reset, rm
@@ -112,86 +356,6 @@ db cmd="start":
await updateEnv();
}
# Auto-reloading frontend server
frontend:
bun run --cwd web dev
# Production build of frontend
build-frontend:
bun run --cwd web build
# Auto-reloading backend server (with embedded assets)
backend *ARGS:
bacon --headless run -- -- {{ARGS}}
# Auto-reloading backend server (no embedded assets, for dev proxy mode)
backend-dev *ARGS:
bacon --headless run -- --no-default-features -- {{ARGS}}
# Production build
build:
bun run --cwd web build
cargo build --release --bin banner
# Run auto-reloading development build with release characteristics
dev-build *ARGS='--services web --tracing pretty': build-frontend
bacon --headless run -- --profile dev-release -- {{ARGS}}
# Auto-reloading development build: Vite frontend + backend (no embedded assets, proxies to Vite)
[parallel]
dev *ARGS='--services web,bot': frontend (backend-dev ARGS)
# Smoke test: start web server, hit API endpoints, verify responses
[script("bash")]
test-smoke port="18080":
set -euo pipefail
PORT={{port}}
cleanup() { kill "$SERVER_PID" 2>/dev/null; wait "$SERVER_PID" 2>/dev/null; }
# Start server in background
PORT=$PORT cargo run -q --no-default-features -- --services web --tracing json &
SERVER_PID=$!
trap cleanup EXIT
# Wait for server to be ready (up to 15s)
for i in $(seq 1 30); do
if curl -sf "http://localhost:$PORT/api/health" >/dev/null 2>&1; then break; fi
if ! kill -0 "$SERVER_PID" 2>/dev/null; then echo "FAIL: server exited early"; exit 1; fi
sleep 0.5
done
PASS=0; FAIL=0
check() {
local label="$1" url="$2" expected="$3"
body=$(curl -sf "$url") || { echo "FAIL: $label - request failed"; FAIL=$((FAIL+1)); return; }
if echo "$body" | grep -q "$expected"; then
echo "PASS: $label"
PASS=$((PASS+1))
else
echo "FAIL: $label - expected '$expected' in: $body"
FAIL=$((FAIL+1))
fi
}
check "GET /api/health" "http://localhost:$PORT/api/health" '"status":"healthy"'
check "GET /api/status" "http://localhost:$PORT/api/status" '"version"'
check "GET /api/metrics" "http://localhost:$PORT/api/metrics" '"banner_api"'
# Test 404
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$PORT/api/nonexistent")
if [ "$STATUS" = "404" ]; then
echo "PASS: 404 on unknown route"
PASS=$((PASS+1))
else
echo "FAIL: expected 404, got $STATUS"
FAIL=$((FAIL+1))
fi
echo ""
echo "Results: $PASS passed, $FAIL failed"
[ "$FAIL" -eq 0 ]
alias b := bun
bun *ARGS:
cd web && bun {{ ARGS }}
+1 -2
View File
@@ -29,8 +29,7 @@ The application consists of three modular services that can be run independently
bun install --cwd web # Install frontend dependencies
cargo build # Build the backend
just dev # Runs auto-reloading dev build
just dev --services bot,web # Runs auto-reloading dev build, running only the bot and web services
just dev # Runs auto-reloading dev build with all services
just dev-build # Development build with release characteristics (frontend is embedded, non-auto-reloading)
just build # Production build that embeds assets
+27 -4
View File
@@ -4,10 +4,33 @@
The Banner project is built as a multi-service application with the following components:
- **Discord Bot Service**: Handles Discord interactions and commands
- **Web Service**: Serves the React frontend and provides API endpoints
- **Scraper Service**: Background data collection and synchronization
- **Database Layer**: PostgreSQL for persistent storage
- **Discord Bot Service**: Handles Discord interactions and commands (Serenity/Poise)
- **Web Service**: Axum HTTP server serving the SvelteKit frontend and REST API endpoints
- **Scraper Service**: Background data collection and synchronization with job queue
- **Database Layer**: PostgreSQL 17 for persistent storage (SQLx with compile-time verification)
- **RateMyProfessors Client**: GraphQL-based bulk sync of professor ratings
### Frontend Stack
- **SvelteKit** with Svelte 5 runes (`$state`, `$derived`, `$effect`)
- **Tailwind CSS v4** via `@tailwindcss/vite`
- **bits-ui** for headless UI primitives (comboboxes, tooltips, dropdowns)
- **TanStack Table** for interactive data tables with sorting and column control
- **OverlayScrollbars** for styled, theme-aware scrollable areas
- **ts-rs** generates TypeScript type bindings from Rust structs
### API Endpoints
| Endpoint | Description |
|---|---|
| `GET /api/health` | Health check |
| `GET /api/status` | Service status, version, and commit hash |
| `GET /api/metrics` | Basic metrics |
| `GET /api/courses/search` | Paginated course search with filters (term, subject, query, open-only, sort) |
| `GET /api/courses/:term/:crn` | Single course detail with instructors and RMP ratings |
| `GET /api/terms` | Available terms from reference cache |
| `GET /api/subjects?term=` | Subjects for a term, ordered by enrollment |
| `GET /api/reference/:category` | Reference data lookups (campuses, instructional methods, etc.) |
## Technical Analysis
+61
View File
@@ -0,0 +1,61 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
## [Unreleased]
## [0.5.0] - 2026-01-29
### Added
- Multi-select subject filtering with searchable comboboxes.
- Smart instructor name abbreviation for compact table display.
- Delivery mode indicators and tooltips in location column.
- Page selector dropdown with animated pagination controls.
- FLIP animations for smooth table row transitions during pagination.
- Time tooltip with detailed meeting schedule and day abbreviations.
- Reusable SimpleTooltip component for consistent UI hints.
### Changed
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
- Consolidated menu snippets and strengthened component type safety.
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
- Eliminated initial theme flash on page load.
## [0.4.0] - 2026-01-28
### Added
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
- TypeScript type bindings generated from Rust types via ts-rs.
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
- Course detail expansion panel with enrollment, meeting times, and instructor info.
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
- Pagination component for navigating large search result sets.
- Footer component with version display.
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
- Frontend API client with typed request/response handling and test coverage.
- Course formatting utilities with comprehensive unit tests.
## [0.3.4] - 2026-01
### Added
- Live service status tracking on web dashboard with auto-refresh and health indicators.
- DB operation extraction for improved testability.
- Unit test suite foundation covering core functionality.
- Docker support for PostgreSQL development environment.
- ICS calendar export with comprehensive holiday exclusion coverage.
- Google Calendar link generation with recurrence rules and meeting details.
- Job queue with priority-based scheduling for background scraping.
- Rate limiting with burst allowance for Banner API requests.
- Session management and caching for Banner API interactions.
- Discord bot commands: search, terms, ics, gcal.
- Intelligent scraping system with priority queues and retry tracking.
### Changed
- Type consolidation and dead code removal across the codebase.
-58
View File
@@ -1,58 +0,0 @@
# Features
## Current Features
### Discord Bot Commands
- **search** - Search for courses with various filters (title, course code, keywords)
- **terms** - List available terms or search for a specific term
- **time** - Get meeting times for a specific course (CRN)
- **ics** - Generate ICS calendar file for a course with holiday exclusions
- **gcal** - Generate Google Calendar link for a course
### Data Pipeline
- Intelligent scraping system with priority queues
- Rate limiting and burst handling
- Background data synchronization
## Feature Wishlist
### Commands
- ICS Download (get a ICS download of your classes with location & timing perfectly - set for every class you're in)
- Classes Now (find classes happening)
- Autocomplete
- Class Title
- Course Number
- Term/Part of Term
- Professor
- Attribute
- Component Pagination
- RateMyProfessor Integration (Linked/Embedded)
- Smart term selection (i.e. Summer 2024 will be selected automatically when opened)
- Rate Limiting (bursting with global/user limits)
- DMs Integration (allow usage of the bot in DMs)
- Class Change Notifications (get notified when details about a class change)
- Multi-term Querying (currently the backend for searching is kinda weird)
- Full Autocomplete for Every Search Option
- Metrics, Log Query, Privileged Error Feedback
- Search for Classes
- Major, Professor, Location, Name, Time of Day
- Subscribe to Classes
- Availability (seat, pre-seat)
- Waitlist Movement
- Detail Changes (meta, time, location, seats, professor)
- `time` Start, End, Days of Week
- `seats` Any change in seat/waitlist data
- `meta`
- Lookup via Course Reference Number (CRN)
- Smart Time of Day Handling
- "2 PM" -> Start within 2:00 PM to 2:59 PM
- "2-3 PM" -> Start within 2:00 PM to 3:59 PM
- "ends by 2 PM" -> Ends within 12:00 AM to 2:00 PM
- "after 2 PM" -> Start within 2:01 PM to 11:59 PM
- "before 2 PM" -> Ends within 12:00 AM to 1:59 PM
- Get By Section Command
- CS 4393 001 =>
- Will require SQL to be able to search for a class by its section number
+2 -1
View File
@@ -4,7 +4,8 @@ This folder contains detailed documentation for the Banner project. This file ac
## Files
- [`FEATURES.md`](FEATURES.md) - Current features, implemented functionality, and future roadmap
- [`CHANGELOG.md`](CHANGELOG.md) - Notable changes by version
- [`ROADMAP.md`](ROADMAP.md) - Planned features and priorities
- [`BANNER.md`](BANNER.md) - General API documentation on the Banner system
- [`ARCHITECTURE.md`](ARCHITECTURE.md) - Technical implementation details, system design, and analysis
+35
View File
@@ -0,0 +1,35 @@
# Roadmap
## Now
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
- **Test coverage expansion** - Broaden coverage with session/rate-limiter tests and more DB integration tests.
## Soon
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
- **Search result pagination** - Paginated embeds for large result sets in Discord.
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
## Eventually
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
- **CRN direct lookup** - Look up a course by its CRN without going through search.
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
## Done
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.40.4.0)*
@@ -0,0 +1,83 @@
-- ============================================================
-- Expand courses table with rich Banner API fields
-- ============================================================
-- Section identifiers
ALTER TABLE courses ADD COLUMN sequence_number VARCHAR;
ALTER TABLE courses ADD COLUMN part_of_term VARCHAR;
-- Schedule & delivery (store codes, descriptions come from reference_data)
ALTER TABLE courses ADD COLUMN instructional_method VARCHAR;
ALTER TABLE courses ADD COLUMN campus VARCHAR;
-- Credit hours
ALTER TABLE courses ADD COLUMN credit_hours INTEGER;
ALTER TABLE courses ADD COLUMN credit_hour_low INTEGER;
ALTER TABLE courses ADD COLUMN credit_hour_high INTEGER;
-- Cross-listing
ALTER TABLE courses ADD COLUMN cross_list VARCHAR;
ALTER TABLE courses ADD COLUMN cross_list_capacity INTEGER;
ALTER TABLE courses ADD COLUMN cross_list_count INTEGER;
-- Section linking
ALTER TABLE courses ADD COLUMN link_identifier VARCHAR;
ALTER TABLE courses ADD COLUMN is_section_linked BOOLEAN;
-- JSONB columns for 1-to-many data
ALTER TABLE courses ADD COLUMN meeting_times JSONB NOT NULL DEFAULT '[]'::jsonb;
ALTER TABLE courses ADD COLUMN attributes JSONB NOT NULL DEFAULT '[]'::jsonb;
-- ============================================================
-- Full-text search support
-- ============================================================
-- Generated tsvector for word-based search on title
ALTER TABLE courses ADD COLUMN title_search tsvector
GENERATED ALWAYS AS (to_tsvector('simple', coalesce(title, ''))) STORED;
CREATE INDEX idx_courses_title_search ON courses USING GIN (title_search);
-- Trigram index for substring/ILIKE search on title
CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE INDEX idx_courses_title_trgm ON courses USING GIN (title gin_trgm_ops);
-- ============================================================
-- New filter indexes
-- ============================================================
CREATE INDEX idx_courses_instructional_method ON courses(instructional_method);
CREATE INDEX idx_courses_campus ON courses(campus);
-- Composite for "open CS courses in Fall 2024" pattern
CREATE INDEX idx_courses_term_subject_avail ON courses(term_code, subject, max_enrollment, enrollment);
-- ============================================================
-- Instructors table (normalized, deduplicated)
-- ============================================================
CREATE TABLE instructors (
banner_id VARCHAR PRIMARY KEY,
display_name VARCHAR NOT NULL,
email VARCHAR
);
CREATE TABLE course_instructors (
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
instructor_id VARCHAR NOT NULL REFERENCES instructors(banner_id) ON DELETE CASCADE,
is_primary BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (course_id, instructor_id)
);
CREATE INDEX idx_course_instructors_instructor ON course_instructors(instructor_id);
-- ============================================================
-- Reference data table (all code→description lookups)
-- ============================================================
CREATE TABLE reference_data (
category VARCHAR NOT NULL,
code VARCHAR NOT NULL,
description VARCHAR NOT NULL,
PRIMARY KEY (category, code)
);
@@ -0,0 +1,17 @@
-- RMP professor data (bulk synced from RateMyProfessors)
CREATE TABLE rmp_professors (
legacy_id INTEGER PRIMARY KEY,
graphql_id VARCHAR NOT NULL,
first_name VARCHAR NOT NULL,
last_name VARCHAR NOT NULL,
department VARCHAR,
avg_rating REAL,
avg_difficulty REAL,
num_ratings INTEGER NOT NULL DEFAULT 0,
would_take_again_pct REAL,
last_synced_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Link Banner instructors to RMP professors
ALTER TABLE instructors ADD COLUMN rmp_legacy_id INTEGER REFERENCES rmp_professors(legacy_id);
ALTER TABLE instructors ADD COLUMN rmp_match_status VARCHAR NOT NULL DEFAULT 'pending';
@@ -0,0 +1,7 @@
-- Add queued_at column to track when a job last entered the "ready to pick up" state.
-- For fresh jobs this equals execute_at; for retried jobs it is updated to NOW().
ALTER TABLE scrape_jobs
ADD COLUMN queued_at TIMESTAMPTZ NOT NULL DEFAULT NOW();
-- Backfill existing rows: set queued_at = execute_at (best approximation)
UPDATE scrape_jobs SET queued_at = execute_at;
@@ -0,0 +1,19 @@
CREATE TABLE users (
discord_id BIGINT PRIMARY KEY,
discord_username TEXT NOT NULL,
discord_avatar_hash TEXT,
is_admin BOOLEAN NOT NULL DEFAULT false,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE user_sessions (
id TEXT PRIMARY KEY,
user_id BIGINT NOT NULL REFERENCES users(discord_id) ON DELETE CASCADE,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
expires_at TIMESTAMPTZ NOT NULL,
last_active_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX idx_user_sessions_user_id ON user_sessions(user_id);
CREATE INDEX idx_user_sessions_expires_at ON user_sessions(expires_at);
@@ -0,0 +1,80 @@
-- Collapse instructors from per-banner-id rows to per-person rows (deduped by lowercased email).
-- All existing RMP matches are deliberately dropped; the new auto-matcher will re-score from scratch.
-- 1. Create the new instructors table (1 row per person, keyed by email)
CREATE TABLE instructors_new (
id SERIAL PRIMARY KEY,
display_name VARCHAR NOT NULL,
email VARCHAR NOT NULL,
rmp_professor_id INTEGER UNIQUE REFERENCES rmp_professors(legacy_id),
rmp_match_status VARCHAR NOT NULL DEFAULT 'unmatched',
CONSTRAINT instructors_email_unique UNIQUE (email)
);
-- 2. Populate from existing data, deduplicating by lowercased email.
-- For each email, pick the display_name from the row with the highest banner_id
-- (deterministic tiebreaker). All rmp fields start fresh (NULL / 'unmatched').
INSERT INTO instructors_new (display_name, email)
SELECT DISTINCT ON (LOWER(email))
display_name,
LOWER(email)
FROM instructors
ORDER BY LOWER(email), banner_id DESC;
-- 3. Create the new course_instructors table with integer FK and banner_id column
CREATE TABLE course_instructors_new (
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
instructor_id INTEGER NOT NULL REFERENCES instructors_new(id) ON DELETE CASCADE,
banner_id VARCHAR NOT NULL,
is_primary BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (course_id, instructor_id)
);
-- 4. Populate from old data, mapping old banner_id → new instructor id via lowercased email.
-- Use DISTINCT ON to handle cases where multiple old banner_ids for the same person
-- taught the same course (would cause duplicate (course_id, instructor_id) pairs).
INSERT INTO course_instructors_new (course_id, instructor_id, banner_id, is_primary)
SELECT DISTINCT ON (ci.course_id, inew.id)
ci.course_id,
inew.id,
ci.instructor_id, -- old banner_id
ci.is_primary
FROM course_instructors ci
JOIN instructors iold ON iold.banner_id = ci.instructor_id
JOIN instructors_new inew ON inew.email = LOWER(iold.email)
ORDER BY ci.course_id, inew.id, ci.is_primary DESC;
-- 5. Drop old tables (course_instructors first due to FK dependency)
DROP TABLE course_instructors;
DROP TABLE instructors;
-- 6. Rename new tables into place
ALTER TABLE instructors_new RENAME TO instructors;
ALTER TABLE course_instructors_new RENAME TO course_instructors;
-- 7. Rename constraints to match the final table names
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_pkey TO instructors_pkey;
ALTER TABLE instructors RENAME CONSTRAINT instructors_new_rmp_professor_id_key TO instructors_rmp_professor_id_key;
ALTER TABLE course_instructors RENAME CONSTRAINT course_instructors_new_pkey TO course_instructors_pkey;
-- 8. Recreate indexes
CREATE INDEX idx_course_instructors_instructor ON course_instructors (instructor_id);
CREATE INDEX idx_instructors_rmp_status ON instructors (rmp_match_status);
CREATE INDEX idx_instructors_email ON instructors (email);
-- 9. Create rmp_match_candidates table
CREATE TABLE rmp_match_candidates (
id SERIAL PRIMARY KEY,
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
rmp_legacy_id INTEGER NOT NULL REFERENCES rmp_professors(legacy_id),
score REAL NOT NULL,
score_breakdown JSONB NOT NULL DEFAULT '{}',
status VARCHAR NOT NULL DEFAULT 'pending',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
resolved_at TIMESTAMPTZ,
resolved_by BIGINT REFERENCES users(discord_id),
CONSTRAINT uq_candidate_pair UNIQUE (instructor_id, rmp_legacy_id)
);
CREATE INDEX idx_match_candidates_instructor ON rmp_match_candidates (instructor_id);
CREATE INDEX idx_match_candidates_status ON rmp_match_candidates (status);
@@ -0,0 +1,24 @@
-- Multi-RMP profile support: allow many RMP profiles per instructor.
-- Each RMP profile still links to at most one instructor (rmp_legacy_id UNIQUE).
-- 1. Create junction table
CREATE TABLE instructor_rmp_links (
id SERIAL PRIMARY KEY,
instructor_id INTEGER NOT NULL REFERENCES instructors(id) ON DELETE CASCADE,
rmp_legacy_id INTEGER NOT NULL UNIQUE REFERENCES rmp_professors(legacy_id),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_by BIGINT REFERENCES users(discord_id),
source VARCHAR NOT NULL DEFAULT 'manual' -- 'auto' | 'manual'
);
CREATE INDEX idx_instructor_rmp_links_instructor ON instructor_rmp_links (instructor_id);
-- 2. Migrate existing matches
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
SELECT id, rmp_professor_id,
CASE rmp_match_status WHEN 'auto' THEN 'auto' ELSE 'manual' END
FROM instructors
WHERE rmp_professor_id IS NOT NULL;
-- 3. Drop old column (and its unique constraint)
ALTER TABLE instructors DROP COLUMN rmp_professor_id;
@@ -0,0 +1,31 @@
-- Scrape job results log: one row per completed (or failed) job for effectiveness tracking.
CREATE TABLE scrape_job_results (
id BIGSERIAL PRIMARY KEY,
target_type target_type NOT NULL,
payload JSONB NOT NULL,
priority scrape_priority NOT NULL,
-- Timing
queued_at TIMESTAMPTZ NOT NULL,
started_at TIMESTAMPTZ NOT NULL,
completed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
duration_ms INT NOT NULL,
-- Outcome
success BOOLEAN NOT NULL,
error_message TEXT,
retry_count INT NOT NULL DEFAULT 0,
-- Effectiveness (NULL when success = false)
courses_fetched INT,
courses_changed INT,
courses_unchanged INT,
audits_generated INT,
metrics_generated INT
);
CREATE INDEX idx_scrape_job_results_target_time
ON scrape_job_results (target_type, completed_at);
CREATE INDEX idx_scrape_job_results_completed
ON scrape_job_results (completed_at);
+27 -2
View File
@@ -6,13 +6,14 @@ use crate::services::bot::BotService;
use crate::services::manager::ServiceManager;
use crate::services::web::WebService;
use crate::state::AppState;
use crate::web::auth::AuthConfig;
use anyhow::Context;
use figment::value::UncasedStr;
use figment::{Figment, providers::Env};
use sqlx::postgres::PgPoolOptions;
use std::process::ExitCode;
use std::sync::Arc;
use std::time::Duration;
use anyhow::Context;
use tracing::{error, info};
/// Main application struct containing all necessary components
@@ -79,6 +80,19 @@ impl App {
let banner_api_arc = Arc::new(banner_api);
let app_state = AppState::new(banner_api_arc.clone(), db_pool.clone());
// Load reference data cache from DB (may be empty on first run)
if let Err(e) = app_state.load_reference_cache().await {
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
}
// Seed the initial admin user if configured
if let Some(admin_id) = config.admin_discord_id {
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
.await
.context("Failed to seed admin user")?;
info!(discord_id = admin_id, username = %user.discord_username, "Seed admin ensured");
}
Ok(App {
config,
db_pool,
@@ -92,7 +106,16 @@ impl App {
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
// Register enabled services with the manager
if services.contains(&ServiceName::Web) {
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
let auth_config = AuthConfig {
client_id: self.config.discord_client_id.clone(),
client_secret: self.config.discord_client_secret.clone(),
redirect_base: self.config.discord_redirect_uri.clone(),
};
let web_service = Box::new(WebService::new(
self.config.port,
self.app_state.clone(),
auth_config,
));
self.service_manager
.register_service(ServiceName::Web.as_str(), web_service);
}
@@ -101,7 +124,9 @@ impl App {
let scraper_service = Box::new(ScraperService::new(
self.db_pool.clone(),
self.banner_api.clone(),
self.app_state.reference_cache.clone(),
self.app_state.service_statuses.clone(),
self.app_state.scrape_job_tx.clone(),
));
self.service_manager
.register_service(ServiceName::Scraper.as_str(), scraper_service);
+26 -3
View File
@@ -40,9 +40,9 @@ impl BannerApi {
.cookie_store(false)
.user_agent(user_agent())
.tcp_keepalive(Some(std::time::Duration::from_secs(60 * 5)))
.read_timeout(std::time::Duration::from_secs(10))
.connect_timeout(std::time::Duration::from_secs(10))
.timeout(std::time::Duration::from_secs(30))
.read_timeout(std::time::Duration::from_secs(20))
.connect_timeout(std::time::Duration::from_secs(15))
.timeout(std::time::Duration::from_secs(40))
.build()
.context("Failed to create HTTP client")?,
)
@@ -228,6 +228,29 @@ impl BannerApi {
.await
}
/// Retrieves campus codes and descriptions.
pub async fn get_campuses(&self, term: &str) -> Result<Vec<Pair>> {
self.get_list_endpoint("get_campus", "", term, 1, 500).await
}
/// Retrieves instructional method codes and descriptions.
pub async fn get_instructional_methods(&self, term: &str) -> Result<Vec<Pair>> {
self.get_list_endpoint("get_instructionalMethod", "", term, 1, 500)
.await
}
/// Retrieves part-of-term codes and descriptions.
pub async fn get_parts_of_term(&self, term: &str) -> Result<Vec<Pair>> {
self.get_list_endpoint("get_partOfTerm", "", term, 1, 500)
.await
}
/// Retrieves section attribute codes and descriptions.
pub async fn get_attributes(&self, term: &str) -> Result<Vec<Pair>> {
self.get_list_endpoint("get_attribute", "", term, 1, 500)
.await
}
/// Retrieves meeting time information for a course.
pub async fn get_course_meeting_time(
&self,
+10 -7
View File
@@ -1,4 +1,4 @@
use bitflags::{bitflags, Flags};
use bitflags::{Flags, bitflags};
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc, Weekday};
use extension_traits::extension;
use serde::{Deserialize, Deserializer, Serialize};
@@ -320,10 +320,11 @@ pub enum MeetingType {
Unknown(String),
}
impl MeetingType {
/// Parse from the meeting type string
pub fn from_string(s: &str) -> Self {
match s {
impl std::str::FromStr for MeetingType {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Ok(match s {
"HB" | "H2" | "H1" => MeetingType::HybridBlended,
"OS" => MeetingType::OnlineSynchronous,
"OA" => MeetingType::OnlineAsynchronous,
@@ -331,9 +332,11 @@ impl MeetingType {
"ID" => MeetingType::IndependentStudy,
"FF" => MeetingType::FaceToFace,
other => MeetingType::Unknown(other.to_string()),
}
})
}
}
impl MeetingType {
/// Get description for the meeting type
pub fn description(&self) -> &'static str {
match self {
@@ -424,7 +427,7 @@ impl MeetingScheduleInfo {
end: now,
}
});
let meeting_type = MeetingType::from_string(&meeting_time.meeting_type);
let meeting_type: MeetingType = meeting_time.meeting_type.parse().unwrap();
let location = MeetingLocation::from_meeting_time(meeting_time);
let duration_weeks = date_range.weeks_duration();
+3 -14
View File
@@ -10,8 +10,9 @@ pub struct Range {
pub high: i32,
}
/// Builder for constructing Banner API search queries
/// Builder for constructing Banner API search queries.
#[derive(Debug, Clone, Default)]
#[allow(dead_code)]
pub struct SearchQuery {
subject: Option<String>,
title: Option<String>,
@@ -32,6 +33,7 @@ pub struct SearchQuery {
course_number_range: Option<Range>,
}
#[allow(dead_code)]
impl SearchQuery {
/// Creates a new SearchQuery with default values
pub fn new() -> Self {
@@ -67,7 +69,6 @@ impl SearchQuery {
}
/// Adds a keyword to the query
#[allow(dead_code)]
pub fn keyword<S: Into<String>>(mut self, keyword: S) -> Self {
match &mut self.keywords {
Some(keywords) => keywords.push(keyword.into()),
@@ -77,63 +78,54 @@ impl SearchQuery {
}
/// Sets whether to search for open courses only
#[allow(dead_code)]
pub fn open_only(mut self, open_only: bool) -> Self {
self.open_only = Some(open_only);
self
}
/// Sets the term part for the query
#[allow(dead_code)]
pub fn term_part(mut self, term_part: Vec<String>) -> Self {
self.term_part = Some(term_part);
self
}
/// Sets the campuses for the query
#[allow(dead_code)]
pub fn campus(mut self, campus: Vec<String>) -> Self {
self.campus = Some(campus);
self
}
/// Sets the instructional methods for the query
#[allow(dead_code)]
pub fn instructional_method(mut self, instructional_method: Vec<String>) -> Self {
self.instructional_method = Some(instructional_method);
self
}
/// Sets the attributes for the query
#[allow(dead_code)]
pub fn attributes(mut self, attributes: Vec<String>) -> Self {
self.attributes = Some(attributes);
self
}
/// Sets the instructors for the query
#[allow(dead_code)]
pub fn instructor(mut self, instructor: Vec<u64>) -> Self {
self.instructor = Some(instructor);
self
}
/// Sets the start time for the query
#[allow(dead_code)]
pub fn start_time(mut self, start_time: Duration) -> Self {
self.start_time = Some(start_time);
self
}
/// Sets the end time for the query
#[allow(dead_code)]
pub fn end_time(mut self, end_time: Duration) -> Self {
self.end_time = Some(end_time);
self
}
/// Sets the credit range for the query
#[allow(dead_code)]
pub fn credits(mut self, low: i32, high: i32) -> Self {
self.min_credits = Some(low);
self.max_credits = Some(high);
@@ -141,14 +133,12 @@ impl SearchQuery {
}
/// Sets the minimum credits for the query
#[allow(dead_code)]
pub fn min_credits(mut self, value: i32) -> Self {
self.min_credits = Some(value);
self
}
/// Sets the maximum credits for the query
#[allow(dead_code)]
pub fn max_credits(mut self, value: i32) -> Self {
self.max_credits = Some(value);
self
@@ -161,7 +151,6 @@ impl SearchQuery {
}
/// Sets the offset for pagination
#[allow(dead_code)]
pub fn offset(mut self, offset: i32) -> Self {
self.offset = offset;
self
+5 -1
View File
@@ -452,7 +452,11 @@ impl SessionPool {
self.select_term(&term.to_string(), &unique_session_id, &cookie_header)
.await?;
Ok(BannerSession::new(&unique_session_id, jsessionid, ssb_cookie))
Ok(BannerSession::new(
&unique_session_id,
jsessionid,
ssb_cookie,
))
}
/// Retrieves a list of terms from the Banner API.
+3 -1
View File
@@ -18,7 +18,9 @@ fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Opti
/// Compute a consecutive range of dates starting from `start` for `count` days.
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
(0..count).filter_map(|i| start.checked_add_signed(Duration::days(i))).collect()
(0..count)
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
.collect()
}
/// Compute university holidays for a given year.
+1 -105
View File
@@ -2,34 +2,16 @@ use clap::Parser;
/// Banner Discord Bot - Course availability monitoring
///
/// This application runs multiple services that can be controlled via CLI arguments:
/// This application runs all services:
/// - bot: Discord bot for course monitoring commands
/// - web: HTTP server for web interface and API
/// - scraper: Background service for scraping course data
///
/// Use --services to specify which services to run, or --disable-services to exclude specific services.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
/// Log formatter to use
#[arg(long, value_enum, default_value_t = default_tracing_format())]
pub tracing: TracingFormat,
/// Services to run (comma-separated). Default: all services
///
/// Examples:
/// --services bot,web # Run only bot and web services
/// --services scraper # Run only the scraper service
#[arg(long, value_delimiter = ',', conflicts_with = "disable_services")]
pub services: Option<Vec<ServiceName>>,
/// Services to disable (comma-separated)
///
/// Examples:
/// --disable-services bot # Run web and scraper only
/// --disable-services bot,web # Run only the scraper service
#[arg(long, value_delimiter = ',', conflicts_with = "services")]
pub disable_services: Option<Vec<ServiceName>>,
}
#[derive(clap::ValueEnum, Clone, Debug)]
@@ -66,34 +48,6 @@ impl ServiceName {
}
}
/// Determine which services should be enabled based on CLI arguments
pub fn determine_enabled_services(args: &Args) -> Result<Vec<ServiceName>, anyhow::Error> {
match (&args.services, &args.disable_services) {
(Some(services), None) => {
// User specified which services to run
Ok(services.clone())
}
(None, Some(disabled)) => {
// User specified which services to disable
let enabled: Vec<ServiceName> = ServiceName::all()
.into_iter()
.filter(|s| !disabled.contains(s))
.collect();
Ok(enabled)
}
(None, None) => {
// Default: run all services
Ok(ServiceName::all())
}
(Some(_), Some(_)) => {
// This should be prevented by clap's conflicts_with, but just in case
Err(anyhow::anyhow!(
"Cannot specify both --services and --disable-services"
))
}
}
}
#[cfg(debug_assertions)]
const DEFAULT_TRACING_FORMAT: TracingFormat = TracingFormat::Pretty;
#[cfg(not(debug_assertions))]
@@ -107,64 +61,6 @@ fn default_tracing_format() -> TracingFormat {
mod tests {
use super::*;
fn args_with_services(
services: Option<Vec<ServiceName>>,
disable: Option<Vec<ServiceName>>,
) -> Args {
Args {
tracing: TracingFormat::Pretty,
services,
disable_services: disable,
}
}
#[test]
fn test_default_enables_all_services() {
let result = determine_enabled_services(&args_with_services(None, None)).unwrap();
assert_eq!(result.len(), 3);
}
#[test]
fn test_explicit_services_only_those() {
let result =
determine_enabled_services(&args_with_services(Some(vec![ServiceName::Web]), None))
.unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].as_str(), "web");
}
#[test]
fn test_disable_bot_leaves_web_and_scraper() {
let result =
determine_enabled_services(&args_with_services(None, Some(vec![ServiceName::Bot])))
.unwrap();
assert_eq!(result.len(), 2);
assert!(result.iter().all(|s| s.as_str() != "bot"));
}
#[test]
fn test_disable_all_leaves_empty() {
let result = determine_enabled_services(&args_with_services(
None,
Some(vec![
ServiceName::Bot,
ServiceName::Web,
ServiceName::Scraper,
]),
))
.unwrap();
assert!(result.is_empty());
}
#[test]
fn test_both_specified_returns_error() {
let result = determine_enabled_services(&args_with_services(
Some(vec![ServiceName::Web]),
Some(vec![ServiceName::Bot]),
));
assert!(result.is_err());
}
#[test]
fn test_service_name_as_str() {
assert_eq!(ServiceName::Bot.as_str(), "bot");
+50
View File
@@ -47,6 +47,19 @@ pub struct Config {
/// Rate limiting configuration for Banner API requests
#[serde(default = "default_rate_limiting")]
pub rate_limiting: RateLimitingConfig,
/// Discord OAuth2 client ID for web authentication
#[serde(deserialize_with = "deserialize_string_or_uint")]
pub discord_client_id: String,
/// Discord OAuth2 client secret for web authentication
pub discord_client_secret: String,
/// Optional base URL override for OAuth2 redirect (e.g. "https://banner.xevion.dev").
/// When unset, the redirect URI is derived from the incoming request's Origin/Host.
#[serde(default)]
pub discord_redirect_uri: Option<String>,
/// Discord user ID to seed as initial admin on startup (optional)
#[serde(default)]
pub admin_discord_id: Option<u64>,
}
/// Default log level of "info"
@@ -216,6 +229,43 @@ where
deserializer.deserialize_any(DurationVisitor)
}
/// Deserializes a value that may arrive as either a string or unsigned integer.
///
/// Figment's env provider infers types from raw values, so numeric-looking strings
/// like Discord client IDs get parsed as integers. This accepts both forms.
fn deserialize_string_or_uint<'de, D>(deserializer: D) -> Result<String, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Visitor;
struct StringOrUintVisitor;
impl<'de> Visitor<'de> for StringOrUintVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or unsigned integer")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(value.to_owned())
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(value.to_string())
}
}
deserializer.deserialize_any(StringOrUintVisitor)
}
#[cfg(test)]
mod tests {
use super::*;
+673 -69
View File
@@ -1,98 +1,584 @@
//! Batch database operations for improved performance.
use crate::banner::Course;
use crate::data::models::{DbMeetingTime, UpsertCounts};
use crate::error::Result;
use sqlx::PgConnection;
use sqlx::PgPool;
use std::collections::{HashMap, HashSet};
use std::time::Instant;
use tracing::info;
/// Convert a Banner API course's meeting times to the DB JSONB shape.
fn to_db_meeting_times(course: &Course) -> serde_json::Value {
let meetings: Vec<DbMeetingTime> = course
.meetings_faculty
.iter()
.map(|mf| {
let mt = &mf.meeting_time;
DbMeetingTime {
begin_time: mt.begin_time.clone(),
end_time: mt.end_time.clone(),
start_date: mt.start_date.clone(),
end_date: mt.end_date.clone(),
monday: mt.monday,
tuesday: mt.tuesday,
wednesday: mt.wednesday,
thursday: mt.thursday,
friday: mt.friday,
saturday: mt.saturday,
sunday: mt.sunday,
building: mt.building.clone(),
building_description: mt.building_description.clone(),
room: mt.room.clone(),
campus: mt.campus.clone(),
meeting_type: mt.meeting_type.clone(),
meeting_schedule_type: mt.meeting_schedule_type.clone(),
}
})
.collect();
serde_json::to_value(meetings).unwrap_or_default()
}
/// Convert a Banner API course's section attributes to a JSONB array of code strings.
fn to_db_attributes(course: &Course) -> serde_json::Value {
let codes: Vec<&str> = course
.section_attributes
.iter()
.map(|a| a.code.as_str())
.collect();
serde_json::to_value(codes).unwrap_or_default()
}
/// Extract the campus code from the first meeting time (Banner doesn't put it on the course directly).
fn extract_campus_code(course: &Course) -> Option<String> {
course
.meetings_faculty
.first()
.and_then(|mf| mf.meeting_time.campus.clone())
}
// ---------------------------------------------------------------------------
// Task 1: UpsertDiffRow — captures pre- and post-upsert state for diffing
// ---------------------------------------------------------------------------
/// Row returned by the CTE-based upsert query, carrying both old and new values
/// for every auditable field. `old_id` is `None` for fresh inserts.
#[derive(sqlx::FromRow, Debug)]
struct UpsertDiffRow {
id: i32,
old_id: Option<i32>,
// enrollment fields
old_enrollment: Option<i32>,
new_enrollment: i32,
old_max_enrollment: Option<i32>,
new_max_enrollment: i32,
old_wait_count: Option<i32>,
new_wait_count: i32,
old_wait_capacity: Option<i32>,
new_wait_capacity: i32,
// text fields (non-nullable in DB)
old_subject: Option<String>,
new_subject: String,
old_course_number: Option<String>,
new_course_number: String,
old_title: Option<String>,
new_title: String,
// nullable text fields
old_sequence_number: Option<String>,
new_sequence_number: Option<String>,
old_part_of_term: Option<String>,
new_part_of_term: Option<String>,
old_instructional_method: Option<String>,
new_instructional_method: Option<String>,
old_campus: Option<String>,
new_campus: Option<String>,
// nullable int fields
old_credit_hours: Option<i32>,
new_credit_hours: Option<i32>,
old_credit_hour_low: Option<i32>,
new_credit_hour_low: Option<i32>,
old_credit_hour_high: Option<i32>,
new_credit_hour_high: Option<i32>,
// cross-list fields
old_cross_list: Option<String>,
new_cross_list: Option<String>,
old_cross_list_capacity: Option<i32>,
new_cross_list_capacity: Option<i32>,
old_cross_list_count: Option<i32>,
new_cross_list_count: Option<i32>,
// link fields
old_link_identifier: Option<String>,
new_link_identifier: Option<String>,
old_is_section_linked: Option<bool>,
new_is_section_linked: Option<bool>,
// JSONB fields
old_meeting_times: Option<serde_json::Value>,
new_meeting_times: serde_json::Value,
old_attributes: Option<serde_json::Value>,
new_attributes: serde_json::Value,
}
// ---------------------------------------------------------------------------
// Task 3: Entry types and diff logic
// ---------------------------------------------------------------------------
struct AuditEntry {
course_id: i32,
field_changed: &'static str,
old_value: String,
new_value: String,
}
struct MetricEntry {
course_id: i32,
enrollment: i32,
wait_count: i32,
seats_available: i32,
}
/// Compare old vs new for a single field, pushing an `AuditEntry` when they differ.
///
/// Three variants:
/// - `diff_field!(audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `T` new
/// - `diff_field!(opt audits, row, field_name, old_field, new_field)` — `Option<T>` old vs `Option<T>` new
/// - `diff_field!(json audits, row, field_name, old_field, new_field)` — `Option<Value>` old vs `Value` new
///
/// All variants skip when `old_id` is None (fresh insert).
macro_rules! diff_field {
// Standard: Option<T> old vs T new (non-nullable columns)
($audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_str = $row
.$old
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
let new_str = $row.$new.to_string();
if old_str != new_str {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_str,
new_value: new_str,
});
}
}
};
// Nullable: Option<T> old vs Option<T> new
(opt $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_str = $row
.$old
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
let new_str = $row
.$new
.as_ref()
.map(|v| v.to_string())
.unwrap_or_default();
if old_str != new_str {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_str,
new_value: new_str,
});
}
}
};
// JSONB: Option<Value> old vs Value new
(json $audits:ident, $row:ident, $field:expr, $old:ident, $new:ident) => {
if $row.old_id.is_some() {
let old_val = $row
.$old
.as_ref()
.cloned()
.unwrap_or(serde_json::Value::Null);
let new_val = &$row.$new;
if old_val != *new_val {
$audits.push(AuditEntry {
course_id: $row.id,
field_changed: $field,
old_value: old_val.to_string(),
new_value: new_val.to_string(),
});
}
}
};
}
/// Compute audit entries (field-level diffs) and metric entries from upsert diff rows.
fn compute_diffs(rows: &[UpsertDiffRow]) -> (Vec<AuditEntry>, Vec<MetricEntry>) {
let mut audits = Vec::new();
let mut metrics = Vec::new();
for row in rows {
// Non-nullable fields
diff_field!(audits, row, "enrollment", old_enrollment, new_enrollment);
diff_field!(
audits,
row,
"max_enrollment",
old_max_enrollment,
new_max_enrollment
);
diff_field!(audits, row, "wait_count", old_wait_count, new_wait_count);
diff_field!(
audits,
row,
"wait_capacity",
old_wait_capacity,
new_wait_capacity
);
diff_field!(audits, row, "subject", old_subject, new_subject);
diff_field!(
audits,
row,
"course_number",
old_course_number,
new_course_number
);
diff_field!(audits, row, "title", old_title, new_title);
// Nullable text fields
diff_field!(opt audits, row, "sequence_number", old_sequence_number, new_sequence_number);
diff_field!(opt audits, row, "part_of_term", old_part_of_term, new_part_of_term);
diff_field!(opt audits, row, "instructional_method", old_instructional_method, new_instructional_method);
diff_field!(opt audits, row, "campus", old_campus, new_campus);
// Nullable int fields
diff_field!(opt audits, row, "credit_hours", old_credit_hours, new_credit_hours);
diff_field!(opt audits, row, "credit_hour_low", old_credit_hour_low, new_credit_hour_low);
diff_field!(opt audits, row, "credit_hour_high", old_credit_hour_high, new_credit_hour_high);
// Cross-list fields
diff_field!(opt audits, row, "cross_list", old_cross_list, new_cross_list);
diff_field!(opt audits, row, "cross_list_capacity", old_cross_list_capacity, new_cross_list_capacity);
diff_field!(opt audits, row, "cross_list_count", old_cross_list_count, new_cross_list_count);
// Link fields
diff_field!(opt audits, row, "link_identifier", old_link_identifier, new_link_identifier);
diff_field!(opt audits, row, "is_section_linked", old_is_section_linked, new_is_section_linked);
// JSONB fields
diff_field!(json audits, row, "meeting_times", old_meeting_times, new_meeting_times);
diff_field!(json audits, row, "attributes", old_attributes, new_attributes);
// Emit a metric entry when enrollment/wait_count/max_enrollment changed
// Skip fresh inserts (no old data to compare against)
let enrollment_changed = row.old_id.is_some()
&& (row.old_enrollment != Some(row.new_enrollment)
|| row.old_wait_count != Some(row.new_wait_count)
|| row.old_max_enrollment != Some(row.new_max_enrollment));
if enrollment_changed {
metrics.push(MetricEntry {
course_id: row.id,
enrollment: row.new_enrollment,
wait_count: row.new_wait_count,
seats_available: row.new_max_enrollment - row.new_enrollment,
});
}
}
(audits, metrics)
}
// ---------------------------------------------------------------------------
// Task 4: Batch insert functions for audits and metrics
// ---------------------------------------------------------------------------
async fn insert_audits(audits: &[AuditEntry], conn: &mut PgConnection) -> Result<()> {
if audits.is_empty() {
return Ok(());
}
let course_ids: Vec<i32> = audits.iter().map(|a| a.course_id).collect();
let fields: Vec<&str> = audits.iter().map(|a| a.field_changed).collect();
let old_values: Vec<&str> = audits.iter().map(|a| a.old_value.as_str()).collect();
let new_values: Vec<&str> = audits.iter().map(|a| a.new_value.as_str()).collect();
sqlx::query(
r#"
INSERT INTO course_audits (course_id, timestamp, field_changed, old_value, new_value)
SELECT v.course_id, NOW(), v.field_changed, v.old_value, v.new_value
FROM UNNEST($1::int4[], $2::text[], $3::text[], $4::text[])
AS v(course_id, field_changed, old_value, new_value)
"#,
)
.bind(&course_ids)
.bind(&fields)
.bind(&old_values)
.bind(&new_values)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_audits: {}", e))?;
Ok(())
}
async fn insert_metrics(metrics: &[MetricEntry], conn: &mut PgConnection) -> Result<()> {
if metrics.is_empty() {
return Ok(());
}
let course_ids: Vec<i32> = metrics.iter().map(|m| m.course_id).collect();
let enrollments: Vec<i32> = metrics.iter().map(|m| m.enrollment).collect();
let wait_counts: Vec<i32> = metrics.iter().map(|m| m.wait_count).collect();
let seats_available: Vec<i32> = metrics.iter().map(|m| m.seats_available).collect();
sqlx::query(
r#"
INSERT INTO course_metrics (course_id, timestamp, enrollment, wait_count, seats_available)
SELECT v.course_id, NOW(), v.enrollment, v.wait_count, v.seats_available
FROM UNNEST($1::int4[], $2::int4[], $3::int4[], $4::int4[])
AS v(course_id, enrollment, wait_count, seats_available)
"#,
)
.bind(&course_ids)
.bind(&enrollments)
.bind(&wait_counts)
.bind(&seats_available)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch insert course_metrics: {}", e))?;
Ok(())
}
// ---------------------------------------------------------------------------
// Core upsert functions (updated to use &mut PgConnection)
// ---------------------------------------------------------------------------
/// Batch upsert courses in a single database query.
///
/// This function performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses
/// in a single round-trip to the database, significantly reducing overhead compared
/// to individual inserts.
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
/// new fields (meeting times, attributes, instructor data). Captures pre-update
/// state for audit/metric tracking, all within a single transaction.
///
/// # Performance
/// - Reduces N database round-trips to 1
/// - Reduces N database round-trips to 5 (old-data CTE + upsert, audits, metrics, instructors, junction)
/// - Typical usage: 50-200 courses per batch
/// - PostgreSQL parameter limit: 65,535 (we use ~10 per course)
///
/// # Arguments
/// * `courses` - Slice of Course structs from the Banner API
/// * `db_pool` - PostgreSQL connection pool
///
/// # Returns
/// * `Ok(())` on success
/// * `Err(_)` if the database operation fails
///
/// # Example
/// ```no_run
/// use banner::data::batch::batch_upsert_courses;
/// use banner::banner::Course;
/// use sqlx::PgPool;
///
/// async fn example(courses: &[Course], pool: &PgPool) -> anyhow::Result<()> {
/// batch_upsert_courses(courses, pool).await?;
/// Ok(())
/// }
/// ```
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
// Early return for empty batches
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<UpsertCounts> {
if courses.is_empty() {
info!("No courses to upsert, skipping batch operation");
return Ok(());
return Ok(UpsertCounts::default());
}
let start = Instant::now();
let course_count = courses.len();
// Extract course fields into vectors for UNNEST
let mut tx = db_pool.begin().await?;
// Step 1: Upsert courses with CTE, returning diff rows
let diff_rows = upsert_courses(courses, &mut tx).await?;
// Step 2: Extract course IDs for instructor linking
let course_ids: Vec<i32> = diff_rows.iter().map(|r| r.id).collect();
// Step 3: Compute audit/metric diffs
let (audits, metrics) = compute_diffs(&diff_rows);
// Count courses that had at least one field change (existing rows only)
let changed_ids: HashSet<i32> = audits.iter().map(|a| a.course_id).collect();
let existing_count = diff_rows.iter().filter(|r| r.old_id.is_some()).count() as i32;
let courses_changed = changed_ids.len() as i32;
let counts = UpsertCounts {
courses_fetched: course_count as i32,
courses_changed,
courses_unchanged: existing_count - courses_changed,
audits_generated: audits.len() as i32,
metrics_generated: metrics.len() as i32,
};
// Step 4: Insert audits and metrics
insert_audits(&audits, &mut tx).await?;
insert_metrics(&metrics, &mut tx).await?;
// Step 5: Upsert instructors (returns email -> id map)
let email_to_id = upsert_instructors(courses, &mut tx).await?;
// Step 6: Link courses to instructors via junction table
upsert_course_instructors(courses, &course_ids, &email_to_id, &mut tx).await?;
tx.commit().await?;
let duration = start.elapsed();
info!(
courses_count = course_count,
courses_changed = counts.courses_changed,
courses_unchanged = counts.courses_unchanged,
audit_entries = counts.audits_generated,
metric_entries = counts.metrics_generated,
duration_ms = duration.as_millis(),
"Batch upserted courses with instructors, audits, and metrics"
);
Ok(counts)
}
// ---------------------------------------------------------------------------
// Task 2: CTE-based upsert returning old+new values
// ---------------------------------------------------------------------------
/// Upsert all courses and return diff rows with old and new values for auditing.
async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<Vec<UpsertDiffRow>> {
let crns: Vec<&str> = courses
.iter()
.map(|c| c.course_reference_number.as_str())
.collect();
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();
let term_codes: Vec<&str> = courses.iter().map(|c| c.term.as_str()).collect();
let enrollments: Vec<i32> = courses.iter().map(|c| c.enrollment).collect();
let max_enrollments: Vec<i32> = courses.iter().map(|c| c.maximum_enrollment).collect();
let wait_counts: Vec<i32> = courses.iter().map(|c| c.wait_count).collect();
let wait_capacities: Vec<i32> = courses.iter().map(|c| c.wait_capacity).collect();
// Perform batch upsert using UNNEST for efficient bulk insertion
let result = sqlx::query(
// New scalar fields
let sequence_numbers: Vec<Option<&str>> = courses
.iter()
.map(|c| Some(c.sequence_number.as_str()))
.collect();
let parts_of_term: Vec<Option<&str>> = courses
.iter()
.map(|c| Some(c.part_of_term.as_str()))
.collect();
let instructional_methods: Vec<Option<&str>> = courses
.iter()
.map(|c| Some(c.instructional_method.as_str()))
.collect();
let campuses: Vec<Option<String>> = courses.iter().map(extract_campus_code).collect();
let credit_hours: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hours).collect();
let credit_hour_lows: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_low).collect();
let credit_hour_highs: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_high).collect();
let cross_lists: Vec<Option<&str>> = courses.iter().map(|c| c.cross_list.as_deref()).collect();
let cross_list_capacities: Vec<Option<i32>> =
courses.iter().map(|c| c.cross_list_capacity).collect();
let cross_list_counts: Vec<Option<i32>> = courses.iter().map(|c| c.cross_list_count).collect();
let link_identifiers: Vec<Option<&str>> = courses
.iter()
.map(|c| c.link_identifier.as_deref())
.collect();
let is_section_linkeds: Vec<Option<bool>> =
courses.iter().map(|c| Some(c.is_section_linked)).collect();
// JSONB fields
let meeting_times_json: Vec<serde_json::Value> =
courses.iter().map(to_db_meeting_times).collect();
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
let rows = sqlx::query_as::<_, UpsertDiffRow>(
r#"
INSERT INTO courses (
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
WITH old_data AS (
SELECT id, enrollment, max_enrollment, wait_count, wait_capacity,
subject, course_number, title,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes,
crn, term_code
FROM courses
WHERE (crn, term_code) IN (SELECT * FROM UNNEST($1::text[], $5::text[]))
),
upserted AS (
INSERT INTO courses (
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
)
SELECT
v.crn, v.subject, v.course_number, v.title, v.term_code,
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
v.cross_list, v.cross_list_capacity, v.cross_list_count,
v.link_identifier, v.is_section_linked,
v.meeting_times, v.attributes
FROM UNNEST(
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
$10::text[], $11::text[], $12::text[], $13::text[],
$14::int4[], $15::int4[], $16::int4[],
$17::text[], $18::int4[], $19::int4[],
$20::text[], $21::bool[],
$22::jsonb[], $23::jsonb[]
) AS v(
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity,
sequence_number, part_of_term, instructional_method, campus,
credit_hours, credit_hour_low, credit_hour_high,
cross_list, cross_list_capacity, cross_list_count,
link_identifier, is_section_linked,
meeting_times, attributes
)
ON CONFLICT (crn, term_code)
DO UPDATE SET
subject = EXCLUDED.subject,
course_number = EXCLUDED.course_number,
title = EXCLUDED.title,
enrollment = EXCLUDED.enrollment,
max_enrollment = EXCLUDED.max_enrollment,
wait_count = EXCLUDED.wait_count,
wait_capacity = EXCLUDED.wait_capacity,
last_scraped_at = EXCLUDED.last_scraped_at,
sequence_number = EXCLUDED.sequence_number,
part_of_term = EXCLUDED.part_of_term,
instructional_method = EXCLUDED.instructional_method,
campus = EXCLUDED.campus,
credit_hours = EXCLUDED.credit_hours,
credit_hour_low = EXCLUDED.credit_hour_low,
credit_hour_high = EXCLUDED.credit_hour_high,
cross_list = EXCLUDED.cross_list,
cross_list_capacity = EXCLUDED.cross_list_capacity,
cross_list_count = EXCLUDED.cross_list_count,
link_identifier = EXCLUDED.link_identifier,
is_section_linked = EXCLUDED.is_section_linked,
meeting_times = EXCLUDED.meeting_times,
attributes = EXCLUDED.attributes
RETURNING *
)
SELECT * FROM UNNEST(
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
array_fill(NOW()::timestamptz, ARRAY[$10])
) AS t(
crn, subject, course_number, title, term_code,
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
)
ON CONFLICT (crn, term_code)
DO UPDATE SET
subject = EXCLUDED.subject,
course_number = EXCLUDED.course_number,
title = EXCLUDED.title,
enrollment = EXCLUDED.enrollment,
max_enrollment = EXCLUDED.max_enrollment,
wait_count = EXCLUDED.wait_count,
wait_capacity = EXCLUDED.wait_capacity,
last_scraped_at = EXCLUDED.last_scraped_at
SELECT u.id,
o.id AS old_id,
o.enrollment AS old_enrollment, u.enrollment AS new_enrollment,
o.max_enrollment AS old_max_enrollment, u.max_enrollment AS new_max_enrollment,
o.wait_count AS old_wait_count, u.wait_count AS new_wait_count,
o.wait_capacity AS old_wait_capacity, u.wait_capacity AS new_wait_capacity,
o.subject AS old_subject, u.subject AS new_subject,
o.course_number AS old_course_number, u.course_number AS new_course_number,
o.title AS old_title, u.title AS new_title,
o.sequence_number AS old_sequence_number, u.sequence_number AS new_sequence_number,
o.part_of_term AS old_part_of_term, u.part_of_term AS new_part_of_term,
o.instructional_method AS old_instructional_method, u.instructional_method AS new_instructional_method,
o.campus AS old_campus, u.campus AS new_campus,
o.credit_hours AS old_credit_hours, u.credit_hours AS new_credit_hours,
o.credit_hour_low AS old_credit_hour_low, u.credit_hour_low AS new_credit_hour_low,
o.credit_hour_high AS old_credit_hour_high, u.credit_hour_high AS new_credit_hour_high,
o.cross_list AS old_cross_list, u.cross_list AS new_cross_list,
o.cross_list_capacity AS old_cross_list_capacity, u.cross_list_capacity AS new_cross_list_capacity,
o.cross_list_count AS old_cross_list_count, u.cross_list_count AS new_cross_list_count,
o.link_identifier AS old_link_identifier, u.link_identifier AS new_link_identifier,
o.is_section_linked AS old_is_section_linked, u.is_section_linked AS new_is_section_linked,
o.meeting_times AS old_meeting_times, u.meeting_times AS new_meeting_times,
o.attributes AS old_attributes, u.attributes AS new_attributes
FROM upserted u
LEFT JOIN old_data o ON u.crn = o.crn AND u.term_code = o.term_code
"#,
)
.bind(&crns)
@@ -104,19 +590,137 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
.bind(&max_enrollments)
.bind(&wait_counts)
.bind(&wait_capacities)
.bind(course_count as i32)
.execute(db_pool)
.bind(&sequence_numbers)
.bind(&parts_of_term)
.bind(&instructional_methods)
.bind(&campuses)
.bind(&credit_hours)
.bind(&credit_hour_lows)
.bind(&credit_hour_highs)
.bind(&cross_lists)
.bind(&cross_list_capacities)
.bind(&cross_list_counts)
.bind(&link_identifiers)
.bind(&is_section_linkeds)
.bind(&meeting_times_json)
.bind(&attributes_json)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
let duration = start.elapsed();
Ok(rows)
}
info!(
courses_count = course_count,
rows_affected = result.rows_affected(),
duration_ms = duration.as_millis(),
"Batch upserted courses"
);
/// Deduplicate and upsert all instructors from the batch by email.
/// Returns a map of lowercased_email -> instructor id for junction linking.
async fn upsert_instructors(
courses: &[Course],
conn: &mut PgConnection,
) -> Result<HashMap<String, i32>> {
let mut seen = HashSet::new();
let mut display_names: Vec<&str> = Vec::new();
let mut emails_lower: Vec<String> = Vec::new();
let mut skipped_no_email = 0u32;
for course in courses {
for faculty in &course.faculty {
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if seen.insert(email_lower.clone()) {
display_names.push(faculty.display_name.as_str());
emails_lower.push(email_lower);
}
} else {
skipped_no_email += 1;
}
}
}
if skipped_no_email > 0 {
tracing::warn!(
count = skipped_no_email,
"Skipped instructors with no email address"
);
}
if display_names.is_empty() {
return Ok(HashMap::new());
}
let email_refs: Vec<&str> = emails_lower.iter().map(|s| s.as_str()).collect();
let rows: Vec<(i32, String)> = sqlx::query_as(
r#"
INSERT INTO instructors (display_name, email)
SELECT * FROM UNNEST($1::text[], $2::text[])
ON CONFLICT (email)
DO UPDATE SET display_name = EXCLUDED.display_name
RETURNING id, email
"#,
)
.bind(&display_names)
.bind(&email_refs)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
Ok(rows.into_iter().map(|(id, email)| (email, id)).collect())
}
/// Link courses to their instructors via the junction table.
async fn upsert_course_instructors(
courses: &[Course],
course_ids: &[i32],
email_to_id: &HashMap<String, i32>,
conn: &mut PgConnection,
) -> Result<()> {
let mut cids = Vec::new();
let mut instructor_ids: Vec<i32> = Vec::new();
let mut banner_ids: Vec<&str> = Vec::new();
let mut primaries = Vec::new();
for (course, &course_id) in courses.iter().zip(course_ids) {
for faculty in &course.faculty {
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if let Some(&instructor_id) = email_to_id.get(&email_lower) {
cids.push(course_id);
instructor_ids.push(instructor_id);
banner_ids.push(faculty.banner_id.as_str());
primaries.push(faculty.primary_indicator);
}
}
}
}
if cids.is_empty() {
return Ok(());
}
// Delete existing links for these courses then re-insert.
// This handles instructor changes cleanly.
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
.bind(&cids)
.execute(&mut *conn)
.await?;
sqlx::query(
r#"
INSERT INTO course_instructors (course_id, instructor_id, banner_id, is_primary)
SELECT * FROM UNNEST($1::int4[], $2::int4[], $3::text[], $4::bool[])
ON CONFLICT (course_id, instructor_id)
DO UPDATE SET
banner_id = EXCLUDED.banner_id,
is_primary = EXCLUDED.is_primary
"#,
)
.bind(&cids)
.bind(&instructor_ids)
.bind(&banner_ids)
.bind(&primaries)
.execute(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
Ok(())
}
+249
View File
@@ -0,0 +1,249 @@
//! Database query functions for courses, used by the web API.
use crate::data::models::{Course, CourseInstructorDetail};
use crate::error::Result;
use sqlx::PgPool;
use std::collections::HashMap;
/// Column to sort search results by.
#[derive(Debug, Clone, Copy, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum SortColumn {
CourseCode,
Title,
Instructor,
Time,
Seats,
}
/// Sort direction.
#[derive(Debug, Clone, Copy, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum SortDirection {
Asc,
Desc,
}
/// Shared WHERE clause for course search filters.
///
/// Parameters $1-$8 match the bind order in `search_courses`.
const SEARCH_WHERE: &str = r#"
WHERE term_code = $1
AND ($2::text[] IS NULL OR subject = ANY($2))
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
AND ($4::int IS NULL OR course_number::int >= $4)
AND ($5::int IS NULL OR course_number::int <= $5)
AND ($6::bool = false OR max_enrollment > enrollment)
AND ($7::text IS NULL OR instructional_method = $7)
AND ($8::text IS NULL OR campus = $8)
"#;
/// Build a safe ORDER BY clause from typed sort parameters.
///
/// All column names are hardcoded string literals — no caller input is interpolated.
fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) -> String {
let dir = match direction.unwrap_or(SortDirection::Asc) {
SortDirection::Asc => "ASC",
SortDirection::Desc => "DESC",
};
match column {
Some(SortColumn::CourseCode) => {
format!("subject {dir}, course_number {dir}, sequence_number {dir}")
}
Some(SortColumn::Title) => format!("title {dir}"),
Some(SortColumn::Instructor) => {
format!(
"(SELECT i.display_name FROM course_instructors ci \
JOIN instructors i ON i.id = ci.instructor_id \
WHERE ci.course_id = courses.id AND ci.is_primary = true \
LIMIT 1) {dir} NULLS LAST"
)
}
Some(SortColumn::Time) => {
format!("(meeting_times->0->>'begin_time') {dir} NULLS LAST")
}
Some(SortColumn::Seats) => {
format!("(max_enrollment - enrollment) {dir}")
}
None => "subject ASC, course_number ASC, sequence_number ASC".to_string(),
}
}
/// Search courses by term with optional filters.
///
/// Returns `(courses, total_count)` for pagination. Uses FTS tsvector for word
/// search and falls back to trigram ILIKE for substring matching.
#[allow(clippy::too_many_arguments)]
pub async fn search_courses(
db_pool: &PgPool,
term_code: &str,
subject: Option<&[String]>,
title_query: Option<&str>,
course_number_low: Option<i32>,
course_number_high: Option<i32>,
open_only: bool,
instructional_method: Option<&str>,
campus: Option<&str>,
limit: i32,
offset: i32,
sort_by: Option<SortColumn>,
sort_dir: Option<SortDirection>,
) -> Result<(Vec<Course>, i64)> {
let order_by = sort_clause(sort_by, sort_dir);
let data_query =
format!("SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10");
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
let courses = sqlx::query_as::<_, Course>(&data_query)
.bind(term_code)
.bind(subject)
.bind(title_query)
.bind(course_number_low)
.bind(course_number_high)
.bind(open_only)
.bind(instructional_method)
.bind(campus)
.bind(limit)
.bind(offset)
.fetch_all(db_pool)
.await?;
let total: (i64,) = sqlx::query_as(&count_query)
.bind(term_code)
.bind(subject)
.bind(title_query)
.bind(course_number_low)
.bind(course_number_high)
.bind(open_only)
.bind(instructional_method)
.bind(campus)
.fetch_one(db_pool)
.await?;
Ok((courses, total.0))
}
/// Get a single course by CRN and term.
pub async fn get_course_by_crn(
db_pool: &PgPool,
crn: &str,
term_code: &str,
) -> Result<Option<Course>> {
let course =
sqlx::query_as::<_, Course>("SELECT * FROM courses WHERE crn = $1 AND term_code = $2")
.bind(crn)
.bind(term_code)
.fetch_optional(db_pool)
.await?;
Ok(course)
}
/// Get instructors for a single course by course ID.
pub async fn get_course_instructors(
db_pool: &PgPool,
course_id: i32,
) -> Result<Vec<CourseInstructorDetail>> {
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
r#"
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
ci.course_id
FROM course_instructors ci
JOIN instructors i ON i.id = ci.instructor_id
LEFT JOIN LATERAL (
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = i.id
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
LIMIT 1
) rmp ON true
WHERE ci.course_id = $1
ORDER BY ci.is_primary DESC, i.display_name
"#,
)
.bind(course_id)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
/// Batch-fetch instructors for multiple courses in a single query.
///
/// Returns a map of `course_id → Vec<CourseInstructorDetail>`.
pub async fn get_instructors_for_courses(
db_pool: &PgPool,
course_ids: &[i32],
) -> Result<HashMap<i32, Vec<CourseInstructorDetail>>> {
if course_ids.is_empty() {
return Ok(HashMap::new());
}
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
r#"
SELECT i.id as instructor_id, ci.banner_id, i.display_name, i.email, ci.is_primary,
rmp.avg_rating, rmp.num_ratings, rmp.rmp_legacy_id,
ci.course_id
FROM course_instructors ci
JOIN instructors i ON i.id = ci.instructor_id
LEFT JOIN LATERAL (
SELECT rp.avg_rating, rp.num_ratings, rp.legacy_id as rmp_legacy_id
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = i.id
ORDER BY rp.num_ratings DESC NULLS LAST, rp.legacy_id ASC
LIMIT 1
) rmp ON true
WHERE ci.course_id = ANY($1)
ORDER BY ci.course_id, ci.is_primary DESC, i.display_name
"#,
)
.bind(course_ids)
.fetch_all(db_pool)
.await?;
let mut map: HashMap<i32, Vec<CourseInstructorDetail>> = HashMap::new();
for row in rows {
// course_id is always present in the batch query
let cid = row.course_id.unwrap_or_default();
map.entry(cid).or_default().push(row);
}
Ok(map)
}
/// Get subjects for a term, sorted by total enrollment (descending).
///
/// Returns only subjects that have courses in the given term, with their
/// descriptions from reference_data and enrollment totals for ranking.
pub async fn get_subjects_by_enrollment(
db_pool: &PgPool,
term_code: &str,
) -> Result<Vec<(String, String, i64)>> {
let rows: Vec<(String, String, i64)> = sqlx::query_as(
r#"
SELECT c.subject,
COALESCE(rd.description, c.subject),
COALESCE(SUM(c.enrollment), 0) as total_enrollment
FROM courses c
LEFT JOIN reference_data rd ON rd.category = 'subject' AND rd.code = c.subject
WHERE c.term_code = $1
GROUP BY c.subject, rd.description
ORDER BY total_enrollment DESC
"#,
)
.bind(term_code)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
/// Get all distinct term codes that have courses in the DB.
pub async fn get_available_terms(db_pool: &PgPool) -> Result<Vec<String>> {
let rows: Vec<(String,)> =
sqlx::query_as("SELECT DISTINCT term_code FROM courses ORDER BY term_code DESC")
.fetch_all(db_pool)
.await?;
Ok(rows.into_iter().map(|(tc,)| tc).collect())
}
+6
View File
@@ -1,5 +1,11 @@
//! Database models and schema.
pub mod batch;
pub mod courses;
pub mod models;
pub mod reference;
pub mod rmp;
pub mod rmp_matching;
pub mod scrape_jobs;
pub mod sessions;
pub mod users;
+192
View File
@@ -1,7 +1,68 @@
//! `sqlx` models for the database schema.
use chrono::{DateTime, Utc};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use ts_rs::TS;
/// Serialize an `i64` as a string to avoid JavaScript precision loss for values exceeding 2^53.
fn serialize_i64_as_string<S: Serializer>(value: &i64, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&value.to_string())
}
/// Deserialize an `i64` from either a number or a string.
fn deserialize_i64_from_string<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<i64, D::Error> {
use serde::de;
struct I64OrStringVisitor;
impl<'de> de::Visitor<'de> for I64OrStringVisitor {
type Value = i64;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("an integer or a string containing an integer")
}
fn visit_i64<E: de::Error>(self, value: i64) -> Result<i64, E> {
Ok(value)
}
fn visit_u64<E: de::Error>(self, value: u64) -> Result<i64, E> {
i64::try_from(value).map_err(|_| E::custom(format!("u64 {value} out of i64 range")))
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<i64, E> {
value.parse().map_err(de::Error::custom)
}
}
deserializer.deserialize_any(I64OrStringVisitor)
}
/// Represents a meeting time stored as JSONB in the courses table.
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct DbMeetingTime {
pub begin_time: Option<String>,
pub end_time: Option<String>,
pub start_date: String,
pub end_date: String,
pub monday: bool,
pub tuesday: bool,
pub wednesday: bool,
pub thursday: bool,
pub friday: bool,
pub saturday: bool,
pub sunday: bool,
pub building: Option<String>,
pub building_description: Option<String>,
pub room: Option<String>,
pub campus: Option<String>,
pub meeting_type: String,
pub meeting_schedule_type: String,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
@@ -17,6 +78,63 @@ pub struct Course {
pub wait_count: i32,
pub wait_capacity: i32,
pub last_scraped_at: DateTime<Utc>,
// New scalar fields
pub sequence_number: Option<String>,
pub part_of_term: Option<String>,
pub instructional_method: Option<String>,
pub campus: Option<String>,
pub credit_hours: Option<i32>,
pub credit_hour_low: Option<i32>,
pub credit_hour_high: Option<i32>,
pub cross_list: Option<String>,
pub cross_list_capacity: Option<i32>,
pub cross_list_count: Option<i32>,
pub link_identifier: Option<String>,
pub is_section_linked: Option<bool>,
// JSONB fields
pub meeting_times: Value,
pub attributes: Value,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct Instructor {
pub id: i32,
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct CourseInstructor {
pub course_id: i32,
pub instructor_id: i32,
pub banner_id: String,
pub is_primary: bool,
}
/// Joined instructor data for a course (from course_instructors + instructors + rmp_professors).
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct CourseInstructorDetail {
pub instructor_id: i32,
pub banner_id: String,
pub display_name: String,
pub email: String,
pub is_primary: bool,
pub avg_rating: Option<f32>,
pub num_ratings: Option<i32>,
pub rmp_legacy_id: Option<i32>,
/// Present when fetched via batch query; `None` for single-course queries.
pub course_id: Option<i32>,
}
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct ReferenceData {
pub category: String,
pub code: String,
pub description: String,
}
#[allow(dead_code)]
@@ -41,6 +159,16 @@ pub struct CourseAudit {
pub new_value: String,
}
/// Aggregate counts returned by batch upsert, used for scrape job result logging.
#[derive(Debug, Clone, Default)]
pub struct UpsertCounts {
pub courses_fetched: i32,
pub courses_changed: i32,
pub courses_unchanged: i32,
pub audits_generated: i32,
pub metrics_generated: i32,
}
/// The priority level of a scrape job.
#[derive(sqlx::Type, Copy, Debug, Clone)]
#[sqlx(type_name = "scrape_priority", rename_all = "PascalCase")]
@@ -61,6 +189,20 @@ pub enum TargetType {
SingleCrn,
}
/// Computed status for a scrape job, derived from existing fields.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum ScrapeJobStatus {
Processing,
StaleLock,
Exhausted,
Scheduled,
Pending,
}
/// How long a lock can be held before it is considered stale (mirrors `scrape_jobs::LOCK_EXPIRY`).
const LOCK_EXPIRY_SECS: i64 = 10 * 60;
/// Represents a queryable job from the database.
#[allow(dead_code)]
#[derive(sqlx::FromRow, Debug, Clone)]
@@ -76,4 +218,54 @@ pub struct ScrapeJob {
pub retry_count: i32,
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
pub max_retries: i32,
/// When the job last entered the "ready to pick up" state.
/// Set to NOW() on creation; updated to NOW() on retry.
pub queued_at: DateTime<Utc>,
}
impl ScrapeJob {
/// Compute the current status of this job from its fields.
pub fn status(&self) -> ScrapeJobStatus {
let now = Utc::now();
match self.locked_at {
Some(locked) if (now - locked).num_seconds() < LOCK_EXPIRY_SECS => {
ScrapeJobStatus::Processing
}
Some(_) => ScrapeJobStatus::StaleLock,
None if self.retry_count >= self.max_retries && self.max_retries > 0 => {
ScrapeJobStatus::Exhausted
}
None if self.execute_at > now => ScrapeJobStatus::Scheduled,
None => ScrapeJobStatus::Pending,
}
}
}
/// A user authenticated via Discord OAuth.
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct User {
#[serde(
serialize_with = "serialize_i64_as_string",
deserialize_with = "deserialize_i64_from_string"
)]
#[ts(type = "string")]
pub discord_id: i64,
pub discord_username: String,
pub discord_avatar_hash: Option<String>,
pub is_admin: bool,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// A server-side session for an authenticated user.
#[allow(dead_code)] // Fields read via sqlx::FromRow; some only used in DB queries
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct UserSession {
pub id: String,
pub user_id: i64,
pub created_at: DateTime<Utc>,
pub expires_at: DateTime<Utc>,
pub last_active_at: DateTime<Utc>,
}
+57
View File
@@ -0,0 +1,57 @@
//! Database operations for the `reference_data` table (code→description lookups).
use crate::data::models::ReferenceData;
use crate::error::Result;
use html_escape::decode_html_entities;
use sqlx::PgPool;
/// Batch upsert reference data entries.
pub async fn batch_upsert(entries: &[ReferenceData], db_pool: &PgPool) -> Result<()> {
if entries.is_empty() {
return Ok(());
}
let categories: Vec<&str> = entries.iter().map(|e| e.category.as_str()).collect();
let codes: Vec<&str> = entries.iter().map(|e| e.code.as_str()).collect();
let descriptions: Vec<String> = entries
.iter()
.map(|e| decode_html_entities(&e.description).into_owned())
.collect();
sqlx::query(
r#"
INSERT INTO reference_data (category, code, description)
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
ON CONFLICT (category, code)
DO UPDATE SET description = EXCLUDED.description
"#,
)
.bind(&categories)
.bind(&codes)
.bind(&descriptions)
.execute(db_pool)
.await?;
Ok(())
}
/// Get all reference data entries for a category.
pub async fn get_by_category(category: &str, db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
let rows = sqlx::query_as::<_, ReferenceData>(
"SELECT category, code, description FROM reference_data WHERE category = $1 ORDER BY description",
)
.bind(category)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
/// Get all reference data entries (for cache initialization).
pub async fn get_all(db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
let rows = sqlx::query_as::<_, ReferenceData>(
"SELECT category, code, description FROM reference_data ORDER BY category, description",
)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
+209
View File
@@ -0,0 +1,209 @@
//! Database operations for RateMyProfessors data.
use crate::error::Result;
use crate::rmp::RmpProfessor;
use sqlx::PgPool;
use std::collections::HashSet;
/// Bulk upsert RMP professors using the UNNEST pattern.
///
/// Deduplicates by `legacy_id` before inserting — the RMP API can return
/// the same professor on multiple pages.
pub async fn batch_upsert_rmp_professors(
professors: &[RmpProfessor],
db_pool: &PgPool,
) -> Result<()> {
if professors.is_empty() {
return Ok(());
}
// Deduplicate: keep last occurrence per legacy_id (latest page wins)
let mut seen = HashSet::new();
let deduped: Vec<&RmpProfessor> = professors
.iter()
.rev()
.filter(|p| seen.insert(p.legacy_id))
.collect();
let legacy_ids: Vec<i32> = deduped.iter().map(|p| p.legacy_id).collect();
let graphql_ids: Vec<&str> = deduped.iter().map(|p| p.graphql_id.as_str()).collect();
let first_names: Vec<String> = deduped
.iter()
.map(|p| p.first_name.trim().to_string())
.collect();
let first_name_refs: Vec<&str> = first_names.iter().map(|s| s.as_str()).collect();
let last_names: Vec<String> = deduped
.iter()
.map(|p| p.last_name.trim().to_string())
.collect();
let last_name_refs: Vec<&str> = last_names.iter().map(|s| s.as_str()).collect();
let departments: Vec<Option<&str>> = deduped.iter().map(|p| p.department.as_deref()).collect();
let avg_ratings: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_rating).collect();
let avg_difficulties: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_difficulty).collect();
let num_ratings: Vec<i32> = deduped.iter().map(|p| p.num_ratings).collect();
let would_take_again_pcts: Vec<Option<f32>> =
deduped.iter().map(|p| p.would_take_again_pct).collect();
sqlx::query(
r#"
INSERT INTO rmp_professors (
legacy_id, graphql_id, first_name, last_name, department,
avg_rating, avg_difficulty, num_ratings, would_take_again_pct,
last_synced_at
)
SELECT
v.legacy_id, v.graphql_id, v.first_name, v.last_name, v.department,
v.avg_rating, v.avg_difficulty, v.num_ratings, v.would_take_again_pct,
NOW()
FROM UNNEST(
$1::int4[], $2::text[], $3::text[], $4::text[], $5::text[],
$6::real[], $7::real[], $8::int4[], $9::real[]
) AS v(
legacy_id, graphql_id, first_name, last_name, department,
avg_rating, avg_difficulty, num_ratings, would_take_again_pct
)
ON CONFLICT (legacy_id)
DO UPDATE SET
graphql_id = EXCLUDED.graphql_id,
first_name = EXCLUDED.first_name,
last_name = EXCLUDED.last_name,
department = EXCLUDED.department,
avg_rating = EXCLUDED.avg_rating,
avg_difficulty = EXCLUDED.avg_difficulty,
num_ratings = EXCLUDED.num_ratings,
would_take_again_pct = EXCLUDED.would_take_again_pct,
last_synced_at = EXCLUDED.last_synced_at
"#,
)
.bind(&legacy_ids)
.bind(&graphql_ids)
.bind(&first_name_refs)
.bind(&last_name_refs)
.bind(&departments)
.bind(&avg_ratings)
.bind(&avg_difficulties)
.bind(&num_ratings)
.bind(&would_take_again_pcts)
.execute(db_pool)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert RMP professors: {}", e))?;
Ok(())
}
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
pub(crate) fn normalize(s: &str) -> String {
s.trim().to_lowercase().trim_end_matches('.').to_string()
}
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
///
/// Returns `None` if the format is unparseable (no comma, empty parts).
pub(crate) fn parse_display_name(display_name: &str) -> Option<(String, String)> {
let (last_part, first_part) = display_name.split_once(',')?;
let last = normalize(last_part);
// Take only the first token of the first-name portion to drop middle names/initials.
let first = normalize(first_part.split_whitespace().next()?);
if last.is_empty() || first.is_empty() {
return None;
}
Some((last, first))
}
/// Retrieve RMP rating data for an instructor by instructor id.
///
/// Returns `(avg_rating, num_ratings)` for the best linked RMP profile
/// (most ratings). Returns `None` if no link exists.
#[allow(dead_code)]
pub async fn get_instructor_rmp_data(
db_pool: &PgPool,
instructor_id: i32,
) -> Result<Option<(f32, i32)>> {
let row: Option<(f32, i32)> = sqlx::query_as(
r#"
SELECT rp.avg_rating, rp.num_ratings
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = $1
AND rp.avg_rating IS NOT NULL
ORDER BY rp.num_ratings DESC NULLS LAST
LIMIT 1
"#,
)
.bind(instructor_id)
.fetch_optional(db_pool)
.await?;
Ok(row)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_standard_name() {
assert_eq!(
parse_display_name("Smith, John"),
Some(("smith".into(), "john".into()))
);
}
#[test]
fn parse_name_with_middle() {
assert_eq!(
parse_display_name("Smith, John David"),
Some(("smith".into(), "john".into()))
);
}
#[test]
fn parse_name_with_middle_initial() {
assert_eq!(
parse_display_name("Garcia, Maria L."),
Some(("garcia".into(), "maria".into()))
);
}
#[test]
fn parse_name_with_suffix_in_last() {
// Banner may encode "Jr." as part of the last name.
// normalize() strips trailing periods so "Jr." becomes "jr".
assert_eq!(
parse_display_name("Smith Jr., James"),
Some(("smith jr".into(), "james".into()))
);
}
#[test]
fn parse_no_comma_returns_none() {
assert_eq!(parse_display_name("SingleName"), None);
}
#[test]
fn parse_empty_first_returns_none() {
assert_eq!(parse_display_name("Smith,"), None);
}
#[test]
fn parse_empty_last_returns_none() {
assert_eq!(parse_display_name(", John"), None);
}
#[test]
fn parse_extra_whitespace() {
assert_eq!(
parse_display_name(" Doe , Jane Marie "),
Some(("doe".into(), "jane".into()))
);
}
#[test]
fn normalize_trims_and_lowercases() {
assert_eq!(normalize(" FOO "), "foo");
}
#[test]
fn normalize_strips_trailing_period() {
assert_eq!(normalize("Jr."), "jr");
}
}
+513
View File
@@ -0,0 +1,513 @@
//! Confidence scoring and candidate generation for RMP instructor matching.
use crate::data::rmp::{normalize, parse_display_name};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::collections::{HashMap, HashSet};
use tracing::{debug, info};
// ---------------------------------------------------------------------------
// Scoring types
// ---------------------------------------------------------------------------
/// Breakdown of individual scoring signals.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScoreBreakdown {
pub department: f32,
pub uniqueness: f32,
pub volume: f32,
}
/// Result of scoring a single instructorRMP candidate pair.
#[derive(Debug, Clone)]
pub struct MatchScore {
pub score: f32,
pub breakdown: ScoreBreakdown,
}
// ---------------------------------------------------------------------------
// Thresholds
// ---------------------------------------------------------------------------
/// Minimum composite score to store a candidate row.
const MIN_CANDIDATE_THRESHOLD: f32 = 0.40;
/// Score at or above which a candidate is auto-accepted.
const AUTO_ACCEPT_THRESHOLD: f32 = 0.85;
// ---------------------------------------------------------------------------
// Weights
// ---------------------------------------------------------------------------
const WEIGHT_DEPARTMENT: f32 = 0.50;
const WEIGHT_UNIQUENESS: f32 = 0.30;
const WEIGHT_VOLUME: f32 = 0.20;
// ---------------------------------------------------------------------------
// Pure scoring functions
// ---------------------------------------------------------------------------
/// Check if an instructor's subjects overlap with an RMP department.
///
/// Returns `1.0` for a match, `0.2` for a mismatch, `0.5` when the RMP
/// department is unknown.
fn department_similarity(subjects: &[String], rmp_department: Option<&str>) -> f32 {
let Some(dept) = rmp_department else {
return 0.5;
};
let dept_lower = dept.to_lowercase();
// Quick check: does any subject appear directly in the department string
// or vice-versa?
for subj in subjects {
let subj_lower = subj.to_lowercase();
if dept_lower.contains(&subj_lower) || subj_lower.contains(&dept_lower) {
return 1.0;
}
// Handle common UTSA abbreviation mappings.
if matches_known_abbreviation(&subj_lower, &dept_lower) {
return 1.0;
}
}
0.2
}
/// Expand common subject abbreviations used at UTSA and check for overlap.
fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
const MAPPINGS: &[(&str, &[&str])] = &[
("cs", &["computer science"]),
("ece", &["electrical", "computer engineering"]),
("ee", &["electrical engineering", "electrical"]),
("me", &["mechanical engineering", "mechanical"]),
("ce", &["civil engineering", "civil"]),
("bio", &["biology", "biological"]),
("chem", &["chemistry"]),
("phys", &["physics"]),
("math", &["mathematics"]),
("sta", &["statistics"]),
("eng", &["english"]),
("his", &["history"]),
("pol", &["political science"]),
("psy", &["psychology"]),
("soc", &["sociology"]),
("mus", &["music"]),
("art", &["art"]),
("phi", &["philosophy"]),
("eco", &["economics"]),
("acc", &["accounting"]),
("fin", &["finance"]),
("mgt", &["management"]),
("mkt", &["marketing"]),
("is", &["information systems"]),
("ms", &["management science"]),
("kin", &["kinesiology"]),
("com", &["communication"]),
];
for &(abbr, expansions) in MAPPINGS {
if subject == abbr {
return expansions
.iter()
.any(|expansion| department.contains(expansion));
}
}
false
}
/// Compute match confidence score (0.01.0) for an instructorRMP pair.
///
/// Name matching is handled by the caller via pre-filtering on exact
/// normalized `(last, first)`, so only department, uniqueness, and volume
/// signals are scored here.
pub fn compute_match_score(
instructor_subjects: &[String],
rmp_department: Option<&str>,
candidate_count: usize,
rmp_num_ratings: i32,
) -> MatchScore {
// --- Department (0.50) ---
let dept_score = department_similarity(instructor_subjects, rmp_department);
// --- Uniqueness (0.30) ---
let uniqueness_score = match candidate_count {
0 | 1 => 1.0,
2 => 0.5,
_ => 0.2,
};
// --- Volume (0.20) ---
let volume_score = ((rmp_num_ratings as f32).ln_1p() / 5.0_f32.ln_1p()).clamp(0.0, 1.0);
let composite = dept_score * WEIGHT_DEPARTMENT
+ uniqueness_score * WEIGHT_UNIQUENESS
+ volume_score * WEIGHT_VOLUME;
MatchScore {
score: composite,
breakdown: ScoreBreakdown {
department: dept_score,
uniqueness: uniqueness_score,
volume: volume_score,
},
}
}
// ---------------------------------------------------------------------------
// Candidate generation (DB)
// ---------------------------------------------------------------------------
/// Statistics returned from candidate generation.
#[derive(Debug)]
pub struct MatchingStats {
pub total_unmatched: usize,
pub candidates_created: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
}
/// Lightweight row for building the in-memory RMP name index.
struct RmpProfForMatching {
legacy_id: i32,
department: Option<String>,
num_ratings: i32,
}
/// Generate match candidates for all unmatched instructors.
///
/// For each unmatched instructor:
/// 1. Parse `display_name` into (last, first).
/// 2. Find RMP professors with matching normalized name.
/// 3. Score each candidate.
/// 4. Store candidates scoring above [`MIN_CANDIDATE_THRESHOLD`].
/// 5. Auto-accept if the top candidate scores ≥ [`AUTO_ACCEPT_THRESHOLD`]
/// and no existing rejected candidate exists for that pair.
///
/// Already-evaluated instructorRMP pairs (any status) are skipped.
pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
// 1. Load unmatched instructors
let instructors: Vec<(i32, String)> = sqlx::query_as(
"SELECT id, display_name FROM instructors WHERE rmp_match_status = 'unmatched'",
)
.fetch_all(db_pool)
.await?;
if instructors.is_empty() {
info!("No unmatched instructors to generate candidates for");
return Ok(MatchingStats {
total_unmatched: 0,
candidates_created: 0,
auto_matched: 0,
skipped_unparseable: 0,
skipped_no_candidates: 0,
});
}
let instructor_ids: Vec<i32> = instructors.iter().map(|(id, _)| *id).collect();
let total_unmatched = instructors.len();
// 2. Load instructor subjects
let subject_rows: Vec<(i32, String)> = sqlx::query_as(
r#"
SELECT DISTINCT ci.instructor_id, c.subject
FROM course_instructors ci
JOIN courses c ON c.id = ci.course_id
WHERE ci.instructor_id = ANY($1)
"#,
)
.bind(&instructor_ids)
.fetch_all(db_pool)
.await?;
let mut subject_map: HashMap<i32, Vec<String>> = HashMap::new();
for (iid, subject) in subject_rows {
subject_map.entry(iid).or_default().push(subject);
}
// 3. Load all RMP professors
let prof_rows: Vec<(i32, String, String, Option<String>, i32)> = sqlx::query_as(
"SELECT legacy_id, first_name, last_name, department, num_ratings FROM rmp_professors",
)
.fetch_all(db_pool)
.await?;
// Build name index: (normalized_last, normalized_first) -> Vec<RmpProfForMatching>
let mut name_index: HashMap<(String, String), Vec<RmpProfForMatching>> = HashMap::new();
for (legacy_id, first_name, last_name, department, num_ratings) in prof_rows {
let key = (normalize(&last_name), normalize(&first_name));
name_index.entry(key).or_default().push(RmpProfForMatching {
legacy_id,
department,
num_ratings,
});
}
// 4. Load existing candidate pairs (and rejected subset) in a single query
let candidate_rows: Vec<(i32, i32, String)> =
sqlx::query_as("SELECT instructor_id, rmp_legacy_id, status FROM rmp_match_candidates")
.fetch_all(db_pool)
.await?;
let mut existing_pairs: HashSet<(i32, i32)> = HashSet::with_capacity(candidate_rows.len());
let mut rejected_pairs: HashSet<(i32, i32)> = HashSet::new();
for (iid, lid, status) in candidate_rows {
existing_pairs.insert((iid, lid));
if status == "rejected" {
rejected_pairs.insert((iid, lid));
}
}
// 5. Score and collect candidates
let empty_subjects: Vec<String> = Vec::new();
let mut candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut auto_accept: Vec<(i32, i32)> = Vec::new(); // (instructor_id, legacy_id)
let mut skipped_unparseable = 0usize;
let mut skipped_no_candidates = 0usize;
for (instructor_id, display_name) in &instructors {
let Some((norm_last, norm_first)) = parse_display_name(display_name) else {
skipped_unparseable += 1;
debug!(
instructor_id,
display_name, "Unparseable display name, skipping"
);
continue;
};
let subjects = subject_map.get(instructor_id).unwrap_or(&empty_subjects);
let key = (norm_last.clone(), norm_first.clone());
let Some(rmp_candidates) = name_index.get(&key) else {
skipped_no_candidates += 1;
continue;
};
let candidate_count = rmp_candidates.len();
let mut best: Option<(f32, i32)> = None;
for prof in rmp_candidates {
let pair = (*instructor_id, prof.legacy_id);
if existing_pairs.contains(&pair) {
continue;
}
let ms = compute_match_score(
subjects,
prof.department.as_deref(),
candidate_count,
prof.num_ratings,
);
if ms.score < MIN_CANDIDATE_THRESHOLD {
continue;
}
let breakdown_json =
serde_json::to_value(&ms.breakdown).unwrap_or_else(|_| serde_json::json!({}));
candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
match best {
Some((s, _)) if ms.score > s => best = Some((ms.score, prof.legacy_id)),
None => best = Some((ms.score, prof.legacy_id)),
_ => {}
}
}
// Auto-accept the top candidate if it meets the threshold and is not
// previously rejected.
if let Some((score, legacy_id)) = best
&& score >= AUTO_ACCEPT_THRESHOLD
&& !rejected_pairs.contains(&(*instructor_id, legacy_id))
{
auto_accept.push((*instructor_id, legacy_id));
}
}
// 67. Write candidates and auto-accept within a single transaction
let candidates_created = candidates.len();
let auto_matched = auto_accept.len();
let mut tx = db_pool.begin().await?;
// 6. Batch-insert candidates
if !candidates.is_empty() {
let c_instructor_ids: Vec<i32> = candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = candidates.iter().map(|(_, _, s, _)| *s).collect();
let c_breakdowns: Vec<serde_json::Value> =
candidates.into_iter().map(|(_, _, _, b)| b).collect();
sqlx::query(
r#"
INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, score_breakdown)
SELECT v.instructor_id, v.rmp_legacy_id, v.score, v.score_breakdown
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
ON CONFLICT (instructor_id, rmp_legacy_id) DO NOTHING
"#,
)
.bind(&c_instructor_ids)
.bind(&c_legacy_ids)
.bind(&c_scores)
.bind(&c_breakdowns)
.execute(&mut *tx)
.await?;
}
// 7. Auto-accept top candidates
if !auto_accept.is_empty() {
let aa_instructor_ids: Vec<i32> = auto_accept.iter().map(|(iid, _)| *iid).collect();
let aa_legacy_ids: Vec<i32> = auto_accept.iter().map(|(_, lid)| *lid).collect();
// Mark the candidate row as accepted
sqlx::query(
r#"
UPDATE rmp_match_candidates mc
SET status = 'accepted', resolved_at = NOW()
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
WHERE mc.instructor_id = v.instructor_id
AND mc.rmp_legacy_id = v.rmp_legacy_id
"#,
)
.bind(&aa_instructor_ids)
.bind(&aa_legacy_ids)
.execute(&mut *tx)
.await?;
// Insert links into instructor_rmp_links
sqlx::query(
r#"
INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
SELECT v.instructor_id, v.rmp_legacy_id, 'auto'
FROM UNNEST($1::int4[], $2::int4[]) AS v(instructor_id, rmp_legacy_id)
ON CONFLICT (rmp_legacy_id) DO NOTHING
"#,
)
.bind(&aa_instructor_ids)
.bind(&aa_legacy_ids)
.execute(&mut *tx)
.await?;
// Update instructor match status
sqlx::query(
r#"
UPDATE instructors i
SET rmp_match_status = 'auto'
FROM UNNEST($1::int4[]) AS v(instructor_id)
WHERE i.id = v.instructor_id
"#,
)
.bind(&aa_instructor_ids)
.execute(&mut *tx)
.await?;
}
tx.commit().await?;
let stats = MatchingStats {
total_unmatched,
candidates_created,
auto_matched,
skipped_unparseable,
skipped_no_candidates,
};
info!(
total_unmatched = stats.total_unmatched,
candidates_created = stats.candidates_created,
auto_matched = stats.auto_matched,
skipped_unparseable = stats.skipped_unparseable,
skipped_no_candidates = stats.skipped_no_candidates,
"Candidate generation complete"
);
Ok(stats)
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ideal_candidate_high_score() {
let ms = compute_match_score(
&["CS".to_string()],
Some("Computer Science"),
1, // unique candidate
50, // decent ratings
);
// dept 1.0*0.50 + unique 1.0*0.30 + volume ~0.97*0.20 ≈ 0.99
assert!(ms.score >= 0.85, "Expected score >= 0.85, got {}", ms.score);
assert_eq!(ms.breakdown.uniqueness, 1.0);
assert_eq!(ms.breakdown.department, 1.0);
}
#[test]
fn test_ambiguous_candidates_lower_score() {
let unique = compute_match_score(&[], None, 1, 10);
let ambiguous = compute_match_score(&[], None, 3, 10);
assert!(
unique.score > ambiguous.score,
"Unique ({}) should outscore ambiguous ({})",
unique.score,
ambiguous.score
);
assert_eq!(unique.breakdown.uniqueness, 1.0);
assert_eq!(ambiguous.breakdown.uniqueness, 0.2);
}
#[test]
fn test_no_department_neutral() {
let ms = compute_match_score(&["CS".to_string()], None, 1, 10);
assert_eq!(ms.breakdown.department, 0.5);
}
#[test]
fn test_department_match() {
let ms = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
assert_eq!(ms.breakdown.department, 1.0);
}
#[test]
fn test_department_mismatch() {
let ms = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
assert_eq!(ms.breakdown.department, 0.2);
}
#[test]
fn test_department_match_outscores_mismatch() {
let matched = compute_match_score(&["CS".to_string()], Some("Computer Science"), 1, 10);
let mismatched = compute_match_score(&["CS".to_string()], Some("History"), 1, 10);
assert!(
matched.score > mismatched.score,
"Department match ({}) should outscore mismatch ({})",
matched.score,
mismatched.score
);
}
#[test]
fn test_volume_scaling() {
let zero = compute_match_score(&[], None, 1, 0);
let many = compute_match_score(&[], None, 1, 100);
assert!(
many.breakdown.volume > zero.breakdown.volume,
"100 ratings ({}) should outscore 0 ratings ({})",
many.breakdown.volume,
zero.breakdown.volume
);
assert_eq!(zero.breakdown.volume, 0.0);
assert!(
many.breakdown.volume > 0.9,
"100 ratings should be near max"
);
}
}
+122 -35
View File
@@ -1,15 +1,40 @@
//! Database operations for scrape job queue management.
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType};
use crate::data::models::{ScrapeJob, ScrapePriority, TargetType, UpsertCounts};
use crate::error::Result;
use chrono::{DateTime, Utc};
use sqlx::PgPool;
use std::collections::HashSet;
/// Force-unlock all jobs that have a non-NULL `locked_at`.
///
/// Intended to be called once at startup to recover jobs left locked by
/// a previous unclean shutdown (crash, OOM kill, etc.).
///
/// # Returns
/// The number of jobs that were unlocked.
pub async fn force_unlock_all(db_pool: &PgPool) -> Result<u64> {
let result = sqlx::query(
"UPDATE scrape_jobs SET locked_at = NULL, queued_at = NOW() WHERE locked_at IS NOT NULL",
)
.execute(db_pool)
.await?;
Ok(result.rows_affected())
}
/// How long a lock can be held before it is considered expired and reclaimable.
///
/// This acts as a safety net for cases where a worker dies without unlocking
/// (OOM kill, crash, network partition). Under normal operation, the worker's
/// own job timeout fires well before this threshold.
const LOCK_EXPIRY: std::time::Duration = std::time::Duration::from_secs(10 * 60);
/// Atomically fetch and lock the next available scrape job.
///
/// Uses `FOR UPDATE SKIP LOCKED` to allow multiple workers to poll the queue
/// concurrently without conflicts. Only jobs that are unlocked and ready to
/// execute (based on `execute_at`) are considered.
/// concurrently without conflicts. Considers jobs that are:
/// - Unlocked and ready to execute, OR
/// - Locked but past [`LOCK_EXPIRY`] (abandoned by a dead worker)
///
/// # Arguments
/// * `db_pool` - PostgreSQL connection pool
@@ -20,9 +45,16 @@ use std::collections::HashSet;
pub async fn fetch_and_lock_job(db_pool: &PgPool) -> Result<Option<ScrapeJob>> {
let mut tx = db_pool.begin().await?;
let lock_expiry_secs = LOCK_EXPIRY.as_secs() as i32;
let job = sqlx::query_as::<_, ScrapeJob>(
"SELECT * FROM scrape_jobs WHERE locked_at IS NULL AND execute_at <= NOW() ORDER BY priority DESC, execute_at ASC LIMIT 1 FOR UPDATE SKIP LOCKED"
"SELECT * FROM scrape_jobs \
WHERE (locked_at IS NULL OR locked_at < NOW() - make_interval(secs => $1::double precision)) \
AND execute_at <= NOW() \
ORDER BY priority DESC, execute_at ASC \
LIMIT 1 \
FOR UPDATE SKIP LOCKED"
)
.bind(lock_expiry_secs)
.fetch_optional(&mut *tx)
.await?;
@@ -68,10 +100,11 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
Ok(())
}
/// Atomically unlock a job and increment its retry count.
/// Atomically unlock a job, increment its retry count, and reset `queued_at`.
///
/// Returns whether the job still has retries remaining. This is determined
/// atomically in the database to avoid race conditions between workers.
/// Returns the new `queued_at` timestamp if retries remain, or `None` if
/// the job has exhausted its retries. This is determined atomically in the
/// database to avoid race conditions between workers.
///
/// # Arguments
/// * `job_id` - The database ID of the job
@@ -79,31 +112,31 @@ pub async fn unlock_job(job_id: i32, db_pool: &PgPool) -> Result<()> {
/// * `db_pool` - PostgreSQL connection pool
///
/// # Returns
/// * `Ok(true)` if the job was unlocked and retries remain
/// * `Ok(false)` if the job has exhausted its retries
/// * `Ok(Some(queued_at))` if the job was unlocked and retries remain
/// * `Ok(None)` if the job has exhausted its retries
pub async fn unlock_and_increment_retry(
job_id: i32,
max_retries: i32,
db_pool: &PgPool,
) -> Result<bool> {
let result = sqlx::query_scalar::<_, Option<i32>>(
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
let result = sqlx::query_scalar::<_, Option<chrono::DateTime<chrono::Utc>>>(
"UPDATE scrape_jobs
SET locked_at = NULL, retry_count = retry_count + 1
SET locked_at = NULL, retry_count = retry_count + 1, queued_at = NOW()
WHERE id = $1
RETURNING CASE WHEN retry_count < $2 THEN retry_count ELSE NULL END",
RETURNING CASE WHEN retry_count <= $2 THEN queued_at ELSE NULL END",
)
.bind(job_id)
.bind(max_retries)
.fetch_one(db_pool)
.await?;
Ok(result.is_some())
Ok(result)
}
/// Find existing unlocked job payloads matching the given target type and candidates.
/// Find existing job payloads matching the given target type and candidates.
///
/// Returns a set of stringified JSON payloads that already exist in the queue,
/// used for deduplication when scheduling new jobs.
/// Returns a set of stringified JSON payloads that already exist in the queue
/// (both locked and unlocked), used for deduplication when scheduling new jobs.
///
/// # Arguments
/// * `target_type` - The target type to filter by
@@ -111,7 +144,7 @@ pub async fn unlock_and_increment_retry(
/// * `db_pool` - PostgreSQL connection pool
///
/// # Returns
/// A `HashSet` of stringified JSON payloads that already have pending jobs
/// A `HashSet` of stringified JSON payloads that already have pending or in-progress jobs
pub async fn find_existing_job_payloads(
target_type: TargetType,
candidate_payloads: &[serde_json::Value],
@@ -119,7 +152,7 @@ pub async fn find_existing_job_payloads(
) -> Result<HashSet<String>> {
let existing_jobs: Vec<(serde_json::Value,)> = sqlx::query_as(
"SELECT target_payload FROM scrape_jobs
WHERE target_type = $1 AND target_payload = ANY($2) AND locked_at IS NULL",
WHERE target_type = $1 AND target_payload = ANY($2)",
)
.bind(target_type)
.bind(candidate_payloads)
@@ -134,7 +167,53 @@ pub async fn find_existing_job_payloads(
Ok(existing_payloads)
}
/// Batch insert scrape jobs in a single transaction.
/// Insert a scrape job result log entry.
#[allow(clippy::too_many_arguments)]
pub async fn insert_job_result(
target_type: TargetType,
payload: serde_json::Value,
priority: ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
duration_ms: i32,
success: bool,
error_message: Option<&str>,
retry_count: i32,
counts: Option<&UpsertCounts>,
db_pool: &PgPool,
) -> Result<()> {
sqlx::query(
r#"
INSERT INTO scrape_job_results (
target_type, payload, priority,
queued_at, started_at, duration_ms,
success, error_message, retry_count,
courses_fetched, courses_changed, courses_unchanged,
audits_generated, metrics_generated
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
"#,
)
.bind(target_type)
.bind(&payload)
.bind(priority)
.bind(queued_at)
.bind(started_at)
.bind(duration_ms)
.bind(success)
.bind(error_message)
.bind(retry_count)
.bind(counts.map(|c| c.courses_fetched))
.bind(counts.map(|c| c.courses_changed))
.bind(counts.map(|c| c.courses_unchanged))
.bind(counts.map(|c| c.audits_generated))
.bind(counts.map(|c| c.metrics_generated))
.execute(db_pool)
.await?;
Ok(())
}
/// Batch insert scrape jobs using UNNEST for a single round-trip.
///
/// All jobs are inserted with `execute_at` set to the current time.
///
@@ -144,27 +223,35 @@ pub async fn find_existing_job_payloads(
pub async fn batch_insert_jobs(
jobs: &[(serde_json::Value, TargetType, ScrapePriority)],
db_pool: &PgPool,
) -> Result<()> {
) -> Result<Vec<ScrapeJob>> {
if jobs.is_empty() {
return Ok(());
return Ok(Vec::new());
}
let now = chrono::Utc::now();
let mut tx = db_pool.begin().await?;
let mut target_types: Vec<String> = Vec::with_capacity(jobs.len());
let mut payloads: Vec<serde_json::Value> = Vec::with_capacity(jobs.len());
let mut priorities: Vec<String> = Vec::with_capacity(jobs.len());
for (payload, target_type, priority) in jobs {
sqlx::query(
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at) VALUES ($1, $2, $3, $4)"
)
.bind(target_type)
.bind(payload)
.bind(priority)
.bind(now)
.execute(&mut *tx)
.await?;
target_types.push(format!("{target_type:?}"));
payloads.push(payload.clone());
priorities.push(format!("{priority:?}"));
}
tx.commit().await?;
let inserted = sqlx::query_as::<_, ScrapeJob>(
r#"
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at, queued_at)
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW(), NOW()
FROM UNNEST($1::text[], $2::jsonb[], $3::text[])
AS v(target_type, payload, priority)
RETURNING *
"#,
)
.bind(&target_types)
.bind(&payloads)
.bind(&priorities)
.fetch_all(db_pool)
.await?;
Ok(())
Ok(inserted)
}
+90
View File
@@ -0,0 +1,90 @@
//! Database query functions for user sessions.
use anyhow::Context;
use rand::Rng;
use sqlx::PgPool;
use super::models::UserSession;
use crate::error::Result;
/// Generate a cryptographically random 32-byte hex token.
fn generate_token() -> String {
let bytes: [u8; 32] = rand::rng().random();
bytes.iter().map(|b| format!("{b:02x}")).collect()
}
/// Create a new session for a user with the given duration.
pub async fn create_session(
pool: &PgPool,
user_id: i64,
duration: std::time::Duration,
) -> Result<UserSession> {
let token = generate_token();
let duration_secs = duration.as_secs() as i64;
sqlx::query_as::<_, UserSession>(
r#"
INSERT INTO user_sessions (id, user_id, expires_at)
VALUES ($1, $2, now() + make_interval(secs => $3::double precision))
RETURNING *
"#,
)
.bind(&token)
.bind(user_id)
.bind(duration_secs as f64)
.fetch_one(pool)
.await
.context("failed to create session")
}
/// Fetch a session by token, only if it has not expired.
pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSession>> {
sqlx::query_as::<_, UserSession>(
"SELECT * FROM user_sessions WHERE id = $1 AND expires_at > now()",
)
.bind(token)
.fetch_optional(pool)
.await
.context("failed to get session")
}
/// Update the last-active timestamp for a session.
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query("UPDATE user_sessions SET last_active_at = now() WHERE id = $1")
.bind(token)
.execute(pool)
.await
.context("failed to touch session")?;
Ok(())
}
/// Delete a session by token.
pub async fn delete_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query("DELETE FROM user_sessions WHERE id = $1")
.bind(token)
.execute(pool)
.await
.context("failed to delete session")?;
Ok(())
}
/// Delete all sessions for a user. Returns the number of sessions deleted.
#[allow(dead_code)] // Available for admin user-deletion flow
pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE user_id = $1")
.bind(user_id)
.execute(pool)
.await
.context("failed to delete user sessions")?;
Ok(result.rows_affected())
}
/// Delete all expired sessions. Returns the number of sessions cleaned up.
#[allow(dead_code)] // Called by SessionCache::cleanup_expired (not yet wired to periodic task)
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
.execute(pool)
.await
.context("failed to cleanup expired sessions")?;
Ok(result.rows_affected())
}
+86
View File
@@ -0,0 +1,86 @@
//! Database query functions for users.
use anyhow::Context;
use sqlx::PgPool;
use super::models::User;
use crate::error::Result;
/// Insert a new user or update username/avatar on conflict.
pub async fn upsert_user(
pool: &PgPool,
discord_id: i64,
username: &str,
avatar_hash: Option<&str>,
) -> Result<User> {
sqlx::query_as::<_, User>(
r#"
INSERT INTO users (discord_id, discord_username, discord_avatar_hash)
VALUES ($1, $2, $3)
ON CONFLICT (discord_id) DO UPDATE
SET discord_username = EXCLUDED.discord_username,
discord_avatar_hash = EXCLUDED.discord_avatar_hash,
updated_at = now()
RETURNING *
"#,
)
.bind(discord_id)
.bind(username)
.bind(avatar_hash)
.fetch_one(pool)
.await
.context("failed to upsert user")
}
/// Fetch a user by Discord ID.
pub async fn get_user(pool: &PgPool, discord_id: i64) -> Result<Option<User>> {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE discord_id = $1")
.bind(discord_id)
.fetch_optional(pool)
.await
.context("failed to get user")
}
/// List all users ordered by creation date (newest first).
pub async fn list_users(pool: &PgPool) -> Result<Vec<User>> {
sqlx::query_as::<_, User>("SELECT * FROM users ORDER BY created_at DESC")
.fetch_all(pool)
.await
.context("failed to list users")
}
/// Set the admin flag for a user, returning the updated user if found.
pub async fn set_admin(pool: &PgPool, discord_id: i64, is_admin: bool) -> Result<Option<User>> {
sqlx::query_as::<_, User>(
r#"
UPDATE users
SET is_admin = $2, updated_at = now()
WHERE discord_id = $1
RETURNING *
"#,
)
.bind(discord_id)
.bind(is_admin)
.fetch_optional(pool)
.await
.context("failed to set admin status")
}
/// Ensure a seed admin exists. Upserts with `is_admin = true` and a placeholder
/// username that will be replaced on first OAuth login.
pub async fn ensure_seed_admin(pool: &PgPool, discord_id: i64) -> Result<User> {
sqlx::query_as::<_, User>(
r#"
INSERT INTO users (discord_id, discord_username, is_admin)
VALUES ($1, 'seed-admin', true)
ON CONFLICT (discord_id) DO UPDATE
SET is_admin = true,
updated_at = now()
RETURNING *
"#,
)
.bind(discord_id)
.fetch_one(pool)
.await
.context("failed to ensure seed admin")
}
+1
View File
@@ -7,6 +7,7 @@ pub mod data;
pub mod error;
pub mod formatter;
pub mod logging;
pub mod rmp;
pub mod scraper;
pub mod services;
pub mod signals;
+4 -5
View File
@@ -1,5 +1,5 @@
use crate::app::App;
use crate::cli::{Args, ServiceName, determine_enabled_services};
use crate::cli::{Args, ServiceName};
use crate::logging::setup_logging;
use clap::Parser;
use std::process::ExitCode;
@@ -14,11 +14,11 @@ mod data;
mod error;
mod formatter;
mod logging;
mod rmp;
mod scraper;
mod services;
mod signals;
mod state;
#[allow(dead_code)]
mod status;
mod web;
@@ -29,9 +29,8 @@ async fn main() -> ExitCode {
// Parse CLI arguments
let args = Args::parse();
// Determine which services should be enabled
let enabled_services: Vec<ServiceName> =
determine_enabled_services(&args).expect("Failed to determine enabled services");
// Always run all services
let enabled_services = ServiceName::all();
// Create and initialize the application
let mut app = App::new().await.expect("Failed to initialize application");
+156
View File
@@ -0,0 +1,156 @@
//! RateMyProfessors GraphQL client for bulk professor data sync.
use anyhow::Result;
use serde::{Deserialize, Serialize};
use tracing::{debug, info};
/// UTSA's school ID on RateMyProfessors (base64 of "School-1516").
const UTSA_SCHOOL_ID: &str = "U2Nob29sLTE1MTY=";
/// Basic auth header value (base64 of "test:test").
const AUTH_HEADER: &str = "Basic dGVzdDp0ZXN0";
/// GraphQL endpoint.
const GRAPHQL_URL: &str = "https://www.ratemyprofessors.com/graphql";
/// Page size for paginated fetches.
const PAGE_SIZE: u32 = 100;
/// A professor record from RateMyProfessors.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RmpProfessor {
pub legacy_id: i32,
pub graphql_id: String,
pub first_name: String,
pub last_name: String,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub avg_difficulty: Option<f32>,
pub num_ratings: i32,
pub would_take_again_pct: Option<f32>,
}
/// Client for fetching professor data from RateMyProfessors.
pub struct RmpClient {
http: reqwest::Client,
}
impl Default for RmpClient {
fn default() -> Self {
Self::new()
}
}
impl RmpClient {
pub fn new() -> Self {
Self {
http: reqwest::Client::new(),
}
}
/// Fetch all professors for UTSA via paginated GraphQL queries.
pub async fn fetch_all_professors(&self) -> Result<Vec<RmpProfessor>> {
let mut all = Vec::new();
let mut cursor: Option<String> = None;
loop {
let after_clause = match &cursor {
Some(c) => format!(r#", after: "{}""#, c),
None => String::new(),
};
let query = format!(
r#"query {{
newSearch {{
teachers(query: {{ text: "", schoolID: "{school_id}" }}, first: {page_size}{after}) {{
edges {{
cursor
node {{
id
legacyId
firstName
lastName
department
avgRating
avgDifficulty
numRatings
wouldTakeAgainPercent
}}
}}
pageInfo {{
hasNextPage
endCursor
}}
}}
}}
}}"#,
school_id = UTSA_SCHOOL_ID,
page_size = PAGE_SIZE,
after = after_clause,
);
let body = serde_json::json!({ "query": query });
let resp = self
.http
.post(GRAPHQL_URL)
.header("Authorization", AUTH_HEADER)
.json(&body)
.send()
.await?;
let status = resp.status();
if !status.is_success() {
let text = resp.text().await.unwrap_or_default();
anyhow::bail!("RMP GraphQL request failed ({status}): {text}");
}
let json: serde_json::Value = resp.json().await?;
let teachers = &json["data"]["newSearch"]["teachers"];
let edges = teachers["edges"]
.as_array()
.ok_or_else(|| anyhow::anyhow!("Missing edges in RMP response"))?;
for edge in edges {
let node = &edge["node"];
let wta = node["wouldTakeAgainPercent"]
.as_f64()
.map(|v| v as f32)
.filter(|&v| v >= 0.0);
all.push(RmpProfessor {
legacy_id: node["legacyId"]
.as_i64()
.ok_or_else(|| anyhow::anyhow!("Missing legacyId"))?
as i32,
graphql_id: node["id"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing id"))?
.to_string(),
first_name: node["firstName"].as_str().unwrap_or_default().to_string(),
last_name: node["lastName"].as_str().unwrap_or_default().to_string(),
department: node["department"].as_str().map(|s| s.to_string()),
avg_rating: node["avgRating"].as_f64().map(|v| v as f32),
avg_difficulty: node["avgDifficulty"].as_f64().map(|v| v as f32),
num_ratings: node["numRatings"].as_i64().unwrap_or(0) as i32,
would_take_again_pct: wta,
});
}
let page_info = &teachers["pageInfo"];
let has_next = page_info["hasNextPage"].as_bool().unwrap_or(false);
if !has_next {
break;
}
cursor = page_info["endCursor"].as_str().map(|s| s.to_string());
debug!(fetched = all.len(), "RMP pagination: fetching next page");
}
info!(total = all.len(), "Fetched all RMP professors");
Ok(all)
}
}
+4 -3
View File
@@ -1,7 +1,7 @@
pub mod subject;
use crate::banner::BannerApi;
use crate::data::models::TargetType;
use crate::data::models::{TargetType, UpsertCounts};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -32,8 +32,9 @@ pub trait Job: Send + Sync {
#[allow(dead_code)]
fn target_type(&self) -> TargetType;
/// Process the job with the given API client and database pool
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()>;
/// Process the job with the given API client and database pool.
/// Returns upsert effectiveness counts on success.
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts>;
/// Get a human-readable description of the job
fn description(&self) -> String;
+8 -6
View File
@@ -1,7 +1,7 @@
use super::Job;
use crate::banner::{BannerApi, SearchQuery, Term};
use crate::data::batch::batch_upsert_courses;
use crate::data::models::TargetType;
use crate::data::models::{TargetType, UpsertCounts};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -26,7 +26,7 @@ impl Job for SubjectJob {
}
#[tracing::instrument(skip(self, banner_api, db_pool), fields(subject = %self.subject))]
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<()> {
async fn process(&self, banner_api: &BannerApi, db_pool: &PgPool) -> Result<UpsertCounts> {
let subject_code = &self.subject;
// Get the current term
@@ -37,17 +37,19 @@ impl Job for SubjectJob {
.search(&term, &query, "subjectDescription", false)
.await?;
if let Some(courses_from_api) = search_result.data {
let counts = if let Some(courses_from_api) = search_result.data {
info!(
subject = %subject_code,
count = courses_from_api.len(),
"Found courses"
);
batch_upsert_courses(&courses_from_api, db_pool).await?;
}
batch_upsert_courses(&courses_from_api, db_pool).await?
} else {
UpsertCounts::default()
};
debug!(subject = %subject_code, "Subject job completed");
Ok(())
Ok(counts)
}
fn description(&self) -> String {
+41 -7
View File
@@ -3,11 +3,14 @@ pub mod scheduler;
pub mod worker;
use crate::banner::BannerApi;
use crate::data::scrape_jobs;
use crate::services::Service;
use crate::state::ReferenceCache;
use crate::status::{ServiceStatus, ServiceStatusRegistry};
use crate::web::ws::ScrapeJobEvent;
use sqlx::PgPool;
use std::sync::Arc;
use tokio::sync::broadcast;
use tokio::sync::{RwLock, broadcast};
use tokio::task::JoinHandle;
use tracing::{info, warn};
@@ -21,7 +24,9 @@ use self::worker::Worker;
pub struct ScraperService {
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
service_statuses: ServiceStatusRegistry,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
scheduler_handle: Option<JoinHandle<()>>,
worker_handles: Vec<JoinHandle<()>>,
shutdown_tx: Option<broadcast::Sender<()>>,
@@ -29,11 +34,19 @@ pub struct ScraperService {
impl ScraperService {
/// Creates a new `ScraperService`.
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>, service_statuses: ServiceStatusRegistry) -> Self {
pub fn new(
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
service_statuses: ServiceStatusRegistry,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
db_pool,
banner_api,
reference_cache,
service_statuses,
job_events_tx,
scheduler_handle: None,
worker_handles: Vec::new(),
shutdown_tx: None,
@@ -41,14 +54,29 @@ impl ScraperService {
}
/// Starts the scheduler and a pool of workers.
pub fn start(&mut self) {
///
/// Force-unlocks any jobs left locked by a previous unclean shutdown before
/// spawning workers, so those jobs re-enter the queue immediately.
pub async fn start(&mut self) {
// Recover jobs left locked by a previous crash/unclean shutdown
match scrape_jobs::force_unlock_all(&self.db_pool).await {
Ok(0) => {}
Ok(count) => warn!(count, "Force-unlocked stale jobs from previous run"),
Err(e) => warn!(error = ?e, "Failed to force-unlock stale jobs"),
}
info!("ScraperService starting");
// Create shutdown channel
let (shutdown_tx, _) = broadcast::channel(1);
self.shutdown_tx = Some(shutdown_tx.clone());
let scheduler = Scheduler::new(self.db_pool.clone(), self.banner_api.clone());
let scheduler = Scheduler::new(
self.db_pool.clone(),
self.banner_api.clone(),
self.reference_cache.clone(),
self.job_events_tx.clone(),
);
let shutdown_rx = shutdown_tx.subscribe();
let scheduler_handle = tokio::spawn(async move {
scheduler.run(shutdown_rx).await;
@@ -58,7 +86,12 @@ impl ScraperService {
let worker_count = 4; // This could be configurable
for i in 0..worker_count {
let worker = Worker::new(i, self.db_pool.clone(), self.banner_api.clone());
let worker = Worker::new(
i,
self.db_pool.clone(),
self.banner_api.clone(),
self.job_events_tx.clone(),
);
let shutdown_rx = shutdown_tx.subscribe();
let worker_handle = tokio::spawn(async move {
worker.run(shutdown_rx).await;
@@ -80,13 +113,14 @@ impl Service for ScraperService {
}
async fn run(&mut self) -> Result<(), anyhow::Error> {
self.start();
self.start().await;
std::future::pending::<()>().await;
Ok(())
}
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
self.service_statuses.set("scraper", ServiceStatus::Disabled);
self.service_statuses
.set("scraper", ServiceStatus::Disabled);
info!("Shutting down scraper service");
// Send shutdown signal to all tasks
+214 -17
View File
@@ -1,28 +1,46 @@
use crate::banner::{BannerApi, Term};
use crate::data::models::{ScrapePriority, TargetType};
use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
use crate::data::scrape_jobs;
use crate::error::Result;
use crate::rmp::RmpClient;
use crate::scraper::jobs::subject::SubjectJob;
use crate::state::ReferenceCache;
use crate::web::ws::{ScrapeJobDto, ScrapeJobEvent};
use serde_json::json;
use sqlx::PgPool;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::broadcast;
use std::time::{Duration, Instant};
use tokio::sync::{RwLock, broadcast};
use tokio::time;
use tokio_util::sync::CancellationToken;
use tracing::{debug, error, info, warn};
/// How often reference data is re-scraped (6 hours).
const REFERENCE_DATA_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
/// How often RMP data is synced (24 hours).
const RMP_SYNC_INTERVAL: Duration = Duration::from_secs(24 * 60 * 60);
/// Periodically analyzes data and enqueues prioritized scrape jobs.
pub struct Scheduler {
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl Scheduler {
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
pub fn new(
db_pool: PgPool,
banner_api: Arc<BannerApi>,
reference_cache: Arc<RwLock<ReferenceCache>>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
db_pool,
banner_api,
reference_cache,
job_events_tx,
}
}
@@ -41,33 +59,69 @@ impl Scheduler {
let work_interval = Duration::from_secs(60);
let mut next_run = time::Instant::now();
let mut current_work: Option<(tokio::task::JoinHandle<()>, CancellationToken)> = None;
// Scrape reference data immediately on first cycle
let mut last_ref_scrape = Instant::now() - REFERENCE_DATA_INTERVAL;
// Sync RMP data immediately on first cycle
let mut last_rmp_sync = Instant::now() - RMP_SYNC_INTERVAL;
loop {
tokio::select! {
_ = time::sleep_until(next_run) => {
let cancel_token = CancellationToken::new();
let should_scrape_ref = last_ref_scrape.elapsed() >= REFERENCE_DATA_INTERVAL;
let should_sync_rmp = last_rmp_sync.elapsed() >= RMP_SYNC_INTERVAL;
// Spawn work in separate task to allow graceful cancellation during shutdown.
// Without this, shutdown would have to wait for the full scheduling cycle.
let work_handle = tokio::spawn({
let db_pool = self.db_pool.clone();
let banner_api = self.banner_api.clone();
let cancel_token = cancel_token.clone();
let reference_cache = self.reference_cache.clone();
let job_events_tx = self.job_events_tx.clone();
async move {
tokio::select! {
result = Self::schedule_jobs_impl(&db_pool, &banner_api) => {
if let Err(e) = result {
error!(error = ?e, "Failed to schedule jobs");
async move {
tokio::select! {
_ = async {
// RMP sync is independent of Banner API — run it
// concurrently with reference data scraping so it
// doesn't wait behind rate-limited Banner calls.
let rmp_fut = async {
if should_sync_rmp
&& let Err(e) = Self::sync_rmp_data(&db_pool).await
{
error!(error = ?e, "Failed to sync RMP data");
}
};
let ref_fut = async {
if should_scrape_ref
&& let Err(e) = Self::scrape_reference_data(&db_pool, &banner_api, &reference_cache).await
{
error!(error = ?e, "Failed to scrape reference data");
}
};
tokio::join!(rmp_fut, ref_fut);
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api, Some(&job_events_tx)).await {
error!(error = ?e, "Failed to schedule jobs");
}
} => {}
_ = cancel_token.cancelled() => {
debug!("Scheduling work cancelled gracefully");
}
}
}
_ = cancel_token.cancelled() => {
debug!("Scheduling work cancelled gracefully");
}
}
}
});
if should_scrape_ref {
last_ref_scrape = Instant::now();
}
if should_sync_rmp {
last_rmp_sync = Instant::now();
}
current_work = Some((work_handle, cancel_token));
next_run = time::Instant::now() + work_interval;
}
@@ -101,7 +155,11 @@ impl Scheduler {
///
/// This is a static method (not &self) to allow it to be called from spawned tasks.
#[tracing::instrument(skip_all, fields(term))]
async fn schedule_jobs_impl(db_pool: &PgPool, banner_api: &BannerApi) -> Result<()> {
async fn schedule_jobs_impl(
db_pool: &PgPool,
banner_api: &BannerApi,
job_events_tx: Option<&broadcast::Sender<ScrapeJobEvent>>,
) -> Result<()> {
// For now, we will implement a simple baseline scheduling strategy:
// 1. Get a list of all subjects from the Banner API.
// 2. Query existing jobs for all subjects in a single query.
@@ -164,10 +222,149 @@ impl Scheduler {
.map(|(payload, _)| (payload, TargetType::Subject, ScrapePriority::Low))
.collect();
scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
let inserted = scrape_jobs::batch_insert_jobs(&jobs, db_pool).await?;
if let Some(tx) = job_events_tx {
inserted.iter().for_each(|job| {
debug!(job_id = job.id, "Emitting JobCreated event");
let _ = tx.send(ScrapeJobEvent::JobCreated {
job: ScrapeJobDto::from(job),
});
});
}
}
debug!("Job scheduling complete");
Ok(())
}
/// Fetch all RMP professors, upsert to DB, and auto-match against Banner instructors.
#[tracing::instrument(skip_all)]
async fn sync_rmp_data(db_pool: &PgPool) -> Result<()> {
info!("Starting RMP data sync");
let client = RmpClient::new();
let professors = client.fetch_all_professors().await?;
let total = professors.len();
crate::data::rmp::batch_upsert_rmp_professors(&professors, db_pool).await?;
info!(total, "RMP professors upserted");
let stats = crate::data::rmp_matching::generate_candidates(db_pool).await?;
info!(
total,
stats.total_unmatched,
stats.candidates_created,
stats.auto_matched,
stats.skipped_unparseable,
stats.skipped_no_candidates,
"RMP sync complete"
);
Ok(())
}
/// Scrape all reference data categories from Banner and upsert to DB, then refresh cache.
#[tracing::instrument(skip_all)]
async fn scrape_reference_data(
db_pool: &PgPool,
banner_api: &BannerApi,
reference_cache: &Arc<RwLock<ReferenceCache>>,
) -> Result<()> {
let term = Term::get_current().inner().to_string();
info!(term = %term, "Scraping reference data");
let mut all_entries = Vec::new();
// Terms (fetched via session pool, no active session needed)
match banner_api.sessions.get_terms("", 1, 500).await {
Ok(terms) => {
debug!(count = terms.len(), "Fetched terms");
all_entries.extend(terms.into_iter().map(|t| ReferenceData {
category: "term".to_string(),
code: t.code,
description: t.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch terms"),
}
// Subjects
match banner_api.get_subjects("", &term, 1, 500).await {
Ok(pairs) => {
debug!(count = pairs.len(), "Fetched subjects");
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
category: "subject".to_string(),
code: p.code,
description: p.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch subjects"),
}
// Campuses
match banner_api.get_campuses(&term).await {
Ok(pairs) => {
debug!(count = pairs.len(), "Fetched campuses");
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
category: "campus".to_string(),
code: p.code,
description: p.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch campuses"),
}
// Instructional methods
match banner_api.get_instructional_methods(&term).await {
Ok(pairs) => {
debug!(count = pairs.len(), "Fetched instructional methods");
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
category: "instructional_method".to_string(),
code: p.code,
description: p.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch instructional methods"),
}
// Parts of term
match banner_api.get_parts_of_term(&term).await {
Ok(pairs) => {
debug!(count = pairs.len(), "Fetched parts of term");
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
category: "part_of_term".to_string(),
code: p.code,
description: p.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch parts of term"),
}
// Attributes
match banner_api.get_attributes(&term).await {
Ok(pairs) => {
debug!(count = pairs.len(), "Fetched attributes");
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
category: "attribute".to_string(),
code: p.code,
description: p.description,
}));
}
Err(e) => warn!(error = ?e, "Failed to fetch attributes"),
}
// Batch upsert all entries
let total = all_entries.len();
crate::data::reference::batch_upsert(&all_entries, db_pool).await?;
info!(total_entries = total, "Reference data upserted to DB");
// Refresh in-memory cache
let all = crate::data::reference::get_all(db_pool).await?;
let count = all.len();
*reference_cache.write().await = ReferenceCache::from_entries(all);
info!(entries = count, "Reference cache refreshed");
Ok(())
}
}
+178 -18
View File
@@ -1,8 +1,10 @@
use crate::banner::{BannerApi, BannerApiError};
use crate::data::models::ScrapeJob;
use crate::data::models::{ScrapeJob, ScrapeJobStatus, UpsertCounts};
use crate::data::scrape_jobs;
use crate::error::Result;
use crate::scraper::jobs::{JobError, JobType};
use crate::web::ws::ScrapeJobEvent;
use chrono::{DateTime, Utc};
use sqlx::PgPool;
use std::sync::Arc;
use std::time::Duration;
@@ -10,6 +12,9 @@ use tokio::sync::broadcast;
use tokio::time;
use tracing::{Instrument, debug, error, info, trace, warn};
/// Maximum time a single job is allowed to run before being considered stuck.
const JOB_TIMEOUT: Duration = Duration::from_secs(5 * 60);
/// A single worker instance.
///
/// Each worker runs in its own asynchronous task and continuously polls the
@@ -18,14 +23,21 @@ pub struct Worker {
id: usize, // For logging purposes
db_pool: PgPool,
banner_api: Arc<BannerApi>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl Worker {
pub fn new(id: usize, db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
pub fn new(
id: usize,
db_pool: PgPool,
banner_api: Arc<BannerApi>,
job_events_tx: broadcast::Sender<ScrapeJobEvent>,
) -> Self {
Self {
id,
db_pool,
banner_api,
job_events_tx,
}
}
@@ -60,22 +72,57 @@ impl Worker {
let job_id = job.id;
let retry_count = job.retry_count;
let max_retries = job.max_retries;
let target_type = job.target_type;
let payload = job.target_payload.clone();
let priority = job.priority;
let queued_at = job.queued_at;
let started_at = Utc::now();
let start = std::time::Instant::now();
// Process the job, racing against shutdown signal
// Emit JobLocked event
let locked_at = started_at.to_rfc3339();
debug!(job_id, "Emitting JobLocked event");
let _ = self.job_events_tx.send(ScrapeJobEvent::JobLocked {
id: job_id,
locked_at,
status: ScrapeJobStatus::Processing,
});
// Process the job, racing against shutdown signal and timeout
let process_result = tokio::select! {
_ = shutdown_rx.recv() => {
self.handle_shutdown_during_processing(job_id).await;
break;
}
result = self.process_job(job) => result
result = async {
match time::timeout(JOB_TIMEOUT, self.process_job(job)).await {
Ok(result) => result,
Err(_elapsed) => {
Err(JobError::Recoverable(anyhow::anyhow!(
"job timed out after {}s",
JOB_TIMEOUT.as_secs()
)))
}
}
} => result
};
let duration = start.elapsed();
// Handle the job processing result
self.handle_job_result(job_id, retry_count, max_retries, process_result, duration)
.await;
self.handle_job_result(
job_id,
retry_count,
max_retries,
process_result,
duration,
target_type,
payload,
priority,
queued_at,
started_at,
)
.await;
}
}
@@ -87,7 +134,7 @@ impl Worker {
scrape_jobs::fetch_and_lock_job(&self.db_pool).await
}
async fn process_job(&self, job: ScrapeJob) -> Result<(), JobError> {
async fn process_job(&self, job: ScrapeJob) -> Result<UpsertCounts, JobError> {
// Convert the database job to our job type
let job_type = JobType::from_target_type_and_payload(job.target_type, job.target_payload)
.map_err(|e| JobError::Unrecoverable(anyhow::anyhow!(e)))?; // Parse errors are unrecoverable
@@ -114,9 +161,7 @@ impl Worker {
job_impl
.process(&self.banner_api, &self.db_pool)
.await
.map_err(JobError::Recoverable)?;
Ok(())
.map_err(JobError::Recoverable)
}
.instrument(span)
.await
@@ -130,7 +175,11 @@ impl Worker {
scrape_jobs::unlock_job(job_id, &self.db_pool).await
}
async fn unlock_and_increment_retry(&self, job_id: i32, max_retries: i32) -> Result<bool> {
async fn unlock_and_increment_retry(
&self,
job_id: i32,
max_retries: i32,
) -> Result<Option<chrono::DateTime<chrono::Utc>>> {
scrape_jobs::unlock_and_increment_retry(job_id, max_retries, &self.db_pool).await
}
@@ -156,31 +205,97 @@ impl Worker {
}
/// Handle the result of job processing
#[allow(clippy::too_many_arguments)]
async fn handle_job_result(
&self,
job_id: i32,
retry_count: i32,
max_retries: i32,
result: Result<(), JobError>,
result: Result<UpsertCounts, JobError>,
duration: std::time::Duration,
target_type: crate::data::models::TargetType,
payload: serde_json::Value,
priority: crate::data::models::ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
) {
let duration_ms = duration.as_millis() as i32;
match result {
Ok(()) => {
Ok(counts) => {
debug!(
worker_id = self.id,
job_id,
duration_ms = duration.as_millis(),
courses_fetched = counts.courses_fetched,
courses_changed = counts.courses_changed,
courses_unchanged = counts.courses_unchanged,
"Job completed successfully"
);
// Log the result
if let Err(e) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
true,
None,
retry_count,
Some(&counts),
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?e, "Failed to insert job result");
}
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete completed job");
}
debug!(job_id, "Emitting JobCompleted event");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobCompleted { id: job_id });
}
Err(JobError::Recoverable(e)) => {
self.handle_recoverable_error(job_id, retry_count, max_retries, e, duration)
.await;
self.handle_recoverable_error(
job_id,
retry_count,
max_retries,
e,
duration,
target_type,
payload,
priority,
queued_at,
started_at,
)
.await;
}
Err(JobError::Unrecoverable(e)) => {
// Log the failed result
let err_msg = format!("{e:#}");
if let Err(log_err) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
false,
Some(&err_msg),
retry_count,
None,
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
}
error!(
worker_id = self.id,
job_id,
@@ -191,11 +306,16 @@ impl Worker {
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete corrupted job");
}
debug!(job_id, "Emitting JobDeleted event");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobDeleted { id: job_id });
}
}
}
/// Handle recoverable errors by logging appropriately and unlocking the job
#[allow(clippy::too_many_arguments)]
async fn handle_recoverable_error(
&self,
job_id: i32,
@@ -203,6 +323,11 @@ impl Worker {
max_retries: i32,
e: anyhow::Error,
duration: std::time::Duration,
target_type: crate::data::models::TargetType,
payload: serde_json::Value,
priority: crate::data::models::ScrapePriority,
queued_at: DateTime<Utc>,
started_at: DateTime<Utc>,
) {
let next_attempt = retry_count.saturating_add(1);
let remaining_retries = max_retries.saturating_sub(next_attempt);
@@ -233,7 +358,7 @@ impl Worker {
// Atomically unlock and increment retry count, checking if retry is allowed
match self.unlock_and_increment_retry(job_id, max_retries).await {
Ok(can_retry) if can_retry => {
Ok(Some(new_queued_at)) => {
debug!(
worker_id = self.id,
job_id,
@@ -241,9 +366,37 @@ impl Worker {
remaining_retries = remaining_retries,
"Job unlocked for retry"
);
debug!(job_id, "Emitting JobRetried event");
let _ = self.job_events_tx.send(ScrapeJobEvent::JobRetried {
id: job_id,
retry_count: next_attempt,
queued_at: new_queued_at.to_rfc3339(),
status: ScrapeJobStatus::Pending,
});
// Don't log a result yet — the job will be retried
}
Ok(_) => {
// Max retries exceeded (detected atomically)
Ok(None) => {
// Max retries exceeded — log final failure result
let duration_ms = duration.as_millis() as i32;
let err_msg = format!("{e:#}");
if let Err(log_err) = scrape_jobs::insert_job_result(
target_type,
payload,
priority,
queued_at,
started_at,
duration_ms,
false,
Some(&err_msg),
next_attempt,
None,
&self.db_pool,
)
.await
{
error!(worker_id = self.id, job_id, error = ?log_err, "Failed to insert job result");
}
error!(
worker_id = self.id,
job_id,
@@ -256,6 +409,13 @@ impl Worker {
if let Err(e) = self.delete_job(job_id).await {
error!(worker_id = self.id, job_id, error = ?e, "Failed to delete failed job");
}
debug!(job_id, "Emitting JobExhausted and JobDeleted events");
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobExhausted { id: job_id });
let _ = self
.job_events_tx
.send(ScrapeJobEvent::JobDeleted { id: job_id });
}
Err(e) => {
error!(worker_id = self.id, job_id, error = ?e, "Failed to unlock and increment retry count");
+9 -7
View File
@@ -1,6 +1,7 @@
use super::Service;
use crate::state::AppState;
use crate::status::ServiceStatus;
use crate::web::auth::AuthConfig;
use crate::web::create_router;
use std::net::SocketAddr;
use tokio::net::TcpListener;
@@ -11,22 +12,21 @@ use tracing::{info, trace, warn};
pub struct WebService {
port: u16,
app_state: AppState,
auth_config: AuthConfig,
shutdown_tx: Option<broadcast::Sender<()>>,
}
impl WebService {
pub fn new(port: u16, app_state: AppState) -> Self {
pub fn new(port: u16, app_state: AppState, auth_config: AuthConfig) -> Self {
Self {
port,
app_state,
auth_config,
shutdown_tx: None,
}
}
/// Periodically pings the database and updates the "database" service status.
async fn db_health_check_loop(
state: AppState,
mut shutdown_rx: broadcast::Receiver<()>,
) {
async fn db_health_check_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
use std::time::Duration;
let mut interval = tokio::time::interval(Duration::from_secs(30));
@@ -61,12 +61,14 @@ impl Service for WebService {
async fn run(&mut self) -> Result<(), anyhow::Error> {
// Create the main router with Banner API routes
let app = create_router(self.app_state.clone());
let app = create_router(self.app_state.clone(), self.auth_config.clone());
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
let listener = TcpListener::bind(addr).await?;
self.app_state.service_statuses.set("web", ServiceStatus::Active);
self.app_state
.service_statuses
.set("web", ServiceStatus::Active);
info!(
service = "web",
address = %addr,
+85
View File
@@ -2,27 +2,112 @@
use crate::banner::BannerApi;
use crate::banner::Course;
use crate::data::models::ReferenceData;
use crate::status::ServiceStatusRegistry;
use crate::web::session_cache::{OAuthStateStore, SessionCache};
use crate::web::ws::ScrapeJobEvent;
use anyhow::Result;
use sqlx::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::{RwLock, broadcast};
/// In-memory cache for reference data (code→description lookups).
///
/// Loaded from the `reference_data` table on startup and refreshed periodically.
/// Uses a two-level HashMap so lookups take `&str` without allocating.
pub struct ReferenceCache {
/// category → (code → description)
data: HashMap<String, HashMap<String, String>>,
}
impl Default for ReferenceCache {
fn default() -> Self {
Self::new()
}
}
impl ReferenceCache {
/// Create an empty cache.
pub fn new() -> Self {
Self {
data: HashMap::new(),
}
}
/// Build cache from a list of reference data entries.
pub fn from_entries(entries: Vec<ReferenceData>) -> Self {
let mut data: HashMap<String, HashMap<String, String>> = HashMap::new();
for e in entries {
data.entry(e.category)
.or_default()
.insert(e.code, e.description);
}
Self { data }
}
/// Look up a description by category and code. Zero allocations.
pub fn lookup(&self, category: &str, code: &str) -> Option<&str> {
self.data
.get(category)
.and_then(|codes| codes.get(code))
.map(|s| s.as_str())
}
/// Get all `(code, description)` pairs for a category, sorted by description.
pub fn entries_for_category(&self, category: &str) -> Vec<(&str, &str)> {
let Some(codes) = self.data.get(category) else {
return Vec::new();
};
let mut entries: Vec<(&str, &str)> = codes
.iter()
.map(|(code, desc)| (code.as_str(), desc.as_str()))
.collect();
entries.sort_by(|a, b| a.1.cmp(b.1));
entries
}
}
#[derive(Clone)]
pub struct AppState {
pub banner_api: Arc<BannerApi>,
pub db_pool: PgPool,
pub service_statuses: ServiceStatusRegistry,
pub reference_cache: Arc<RwLock<ReferenceCache>>,
pub session_cache: SessionCache,
pub oauth_state_store: OAuthStateStore,
pub scrape_job_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl AppState {
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
let (scrape_job_tx, _) = broadcast::channel(64);
Self {
session_cache: SessionCache::new(db_pool.clone()),
oauth_state_store: OAuthStateStore::new(),
banner_api,
db_pool,
service_statuses: ServiceStatusRegistry::new(),
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
scrape_job_tx,
}
}
/// Subscribe to scrape job lifecycle events.
pub fn scrape_job_events(&self) -> broadcast::Receiver<ScrapeJobEvent> {
self.scrape_job_tx.subscribe()
}
/// Initialize the reference cache from the database.
pub async fn load_reference_cache(&self) -> Result<()> {
let entries = crate::data::reference::get_all(&self.db_pool).await?;
let count = entries.len();
let cache = ReferenceCache::from_entries(entries);
*self.reference_cache.write().await = cache;
tracing::info!(entries = count, "Reference cache loaded");
Ok(())
}
/// Get a course by CRN directly from Banner API
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
self.banner_api
+6 -1
View File
@@ -3,11 +3,14 @@ use std::time::Instant;
use dashmap::DashMap;
use serde::Serialize;
use ts_rs::TS;
/// Health status of a service.
#[derive(Debug, Clone, Serialize, PartialEq)]
#[derive(Debug, Clone, Serialize, PartialEq, TS)]
#[serde(rename_all = "lowercase")]
#[ts(export)]
pub enum ServiceStatus {
#[allow(dead_code)]
Starting,
Active,
Connected,
@@ -19,6 +22,7 @@ pub enum ServiceStatus {
#[derive(Debug, Clone)]
pub struct StatusEntry {
pub status: ServiceStatus,
#[allow(dead_code)]
pub updated_at: Instant,
}
@@ -46,6 +50,7 @@ impl ServiceStatusRegistry {
}
/// Returns the current status of a named service, if present.
#[allow(dead_code)]
pub fn get(&self, name: &str) -> Option<ServiceStatus> {
self.inner.get(name).map(|entry| entry.status.clone())
}
+269
View File
@@ -0,0 +1,269 @@
//! Admin API handlers.
//!
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
use axum::extract::{Path, State};
use axum::http::{HeaderMap, StatusCode, header};
use axum::response::{IntoResponse, Json, Response};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_json::{Value, json};
use crate::data::models::User;
use crate::state::AppState;
use crate::web::extractors::AdminUser;
/// `GET /api/admin/status` — Enhanced system status for admins.
pub async fn admin_status(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count users");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count users"})),
)
})?;
let (session_count,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM user_sessions WHERE expires_at > now()")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count sessions");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count sessions"})),
)
})?;
let course_count = state.get_course_count().await.map_err(|e| {
tracing::error!(error = %e, "failed to count courses");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count courses"})),
)
})?;
let (scrape_job_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to count scrape jobs");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to count scrape jobs"})),
)
})?;
let services: Vec<Value> = state
.service_statuses
.all()
.into_iter()
.map(|(name, status)| {
json!({
"name": name,
"status": status,
})
})
.collect();
Ok(Json(json!({
"userCount": user_count,
"sessionCount": session_count,
"courseCount": course_count,
"scrapeJobCount": scrape_job_count,
"services": services,
})))
}
/// `GET /api/admin/users` — List all users.
pub async fn list_users(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Vec<User>>, (StatusCode, Json<Value>)> {
let users = crate::data::users::list_users(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list users");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list users"})),
)
})?;
Ok(Json(users))
}
#[derive(Deserialize)]
pub struct SetAdminBody {
is_admin: bool,
}
/// `PUT /api/admin/users/{discord_id}/admin` — Set admin status for a user.
pub async fn set_user_admin(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(discord_id): Path<i64>,
Json(body): Json<SetAdminBody>,
) -> Result<Json<User>, (StatusCode, Json<Value>)> {
let user = crate::data::users::set_admin(&state.db_pool, discord_id, body.is_admin)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to set admin status");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to set admin status"})),
)
})?
.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "user not found"})),
)
})?;
state.session_cache.evict_user(discord_id);
Ok(Json(user))
}
/// `GET /api/admin/scrape-jobs` — List scrape jobs.
pub async fn list_scrape_jobs(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>(
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list scrape jobs");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list scrape jobs"})),
)
})?;
let jobs: Vec<Value> = rows
.iter()
.map(|j| {
json!({
"id": j.id,
"targetType": format!("{:?}", j.target_type),
"targetPayload": j.target_payload,
"priority": format!("{:?}", j.priority),
"executeAt": j.execute_at.to_rfc3339(),
"createdAt": j.created_at.to_rfc3339(),
"lockedAt": j.locked_at.map(|t| t.to_rfc3339()),
"retryCount": j.retry_count,
"maxRetries": j.max_retries,
"queuedAt": j.queued_at.to_rfc3339(),
"status": j.status(),
})
})
.collect();
Ok(Json(json!({ "jobs": jobs })))
}
/// Row returned by the audit-log query (audit + joined course fields).
#[derive(sqlx::FromRow, Debug)]
struct AuditRow {
id: i32,
course_id: i32,
timestamp: chrono::DateTime<chrono::Utc>,
field_changed: String,
old_value: String,
new_value: String,
// Joined from courses table (nullable in case the course was deleted)
subject: Option<String>,
course_number: Option<String>,
crn: Option<String>,
title: Option<String>,
}
/// Format a `DateTime<Utc>` as an HTTP-date (RFC 2822) for Last-Modified headers.
fn to_http_date(dt: &DateTime<Utc>) -> String {
dt.format("%a, %d %b %Y %H:%M:%S GMT").to_string()
}
/// Parse an `If-Modified-Since` header value into a `DateTime<Utc>`.
fn parse_if_modified_since(headers: &HeaderMap) -> Option<DateTime<Utc>> {
let val = headers.get(header::IF_MODIFIED_SINCE)?.to_str().ok()?;
DateTime::parse_from_rfc2822(val)
.ok()
.map(|dt| dt.with_timezone(&Utc))
}
/// `GET /api/admin/audit-log` — List recent audit entries.
///
/// Supports `If-Modified-Since`: returns 304 when the newest entry hasn't changed.
pub async fn list_audit_log(
AdminUser(_user): AdminUser,
headers: HeaderMap,
State(state): State<AppState>,
) -> Result<Response, (StatusCode, Json<Value>)> {
let rows = sqlx::query_as::<_, AuditRow>(
"SELECT a.id, a.course_id, a.timestamp, a.field_changed, a.old_value, a.new_value, \
c.subject, c.course_number, c.crn, c.title \
FROM course_audits a \
LEFT JOIN courses c ON c.id = a.course_id \
ORDER BY a.timestamp DESC LIMIT 200",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to list audit log");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to list audit log"})),
)
})?;
// Determine the latest timestamp across all rows (query is DESC so first row is newest)
let latest = rows.first().map(|r| r.timestamp);
// If the client sent If-Modified-Since and our data hasn't changed, return 304
if let (Some(since), Some(latest_ts)) = (parse_if_modified_since(&headers), latest) {
// Truncate to seconds for comparison (HTTP dates have second precision)
if latest_ts.timestamp() <= since.timestamp() {
let mut resp = StatusCode::NOT_MODIFIED.into_response();
if let Ok(val) = to_http_date(&latest_ts).parse() {
resp.headers_mut().insert(header::LAST_MODIFIED, val);
}
return Ok(resp);
}
}
let entries: Vec<Value> = rows
.iter()
.map(|a| {
json!({
"id": a.id,
"courseId": a.course_id,
"timestamp": a.timestamp.to_rfc3339(),
"fieldChanged": a.field_changed,
"oldValue": a.old_value,
"newValue": a.new_value,
"subject": a.subject,
"courseNumber": a.course_number,
"crn": a.crn,
"courseTitle": a.title,
})
})
.collect();
let mut resp = Json(json!({ "entries": entries })).into_response();
if let Some(latest_ts) = latest
&& let Ok(val) = to_http_date(&latest_ts).parse()
{
resp.headers_mut().insert(header::LAST_MODIFIED, val);
}
Ok(resp)
}
+865
View File
@@ -0,0 +1,865 @@
//! Admin API handlers for RMP instructor matching management.
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::response::Json;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use ts_rs::TS;
use crate::state::AppState;
use crate::web::extractors::AdminUser;
// ---------------------------------------------------------------------------
// Query / body types
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct ListInstructorsParams {
status: Option<String>,
search: Option<String>,
page: Option<i32>,
per_page: Option<i32>,
sort: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MatchBody {
rmp_legacy_id: i32,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RejectCandidateBody {
rmp_legacy_id: i32,
}
// ---------------------------------------------------------------------------
// Response types
// ---------------------------------------------------------------------------
/// Simple acknowledgement response for mutating operations.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct OkResponse {
pub ok: bool,
}
/// A top-candidate summary shown in the instructor list view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TopCandidateResponse {
pub rmp_legacy_id: i32,
pub score: Option<f32>,
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
pub score_breakdown: Option<serde_json::Value>,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub num_ratings: Option<i32>,
}
/// An instructor row in the paginated list.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorListItem {
pub id: i32,
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
#[ts(as = "i32")]
pub rmp_link_count: i64,
#[ts(as = "i32")]
pub candidate_count: i64,
#[ts(as = "i32")]
pub course_subject_count: i64,
pub top_candidate: Option<TopCandidateResponse>,
}
/// Aggregate status counts for the instructor list.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorStats {
#[ts(as = "i32")]
pub total: i64,
#[ts(as = "i32")]
pub unmatched: i64,
#[ts(as = "i32")]
pub auto: i64,
#[ts(as = "i32")]
pub confirmed: i64,
#[ts(as = "i32")]
pub rejected: i64,
#[ts(as = "i32")]
pub with_candidates: i64,
}
/// Response for `GET /api/admin/instructors`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ListInstructorsResponse {
pub instructors: Vec<InstructorListItem>,
#[ts(as = "i32")]
pub total: i64,
pub page: i32,
pub per_page: i32,
pub stats: InstructorStats,
}
/// Instructor summary in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorDetail {
pub id: i32,
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
pub subjects_taught: Vec<String>,
#[ts(as = "i32")]
pub course_count: i64,
}
/// A linked RMP profile in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct LinkedRmpProfile {
pub link_id: i32,
pub legacy_id: i32,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub avg_difficulty: Option<f32>,
pub num_ratings: Option<i32>,
pub would_take_again_pct: Option<f32>,
}
/// A match candidate in the detail view.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CandidateResponse {
pub id: i32,
pub rmp_legacy_id: i32,
pub first_name: Option<String>,
pub last_name: Option<String>,
pub department: Option<String>,
pub avg_rating: Option<f32>,
pub avg_difficulty: Option<f32>,
pub num_ratings: Option<i32>,
pub would_take_again_pct: Option<f32>,
pub score: Option<f32>,
#[ts(as = "Option<std::collections::HashMap<String, f32>>")]
pub score_breakdown: Option<serde_json::Value>,
pub status: String,
}
/// Response for `GET /api/admin/instructors/{id}` and `POST .../match`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorDetailResponse {
pub instructor: InstructorDetail,
pub current_matches: Vec<LinkedRmpProfile>,
pub candidates: Vec<CandidateResponse>,
}
/// Response for `POST /api/admin/rmp/rescore`.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RescoreResponse {
pub total_unmatched: usize,
pub candidates_created: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
}
// ---------------------------------------------------------------------------
// Helper: map sqlx errors to the standard admin error tuple
// ---------------------------------------------------------------------------
fn db_error(context: &str, e: sqlx::Error) -> (StatusCode, Json<Value>) {
tracing::error!(error = %e, "{context}");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": context})),
)
}
// ---------------------------------------------------------------------------
// Row types for SQL queries
// ---------------------------------------------------------------------------
#[derive(sqlx::FromRow)]
struct InstructorRow {
id: i32,
display_name: String,
email: String,
rmp_match_status: String,
rmp_link_count: Option<i64>,
top_candidate_rmp_id: Option<i32>,
top_candidate_score: Option<f32>,
top_candidate_breakdown: Option<serde_json::Value>,
tc_first_name: Option<String>,
tc_last_name: Option<String>,
tc_department: Option<String>,
tc_avg_rating: Option<f32>,
tc_num_ratings: Option<i32>,
candidate_count: Option<i64>,
course_subject_count: Option<i64>,
}
#[derive(sqlx::FromRow)]
struct StatusCount {
rmp_match_status: String,
count: i64,
}
#[derive(sqlx::FromRow)]
struct CandidateRow {
id: i32,
rmp_legacy_id: i32,
score: Option<f32>,
score_breakdown: Option<serde_json::Value>,
status: String,
first_name: Option<String>,
last_name: Option<String>,
department: Option<String>,
avg_rating: Option<f32>,
avg_difficulty: Option<f32>,
num_ratings: Option<i32>,
would_take_again_pct: Option<f32>,
}
#[derive(sqlx::FromRow)]
struct LinkedRmpProfileRow {
link_id: i32,
legacy_id: i32,
first_name: Option<String>,
last_name: Option<String>,
department: Option<String>,
avg_rating: Option<f32>,
avg_difficulty: Option<f32>,
num_ratings: Option<i32>,
would_take_again_pct: Option<f32>,
}
// ---------------------------------------------------------------------------
// 1. GET /api/admin/instructors — paginated list with filtering
// ---------------------------------------------------------------------------
/// `GET /api/admin/instructors` — List instructors with filtering and pagination.
pub async fn list_instructors(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Query(params): Query<ListInstructorsParams>,
) -> Result<Json<ListInstructorsResponse>, (StatusCode, Json<Value>)> {
let page = params.page.unwrap_or(1).max(1);
let per_page = params.per_page.unwrap_or(50).clamp(1, 100);
let offset = (page - 1) * per_page;
let sort_clause = match params.sort.as_deref() {
Some("name_asc") => "i.display_name ASC",
Some("name_desc") => "i.display_name DESC",
Some("status") => "i.rmp_match_status ASC, i.display_name ASC",
_ => "tc.score DESC NULLS LAST, i.display_name ASC",
};
// Build WHERE clause
let mut conditions = Vec::new();
let mut bind_idx = 0u32;
if params.status.is_some() {
bind_idx += 1;
conditions.push(format!("i.rmp_match_status = ${bind_idx}"));
}
if params.search.is_some() {
bind_idx += 1;
conditions.push(format!(
"(i.display_name ILIKE ${bind_idx} OR i.email ILIKE ${bind_idx})"
));
}
let where_clause = if conditions.is_empty() {
String::new()
} else {
format!("WHERE {}", conditions.join(" AND "))
};
let query_str = format!(
r#"
SELECT
i.id, i.display_name, i.email, i.rmp_match_status,
(SELECT COUNT(*) FROM instructor_rmp_links irl WHERE irl.instructor_id = i.id) as rmp_link_count,
tc.rmp_legacy_id as top_candidate_rmp_id,
tc.score as top_candidate_score,
tc.score_breakdown as top_candidate_breakdown,
rp.first_name as tc_first_name,
rp.last_name as tc_last_name,
rp.department as tc_department,
rp.avg_rating as tc_avg_rating,
rp.num_ratings as tc_num_ratings,
(SELECT COUNT(*) FROM rmp_match_candidates mc WHERE mc.instructor_id = i.id AND mc.status = 'pending') as candidate_count,
(SELECT COUNT(DISTINCT c.subject) FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = i.id) as course_subject_count
FROM instructors i
LEFT JOIN LATERAL (
SELECT mc.rmp_legacy_id, mc.score, mc.score_breakdown
FROM rmp_match_candidates mc
WHERE mc.instructor_id = i.id AND mc.status = 'pending'
ORDER BY mc.score DESC
LIMIT 1
) tc ON true
LEFT JOIN rmp_professors rp ON rp.legacy_id = tc.rmp_legacy_id
{where_clause}
ORDER BY {sort_clause}
LIMIT {per_page} OFFSET {offset}
"#
);
// Build the query with dynamic binds
let mut query = sqlx::query_as::<_, InstructorRow>(&query_str);
if let Some(ref status) = params.status {
query = query.bind(status);
}
if let Some(ref search) = params.search {
query = query.bind(format!("%{search}%"));
}
let rows = query
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to list instructors", e))?;
// Count total with filters
let count_query_str = format!("SELECT COUNT(*) FROM instructors i {where_clause}");
let mut count_query = sqlx::query_as::<_, (i64,)>(&count_query_str);
if let Some(ref status) = params.status {
count_query = count_query.bind(status);
}
if let Some(ref search) = params.search {
count_query = count_query.bind(format!("%{search}%"));
}
let (total,) = count_query
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count instructors", e))?;
// Aggregate stats (unfiltered)
let stats_rows = sqlx::query_as::<_, StatusCount>(
"SELECT rmp_match_status, COUNT(*) as count FROM instructors GROUP BY rmp_match_status",
)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to get instructor stats", e))?;
// Count instructors with at least one candidate (for progress bar denominator)
let (with_candidates,): (i64,) =
sqlx::query_as("SELECT COUNT(DISTINCT instructor_id) FROM rmp_match_candidates")
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count instructors with candidates", e))?;
let mut stats = InstructorStats {
total: 0,
unmatched: 0,
auto: 0,
confirmed: 0,
rejected: 0,
with_candidates,
};
for row in &stats_rows {
stats.total += row.count;
match row.rmp_match_status.as_str() {
"unmatched" => stats.unmatched = row.count,
"auto" => stats.auto = row.count,
"confirmed" => stats.confirmed = row.count,
"rejected" => stats.rejected = row.count,
_ => {}
}
}
let instructors: Vec<InstructorListItem> = rows
.iter()
.map(|r| {
let top_candidate = r.top_candidate_rmp_id.map(|rmp_id| TopCandidateResponse {
rmp_legacy_id: rmp_id,
score: r.top_candidate_score,
score_breakdown: r.top_candidate_breakdown.clone(),
first_name: r.tc_first_name.clone(),
last_name: r.tc_last_name.clone(),
department: r.tc_department.clone(),
avg_rating: r.tc_avg_rating,
num_ratings: r.tc_num_ratings,
});
InstructorListItem {
id: r.id,
display_name: r.display_name.clone(),
email: r.email.clone(),
rmp_match_status: r.rmp_match_status.clone(),
rmp_link_count: r.rmp_link_count.unwrap_or(0),
candidate_count: r.candidate_count.unwrap_or(0),
course_subject_count: r.course_subject_count.unwrap_or(0),
top_candidate,
}
})
.collect();
Ok(Json(ListInstructorsResponse {
instructors,
total,
page,
per_page,
stats,
}))
}
// ---------------------------------------------------------------------------
// 2. GET /api/admin/instructors/{id} — full detail
// ---------------------------------------------------------------------------
/// `GET /api/admin/instructors/{id}` — Full instructor detail with candidates.
pub async fn get_instructor(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
build_instructor_detail(&state, id).await
}
/// Shared helper that builds the full instructor detail response.
async fn build_instructor_detail(
state: &AppState,
id: i32,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
// Fetch instructor
let instructor: Option<(i32, String, String, String)> = sqlx::query_as(
"SELECT id, display_name, email, rmp_match_status FROM instructors WHERE id = $1",
)
.bind(id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch instructor", e))?;
let (inst_id, display_name, email, rmp_match_status) = instructor.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
)
})?;
// Subjects taught
let subjects: Vec<(String,)> = sqlx::query_as(
"SELECT DISTINCT c.subject FROM course_instructors ci JOIN courses c ON c.id = ci.course_id WHERE ci.instructor_id = $1 ORDER BY c.subject",
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch subjects", e))?;
// Course count
let (course_count,): (i64,) = sqlx::query_as(
"SELECT COUNT(DISTINCT ci.course_id) FROM course_instructors ci WHERE ci.instructor_id = $1",
)
.bind(inst_id)
.fetch_one(&state.db_pool)
.await
.map_err(|e| db_error("failed to count courses", e))?;
// Candidates with RMP professor info
let candidates = sqlx::query_as::<_, CandidateRow>(
r#"
SELECT mc.id, mc.rmp_legacy_id, mc.score, mc.score_breakdown, mc.status,
rp.first_name, rp.last_name, rp.department,
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
FROM rmp_match_candidates mc
JOIN rmp_professors rp ON rp.legacy_id = mc.rmp_legacy_id
WHERE mc.instructor_id = $1
ORDER BY mc.score DESC
"#,
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch candidates", e))?;
// Current matches (all linked RMP profiles)
let current_matches = sqlx::query_as::<_, LinkedRmpProfileRow>(
r#"
SELECT irl.id as link_id,
rp.legacy_id, rp.first_name, rp.last_name, rp.department,
rp.avg_rating, rp.avg_difficulty, rp.num_ratings, rp.would_take_again_pct
FROM instructor_rmp_links irl
JOIN rmp_professors rp ON rp.legacy_id = irl.rmp_legacy_id
WHERE irl.instructor_id = $1
ORDER BY rp.num_ratings DESC NULLS LAST
"#,
)
.bind(inst_id)
.fetch_all(&state.db_pool)
.await
.map_err(|e| db_error("failed to fetch linked rmp profiles", e))?;
let current_matches_resp: Vec<LinkedRmpProfile> = current_matches
.into_iter()
.map(|p| LinkedRmpProfile {
link_id: p.link_id,
legacy_id: p.legacy_id,
first_name: p.first_name,
last_name: p.last_name,
department: p.department,
avg_rating: p.avg_rating,
avg_difficulty: p.avg_difficulty,
num_ratings: p.num_ratings,
would_take_again_pct: p.would_take_again_pct,
})
.collect();
let candidates_resp: Vec<CandidateResponse> = candidates
.into_iter()
.map(|c| CandidateResponse {
id: c.id,
rmp_legacy_id: c.rmp_legacy_id,
first_name: c.first_name,
last_name: c.last_name,
department: c.department,
avg_rating: c.avg_rating,
avg_difficulty: c.avg_difficulty,
num_ratings: c.num_ratings,
would_take_again_pct: c.would_take_again_pct,
score: c.score,
score_breakdown: c.score_breakdown,
status: c.status,
})
.collect();
Ok(Json(InstructorDetailResponse {
instructor: InstructorDetail {
id: inst_id,
display_name,
email,
rmp_match_status,
subjects_taught: subjects.into_iter().map(|(s,)| s).collect(),
course_count,
},
current_matches: current_matches_resp,
candidates: candidates_resp,
}))
}
// ---------------------------------------------------------------------------
// 3. POST /api/admin/instructors/{id}/match — accept a candidate
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/match` — Accept a candidate match.
pub async fn match_instructor(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
Json(body): Json<MatchBody>,
) -> Result<Json<InstructorDetailResponse>, (StatusCode, Json<Value>)> {
// Verify the candidate exists and is pending
let candidate: Option<(i32,)> = sqlx::query_as(
"SELECT id FROM rmp_match_candidates WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'pending'",
)
.bind(id)
.bind(body.rmp_legacy_id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check candidate", e))?;
if candidate.is_none() {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "pending candidate not found for this instructor"})),
));
}
// Check if this RMP profile is already linked to a different instructor
let conflict: Option<(i32,)> = sqlx::query_as(
"SELECT instructor_id FROM instructor_rmp_links WHERE rmp_legacy_id = $1 AND instructor_id != $2",
)
.bind(body.rmp_legacy_id)
.bind(id)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check rmp uniqueness", e))?;
if let Some((other_id,)) = conflict {
return Err((
StatusCode::CONFLICT,
Json(json!({
"error": "RMP profile already linked to another instructor",
"conflictingInstructorId": other_id,
})),
));
}
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Insert link into instructor_rmp_links
sqlx::query(
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, created_by, source) VALUES ($1, $2, $3, 'manual') ON CONFLICT (rmp_legacy_id) DO NOTHING",
)
.bind(id)
.bind(body.rmp_legacy_id)
.bind(user.discord_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to insert rmp link", e))?;
// Update instructor match status
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor match status", e))?;
// Accept the candidate
sqlx::query(
"UPDATE rmp_match_candidates SET status = 'accepted', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3",
)
.bind(user.discord_id)
.bind(id)
.bind(body.rmp_legacy_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to accept candidate", e))?;
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
build_instructor_detail(&state, id).await
}
// ---------------------------------------------------------------------------
// 4. POST /api/admin/instructors/{id}/reject-candidate — reject one candidate
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/reject-candidate` — Reject a single candidate.
pub async fn reject_candidate(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
Json(body): Json<RejectCandidateBody>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let result = sqlx::query(
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND rmp_legacy_id = $3 AND status = 'pending'",
)
.bind(user.discord_id)
.bind(id)
.bind(body.rmp_legacy_id)
.execute(&state.db_pool)
.await
.map_err(|e| db_error("failed to reject candidate", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "pending candidate not found"})),
));
}
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 5. POST /api/admin/instructors/{id}/reject-all — no valid match
// ---------------------------------------------------------------------------
/// `POST /api/admin/instructors/{id}/reject-all` — Mark instructor as having no valid RMP match.
pub async fn reject_all(
AdminUser(user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Check current status — cannot reject an instructor with confirmed matches
let current_status: Option<(String,)> =
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.await
.map_err(|e| db_error("failed to fetch instructor status", e))?;
let (status,) = current_status.ok_or_else(|| {
(
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
)
})?;
if status == "confirmed" {
return Err((
StatusCode::CONFLICT,
Json(
json!({"error": "cannot reject instructor with confirmed matches — unmatch first"}),
),
));
}
// Update instructor status
sqlx::query("UPDATE instructors SET rmp_match_status = 'rejected' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
// Reject all pending candidates
sqlx::query(
"UPDATE rmp_match_candidates SET status = 'rejected', resolved_at = NOW(), resolved_by = $1 WHERE instructor_id = $2 AND status = 'pending'",
)
.bind(user.discord_id)
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to reject candidates", e))?;
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 6. POST /api/admin/instructors/{id}/unmatch — remove current match
// ---------------------------------------------------------------------------
/// Body for unmatch — optional `rmpLegacyId` to remove a specific link.
/// If omitted (or null), all links are removed.
#[derive(Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct UnmatchBody {
rmp_legacy_id: Option<i32>,
}
/// `POST /api/admin/instructors/{id}/unmatch` — Remove RMP link(s).
///
/// Send `{ "rmpLegacyId": N }` to remove a specific link, or an empty body / `{}`
/// to remove all links for the instructor.
pub async fn unmatch_instructor(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
Path(id): Path<i32>,
body: Option<Json<UnmatchBody>>,
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let rmp_legacy_id = body.and_then(|b| b.rmp_legacy_id);
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Verify instructor exists
let exists: Option<(i32,)> = sqlx::query_as("SELECT id FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.await
.map_err(|e| db_error("failed to check instructor", e))?;
if exists.is_none() {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "instructor not found"})),
));
}
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
let result = sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(id)
.bind(legacy_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp link", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "link not found for this instructor"})),
));
}
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp links", e))?;
}
// Check if any links remain; update status accordingly
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.fetch_one(&mut *tx)
.await
.map_err(|e| db_error("failed to count remaining links", e))?;
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
}
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
Ok(Json(OkResponse { ok: true }))
}
// ---------------------------------------------------------------------------
// 7. POST /api/admin/rmp/rescore — re-run candidate generation
// ---------------------------------------------------------------------------
/// `POST /api/admin/rmp/rescore` — Re-run RMP candidate generation.
pub async fn rescore(
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> Result<Json<RescoreResponse>, (StatusCode, Json<Value>)> {
let stats = crate::data::rmp_matching::generate_candidates(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to run candidate generation");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "candidate generation failed"})),
)
})?;
Ok(Json(RescoreResponse {
total_unmatched: stats.total_unmatched,
candidates_created: stats.candidates_created,
auto_matched: stats.auto_matched,
skipped_unparseable: stats.skipped_unparseable,
skipped_no_candidates: stats.skipped_no_candidates,
}))
}
+114 -19
View File
@@ -1,14 +1,18 @@
//! Embedded assets for the web frontend
//! Embedded assets for the web frontend.
//!
//! This module handles serving static assets that are embedded into the binary
//! at compile time using rust-embed.
//! Serves static assets embedded into the binary at compile time using rust-embed.
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
//! generated at build time by `web/scripts/compress-assets.ts`.
use axum::http::{HeaderMap, HeaderValue, header};
use dashmap::DashMap;
use rapidhash::v3::rapidhash_v3;
use rust_embed::RustEmbed;
use std::fmt;
use std::sync::LazyLock;
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
/// Embedded web assets from the dist directory
#[derive(RustEmbed)]
#[folder = "web/dist/"]
@@ -21,17 +25,15 @@ pub struct WebAssets;
pub struct AssetHash(u64);
impl AssetHash {
/// Create a new AssetHash from u64 value
pub fn new(hash: u64) -> Self {
Self(hash)
}
/// Get the hash as a hex string
pub fn to_hex(&self) -> String {
format!("{:016x}", self.0)
}
/// Get the hash as a quoted hex string
/// Get the hash as a quoted hex string (for ETag headers)
pub fn quoted(&self) -> String {
format!("\"{}\"", self.to_hex())
}
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
}
impl AssetMetadata {
/// Check if the etag matches the asset hash
pub fn etag_matches(&self, etag: &str) -> bool {
// Remove quotes if present (ETags are typically quoted)
let etag = etag.trim_matches('"');
// ETags generated from u64 hex should be 16 characters
etag.len() == 16
&& u64::from_str_radix(etag, 16)
.map(|parsed| parsed == self.hash.0)
@@ -68,28 +66,125 @@ impl AssetMetadata {
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
/// Get cached asset metadata for a file path, caching on-demand
/// Returns AssetMetadata containing MIME type and RapidHash hash
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
// Check cache first
if let Some(cached) = ASSET_CACHE.get(path) {
return cached.value().clone();
}
// Calculate MIME type
let mime_type = mime_guess::from_path(path)
.first()
.map(|mime| mime.to_string());
// Calculate RapidHash hash (using u64 native output size)
let hash_value = rapidhash_v3(content);
let hash = AssetHash::new(hash_value);
let hash = AssetHash::new(rapidhash_v3(content));
let metadata = AssetMetadata { mime_type, hash };
// Only cache if we haven't exceeded the limit
if ASSET_CACHE.len() < 1000 {
ASSET_CACHE.insert(path.to_string(), metadata.clone());
}
metadata
}
/// Set appropriate `Cache-Control` header based on the asset path.
///
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
/// safe to cache indefinitely. Other assets get shorter cache durations.
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
let cache_control = if path.contains("immutable/") {
// SvelteKit fingerprinted assets — cache forever
"public, max-age=31536000, immutable"
} else if path == "index.html" || path.ends_with(".html") {
"public, max-age=300"
} else {
match path.rsplit_once('.').map(|(_, ext)| ext) {
Some("css" | "js") => "public, max-age=86400",
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
_ => "public, max-age=3600",
}
};
if let Ok(value) = HeaderValue::from_str(cache_control) {
headers.insert(header::CACHE_CONTROL, value);
}
}
/// Serve an embedded asset with content encoding negotiation.
///
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
/// the client's `Accept-Encoding` header, falling back to the uncompressed
/// original. Returns `None` if the asset doesn't exist at all.
pub fn try_serve_asset_with_encoding(
path: &str,
request_headers: &HeaderMap,
) -> Option<axum::response::Response> {
use axum::response::IntoResponse;
let asset_path = path.strip_prefix('/').unwrap_or(path);
// Get the uncompressed original first (for metadata: MIME type, ETag)
let original = WebAssets::get(asset_path)?;
let metadata = get_asset_metadata_cached(asset_path, &original.data);
// Check ETag for conditional requests (304 Not Modified)
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
}
let mime_type = metadata
.mime_type
.unwrap_or_else(|| "application/octet-stream".to_string());
// Only attempt pre-compressed variants for files above the compression
// threshold — the build script skips smaller files too.
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
parse_accepted_encodings(request_headers)
} else {
vec![ContentEncoding::Identity]
};
for encoding in &accepted_encodings {
if *encoding == ContentEncoding::Identity {
continue;
}
let compressed_path = format!("{}{}", asset_path, encoding.extension());
if let Some(compressed) = WebAssets::get(&compressed_path) {
let mut response_headers = HeaderMap::new();
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
response_headers.insert(header::CONTENT_TYPE, ct);
}
if let Some(ce) = encoding.header_value() {
response_headers.insert(header::CONTENT_ENCODING, ce);
}
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
response_headers.insert(header::ETAG, etag_val);
}
// Vary so caches distinguish by encoding
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
set_cache_control(&mut response_headers, asset_path);
return Some(
(
axum::http::StatusCode::OK,
response_headers,
compressed.data,
)
.into_response(),
);
}
}
// No compressed variant found — serve uncompressed original
let mut response_headers = HeaderMap::new();
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
response_headers.insert(header::CONTENT_TYPE, ct);
}
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
response_headers.insert(header::ETAG, etag_val);
}
set_cache_control(&mut response_headers, asset_path);
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
}
+300
View File
@@ -0,0 +1,300 @@
//! Discord OAuth2 authentication handlers.
//!
//! Provides login, callback, logout, and session introspection endpoints
//! for Discord OAuth2 authentication flow.
use axum::extract::{Extension, Query, State};
use axum::http::{HeaderMap, StatusCode, header};
use axum::response::{IntoResponse, Json, Redirect, Response};
use serde::Deserialize;
use serde_json::{Value, json};
use std::time::Duration;
use tracing::{error, info, warn};
use crate::state::AppState;
/// OAuth configuration passed as an Axum Extension.
#[derive(Clone)]
pub struct AuthConfig {
pub client_id: String,
pub client_secret: String,
/// Optional base URL override (e.g. "https://banner.xevion.dev").
/// When `None`, the redirect URI is derived from the request's Origin/Host header.
pub redirect_base: Option<String>,
}
const CALLBACK_PATH: &str = "/api/auth/callback";
/// Derive the origin (scheme + host + port) the user's browser is actually on.
///
/// Priority:
/// 1. Configured `redirect_base` (production override)
/// 2. `Referer` header — preserves the real browser origin even through
/// reverse proxies that rewrite `Host` (e.g. Vite dev proxy with
/// `changeOrigin: true`)
/// 3. `Origin` header (present on POST / CORS requests)
/// 4. `Host` header (last resort, may be rewritten by proxies)
fn resolve_origin(auth_config: &AuthConfig, headers: &HeaderMap) -> String {
if let Some(base) = &auth_config.redirect_base {
return base.trim_end_matches('/').to_owned();
}
// Referer carries the full browser URL; extract just the origin.
if let Some(referer) = headers.get(header::REFERER).and_then(|v| v.to_str().ok())
&& let Ok(parsed) = url::Url::parse(referer)
{
let origin = parsed.origin().unicode_serialization();
if origin != "null" {
return origin;
}
}
if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) {
return origin.trim_end_matches('/').to_owned();
}
if let Some(host) = headers.get(header::HOST).and_then(|v| v.to_str().ok()) {
return format!("http://{host}");
}
"http://localhost:8080".to_owned()
}
#[derive(Deserialize)]
pub struct CallbackParams {
code: String,
state: String,
}
#[derive(Deserialize)]
struct TokenResponse {
access_token: String,
}
#[derive(Deserialize)]
struct DiscordUser {
id: String,
username: String,
avatar: Option<String>,
}
/// Extract the `session` cookie value from request headers.
fn extract_session_token(headers: &HeaderMap) -> Option<String> {
headers
.get(header::COOKIE)?
.to_str()
.ok()?
.split(';')
.find_map(|cookie| {
let cookie = cookie.trim();
cookie.strip_prefix("session=").map(|v| v.to_owned())
})
}
/// Build a `Set-Cookie` header value for the session cookie.
fn session_cookie(token: &str, max_age: i64, secure: bool) -> String {
let mut cookie = format!("session={token}; HttpOnly; SameSite=Lax; Path=/; Max-Age={max_age}");
if secure {
cookie.push_str("; Secure");
}
cookie
}
/// `GET /api/auth/login` — Redirect to Discord OAuth2 authorization page.
pub async fn auth_login(
State(state): State<AppState>,
Extension(auth_config): Extension<AuthConfig>,
headers: HeaderMap,
) -> Redirect {
let origin = resolve_origin(&auth_config, &headers);
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
let csrf_state = state.oauth_state_store.generate(origin);
let redirect_uri_encoded = urlencoding::encode(&redirect_uri);
let url = format!(
"https://discord.com/oauth2/authorize\
?client_id={}\
&redirect_uri={redirect_uri_encoded}\
&response_type=code\
&scope=identify\
&state={csrf_state}",
auth_config.client_id,
);
Redirect::temporary(&url)
}
/// `GET /api/auth/callback` — Handle Discord OAuth2 callback.
pub async fn auth_callback(
State(state): State<AppState>,
Extension(auth_config): Extension<AuthConfig>,
Query(params): Query<CallbackParams>,
) -> Result<Response, (StatusCode, Json<Value>)> {
// 1. Validate CSRF state and recover the origin used during login
let origin = state
.oauth_state_store
.validate(&params.state)
.ok_or_else(|| {
warn!("OAuth callback with invalid CSRF state");
(
StatusCode::BAD_REQUEST,
Json(json!({ "error": "Invalid OAuth state" })),
)
})?;
// 2. Exchange authorization code for access token
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
let client = reqwest::Client::new();
let token_response = client
.post("https://discord.com/api/oauth2/token")
.form(&[
("client_id", auth_config.client_id.as_str()),
("client_secret", auth_config.client_secret.as_str()),
("grant_type", "authorization_code"),
("code", params.code.as_str()),
("redirect_uri", redirect_uri.as_str()),
])
.send()
.await
.map_err(|e| {
error!(error = %e, "failed to exchange OAuth code for token");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Failed to exchange code with Discord" })),
)
})?;
if !token_response.status().is_success() {
let status = token_response.status();
let body = token_response.text().await.unwrap_or_default();
error!(%status, %body, "Discord token exchange returned error");
return Err((
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Discord token exchange failed" })),
));
}
let token_data: TokenResponse = token_response.json().await.map_err(|e| {
error!(error = %e, "failed to parse Discord token response");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid token response from Discord" })),
)
})?;
// 3. Fetch Discord user profile
let discord_user: DiscordUser = client
.get("https://discord.com/api/users/@me")
.bearer_auth(&token_data.access_token)
.send()
.await
.map_err(|e| {
error!(error = %e, "failed to fetch Discord user profile");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Failed to fetch Discord profile" })),
)
})?
.json()
.await
.map_err(|e| {
error!(error = %e, "failed to parse Discord user profile");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid user profile from Discord" })),
)
})?;
let discord_id: i64 = discord_user.id.parse().map_err(|_| {
error!(id = %discord_user.id, "Discord user ID is not a valid i64");
(
StatusCode::BAD_GATEWAY,
Json(json!({ "error": "Invalid Discord user ID" })),
)
})?;
// 4. Upsert user
let user = crate::data::users::upsert_user(
&state.db_pool,
discord_id,
&discord_user.username,
discord_user.avatar.as_deref(),
)
.await
.map_err(|e| {
error!(error = %e, "failed to upsert user");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "Database error" })),
)
})?;
info!(discord_id, username = %user.discord_username, "user authenticated via OAuth");
// 5. Create session
let session = crate::data::sessions::create_session(
&state.db_pool,
discord_id,
Duration::from_secs(7 * 24 * 3600),
)
.await
.map_err(|e| {
error!(error = %e, "failed to create session");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({ "error": "Failed to create session" })),
)
})?;
// 6. Build response with session cookie
let secure = redirect_uri.starts_with("https://");
let cookie = session_cookie(&session.id, 604800, secure);
let redirect_to = if user.is_admin { "/admin" } else { "/" };
Ok((
[(header::SET_COOKIE, cookie)],
Redirect::temporary(redirect_to),
)
.into_response())
}
/// `POST /api/auth/logout` — Destroy the current session.
pub async fn auth_logout(State(state): State<AppState>, headers: HeaderMap) -> Response {
if let Some(token) = extract_session_token(&headers) {
if let Err(e) = crate::data::sessions::delete_session(&state.db_pool, &token).await {
warn!(error = %e, "failed to delete session from database");
}
state.session_cache.evict(&token);
}
let cookie = session_cookie("", 0, false);
(
StatusCode::OK,
[(header::SET_COOKIE, cookie)],
Json(json!({ "ok": true })),
)
.into_response()
}
/// `GET /api/auth/me` — Return the current authenticated user's info.
pub async fn auth_me(
State(state): State<AppState>,
headers: HeaderMap,
) -> Result<Json<Value>, StatusCode> {
let token = extract_session_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
let user = state
.session_cache
.get_user(&token)
.await
.ok_or(StatusCode::UNAUTHORIZED)?;
Ok(Json(json!({
"discordId": user.discord_id.to_string(),
"username": user.discord_username,
"avatarHash": user.discord_avatar_hash,
"isAdmin": user.is_admin,
})))
}
+196
View File
@@ -0,0 +1,196 @@
//! Content encoding negotiation for pre-compressed asset serving.
//!
//! Parses Accept-Encoding headers with quality values and returns
//! supported encodings in priority order for content negotiation.
use axum::http::{HeaderMap, HeaderValue, header};
/// Minimum size threshold for compression (bytes).
///
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
pub const COMPRESSION_MIN_SIZE: usize = 512;
/// Supported content encodings in priority order (best compression first).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ContentEncoding {
Zstd,
Brotli,
Gzip,
Identity,
}
impl ContentEncoding {
/// File extension suffix for pre-compressed variant lookup.
#[inline]
pub fn extension(&self) -> &'static str {
match self {
Self::Zstd => ".zst",
Self::Brotli => ".br",
Self::Gzip => ".gz",
Self::Identity => "",
}
}
/// `Content-Encoding` header value, or `None` for identity.
#[inline]
pub fn header_value(&self) -> Option<HeaderValue> {
match self {
Self::Zstd => Some(HeaderValue::from_static("zstd")),
Self::Brotli => Some(HeaderValue::from_static("br")),
Self::Gzip => Some(HeaderValue::from_static("gzip")),
Self::Identity => None,
}
}
/// Default priority when quality values are equal (higher = better).
#[inline]
fn default_priority(&self) -> u8 {
match self {
Self::Zstd => 4,
Self::Brotli => 3,
Self::Gzip => 2,
Self::Identity => 1,
}
}
}
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
///
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
/// When quality values are equal: zstd > brotli > gzip > identity.
/// Encodings with `q=0` are excluded.
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
let Some(accept) = headers
.get(header::ACCEPT_ENCODING)
.and_then(|v| v.to_str().ok())
else {
return vec![ContentEncoding::Identity];
};
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
for part in accept.split(',') {
let part = part.trim();
if part.is_empty() {
continue;
}
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
let q = params
.split(';')
.find_map(|p| p.trim().strip_prefix("q="))
.and_then(|q| q.parse::<f32>().ok())
.unwrap_or(1.0);
(enc.trim(), q)
} else {
(part, 1.0)
};
if quality == 0.0 {
continue;
}
let encoding = match encoding_str.to_lowercase().as_str() {
"zstd" => ContentEncoding::Zstd,
"br" | "brotli" => ContentEncoding::Brotli,
"gzip" | "x-gzip" => ContentEncoding::Gzip,
"*" => ContentEncoding::Gzip,
"identity" => ContentEncoding::Identity,
_ => continue,
};
encodings.push((encoding, quality));
}
// Sort by quality (desc), then default priority (desc)
encodings.sort_by(|a, b| {
b.1.partial_cmp(&a.1)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
});
if encodings.is_empty() {
vec![ContentEncoding::Identity]
} else {
encodings.into_iter().map(|(e, _)| e).collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_all_encodings() {
let mut headers = HeaderMap::new();
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Zstd);
assert_eq!(encodings[1], ContentEncoding::Brotli);
assert_eq!(encodings[2], ContentEncoding::Gzip);
}
#[test]
fn test_parse_with_quality_values() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
);
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Gzip);
assert_eq!(encodings[1], ContentEncoding::Zstd);
assert_eq!(encodings[2], ContentEncoding::Brotli);
}
#[test]
fn test_no_header_returns_identity() {
let headers = HeaderMap::new();
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings, vec![ContentEncoding::Identity]);
}
#[test]
fn test_disabled_encoding_excluded() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"zstd;q=0, br, gzip".parse().unwrap(),
);
let encodings = parse_accepted_encodings(&headers);
assert_eq!(encodings[0], ContentEncoding::Brotli);
assert_eq!(encodings[1], ContentEncoding::Gzip);
assert!(!encodings.contains(&ContentEncoding::Zstd));
}
#[test]
fn test_real_chrome_header() {
let mut headers = HeaderMap::new();
headers.insert(
header::ACCEPT_ENCODING,
"gzip, deflate, br, zstd".parse().unwrap(),
);
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
}
#[test]
fn test_extensions() {
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
assert_eq!(ContentEncoding::Identity.extension(), "");
}
#[test]
fn test_header_values() {
assert_eq!(
ContentEncoding::Zstd.header_value().unwrap(),
HeaderValue::from_static("zstd")
);
assert_eq!(
ContentEncoding::Brotli.header_value().unwrap(),
HeaderValue::from_static("br")
);
assert!(ContentEncoding::Identity.header_value().is_none());
}
}
+74
View File
@@ -0,0 +1,74 @@
//! Axum extractors for authentication and authorization.
use axum::extract::FromRequestParts;
use axum::http::{StatusCode, header};
use axum::response::Json;
use http::request::Parts;
use serde_json::json;
use crate::data::models::User;
use crate::state::AppState;
/// Extractor that resolves the session cookie to an authenticated [`User`].
///
/// Returns 401 if no valid session cookie is present.
pub struct AuthUser(pub User);
impl FromRequestParts<AppState> for AuthUser {
type Rejection = (StatusCode, Json<serde_json::Value>);
async fn from_request_parts(
parts: &mut Parts,
state: &AppState,
) -> Result<Self, Self::Rejection> {
let token = parts
.headers
.get(header::COOKIE)
.and_then(|v| v.to_str().ok())
.and_then(|cookies| {
cookies
.split(';')
.find_map(|c| c.trim().strip_prefix("session=").map(|v| v.to_owned()))
})
.ok_or_else(|| {
(
StatusCode::UNAUTHORIZED,
Json(json!({"error": "unauthorized", "message": "No session cookie"})),
)
})?;
let user = state.session_cache.get_user(&token).await.ok_or_else(|| {
(
StatusCode::UNAUTHORIZED,
Json(json!({"error": "unauthorized", "message": "Invalid or expired session"})),
)
})?;
Ok(AuthUser(user))
}
}
/// Extractor that requires an authenticated admin user.
///
/// Returns 401 if not authenticated, 403 if not admin.
pub struct AdminUser(pub User);
impl FromRequestParts<AppState> for AdminUser {
type Rejection = (StatusCode, Json<serde_json::Value>);
async fn from_request_parts(
parts: &mut Parts,
state: &AppState,
) -> Result<Self, Self::Rejection> {
let AuthUser(user) = AuthUser::from_request_parts(parts, state).await?;
if !user.is_admin {
return Err((
StatusCode::FORBIDDEN,
Json(json!({"error": "forbidden", "message": "Admin access required"})),
));
}
Ok(AdminUser(user))
}
}
+8
View File
@@ -1,7 +1,15 @@
//! Web API module for the banner application.
pub mod admin;
pub mod admin_rmp;
#[cfg(feature = "embed-assets")]
pub mod assets;
pub mod auth;
#[cfg(feature = "embed-assets")]
pub mod encoding;
pub mod extractors;
pub mod routes;
pub mod session_cache;
pub mod ws;
pub use routes::*;
+537 -114
View File
@@ -1,79 +1,97 @@
//! Web API endpoints for Banner bot monitoring and metrics.
use axum::{
Router,
Extension, Router,
body::Body,
extract::{Request, State},
extract::{Path, Query, Request, State},
http::StatusCode as AxumStatusCode,
response::{Json, Response},
routing::get,
routing::{get, post, put},
};
use crate::web::admin;
use crate::web::admin_rmp;
use crate::web::auth::{self, AuthConfig};
use crate::web::ws;
#[cfg(feature = "embed-assets")]
use axum::{
http::{HeaderMap, HeaderValue, StatusCode, Uri},
response::{Html, IntoResponse},
http::{HeaderMap, StatusCode, Uri},
response::IntoResponse,
};
#[cfg(feature = "embed-assets")]
use http::header;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use std::{collections::BTreeMap, time::Duration};
use ts_rs::TS;
use crate::state::AppState;
use crate::status::ServiceStatus;
#[cfg(not(feature = "embed-assets"))]
use tower_http::cors::{Any, CorsLayer};
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
use tower_http::{
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
trace::TraceLayer,
};
use tracing::{Span, debug, trace, warn};
#[cfg(feature = "embed-assets")]
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
/// Set appropriate caching headers based on asset type
#[cfg(feature = "embed-assets")]
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
let headers = response.headers_mut();
// Set ETag
if let Ok(etag_value) = HeaderValue::from_str(etag) {
headers.insert(header::ETAG, etag_value);
}
// Set Cache-Control based on asset type
let cache_control = if path.starts_with("assets/") {
// Static assets with hashed filenames - long-term cache
"public, max-age=31536000, immutable"
} else if path == "index.html" {
// HTML files - short-term cache
"public, max-age=300"
} else {
match path.split_once('.').map(|(_, extension)| extension) {
Some(ext) => match ext {
// CSS/JS files - medium-term cache
"css" | "js" => "public, max-age=86400",
// Images - long-term cache
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
// Default for other files
_ => "public, max-age=3600",
},
// Default for files without an extension
None => "public, max-age=3600",
}
};
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
headers.insert(header::CACHE_CONTROL, cache_control_value);
}
}
use crate::web::assets::try_serve_asset_with_encoding;
/// Creates the web server router
pub fn create_router(app_state: AppState) -> Router {
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
let api_router = Router::new()
.route("/health", get(health))
.route("/status", get(status))
.route("/metrics", get(metrics))
.route("/courses/search", get(search_courses))
.route("/courses/{term}/{crn}", get(get_course))
.route("/terms", get(get_terms))
.route("/subjects", get(get_subjects))
.route("/reference/{category}", get(get_reference))
.with_state(app_state.clone());
let auth_router = Router::new()
.route("/auth/login", get(auth::auth_login))
.route("/auth/callback", get(auth::auth_callback))
.route("/auth/logout", post(auth::auth_logout))
.route("/auth/me", get(auth::auth_me))
.layer(Extension(auth_config))
.with_state(app_state.clone());
let admin_router = Router::new()
.route("/admin/status", get(admin::admin_status))
.route("/admin/users", get(admin::list_users))
.route(
"/admin/users/{discord_id}/admin",
put(admin::set_user_admin),
)
.route("/admin/scrape-jobs", get(admin::list_scrape_jobs))
.route("/admin/scrape-jobs/ws", get(ws::scrape_jobs_ws))
.route("/admin/audit-log", get(admin::list_audit_log))
.route("/admin/instructors", get(admin_rmp::list_instructors))
.route("/admin/instructors/{id}", get(admin_rmp::get_instructor))
.route(
"/admin/instructors/{id}/match",
post(admin_rmp::match_instructor),
)
.route(
"/admin/instructors/{id}/reject-candidate",
post(admin_rmp::reject_candidate),
)
.route(
"/admin/instructors/{id}/reject-all",
post(admin_rmp::reject_all),
)
.route(
"/admin/instructors/{id}/unmatch",
post(admin_rmp::unmatch_instructor),
)
.route("/admin/rmp/rescore", post(admin_rmp::rescore))
.with_state(app_state);
let mut router = Router::new().nest("/api", api_router);
let mut router = Router::new()
.nest("/api", api_router)
.nest("/api", auth_router)
.nest("/api", admin_router);
// When embed-assets feature is enabled, serve embedded static assets
#[cfg(feature = "embed-assets")]
@@ -93,6 +111,13 @@ pub fn create_router(app_state: AppState) -> Router {
}
router.layer((
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
// assets already have Content-Encoding set, so tower-http skips them.
CompressionLayer::new()
.zstd(true)
.br(true)
.gzip(true)
.quality(tower_http::CompressionLevel::Fastest),
TraceLayer::new_for_http()
.make_span_with(|request: &Request<Body>| {
tracing::debug_span!("request", path = request.uri().path())
@@ -139,71 +164,35 @@ pub fn create_router(app_state: AppState) -> Router {
))
}
/// Handler that extracts request information for caching
/// SPA fallback handler with content encoding negotiation.
///
/// Serves embedded static assets with pre-compressed variants when available,
/// falling back to `index.html` for SPA client-side routing.
#[cfg(feature = "embed-assets")]
async fn fallback(request: Request) -> Response {
async fn fallback(request: Request) -> axum::response::Response {
let uri = request.uri().clone();
let headers = request.headers().clone();
handle_spa_fallback_with_headers(uri, headers).await
handle_spa_fallback(uri, headers).await
}
/// Handles SPA routing by serving index.html for non-API, non-asset requests
/// This version includes HTTP caching headers and ETag support
#[cfg(feature = "embed-assets")]
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
let path = uri.path().trim_start_matches('/');
if let Some(content) = WebAssets::get(path) {
// Get asset metadata (MIME type and hash) with caching
let metadata = get_asset_metadata_cached(path, &content.data);
// Check if client has a matching ETag (conditional request)
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return StatusCode::NOT_MODIFIED.into_response();
}
// Use cached MIME type, only set Content-Type if we have a valid MIME type
let mut response = (
[(
header::CONTENT_TYPE,
// For unknown types, set to application/octet-stream
metadata
.mime_type
.unwrap_or("application/octet-stream".to_string()),
)],
content.data,
)
.into_response();
// Set caching headers
set_caching_headers(&mut response, path, &metadata.hash.quoted());
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
let path = uri.path();
// Try serving the exact asset (with encoding negotiation)
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
return response;
} else {
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
if path.starts_with("assets/") {
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
}
}
// Fall back to the SPA index.html
match WebAssets::get("index.html") {
Some(content) => {
let metadata = get_asset_metadata_cached("index.html", &content.data);
// SvelteKit assets under _app/ that don't exist are a hard 404
let trimmed = path.trim_start_matches('/');
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
}
// Check if client has a matching ETag for index.html
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
{
return StatusCode::NOT_MODIFIED.into_response();
}
let mut response = Html(content.data).into_response();
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
response
}
// SPA fallback: serve index.html with encoding negotiation
match try_serve_asset_with_encoding("/index.html", &request_headers) {
Some(response) => response,
None => (
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to load index.html",
@@ -221,14 +210,16 @@ async fn health() -> Json<Value> {
}))
}
#[derive(Serialize)]
struct ServiceInfo {
#[derive(Serialize, TS)]
#[ts(export)]
pub struct ServiceInfo {
name: String,
status: ServiceStatus,
}
#[derive(Serialize)]
struct StatusResponse {
#[derive(Serialize, TS)]
#[ts(export)]
pub struct StatusResponse {
status: ServiceStatus,
version: String,
commit: String,
@@ -249,7 +240,10 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
);
}
let overall_status = if services.values().any(|s| matches!(s.status, ServiceStatus::Error)) {
let overall_status = if services
.values()
.any(|s| matches!(s.status, ServiceStatus::Error))
{
ServiceStatus::Error
} else if !services.is_empty()
&& services
@@ -272,12 +266,441 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
}
/// Metrics endpoint for monitoring
async fn metrics() -> Json<Value> {
// For now, return basic metrics structure
Json(json!({
"banner_api": {
"status": "connected"
async fn metrics(
State(state): State<AppState>,
Query(params): Query<MetricsParams>,
) -> Result<Json<Value>, (AxumStatusCode, String)> {
let limit = params.limit.clamp(1, 5000);
// Parse range shorthand, defaulting to 24h
let range_str = params.range.as_deref().unwrap_or("24h");
let duration = match range_str {
"1h" => chrono::Duration::hours(1),
"6h" => chrono::Duration::hours(6),
"24h" => chrono::Duration::hours(24),
"7d" => chrono::Duration::days(7),
"30d" => chrono::Duration::days(30),
_ => {
return Err((
AxumStatusCode::BAD_REQUEST,
format!("Invalid range '{range_str}'. Valid: 1h, 6h, 24h, 7d, 30d"),
));
}
};
let since = chrono::Utc::now() - duration;
// Resolve course_id: explicit param takes priority, then term+crn lookup
let course_id = if let Some(id) = params.course_id {
Some(id)
} else if let (Some(term), Some(crn)) = (params.term.as_deref(), params.crn.as_deref()) {
let row: Option<(i32,)> =
sqlx::query_as("SELECT id FROM courses WHERE term_code = $1 AND crn = $2")
.bind(term)
.bind(crn)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Course lookup for metrics failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Course lookup failed".to_string(),
)
})?;
row.map(|(id,)| id)
} else {
None
};
// Build query dynamically based on filters
let metrics: Vec<(i32, i32, chrono::DateTime<chrono::Utc>, i32, i32, i32)> =
if let Some(cid) = course_id {
sqlx::query_as(
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
FROM course_metrics \
WHERE course_id = $1 AND timestamp >= $2 \
ORDER BY timestamp DESC \
LIMIT $3",
)
.bind(cid)
.bind(since)
.bind(limit)
.fetch_all(&state.db_pool)
.await
} else {
sqlx::query_as(
"SELECT id, course_id, timestamp, enrollment, wait_count, seats_available \
FROM course_metrics \
WHERE timestamp >= $1 \
ORDER BY timestamp DESC \
LIMIT $2",
)
.bind(since)
.bind(limit)
.fetch_all(&state.db_pool)
.await
}
.map_err(|e| {
tracing::error!(error = %e, "Metrics query failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Metrics query failed".to_string(),
)
})?;
let count = metrics.len();
let metrics_json: Vec<Value> = metrics
.into_iter()
.map(
|(id, course_id, timestamp, enrollment, wait_count, seats_available)| {
json!({
"id": id,
"courseId": course_id,
"timestamp": timestamp.to_rfc3339(),
"enrollment": enrollment,
"waitCount": wait_count,
"seatsAvailable": seats_available,
})
},
)
.collect();
Ok(Json(json!({
"metrics": metrics_json,
"count": count,
"timestamp": chrono::Utc::now().to_rfc3339(),
})))
}
// ============================================================
// Course search & detail API
// ============================================================
#[derive(Deserialize)]
struct MetricsParams {
course_id: Option<i32>,
term: Option<String>,
crn: Option<String>,
/// Shorthand durations: "1h", "6h", "24h", "7d", "30d"
range: Option<String>,
#[serde(default = "default_metrics_limit")]
limit: i32,
}
fn default_metrics_limit() -> i32 {
500
}
#[derive(Deserialize)]
struct SubjectsParams {
term: String,
}
#[derive(Deserialize)]
struct SearchParams {
term: String,
#[serde(default)]
subject: Vec<String>,
q: Option<String>,
course_number_low: Option<i32>,
course_number_high: Option<i32>,
#[serde(default)]
open_only: bool,
instructional_method: Option<String>,
campus: Option<String>,
#[serde(default = "default_limit")]
limit: i32,
#[serde(default)]
offset: i32,
sort_by: Option<SortColumn>,
sort_dir: Option<SortDirection>,
}
use crate::data::courses::{SortColumn, SortDirection};
fn default_limit() -> i32 {
25
}
#[derive(Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CourseResponse {
crn: String,
subject: String,
course_number: String,
title: String,
term_code: String,
sequence_number: Option<String>,
instructional_method: Option<String>,
campus: Option<String>,
enrollment: i32,
max_enrollment: i32,
wait_count: i32,
wait_capacity: i32,
credit_hours: Option<i32>,
credit_hour_low: Option<i32>,
credit_hour_high: Option<i32>,
cross_list: Option<String>,
cross_list_capacity: Option<i32>,
cross_list_count: Option<i32>,
link_identifier: Option<String>,
is_section_linked: Option<bool>,
part_of_term: Option<String>,
meeting_times: Vec<crate::data::models::DbMeetingTime>,
attributes: Vec<String>,
instructors: Vec<InstructorResponse>,
}
#[derive(Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct InstructorResponse {
instructor_id: i32,
banner_id: String,
display_name: String,
email: String,
is_primary: bool,
rmp_rating: Option<f32>,
rmp_num_ratings: Option<i32>,
rmp_legacy_id: Option<i32>,
}
#[derive(Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct SearchResponse {
courses: Vec<CourseResponse>,
total_count: i32,
offset: i32,
limit: i32,
}
#[derive(Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CodeDescription {
code: String,
description: String,
}
/// Build a `CourseResponse` from a DB course with pre-fetched instructor details.
fn build_course_response(
course: &crate::data::models::Course,
instructors: Vec<crate::data::models::CourseInstructorDetail>,
) -> CourseResponse {
let instructors = instructors
.into_iter()
.map(|i| InstructorResponse {
instructor_id: i.instructor_id,
banner_id: i.banner_id,
display_name: i.display_name,
email: i.email,
is_primary: i.is_primary,
rmp_rating: i.avg_rating,
rmp_num_ratings: i.num_ratings,
rmp_legacy_id: i.rmp_legacy_id,
})
.collect();
CourseResponse {
crn: course.crn.clone(),
subject: course.subject.clone(),
course_number: course.course_number.clone(),
title: course.title.clone(),
term_code: course.term_code.clone(),
sequence_number: course.sequence_number.clone(),
instructional_method: course.instructional_method.clone(),
campus: course.campus.clone(),
enrollment: course.enrollment,
max_enrollment: course.max_enrollment,
wait_count: course.wait_count,
wait_capacity: course.wait_capacity,
credit_hours: course.credit_hours,
credit_hour_low: course.credit_hour_low,
credit_hour_high: course.credit_hour_high,
cross_list: course.cross_list.clone(),
cross_list_capacity: course.cross_list_capacity,
cross_list_count: course.cross_list_count,
link_identifier: course.link_identifier.clone(),
is_section_linked: course.is_section_linked,
part_of_term: course.part_of_term.clone(),
meeting_times: serde_json::from_value(course.meeting_times.clone()).unwrap_or_default(),
attributes: serde_json::from_value(course.attributes.clone()).unwrap_or_default(),
instructors,
}
}
/// `GET /api/courses/search`
async fn search_courses(
State(state): State<AppState>,
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
let limit = params.limit.clamp(1, 100);
let offset = params.offset.max(0);
let (courses, total_count) = crate::data::courses::search_courses(
&state.db_pool,
&params.term,
if params.subject.is_empty() {
None
} else {
Some(&params.subject)
},
"timestamp": chrono::Utc::now().to_rfc3339()
params.q.as_deref(),
params.course_number_low,
params.course_number_high,
params.open_only,
params.instructional_method.as_deref(),
params.campus.as_deref(),
limit,
offset,
params.sort_by,
params.sort_dir,
)
.await
.map_err(|e| {
tracing::error!(error = %e, "Course search failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Search failed".to_string(),
)
})?;
// Batch-fetch all instructors in a single query instead of N+1
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
let mut instructor_map =
crate::data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
.await
.unwrap_or_default();
let course_responses: Vec<CourseResponse> = courses
.iter()
.map(|course| {
let instructors = instructor_map.remove(&course.id).unwrap_or_default();
build_course_response(course, instructors)
})
.collect();
Ok(Json(SearchResponse {
courses: course_responses,
total_count: total_count as i32,
offset,
limit,
}))
}
/// `GET /api/courses/:term/:crn`
async fn get_course(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Json<CourseResponse>, (AxumStatusCode, String)> {
let course = crate::data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
.await
.map_err(|e| {
tracing::error!(error = %e, "Course lookup failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Lookup failed".to_string(),
)
})?
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
.await
.unwrap_or_default();
Ok(Json(build_course_response(&course, instructors)))
}
/// `GET /api/terms`
async fn get_terms(
State(state): State<AppState>,
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
let cache = state.reference_cache.read().await;
let term_codes = crate::data::courses::get_available_terms(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to get terms");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Failed to get terms".to_string(),
)
})?;
let terms: Vec<CodeDescription> = term_codes
.into_iter()
.map(|code| {
let description = cache
.lookup("term", &code)
.unwrap_or("Unknown Term")
.to_string();
CodeDescription { code, description }
})
.collect();
Ok(Json(terms))
}
/// `GET /api/subjects?term=202620`
async fn get_subjects(
State(state): State<AppState>,
Query(params): Query<SubjectsParams>,
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
let rows = crate::data::courses::get_subjects_by_enrollment(&state.db_pool, &params.term)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to get subjects");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Failed to get subjects".to_string(),
)
})?;
let subjects: Vec<CodeDescription> = rows
.into_iter()
.map(|(code, description, _enrollment)| CodeDescription { code, description })
.collect();
Ok(Json(subjects))
}
/// `GET /api/reference/:category`
async fn get_reference(
State(state): State<AppState>,
Path(category): Path<String>,
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
let cache = state.reference_cache.read().await;
let entries = cache.entries_for_category(&category);
if entries.is_empty() {
// Fall back to DB query in case cache doesn't have this category
drop(cache);
let rows = crate::data::reference::get_by_category(&category, &state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, category = %category, "Reference lookup failed");
(
AxumStatusCode::INTERNAL_SERVER_ERROR,
"Lookup failed".to_string(),
)
})?;
return Ok(Json(
rows.into_iter()
.map(|r| CodeDescription {
code: r.code,
description: r.description,
})
.collect(),
));
}
Ok(Json(
entries
.into_iter()
.map(|(code, desc)| CodeDescription {
code: code.to_string(),
description: desc.to_string(),
})
.collect(),
))
}
+188
View File
@@ -0,0 +1,188 @@
//! In-memory caches for session resolution and OAuth CSRF state.
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use rand::Rng;
use sqlx::PgPool;
use std::sync::Arc;
use std::time::{Duration, Instant};
use crate::data::models::User;
/// Cached session entry with TTL.
#[derive(Debug, Clone)]
struct CachedSession {
user: User,
session_expires_at: DateTime<Utc>,
cached_at: Instant,
}
/// In-memory session cache backed by PostgreSQL.
///
/// Provides fast session resolution without a DB round-trip on every request.
/// Cache entries expire after a configurable TTL (default 5 minutes).
#[derive(Clone)]
pub struct SessionCache {
cache: Arc<DashMap<String, CachedSession>>,
db_pool: PgPool,
cache_ttl: Duration,
}
impl SessionCache {
/// Create a new session cache with a 5-minute default TTL.
pub fn new(db_pool: PgPool) -> Self {
Self {
cache: Arc::new(DashMap::new()),
db_pool,
cache_ttl: Duration::from_secs(5 * 60),
}
}
/// Resolve a session token to a [`User`], using the cache when possible.
///
/// On cache hit (entry present, not stale, session not expired), returns the
/// cached user immediately. On miss or stale entry, queries the database for
/// the session and user, populates the cache, and fire-and-forgets a
/// `touch_session` call to update `last_active_at`.
pub async fn get_user(&self, token: &str) -> Option<User> {
// Check cache first
if let Some(entry) = self.cache.get(token) {
let now_instant = Instant::now();
let now_utc = Utc::now();
let cache_fresh = entry.cached_at + self.cache_ttl > now_instant;
let session_valid = entry.session_expires_at > now_utc;
if cache_fresh && session_valid {
return Some(entry.user.clone());
}
// Stale or expired — drop the ref before removing
drop(entry);
self.cache.remove(token);
}
// Cache miss — query DB
let session = crate::data::sessions::get_session(&self.db_pool, token)
.await
.ok()
.flatten()?;
let user = crate::data::users::get_user(&self.db_pool, session.user_id)
.await
.ok()
.flatten()?;
self.cache.insert(
token.to_owned(),
CachedSession {
user: user.clone(),
session_expires_at: session.expires_at,
cached_at: Instant::now(),
},
);
// Fire-and-forget touch to update last_active_at
let pool = self.db_pool.clone();
let token_owned = token.to_owned();
tokio::spawn(async move {
if let Err(e) = crate::data::sessions::touch_session(&pool, &token_owned).await {
tracing::warn!(error = %e, "failed to touch session");
}
});
Some(user)
}
/// Remove a single session from the cache (e.g. on logout).
pub fn evict(&self, token: &str) {
self.cache.remove(token);
}
/// Remove all cached sessions belonging to a user.
pub fn evict_user(&self, discord_id: i64) {
self.cache
.retain(|_, entry| entry.user.discord_id != discord_id);
}
/// Delete expired sessions from the database and sweep the in-memory cache.
///
/// Returns the number of sessions deleted from the database.
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
let now = Utc::now();
self.cache.retain(|_, entry| entry.session_expires_at > now);
Ok(deleted)
}
}
/// Data stored alongside each OAuth CSRF state token.
struct OAuthStateEntry {
created_at: Instant,
/// The browser origin that initiated the login flow, so the callback
/// can reconstruct the exact redirect_uri Discord expects.
origin: String,
}
/// Ephemeral store for OAuth CSRF state tokens.
///
/// Tokens are stored with creation time and expire after a configurable TTL.
/// Each token is single-use: validation consumes it.
#[derive(Clone)]
pub struct OAuthStateStore {
states: Arc<DashMap<String, OAuthStateEntry>>,
ttl: Duration,
}
impl Default for OAuthStateStore {
fn default() -> Self {
Self::new()
}
}
impl OAuthStateStore {
/// Create a new store with a 10-minute TTL.
pub fn new() -> Self {
Self {
states: Arc::new(DashMap::new()),
ttl: Duration::from_secs(10 * 60),
}
}
/// Generate a random 16-byte hex CSRF token, store it with the given
/// origin, and return the token.
pub fn generate(&self, origin: String) -> String {
let bytes: [u8; 16] = rand::rng().random();
let token: String = bytes.iter().map(|b| format!("{b:02x}")).collect();
self.states.insert(
token.clone(),
OAuthStateEntry {
created_at: Instant::now(),
origin,
},
);
token
}
/// Validate and consume a CSRF token. Returns the stored origin if the
/// token was present and not expired.
pub fn validate(&self, state: &str) -> Option<String> {
let (_, entry) = self.states.remove(state)?;
if entry.created_at.elapsed() < self.ttl {
Some(entry.origin)
} else {
None
}
}
/// Remove all expired entries from the store.
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
pub fn cleanup(&self) {
let ttl = self.ttl;
self.states
.retain(|_, entry| entry.created_at.elapsed() < ttl);
}
}
+205
View File
@@ -0,0 +1,205 @@
//! WebSocket event types and handler for real-time scrape job updates.
use axum::{
extract::{
State,
ws::{Message, WebSocket, WebSocketUpgrade},
},
response::IntoResponse,
};
use futures::{SinkExt, StreamExt};
use serde::Serialize;
use sqlx::PgPool;
use tokio::sync::broadcast;
use tracing::debug;
use crate::data::models::{ScrapeJob, ScrapeJobStatus};
use crate::state::AppState;
use crate::web::extractors::AdminUser;
/// A serializable DTO for `ScrapeJob` with computed `status`.
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ScrapeJobDto {
pub id: i32,
pub target_type: String,
pub target_payload: serde_json::Value,
pub priority: String,
pub execute_at: String,
pub created_at: String,
pub locked_at: Option<String>,
pub retry_count: i32,
pub max_retries: i32,
pub queued_at: String,
pub status: ScrapeJobStatus,
}
impl From<&ScrapeJob> for ScrapeJobDto {
fn from(job: &ScrapeJob) -> Self {
Self {
id: job.id,
target_type: format!("{:?}", job.target_type),
target_payload: job.target_payload.clone(),
priority: format!("{:?}", job.priority),
execute_at: job.execute_at.to_rfc3339(),
created_at: job.created_at.to_rfc3339(),
locked_at: job.locked_at.map(|t| t.to_rfc3339()),
retry_count: job.retry_count,
max_retries: job.max_retries,
queued_at: job.queued_at.to_rfc3339(),
status: job.status(),
}
}
}
/// Events broadcast when scrape job state changes.
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum ScrapeJobEvent {
Init {
jobs: Vec<ScrapeJobDto>,
},
JobCreated {
job: ScrapeJobDto,
},
JobLocked {
id: i32,
locked_at: String,
status: ScrapeJobStatus,
},
JobCompleted {
id: i32,
},
JobRetried {
id: i32,
retry_count: i32,
queued_at: String,
status: ScrapeJobStatus,
},
JobExhausted {
id: i32,
},
JobDeleted {
id: i32,
},
}
/// Fetch current scrape jobs from the DB and build an `Init` event.
async fn build_init_event(db_pool: &PgPool) -> Result<ScrapeJobEvent, sqlx::Error> {
let rows = sqlx::query_as::<_, ScrapeJob>(
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
)
.fetch_all(db_pool)
.await?;
let jobs = rows.iter().map(ScrapeJobDto::from).collect();
Ok(ScrapeJobEvent::Init { jobs })
}
/// WebSocket endpoint for real-time scrape job updates.
///
/// Auth is checked via `AdminUser` before the upgrade occurs — if rejected,
/// a 401/403 is returned and the upgrade never happens.
pub async fn scrape_jobs_ws(
ws: WebSocketUpgrade,
AdminUser(_user): AdminUser,
State(state): State<AppState>,
) -> impl IntoResponse {
ws.on_upgrade(|socket| handle_scrape_jobs_ws(socket, state))
}
/// Serialize an event and send it over the WebSocket sink.
/// Returns `true` if the message was sent, `false` if the client disconnected.
async fn send_event(
sink: &mut futures::stream::SplitSink<WebSocket, Message>,
event: &ScrapeJobEvent,
) -> bool {
let Ok(json) = serde_json::to_string(event) else {
return true; // serialization failed, but connection is still alive
};
sink.send(Message::Text(json.into())).await.is_ok()
}
async fn handle_scrape_jobs_ws(socket: WebSocket, state: AppState) {
debug!("scrape-jobs WebSocket connected");
let (mut sink, mut stream) = socket.split();
// Send initial state
let init_event = match build_init_event(&state.db_pool).await {
Ok(event) => event,
Err(e) => {
debug!(error = %e, "failed to build init event, closing WebSocket");
return;
}
};
if !send_event(&mut sink, &init_event).await {
debug!("client disconnected during init send");
return;
}
// Subscribe to broadcast events
let mut rx = state.scrape_job_events();
loop {
tokio::select! {
result = rx.recv() => {
match result {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during event send");
break;
}
}
Err(broadcast::error::RecvError::Lagged(n)) => {
debug!(missed = n, "broadcast lagged, resyncing");
match build_init_event(&state.db_pool).await {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during resync send");
break;
}
}
Err(e) => {
debug!(error = %e, "failed to build resync init event");
}
}
}
Err(broadcast::error::RecvError::Closed) => {
debug!("broadcast channel closed");
break;
}
}
}
msg = stream.next() => {
match msg {
Some(Ok(Message::Text(text))) => {
if let Ok(parsed) = serde_json::from_str::<serde_json::Value>(&text)
&& parsed.get("type").and_then(|t| t.as_str()) == Some("resync")
{
debug!("client requested resync");
match build_init_event(&state.db_pool).await {
Ok(ref event) => {
if !send_event(&mut sink, event).await {
debug!("client disconnected during resync send");
break;
}
}
Err(e) => {
debug!(error = %e, "failed to build resync init event");
}
}
}
}
Some(Ok(Message::Close(_))) | None => {
debug!("client disconnected");
break;
}
_ => {}
}
}
}
}
debug!("scrape-jobs WebSocket disconnected");
}
+113
View File
@@ -210,3 +210,116 @@ async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
assert_eq!(rows[1].0, "202520");
assert_eq!(rows[1].1, 10);
}
#[sqlx::test]
async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
// Insert initial data — should NOT create audits/metrics (it's a fresh insert)
let initial = vec![helpers::make_course(
"50001",
"202510",
"CS",
"3443",
"App Programming",
10,
35,
0,
5,
)];
batch_upsert_courses(&initial, &pool).await.unwrap();
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
audit_count, 0,
"initial insert should not create audit entries"
);
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
metric_count, 0,
"initial insert should not create metric entries"
);
// Update enrollment and wait_count
let updated = vec![helpers::make_course(
"50001",
"202510",
"CS",
"3443",
"App Programming",
20,
35,
2,
5,
)];
batch_upsert_courses(&updated, &pool).await.unwrap();
// Should have audit entries for enrollment and wait_count changes
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert!(
audit_count >= 2,
"should have audit entries for enrollment and wait_count changes, got {audit_count}"
);
// Should have exactly 1 metric entry
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(metric_count, 1, "should have 1 metric snapshot");
// Verify metric values
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
"SELECT enrollment, wait_count, seats_available FROM course_metrics LIMIT 1",
)
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(enrollment, 20);
assert_eq!(wait_count, 2);
assert_eq!(seats, 15); // 35 - 20
}
#[sqlx::test]
async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
// Insert then re-insert identical data — should produce zero audits/metrics
let course = vec![helpers::make_course(
"60001",
"202510",
"CS",
"1083",
"Intro to CS",
25,
30,
0,
5,
)];
batch_upsert_courses(&course, &pool).await.unwrap();
batch_upsert_courses(&course, &pool).await.unwrap();
let (audit_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_audits")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
audit_count, 0,
"identical re-upsert should not create audit entries"
);
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(
metric_count, 0,
"identical re-upsert should not create metric entries"
);
}
+17 -11
View File
@@ -217,10 +217,13 @@ async fn unlock_and_increment_retry_has_retries_remaining(pool: PgPool) {
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(has_retries, "should have retries remaining (0→1, max=3)");
assert!(
result.is_some(),
"should have retries remaining (0→1, max=3)"
);
// Verify state in DB
let (retry_count, locked_at): (i32, Option<chrono::DateTime<chrono::Utc>>) =
@@ -241,17 +244,17 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
json!({"subject": "CS"}),
ScrapePriority::Medium,
true,
2, // retry_count
3, // retry_count (already used all 3 retries)
3, // max_retries
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(
!has_retries,
"should NOT have retries remaining (2→3, max=3)"
result.is_none(),
"should NOT have retries remaining (3→4, max=3)"
);
let (retry_count,): (i32,) =
@@ -260,7 +263,7 @@ async fn unlock_and_increment_retry_exhausted(pool: PgPool) {
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(retry_count, 3);
assert_eq!(retry_count, 4);
}
#[sqlx::test]
@@ -276,11 +279,11 @@ async fn unlock_and_increment_retry_already_exceeded(pool: PgPool) {
)
.await;
let has_retries = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
let result = scrape_jobs::unlock_and_increment_retry(id, 3, &pool)
.await
.unwrap();
assert!(
!has_retries,
result.is_none(),
"should NOT have retries remaining (5→6, max=3)"
);
@@ -346,7 +349,7 @@ async fn find_existing_payloads_returns_matching(pool: PgPool) {
}
#[sqlx::test]
async fn find_existing_payloads_ignores_locked(pool: PgPool) {
async fn find_existing_payloads_includes_locked(pool: PgPool) {
let payload = json!({"subject": "CS"});
helpers::insert_scrape_job(
@@ -365,7 +368,10 @@ async fn find_existing_payloads_ignores_locked(pool: PgPool) {
.await
.unwrap();
assert!(existing.is_empty(), "locked jobs should be ignored");
assert!(
existing.contains(&payload.to_string()),
"locked jobs should be included in deduplication"
);
}
#[sqlx::test]
+1 -2
View File
@@ -5,5 +5,4 @@ dist-ssr
*.local
count.txt
.env
.nitro
.tanstack
.svelte-kit
+1 -1
View File
@@ -7,7 +7,7 @@
},
"files": {
"ignoreUnknown": false,
"ignore": ["dist/", "node_modules/", ".tanstack/"]
"ignore": ["dist/", "node_modules/", ".svelte-kit/", "src/lib/bindings/"]
},
"formatter": {
"enabled": true,
+156 -849
View File
File diff suppressed because it is too large Load Diff
-60
View File
@@ -1,60 +0,0 @@
import js from "@eslint/js";
import tseslint from "typescript-eslint";
import react from "eslint-plugin-react";
import reactHooks from "eslint-plugin-react-hooks";
import reactRefresh from "eslint-plugin-react-refresh";
export default tseslint.config(
// Ignore generated files and build outputs
{
ignores: ["dist", "node_modules", "src/routeTree.gen.ts", "*.config.js"],
},
// Base configs
js.configs.recommended,
...tseslint.configs.recommendedTypeChecked,
// React plugin configuration
{
files: ["**/*.{ts,tsx}"],
plugins: {
react,
"react-hooks": reactHooks,
"react-refresh": reactRefresh,
},
languageOptions: {
parserOptions: {
project: true,
tsconfigRootDir: import.meta.dirname,
ecmaFeatures: {
jsx: true,
},
},
},
settings: {
react: {
version: "19.0",
},
},
rules: {
// React rules
...react.configs.recommended.rules,
...react.configs["jsx-runtime"].rules,
...reactHooks.configs.recommended.rules,
// React Refresh
"react-refresh/only-export-components": ["warn", { allowConstantExport: true }],
// TypeScript overrides
"@typescript-eslint/no-unused-vars": [
"error",
{
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
},
],
"@typescript-eslint/no-explicit-any": "warn",
// Disable prop-types since we're using TypeScript
"react/prop-types": "off",
},
}
);
-20
View File
@@ -1,20 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" href="/favicon.ico" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
/>
<link rel="apple-touch-icon" href="/logo192.png" />
<link rel="manifest" href="/manifest.json" />
<title>Banner</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>
+31 -34
View File
@@ -3,48 +3,45 @@
"private": true,
"type": "module",
"scripts": {
"dev": "vite --port 3000",
"start": "vite --port 3000",
"build": "vite build && tsc",
"serve": "vite preview",
"dev": "vite dev --port 3000",
"build": "vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"test": "vitest run",
"lint": "tsc && eslint . --ext .ts,.tsx",
"typecheck": "tsc --noEmit",
"format": "biome format --write .",
"format:check": "biome format ."
},
"dependencies": {
"@radix-ui/themes": "^3.2.1",
"@tanstack/react-devtools": "^0.2.2",
"@tanstack/react-router": "^1.157.16",
"@tanstack/react-router-devtools": "^1.157.16",
"@tanstack/router-plugin": "^1.157.16",
"lucide-react": "^0.544.0",
"next-themes": "^0.4.6",
"react": "^19.2.4",
"react-dom": "^19.2.4",
"react-timeago": "^8.3.0",
"recharts": "^3.7.0"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@eslint/js": "^9.39.2",
"@testing-library/dom": "^10.4.0",
"@testing-library/react": "^16.3.2",
"@types/node": "^24.10.9",
"@types/react": "^19.2.10",
"@types/react-dom": "^19.0.3",
"@vitejs/plugin-react": "^4.3.4",
"baseline-browser-mapping": "^2.9.19",
"eslint": "^9.39.2",
"eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.26",
"@fontsource-variable/inter": "^5.2.5",
"@lucide/svelte": "^0.563.0",
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/kit": "^2.16.0",
"@sveltejs/vite-plugin-svelte": "^5.0.3",
"@tailwindcss/vite": "^4.0.0",
"@tanstack/table-core": "^8.21.3",
"@types/d3-scale": "^4.0.9",
"@types/d3-shape": "^3.1.8",
"@types/d3-time-format": "^4.0.3",
"@types/node": "^25.1.0",
"bits-ui": "^1.3.7",
"clsx": "^2.1.1",
"jsdom": "^26.0.0",
"svelte": "^5.19.0",
"svelte-check": "^4.1.4",
"tailwind-merge": "^3.0.1",
"tailwindcss": "^4.0.0",
"typescript": "^5.7.2",
"typescript-eslint": "^8.54.0",
"vite": "^6.3.5",
"vitest": "^3.0.5",
"web-vitals": "^4.2.4"
"vitest": "^3.0.5"
},
"dependencies": {
"@icons-pack/svelte-simple-icons": "^6.5.0",
"d3-scale": "^4.0.2",
"d3-shape": "^3.2.0",
"d3-time-format": "^4.1.0",
"date-fns": "^4.1.0",
"overlayscrollbars": "^2.14.0",
"overlayscrollbars-svelte": "^0.5.5"
}
}
+148
View File
@@ -0,0 +1,148 @@
#!/usr/bin/env bun
/**
* Pre-compress static assets with maximum compression levels.
* Run after `bun run build`.
*
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
* These are embedded alongside originals by rust-embed and served via
* content negotiation in src/web/assets.rs.
*/
import { readdir, stat, readFile, writeFile } from "fs/promises";
import { join, extname } from "path";
import { gzipSync, brotliCompressSync, constants } from "zlib";
import { $ } from "bun";
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
const MIN_SIZE = 512;
const COMPRESSIBLE_EXTENSIONS = new Set([
".js",
".css",
".html",
".json",
".svg",
".txt",
".xml",
".map",
]);
// Check if zstd CLI is available
let hasZstd = false;
try {
await $`which zstd`.quiet();
hasZstd = true;
} catch {
console.warn("Warning: zstd not found, skipping .zst generation");
}
async function* walkDir(dir: string): AsyncGenerator<string> {
try {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const path = join(dir, entry.name);
if (entry.isDirectory()) {
yield* walkDir(path);
} else if (entry.isFile()) {
yield path;
}
}
} catch {
// Directory doesn't exist, skip
}
}
async function compressFile(path: string): Promise<void> {
const ext = extname(path);
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
const stats = await stat(path);
if (stats.size < MIN_SIZE) return;
// Skip if all compressed variants already exist
const variantsExist = await Promise.all([
stat(`${path}.br`).then(
() => true,
() => false
),
stat(`${path}.gz`).then(
() => true,
() => false
),
hasZstd
? stat(`${path}.zst`).then(
() => true,
() => false
)
: Promise.resolve(false),
]);
if (variantsExist.every((exists) => exists || !hasZstd)) {
return;
}
const content = await readFile(path);
const originalSize = content.length;
// Brotli (maximum quality = 11)
const brContent = brotliCompressSync(content, {
params: {
[constants.BROTLI_PARAM_QUALITY]: 11,
},
});
await writeFile(`${path}.br`, brContent);
// Gzip (level 9)
const gzContent = gzipSync(content, { level: 9 });
await writeFile(`${path}.gz`, gzContent);
// Zstd (level 19 - maximum)
if (hasZstd) {
try {
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
} catch (e) {
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
}
}
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
}
async function main() {
console.log("Pre-compressing static assets...");
// Banner uses adapter-static with output in dist/
const dirs = ["dist"];
let scannedFiles = 0;
let compressedFiles = 0;
for (const dir of dirs) {
for await (const file of walkDir(dir)) {
const ext = extname(file);
scannedFiles++;
if (
COMPRESSIBLE_EXTENSIONS.has(ext) &&
!file.endsWith(".br") &&
!file.endsWith(".gz") &&
!file.endsWith(".zst")
) {
const stats = await stat(file);
if (stats.size >= MIN_SIZE) {
await compressFile(file);
compressedFiles++;
}
}
}
}
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
}
main().catch((e) => {
console.error("Compression failed:", e);
process.exit(1);
});
-54
View File
@@ -1,54 +0,0 @@
.App {
min-height: 100vh;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
background-color: var(--color-background);
color: var(--color-text);
}
@keyframes pulse {
0%,
100% {
opacity: 0.2;
}
50% {
opacity: 0.4;
}
}
.animate-pulse {
animation: pulse 2s ease-in-out infinite;
}
/* Theme toggle button */
.theme-toggle {
cursor: pointer;
background-color: transparent;
border: none;
margin: 4px;
padding: 7px;
border-radius: 6px;
display: flex;
align-items: center;
justify-content: center;
color: var(--gray-11);
transition: background-color 0.2s, color 0.2s;
transform: scale(1.25);
}
.theme-toggle:hover {
background-color: var(--gray-4);
}
/* Screen reader only text */
.sr-only {
position: absolute;
width: 1px;
height: 1px;
padding: 0;
margin: -1px;
overflow: hidden;
clip: rect(0, 0, 0, 0);
white-space: nowrap;
border: 0;
}
+11
View File
@@ -0,0 +1,11 @@
/// <reference types="@sveltejs/kit" />
declare const __APP_VERSION__: string;
declare namespace App {
// interface Error {}
// interface Locals {}
// interface PageData {}
// interface PageState {}
// interface Platform {}
}
+32
View File
@@ -0,0 +1,32 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="icon" href="%sveltekit.assets%/favicon.ico" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
/>
<link rel="apple-touch-icon" href="%sveltekit.assets%/logo192.png" />
<link rel="manifest" href="%sveltekit.assets%/manifest.json" />
<title>Banner</title>
<script>
(function () {
var stored = localStorage.getItem("theme");
var isDark =
stored === "dark" ||
(stored !== "light" &&
window.matchMedia("(prefers-color-scheme: dark)").matches);
if (isDark) {
document.documentElement.classList.add("dark");
}
})();
</script>
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
</body>
</html>
-36
View File
@@ -1,36 +0,0 @@
import { Button } from "@radix-ui/themes";
import { Monitor, Moon, Sun } from "lucide-react";
import { useTheme } from "next-themes";
import { useMemo } from "react";
export function ThemeToggle() {
const { theme, setTheme } = useTheme();
const nextTheme = useMemo(() => {
switch (theme) {
case "light":
return "dark";
case "dark":
return "system";
case "system":
return "light";
default:
console.error(`Invalid theme: ${theme}`);
return "system";
}
}, [theme]);
const icon = useMemo(() => {
if (nextTheme === "system") {
return <Monitor size={18} />;
}
return nextTheme === "dark" ? <Moon size={18} /> : <Sun size={18} />;
}, [nextTheme]);
return (
<Button variant="ghost" size="3" onClick={() => setTheme(nextTheme)} className="theme-toggle">
{icon}
<span className="sr-only">Toggle theme</span>
</Button>
);
}
+98 -19
View File
@@ -1,7 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { BannerApiClient } from "./api";
// Mock fetch
global.fetch = vi.fn();
describe("BannerApiClient", () => {
@@ -12,23 +11,6 @@ describe("BannerApiClient", () => {
vi.clearAllMocks();
});
it("should fetch health data", async () => {
const mockHealth = {
status: "healthy",
timestamp: "2024-01-01T00:00:00Z",
};
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockHealth),
} as Response);
const result = await apiClient.getHealth();
expect(fetch).toHaveBeenCalledWith("/api/health");
expect(result).toEqual(mockHealth);
});
it("should fetch status data", async () => {
const mockStatus = {
status: "active" as const,
@@ -58,8 +40,105 @@ describe("BannerApiClient", () => {
statusText: "Internal Server Error",
} as Response);
await expect(apiClient.getHealth()).rejects.toThrow(
await expect(apiClient.getStatus()).rejects.toThrow(
"API request failed: 500 Internal Server Error"
);
});
it("should search courses with all params", async () => {
const mockResponse = {
courses: [],
totalCount: 0,
offset: 0,
limit: 25,
};
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockResponse),
} as Response);
const result = await apiClient.searchCourses({
term: "202420",
subjects: ["CS"],
q: "data",
open_only: true,
limit: 25,
offset: 50,
});
expect(fetch).toHaveBeenCalledWith(
"/api/courses/search?term=202420&subject=CS&q=data&open_only=true&limit=25&offset=50"
);
expect(result).toEqual(mockResponse);
});
it("should search courses with minimal params", async () => {
const mockResponse = {
courses: [],
totalCount: 0,
offset: 0,
limit: 25,
};
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockResponse),
} as Response);
await apiClient.searchCourses({ term: "202420" });
expect(fetch).toHaveBeenCalledWith("/api/courses/search?term=202420");
});
it("should fetch terms", async () => {
const mockTerms = [
{ code: "202420", description: "Fall 2024" },
{ code: "202510", description: "Spring 2025" },
];
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockTerms),
} as Response);
const result = await apiClient.getTerms();
expect(fetch).toHaveBeenCalledWith("/api/terms");
expect(result).toEqual(mockTerms);
});
it("should fetch subjects for a term", async () => {
const mockSubjects = [
{ code: "CS", description: "Computer Science" },
{ code: "MAT", description: "Mathematics" },
];
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockSubjects),
} as Response);
const result = await apiClient.getSubjects("202420");
expect(fetch).toHaveBeenCalledWith("/api/subjects?term=202420");
expect(result).toEqual(mockSubjects);
});
it("should fetch reference data", async () => {
const mockRef = [
{ code: "F", description: "Face to Face" },
{ code: "OL", description: "Online" },
];
vi.mocked(fetch).mockResolvedValueOnce({
ok: true,
json: () => Promise.resolve(mockRef),
} as Response);
const result = await apiClient.getReference("instructional_methods");
expect(fetch).toHaveBeenCalledWith("/api/reference/instructional_methods");
expect(result).toEqual(mockRef);
});
});
+309 -24
View File
@@ -1,41 +1,177 @@
// API client for Banner backend
import type {
CandidateResponse,
CodeDescription,
CourseResponse,
DbMeetingTime,
InstructorDetail,
InstructorDetailResponse,
InstructorListItem,
InstructorResponse,
InstructorStats,
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
SearchResponse as SearchResponseGenerated,
ServiceInfo,
ServiceStatus,
StatusResponse,
TopCandidateResponse,
User,
} from "$lib/bindings";
const API_BASE_URL = "/api";
export interface HealthResponse {
status: string;
// Re-export generated types under their canonical names
export type {
CandidateResponse,
CodeDescription,
CourseResponse,
DbMeetingTime,
InstructorDetail,
InstructorDetailResponse,
InstructorListItem,
InstructorResponse,
InstructorStats,
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
ServiceInfo,
ServiceStatus,
StatusResponse,
TopCandidateResponse,
};
// Semantic aliases — these all share the CodeDescription shape
export type Term = CodeDescription;
export type Subject = CodeDescription;
export type ReferenceEntry = CodeDescription;
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
export type SearchResponse = SearchResponseGenerated;
// Client-side only — not generated from Rust
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
export type SortDirection = "asc" | "desc";
export interface AdminStatus {
userCount: number;
sessionCount: number;
courseCount: number;
scrapeJobCount: number;
services: { name: string; status: string }[];
}
export interface ScrapeJob {
id: number;
targetType: string;
targetPayload: unknown;
priority: string;
executeAt: string;
createdAt: string;
lockedAt: string | null;
retryCount: number;
maxRetries: number;
queuedAt: string;
status: "processing" | "staleLock" | "exhausted" | "scheduled" | "pending";
}
export interface ScrapeJobsResponse {
jobs: ScrapeJob[];
}
export interface AuditLogEntry {
id: number;
courseId: number;
timestamp: string;
fieldChanged: string;
oldValue: string;
newValue: string;
subject: string | null;
courseNumber: string | null;
crn: string | null;
courseTitle: string | null;
}
export type Status = "starting" | "active" | "connected" | "disabled" | "error";
export interface ServiceInfo {
name: string;
status: Status;
export interface AuditLogResponse {
entries: AuditLogEntry[];
}
export interface StatusResponse {
status: Status;
version: string;
commit: string;
services: Record<string, ServiceInfo>;
export interface MetricEntry {
id: number;
courseId: number;
timestamp: string;
enrollment: number;
waitCount: number;
seatsAvailable: number;
}
export interface MetricsResponse {
banner_api: {
status: string;
};
metrics: MetricEntry[];
count: number;
timestamp: string;
}
export interface MetricsParams {
course_id?: number;
term?: string;
crn?: string;
range?: "1h" | "6h" | "24h" | "7d" | "30d";
limit?: number;
}
export interface SearchParams {
term: string;
subjects?: string[];
q?: string;
open_only?: boolean;
limit?: number;
offset?: number;
sort_by?: SortColumn;
sort_dir?: SortDirection;
}
// Admin instructor query params (client-only, not generated)
export interface AdminInstructorListParams {
status?: string;
search?: string;
page?: number;
per_page?: number;
sort?: string;
}
export class BannerApiClient {
private baseUrl: string;
private fetchFn: typeof fetch;
constructor(baseUrl: string = API_BASE_URL) {
constructor(baseUrl: string = API_BASE_URL, fetchFn: typeof fetch = fetch) {
this.baseUrl = baseUrl;
this.fetchFn = fetchFn;
}
private async request<T>(endpoint: string): Promise<T> {
const response = await fetch(`${this.baseUrl}${endpoint}`);
private buildInit(options?: { method?: string; body?: unknown }): RequestInit | undefined {
if (!options) return undefined;
const init: RequestInit = {};
if (options.method) {
init.method = options.method;
}
if (options.body !== undefined) {
init.headers = { "Content-Type": "application/json" };
init.body = JSON.stringify(options.body);
} else if (options.method) {
init.headers = { "Content-Type": "application/json" };
}
return Object.keys(init).length > 0 ? init : undefined;
}
private async request<T>(
endpoint: string,
options?: { method?: string; body?: unknown }
): Promise<T> {
const init = this.buildInit(options);
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
if (init) args.push(init);
const response = await this.fetchFn(...args);
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
@@ -44,18 +180,167 @@ export class BannerApiClient {
return (await response.json()) as T;
}
async getHealth(): Promise<HealthResponse> {
return this.request<HealthResponse>("/health");
private async requestVoid(
endpoint: string,
options?: { method?: string; body?: unknown }
): Promise<void> {
const init = this.buildInit(options);
const args: [string, RequestInit?] = [`${this.baseUrl}${endpoint}`];
if (init) args.push(init);
const response = await this.fetchFn(...args);
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
}
async getStatus(): Promise<StatusResponse> {
return this.request<StatusResponse>("/status");
}
async getMetrics(): Promise<MetricsResponse> {
return this.request<MetricsResponse>("/metrics");
async searchCourses(params: SearchParams): Promise<SearchResponse> {
const query = new URLSearchParams();
query.set("term", params.term);
if (params.subjects) {
for (const s of params.subjects) {
query.append("subject", s);
}
}
if (params.q) query.set("q", params.q);
if (params.open_only) query.set("open_only", "true");
if (params.limit !== undefined) query.set("limit", String(params.limit));
if (params.offset !== undefined) query.set("offset", String(params.offset));
if (params.sort_by) query.set("sort_by", params.sort_by);
if (params.sort_dir) query.set("sort_dir", params.sort_dir);
return this.request<SearchResponse>(`/courses/search?${query.toString()}`);
}
async getTerms(): Promise<Term[]> {
return this.request<Term[]>("/terms");
}
async getSubjects(termCode: string): Promise<Subject[]> {
return this.request<Subject[]>(`/subjects?term=${encodeURIComponent(termCode)}`);
}
async getReference(category: string): Promise<ReferenceEntry[]> {
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
}
// Admin endpoints
async getAdminStatus(): Promise<AdminStatus> {
return this.request<AdminStatus>("/admin/status");
}
async getAdminUsers(): Promise<User[]> {
return this.request<User[]>("/admin/users");
}
async setUserAdmin(discordId: string, isAdmin: boolean): Promise<User> {
return this.request<User>(`/admin/users/${discordId}/admin`, {
method: "PUT",
body: { is_admin: isAdmin },
});
}
async getAdminScrapeJobs(): Promise<ScrapeJobsResponse> {
return this.request<ScrapeJobsResponse>("/admin/scrape-jobs");
}
/**
* Fetch the audit log with conditional request support.
*
* Returns `null` when the server responds 304 (data unchanged).
* Stores and sends `Last-Modified` / `If-Modified-Since` automatically.
*/
async getAdminAuditLog(): Promise<AuditLogResponse | null> {
const headers: Record<string, string> = {};
if (this._auditLastModified) {
headers["If-Modified-Since"] = this._auditLastModified;
}
const response = await this.fetchFn(`${this.baseUrl}/admin/audit-log`, { headers });
if (response.status === 304) {
return null;
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
const lastMod = response.headers.get("Last-Modified");
if (lastMod) {
this._auditLastModified = lastMod;
}
return (await response.json()) as AuditLogResponse;
}
/** Stored `Last-Modified` value for audit log conditional requests. */
private _auditLastModified: string | null = null;
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
const query = new URLSearchParams();
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
if (params?.term) query.set("term", params.term);
if (params?.crn) query.set("crn", params.crn);
if (params?.range) query.set("range", params.range);
if (params?.limit !== undefined) query.set("limit", String(params.limit));
const qs = query.toString();
return this.request<MetricsResponse>(`/metrics${qs ? `?${qs}` : ""}`);
}
// Admin instructor endpoints
async getAdminInstructors(params?: AdminInstructorListParams): Promise<ListInstructorsResponse> {
const query = new URLSearchParams();
if (params?.status) query.set("status", params.status);
if (params?.search) query.set("search", params.search);
if (params?.page !== undefined) query.set("page", String(params.page));
if (params?.per_page !== undefined) query.set("per_page", String(params.per_page));
if (params?.sort) query.set("sort", params.sort);
const qs = query.toString();
return this.request<ListInstructorsResponse>(`/admin/instructors${qs ? `?${qs}` : ""}`);
}
async getAdminInstructor(id: number): Promise<InstructorDetailResponse> {
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}`);
}
async matchInstructor(id: number, rmpLegacyId: number): Promise<InstructorDetailResponse> {
return this.request<InstructorDetailResponse>(`/admin/instructors/${id}/match`, {
method: "POST",
body: { rmpLegacyId },
});
}
async rejectCandidate(id: number, rmpLegacyId: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/reject-candidate`, {
method: "POST",
body: { rmpLegacyId },
});
}
async rejectAllCandidates(id: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/reject-all`, {
method: "POST",
});
}
async unmatchInstructor(id: number, rmpLegacyId?: number): Promise<void> {
return this.requestVoid(`/admin/instructors/${id}/unmatch`, {
method: "POST",
...(rmpLegacyId !== undefined ? { body: { rmpLegacyId } } : {}),
});
}
async rescoreInstructors(): Promise<RescoreResponse> {
return this.request<RescoreResponse>("/admin/rmp/rescore", {
method: "POST",
});
}
}
// Export a default instance
export const client = new BannerApiClient();
+77
View File
@@ -0,0 +1,77 @@
import type { User } from "$lib/bindings";
type AuthState =
| { mode: "loading" }
| { mode: "authenticated"; user: User }
| { mode: "unauthenticated" };
class AuthStore {
state = $state<AuthState>({ mode: "loading" });
get user(): User | null {
return this.state.mode === "authenticated" ? this.state.user : null;
}
get isAdmin(): boolean {
return this.user?.isAdmin ?? false;
}
get isLoading(): boolean {
return this.state.mode === "loading";
}
get isAuthenticated(): boolean {
return this.state.mode === "authenticated";
}
/**
* Attempt to load the current user session from the backend.
* Only transitions to "unauthenticated" on a definitive 401/403.
* Retries indefinitely on transient failures (network errors, 5xx)
* so that a slow backend startup doesn't kick the user to login.
*/
async init() {
const MAX_DELAY_MS = 7_000;
let delayMs = 500;
for (;;) {
try {
const response = await fetch("/api/auth/me");
if (response.ok) {
const user: User = await response.json();
this.state = { mode: "authenticated", user };
return;
}
// Definitive rejection — no session or not authorized
if (response.status === 401 || response.status === 403) {
this.state = { mode: "unauthenticated" };
return;
}
// Server error (5xx) or unexpected status — retry
} catch {
// Network error (backend not up yet) — retry
}
await new Promise((r) => setTimeout(r, delayMs));
delayMs = Math.min(delayMs * 2, MAX_DELAY_MS);
}
}
login() {
window.location.href = "/api/auth/login";
}
async logout() {
try {
await fetch("/api/auth/logout", { method: "POST" });
} finally {
this.state = { mode: "unauthenticated" };
window.location.href = "/";
}
}
}
export const authStore = new AuthStore();
+19
View File
@@ -0,0 +1,19 @@
export type { CandidateResponse } from "./CandidateResponse";
export type { CodeDescription } from "./CodeDescription";
export type { CourseResponse } from "./CourseResponse";
export type { DbMeetingTime } from "./DbMeetingTime";
export type { InstructorDetail } from "./InstructorDetail";
export type { InstructorDetailResponse } from "./InstructorDetailResponse";
export type { InstructorListItem } from "./InstructorListItem";
export type { InstructorResponse } from "./InstructorResponse";
export type { InstructorStats } from "./InstructorStats";
export type { LinkedRmpProfile } from "./LinkedRmpProfile";
export type { ListInstructorsResponse } from "./ListInstructorsResponse";
export type { OkResponse } from "./OkResponse";
export type { RescoreResponse } from "./RescoreResponse";
export type { SearchResponse } from "./SearchResponse";
export type { ServiceInfo } from "./ServiceInfo";
export type { ServiceStatus } from "./ServiceStatus";
export type { StatusResponse } from "./StatusResponse";
export type { TopCandidateResponse } from "./TopCandidateResponse";
export type { User } from "./User";
+306
View File
@@ -0,0 +1,306 @@
<script lang="ts">
import type { CourseResponse } from "$lib/api";
import {
formatTime,
formatCreditHours,
formatDate,
formatMeetingDaysLong,
isMeetingTimeTBA,
isTimeTBA,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import { Tooltip } from "bits-ui";
import SimpleTooltip from "./SimpleTooltip.svelte";
import { Info, Copy, Check, Star, Triangle, ExternalLink } from "@lucide/svelte";
let { course }: { course: CourseResponse } = $props();
const clipboard = useClipboard();
</script>
<div class="bg-muted/60 p-5 text-sm border-b border-border">
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
<!-- Instructors -->
<div>
<h4 class="text-sm text-foreground mb-2">Instructors</h4>
{#if course.instructors.length > 0}
<div class="flex flex-wrap gap-1.5">
{#each course.instructors as instructor}
<Tooltip.Root delayDuration={200}>
<Tooltip.Trigger>
<span
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
{instructor.displayName}
{#if instructor.rmpRating != null}
{@const rating = instructor.rmpRating}
{@const lowConfidence =
(instructor.rmpNumRatings ?? 0) <
RMP_CONFIDENCE_THRESHOLD}
<span
class="text-[10px] font-semibold inline-flex items-center gap-0.5"
style={ratingStyle(
rating,
themeStore.isDark,
)}
>
{rating.toFixed(1)}
{#if lowConfidence}
<Triangle
class="size-2 fill-current"
/>
{:else}
<Star
class="size-2.5 fill-current"
/>
{/if}
</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
sideOffset={6}
class={cn(tooltipContentClass, "px-3 py-2")}
>
<div class="space-y-1.5">
<div class="font-medium">
{instructor.displayName}
</div>
{#if instructor.isPrimary}
<div class="text-muted-foreground">
Primary instructor
</div>
{/if}
{#if instructor.rmpRating != null}
<div class="text-muted-foreground">
{instructor.rmpRating.toFixed(1)}/5
· {instructor.rmpNumRatings ?? 0} ratings
{#if (instructor.rmpNumRatings ?? 0) < RMP_CONFIDENCE_THRESHOLD}
(low)
{/if}
</div>
{/if}
{#if instructor.rmpLegacyId != null}
<a
href={rmpUrl(
instructor.rmpLegacyId,
)}
target="_blank"
rel="noopener"
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors"
>
<ExternalLink class="size-3" />
<span>View on RMP</span>
</a>
{/if}
{#if instructor.email}
<button
onclick={(e) =>
clipboard.copy(
instructor.email!,
e,
)}
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
>
{#if clipboard.copiedValue === instructor.email}
<Check class="size-3" />
<span>Copied!</span>
{:else}
<Copy class="size-3" />
<span>{instructor.email}</span>
{/if}
</button>
{/if}
</div>
</Tooltip.Content>
</Tooltip.Root>
{/each}
</div>
{:else}
<span class="text-muted-foreground italic">Staff</span>
{/if}
</div>
<!-- Meeting Times -->
<div>
<h4 class="text-sm text-foreground mb-2">Meeting Times</h4>
{#if course.meetingTimes.length > 0}
<ul class="space-y-2">
{#each course.meetingTimes as mt}
<li>
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
<span class="italic text-muted-foreground"
>TBA</span
>
{:else}
<div class="flex items-baseline gap-1.5">
{#if !isMeetingTimeTBA(mt)}
<span
class="font-medium text-foreground"
>
{formatMeetingDaysLong(mt)}
</span>
{/if}
{#if !isTimeTBA(mt)}
<span class="text-muted-foreground">
{formatTime(
mt.begin_time,
)}&ndash;{formatTime(mt.end_time)}
</span>
{:else}
<span
class="italic text-muted-foreground"
>Time TBA</span
>
{/if}
</div>
{/if}
{#if mt.building || mt.room}
<div
class="text-xs text-muted-foreground mt-0.5"
>
{mt.building_description ??
mt.building}{mt.room
? ` ${mt.room}`
: ""}
</div>
{/if}
<div
class="text-xs text-muted-foreground/70 mt-0.5"
>
{formatDate(mt.start_date)} &ndash; {formatDate(
mt.end_date,
)}
</div>
</li>
{/each}
</ul>
{:else}
<span class="italic text-muted-foreground">TBA</span>
{/if}
</div>
<!-- Delivery -->
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Delivery
<SimpleTooltip
text="How the course is taught: in-person, online, hybrid, etc."
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<span class="text-foreground">
{course.instructionalMethod ?? "—"}
{#if course.campus}
<span class="text-muted-foreground">
· {course.campus}
</span>
{/if}
</span>
</div>
<!-- Credits -->
<div>
<h4 class="text-sm text-foreground mb-2">Credits</h4>
<span class="text-foreground">{formatCreditHours(course)}</span>
</div>
<!-- Attributes -->
{#if course.attributes.length > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Attributes
<SimpleTooltip
text="Course flags for degree requirements, core curriculum, or special designations"
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<div class="flex flex-wrap gap-1.5">
{#each course.attributes as attr}
<SimpleTooltip
text="Course attribute code"
delay={150}
passthrough
>
<span
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
>
{attr}
</span>
</SimpleTooltip>
{/each}
</div>
</div>
{/if}
<!-- Cross-list -->
{#if course.crossList}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Cross-list
<SimpleTooltip
text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class."
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<Tooltip.Root delayDuration={150} disableHoverableContent>
<Tooltip.Trigger>
<span
class="inline-flex items-center gap-1.5 text-foreground font-mono"
>
<span
class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium"
>
{course.crossList}
</span>
{#if course.crossListCount != null && course.crossListCapacity != null}
<span class="text-muted-foreground text-xs">
{formatNumber(course.crossListCount)}/{formatNumber(course.crossListCapacity)}
</span>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content sideOffset={6} class={tooltipContentClass}>
Group <span class="font-mono font-medium"
>{course.crossList}</span
>
{#if course.crossListCount != null && course.crossListCapacity != null}
{formatNumber(course.crossListCount)} enrolled across {formatNumber(course.crossListCapacity)}
shared seats
{/if}
</Tooltip.Content>
</Tooltip.Root>
</div>
{/if}
<!-- Waitlist -->
{#if course.waitCapacity > 0}
<div>
<h4 class="text-sm text-foreground mb-2">Waitlist</h4>
<span class="text-2foreground"
>{formatNumber(course.waitCount)} / {formatNumber(course.waitCapacity)}</span
>
</div>
{/if}
</div>
</div>
+787
View File
@@ -0,0 +1,787 @@
<script lang="ts">
import type { CourseResponse } from "$lib/api";
import {
abbreviateInstructor,
concernAccentColor,
formatLocationDisplay,
formatLocationTooltip,
formatMeetingDays,
formatMeetingTimesTooltip,
formatTimeRange,
getDeliveryConcern,
getPrimaryInstructor,
isMeetingTimeTBA,
isTimeTBA,
openSeats,
seatsColor,
seatsDotColor,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
import CourseDetail from "./CourseDetail.svelte";
import { fade, fly, slide } from "svelte/transition";
import { flip } from "svelte/animate";
import { createSvelteTable, FlexRender } from "$lib/components/ui/data-table/index.js";
import {
getCoreRowModel,
getSortedRowModel,
type ColumnDef,
type SortingState,
type VisibilityState,
type Updater,
} from "@tanstack/table-core";
import {
ArrowUp,
ArrowDown,
ArrowUpDown,
Columns3,
Check,
RotateCcw,
Star,
Triangle,
ExternalLink,
} from "@lucide/svelte";
import { DropdownMenu, ContextMenu, Tooltip } from "bits-ui";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import SimpleTooltip from "./SimpleTooltip.svelte";
let {
courses,
loading,
sorting = [],
onSortingChange,
manualSorting = false,
subjectMap = {},
}: {
courses: CourseResponse[];
loading: boolean;
sorting?: SortingState;
onSortingChange?: (sorting: SortingState) => void;
manualSorting?: boolean;
subjectMap?: Record<string, string>;
} = $props();
let expandedCrn: string | null = $state(null);
let tableWrapper: HTMLDivElement = undefined!;
const clipboard = useClipboard(1000);
// Collapse expanded row when the dataset changes to avoid stale detail rows
// and FLIP position calculation glitches from lingering expanded content
$effect(() => {
courses; // track dependency
expandedCrn = null;
});
useOverlayScrollbars(() => tableWrapper, {
overflow: { x: "scroll", y: "hidden" },
scrollbars: { autoHide: "never" },
});
// Column visibility state
let columnVisibility: VisibilityState = $state({});
function resetColumnVisibility() {
columnVisibility = {};
}
function handleVisibilityChange(updater: Updater<VisibilityState>) {
const newVisibility = typeof updater === "function" ? updater(columnVisibility) : updater;
columnVisibility = newVisibility;
}
// visibleColumnIds and hasCustomVisibility derived after column definitions below
function toggleRow(crn: string) {
expandedCrn = expandedCrn === crn ? null : crn;
}
function primaryInstructorDisplay(course: CourseResponse): string {
const primary = getPrimaryInstructor(course.instructors);
if (!primary) return "Staff";
return abbreviateInstructor(primary.displayName);
}
function primaryRating(
course: CourseResponse
): { rating: number; count: number; legacyId: number | null } | null {
const primary = getPrimaryInstructor(course.instructors);
if (!primary?.rmpRating) return null;
return {
rating: primary.rmpRating,
count: primary.rmpNumRatings ?? 0,
legacyId: primary.rmpLegacyId ?? null,
};
}
function timeIsTBA(course: CourseResponse): boolean {
if (course.meetingTimes.length === 0) return true;
const mt = course.meetingTimes[0];
return isMeetingTimeTBA(mt) && isTimeTBA(mt);
}
// Column definitions
const columns: ColumnDef<CourseResponse, unknown>[] = [
{
id: "crn",
accessorKey: "crn",
header: "CRN",
enableSorting: false,
},
{
id: "course_code",
accessorFn: (row) => `${row.subject} ${row.courseNumber}`,
header: "Course",
enableSorting: true,
},
{
id: "title",
accessorKey: "title",
header: "Title",
enableSorting: true,
},
{
id: "instructor",
accessorFn: (row) => primaryInstructorDisplay(row),
header: "Instructor",
enableSorting: true,
},
{
id: "time",
accessorFn: (row) => {
if (row.meetingTimes.length === 0) return "";
const mt = row.meetingTimes[0];
return `${formatMeetingDays(mt)} ${formatTimeRange(mt.begin_time, mt.end_time)}`;
},
header: "Time",
enableSorting: true,
},
{
id: "location",
accessorFn: (row) => formatLocationDisplay(row) ?? "",
header: "Location",
enableSorting: false,
},
{
id: "seats",
accessorFn: (row) => openSeats(row),
header: "Seats",
enableSorting: true,
},
];
/** Column IDs that are currently visible */
let visibleColumnIds = $derived(
columns.map((c) => c.id!).filter((id) => columnVisibility[id] !== false)
);
let hasCustomVisibility = $derived(Object.values(columnVisibility).some((v) => v === false));
function handleSortingChange(updater: Updater<SortingState>) {
const newSorting = typeof updater === "function" ? updater(sorting) : updater;
onSortingChange?.(newSorting);
}
const table = createSvelteTable({
get data() {
return courses;
},
getRowId: (row) => String(row.crn),
columns,
state: {
get sorting() {
return sorting;
},
get columnVisibility() {
return columnVisibility;
},
},
onSortingChange: handleSortingChange,
onColumnVisibilityChange: handleVisibilityChange,
getCoreRowModel: getCoreRowModel(),
get getSortedRowModel() {
return manualSorting ? undefined : getSortedRowModel<CourseResponse>();
},
get manualSorting() {
return manualSorting;
},
enableSortingRemoval: true,
});
</script>
{#snippet columnVisibilityGroup(
Group: typeof DropdownMenu.Group,
GroupHeading: typeof DropdownMenu.GroupHeading,
CheckboxItem: typeof DropdownMenu.CheckboxItem,
Separator: typeof DropdownMenu.Separator,
Item: typeof DropdownMenu.Item,
)}
<Group>
<GroupHeading
class="px-2 py-1.5 text-xs font-medium text-muted-foreground"
>
Toggle columns
</GroupHeading>
{#each columns as col}
{@const id = col.id!}
{@const label = typeof col.header === "string" ? col.header : id}
<CheckboxItem
checked={columnVisibility[id] !== false}
closeOnSelect={false}
onCheckedChange={(checked) => {
columnVisibility = {
...columnVisibility,
[id]: checked,
};
}}
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
>
{#snippet children({ checked })}
<span
class="flex size-4 items-center justify-center rounded-sm border border-border"
>
{#if checked}
<Check class="size-3" />
{/if}
</span>
{label}
{/snippet}
</CheckboxItem>
{/each}
</Group>
{#if hasCustomVisibility}
<Separator class="mx-1 my-1 h-px bg-border" />
<Item
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
onSelect={resetColumnVisibility}
>
<RotateCcw class="size-3.5" />
Reset to default
</Item>
{/if}
{/snippet}
<!-- Toolbar: View columns button -->
<div class="flex items-center justify-end pb-2">
<DropdownMenu.Root>
<DropdownMenu.Trigger
class="inline-flex items-center gap-1.5 rounded-md border border-border bg-background px-2.5 py-1.5 text-xs font-medium text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors cursor-pointer"
>
<Columns3 class="size-3.5" />
View
</DropdownMenu.Trigger>
<DropdownMenu.Portal>
<DropdownMenu.Content
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
align="end"
sideOffset={4}
forceMount
>
{#snippet child({ wrapperProps, props, open })}
{#if open}
<div {...wrapperProps}>
<div
{...props}
transition:fly={{ duration: 150, y: -10 }}
>
{@render columnVisibilityGroup(
DropdownMenu.Group,
DropdownMenu.GroupHeading,
DropdownMenu.CheckboxItem,
DropdownMenu.Separator,
DropdownMenu.Item,
)}
</div>
</div>
{/if}
{/snippet}
</DropdownMenu.Content>
</DropdownMenu.Portal>
</DropdownMenu.Root>
</div>
<!-- Table with context menu on header -->
<div bind:this={tableWrapper} class="overflow-x-auto">
<ContextMenu.Root>
<ContextMenu.Trigger class="contents">
<table class="w-full min-w-160 border-collapse text-sm">
<thead>
{#each table.getHeaderGroups() as headerGroup}
<tr
class="border-b border-border text-left text-muted-foreground"
>
{#each headerGroup.headers as header}
{#if header.column.getIsVisible()}
<th
class="py-2 px-2 font-medium {header.id ===
'seats'
? 'text-right'
: ''}"
class:cursor-pointer={header.column.getCanSort()}
class:select-none={header.column.getCanSort()}
onclick={header.column.getToggleSortingHandler()}
>
{#if header.column.getCanSort()}
<span
class="inline-flex items-center gap-1"
>
{#if typeof header.column.columnDef.header === "string"}
{header.column.columnDef
.header}
{:else}
<FlexRender
content={header.column
.columnDef.header}
context={header.getContext()}
/>
{/if}
{#if header.column.getIsSorted() === "asc"}
<ArrowUp class="size-3.5" />
{:else if header.column.getIsSorted() === "desc"}
<ArrowDown
class="size-3.5"
/>
{:else}
<ArrowUpDown
class="size-3.5 text-muted-foreground/40"
/>
{/if}
</span>
{:else if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef
.header}
context={header.getContext()}
/>
{/if}
</th>
{/if}
{/each}
</tr>
{/each}
</thead>
{#if loading && courses.length === 0}
<tbody>
{#each Array(5) as _}
<tr class="border-b border-border">
{#each table.getVisibleLeafColumns() as col}
<td class="py-2.5 px-2">
<div
class="h-4 bg-muted rounded animate-pulse {col.id ===
'seats'
? 'w-14 ml-auto'
: col.id === 'title'
? 'w-40'
: col.id === 'crn'
? 'w-10'
: 'w-20'}"
></div>
</td>
{/each}
</tr>
{/each}
</tbody>
{:else if courses.length === 0}
<tbody>
<tr>
<td
colspan={visibleColumnIds.length}
class="py-12 text-center text-muted-foreground"
>
No courses found. Try adjusting your filters.
</td>
</tr>
</tbody>
{:else}
<!-- No out: transition — Svelte outros break table layout (tbody loses positioning and overlaps) -->
{#each table.getRowModel().rows as row, i (row.id)}
{@const course = row.original}
<tbody
animate:flip={{ duration: 300 }}
in:fade={{
duration: 200,
delay: Math.min(i * 20, 400),
}}
>
<tr
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
course.crn
? 'bg-muted/30'
: ''}"
onclick={() => toggleRow(course.crn)}
>
{#each row.getVisibleCells() as cell (cell.id)}
{@const colId = cell.column.id}
{#if colId === "crn"}
<td class="py-2 px-2 relative">
<button
class="relative inline-flex items-center rounded-full px-2 py-0.5 border border-border/50 bg-muted/20 hover:bg-muted/40 hover:border-foreground/30 transition-colors duration-150 cursor-copy focus-visible:outline-2 focus-visible:outline-offset-1 focus-visible:outline-ring font-mono text-xs text-muted-foreground/70"
onclick={(e) =>
clipboard.copy(
course.crn,
e,
)}
onkeydown={(e) => {
if (
e.key === "Enter" ||
e.key === " "
) {
e.preventDefault();
clipboard.copy(
course.crn,
e,
);
}
}}
aria-label="Copy CRN {course.crn} to clipboard"
>
{course.crn}
{#if clipboard.copiedValue === course.crn}
<span
class="absolute -top-8 left-1/2 -translate-x-1/2 whitespace-nowrap text-xs px-2 py-1 rounded-md bg-green-500/10 border border-green-500/20 text-green-700 dark:text-green-300 pointer-events-none z-10"
in:fade={{
duration: 100,
}}
out:fade={{
duration: 200,
}}
>
Copied!
</span>
{/if}
</button>
</td>
{:else if colId === "course_code"}
{@const subjectDesc =
subjectMap[course.subject]}
<td class="py-2 px-2 whitespace-nowrap">
<SimpleTooltip
text={subjectDesc
? `${subjectDesc} ${course.courseNumber}`
: `${course.subject} ${course.courseNumber}`}
delay={200}
side="bottom"
passthrough
>
<span class="font-semibold"
>{course.subject}
{course.courseNumber}</span
>{#if course.sequenceNumber}<span
class="text-muted-foreground"
>-{course.sequenceNumber}</span
>{/if}
</SimpleTooltip>
</td>
{:else if colId === "title"}
<td
class="py-2 px-2 font-medium max-w-50 truncate"
>
<SimpleTooltip
text={course.title}
delay={200}
side="bottom"
passthrough
>
<span class="block truncate"
>{course.title}</span
>
</SimpleTooltip>
</td>
{:else if colId === "instructor"}
{@const primary = getPrimaryInstructor(
course.instructors,
)}
{@const display =
primaryInstructorDisplay(course)}
{@const commaIdx =
display.indexOf(", ")}
{@const ratingData =
primaryRating(course)}
<td class="py-2 px-2 whitespace-nowrap">
{#if display === "Staff"}
<span
class="text-xs text-muted-foreground/60 uppercase"
>Staff</span
>
{:else}
<SimpleTooltip
text={primary?.displayName ??
"Staff"}
delay={200}
side="bottom"
passthrough
>
{#if commaIdx !== -1}
<span
>{display.slice(
0,
commaIdx,
)},
<span
class="text-muted-foreground"
>{display.slice(
commaIdx +
1,
)}</span
></span
>
{:else}
<span>{display}</span>
{/if}
</SimpleTooltip>
{/if}
{#if ratingData}
{@const lowConfidence =
ratingData.count <
RMP_CONFIDENCE_THRESHOLD}
<Tooltip.Root
delayDuration={150}
>
<Tooltip.Trigger>
<span
class="ml-1 text-xs font-medium inline-flex items-center gap-0.5"
style={ratingStyle(
ratingData.rating,
themeStore.isDark,
)}
>
{ratingData.rating.toFixed(
1,
)}
{#if lowConfidence}
<Triangle
class="size-2 fill-current"
/>
{:else}
<Star
class="size-2.5 fill-current"
/>
{/if}
</span>
</Tooltip.Trigger>
<Tooltip.Content
side="bottom"
sideOffset={6}
class={cn(
tooltipContentClass,
"px-2.5 py-1.5",
)}
>
<span
class="inline-flex items-center gap-1.5 text-xs"
>
{ratingData.rating.toFixed(
1,
)}/5 · {formatNumber(ratingData.count)}
ratings
{#if (ratingData.count ?? 0) < RMP_CONFIDENCE_THRESHOLD}
(low)
{/if}
{#if ratingData.legacyId != null}
·
<a
href={rmpUrl(
ratingData.legacyId,
)}
target="_blank"
rel="noopener"
class="inline-flex items-center gap-0.5 text-muted-foreground hover:text-foreground transition-colors"
>
RMP
<ExternalLink
class="size-3"
/>
</a>
{/if}
</span>
</Tooltip.Content>
</Tooltip.Root>
{/if}
</td>
{:else if colId === "time"}
<td class="py-2 px-2 whitespace-nowrap">
<SimpleTooltip
text={formatMeetingTimesTooltip(
course.meetingTimes,
)}
passthrough
>
{#if timeIsTBA(course)}
<span
class="text-xs text-muted-foreground/60"
>TBA</span
>
{:else}
{@const mt =
course.meetingTimes[0]}
<span>
{#if !isMeetingTimeTBA(mt)}
<span
class="font-mono font-medium"
>{formatMeetingDays(
mt,
)}</span
>
{" "}
{/if}
{#if !isTimeTBA(mt)}
<span
class="text-muted-foreground"
>{formatTimeRange(
mt.begin_time,
mt.end_time,
)}</span
>
{:else}
<span
class="text-xs text-muted-foreground/60"
>TBA</span
>
{/if}
{#if course.meetingTimes.length > 1}
<span
class="ml-1 text-xs text-muted-foreground/70 font-medium"
>+{course
.meetingTimes
.length -
1}</span
>
{/if}
</span>
{/if}
</SimpleTooltip>
</td>
{:else if colId === "location"}
{@const concern =
getDeliveryConcern(course)}
{@const accentColor =
concernAccentColor(concern)}
{@const locTooltip =
formatLocationTooltip(course)}
{@const locDisplay =
formatLocationDisplay(course)}
<td class="py-2 px-2 whitespace-nowrap">
{#if locTooltip}
<SimpleTooltip
text={locTooltip}
delay={200}
passthrough
>
<span
class="text-muted-foreground"
class:pl-2={accentColor !==
null}
style:border-left={accentColor
? `2px solid ${accentColor}`
: undefined}
>
{locDisplay ?? "—"}
</span>
</SimpleTooltip>
{:else if locDisplay}
<span
class="text-muted-foreground"
>
{locDisplay}
</span>
{:else}
<span
class="text-xs text-muted-foreground/50"
>—</span
>
{/if}
</td>
{:else if colId === "seats"}
<td
class="py-2 px-2 text-right whitespace-nowrap"
>
<SimpleTooltip
text="{formatNumber(openSeats(
course,
))} of {formatNumber(course.maxEnrollment)} seats open, {formatNumber(course.enrollment)} enrolled{course.waitCount >
0
? `, ${formatNumber(course.waitCount)} waitlisted`
: ''}"
delay={200}
side="left"
passthrough
>
<span
class="inline-flex items-center gap-1.5"
>
<span
class="size-1.5 rounded-full {seatsDotColor(
course,
)} shrink-0"
></span>
<span
class="{seatsColor(
course,
)} font-medium tabular-nums"
>{#if openSeats(course) === 0}Full{:else}{openSeats(
course,
)} open{/if}</span
>
<span
class="text-muted-foreground/60 tabular-nums"
>{formatNumber(course.enrollment)}/{formatNumber(course.maxEnrollment)}{#if course.waitCount > 0}
· WL {formatNumber(course.waitCount)}/{formatNumber(course.waitCapacity)}{/if}</span
>
</span>
</SimpleTooltip>
</td>
{/if}
{/each}
</tr>
{#if expandedCrn === course.crn}
<tr>
<td
colspan={visibleColumnIds.length}
class="p-0"
>
<div
transition:slide={{ duration: 200 }}
>
<CourseDetail {course} />
</div>
</td>
</tr>
{/if}
</tbody>
{/each}
{/if}
</table>
</ContextMenu.Trigger>
<ContextMenu.Portal>
<ContextMenu.Content
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
forceMount
>
{#snippet child({ wrapperProps, props, open })}
{#if open}
<div {...wrapperProps}>
<div
{...props}
in:fade={{ duration: 100 }}
out:fade={{ duration: 100 }}
>
{@render columnVisibilityGroup(
ContextMenu.Group,
ContextMenu.GroupHeading,
ContextMenu.CheckboxItem,
ContextMenu.Separator,
ContextMenu.Item,
)}
</div>
</div>
{/if}
{/snippet}
</ContextMenu.Content>
</ContextMenu.Portal>
</ContextMenu.Root>
</div>
@@ -0,0 +1,56 @@
<script lang="ts">
import { page } from "$app/state";
import { TriangleAlert, RotateCcw } from "@lucide/svelte";
interface Props {
/** Heading shown in the error card */
title?: string;
/** The error value from svelte:boundary */
error: unknown;
/** Reset callback from svelte:boundary */
reset: () => void;
}
let { title = "Something went wrong", error, reset }: Props = $props();
let errorName = $derived(error instanceof Error ? error.constructor.name : "Error");
let errorMessage = $derived(error instanceof Error ? error.message : String(error));
let errorStack = $derived(error instanceof Error ? error.stack : null);
</script>
<div class="flex items-center justify-center py-16 px-4">
<div class="w-full max-w-lg rounded-lg border border-status-red/25 bg-status-red/5 overflow-hidden text-sm">
<div class="px-4 py-2.5 border-b border-status-red/15 flex items-center justify-between gap-4">
<div class="flex items-center gap-2 text-status-red">
<TriangleAlert size={16} strokeWidth={2.25} />
<span class="font-semibold">{title}</span>
</div>
<span class="text-xs text-muted-foreground font-mono">{page.url.pathname}</span>
</div>
<div class="px-4 py-3 border-b border-status-red/15">
<span class="text-xs text-muted-foreground/70 font-mono">{errorName}</span>
<pre class="mt-1 text-xs text-foreground/80 overflow-auto whitespace-pre-wrap break-words">{errorMessage}</pre>
</div>
{#if errorStack}
<details class="border-b border-status-red/15">
<summary class="px-4 py-2 text-xs text-muted-foreground/70 cursor-pointer hover:text-muted-foreground select-none">
Stack trace
</summary>
<pre class="px-4 py-3 text-xs text-muted-foreground/60 overflow-auto whitespace-pre-wrap break-words max-h-48">{errorStack}</pre>
</details>
{/if}
<div class="px-4 py-2.5 flex items-center justify-end gap-3">
<span class="text-xs text-muted-foreground/60">Retries this section, not the full page</span>
<button
class="shrink-0 cursor-pointer inline-flex items-center gap-1.5 rounded-md bg-status-red px-3 py-1.5 text-sm font-medium text-white hover:brightness-110 transition-all"
onclick={reset}
>
<RotateCcw size={14} strokeWidth={2.25} />
Try again
</button>
</div>
</div>
</div>
+36
View File
@@ -0,0 +1,36 @@
<script lang="ts">
import { cn } from "$lib/utils";
let {
commitHash,
showStatusLink = true,
class: className,
}: {
commitHash?: string | null;
showStatusLink?: boolean;
class?: string;
} = $props();
</script>
<div class={cn("flex justify-center items-center gap-2 mt-auto pt-6 pb-4", className)}>
{#if __APP_VERSION__}
<span class="text-xs text-muted-foreground">v{__APP_VERSION__}</span>
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
{/if}
<a
href={commitHash
? `https://github.com/Xevion/banner/commit/${commitHash}`
: "https://github.com/Xevion/banner"}
target="_blank"
rel="noopener noreferrer"
class="text-xs text-muted-foreground no-underline hover:underline"
>
GitHub
</a>
{#if showStatusLink}
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
<a href="/health" class="text-xs text-muted-foreground no-underline hover:underline">
Status
</a>
{/if}
</div>
+58
View File
@@ -0,0 +1,58 @@
<script lang="ts">
import { page } from "$app/state";
import { Search, User, Clock } from "@lucide/svelte";
import { authStore } from "$lib/auth.svelte";
import ThemeToggle from "./ThemeToggle.svelte";
const staticTabs = [
{ href: "/", label: "Search", icon: Search },
{ href: "/timeline", label: "Timeline", icon: Clock },
] as const;
const APP_PREFIXES = ["/profile", "/settings", "/admin"];
let profileTab = $derived({
href: authStore.isAuthenticated ? "/profile" : "/login",
label: authStore.isAuthenticated ? "Account" : "Login",
icon: User,
});
function isActive(tabHref: string): boolean {
if (tabHref === "/") return page.url.pathname === "/";
if (tabHref === "/profile") {
return APP_PREFIXES.some((p) => page.url.pathname.startsWith(p));
}
return page.url.pathname.startsWith(tabHref);
}
</script>
<nav class="w-full flex justify-center pt-5 px-5">
<div class="w-full max-w-6xl flex items-center justify-between">
<!-- pointer-events-auto: root layout wraps nav in pointer-events-none overlay -->
<div class="flex items-center gap-1 rounded-lg bg-muted p-1 pointer-events-auto">
{#each staticTabs as tab}
<a
href={tab.href}
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
{isActive(tab.href)
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<tab.icon size={15} strokeWidth={2} />
{tab.label}
</a>
{/each}
<a
href={profileTab.href}
class="flex items-center gap-1.5 rounded-md px-3 py-1.5 text-sm font-medium transition-colors no-underline
{isActive(profileTab.href)
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<User size={15} strokeWidth={2} />
{profileTab.label}
</a>
<ThemeToggle />
</div>
</div>
</nav>
@@ -0,0 +1,76 @@
<script lang="ts">
import { navigationStore } from "$lib/stores/navigation.svelte";
import type { Snippet } from "svelte";
import { cubicOut } from "svelte/easing";
import type { TransitionConfig } from "svelte/transition";
type Axis = "horizontal" | "vertical";
let {
key,
children,
axis = "horizontal",
inDelay = 0,
outDelay = 0,
}: {
key: string;
children: Snippet;
axis?: Axis;
inDelay?: number;
outDelay?: number;
} = $props();
const DURATION = 400;
const OFFSET = 40;
function translate(axis: Axis, value: number): string {
return axis === "vertical" ? `translateY(${value}px)` : `translateX(${value}px)`;
}
function inTransition(_node: HTMLElement): TransitionConfig {
const dir = navigationStore.direction;
if (dir === "fade") {
return {
duration: DURATION,
delay: inDelay,
easing: cubicOut,
css: (t: number) => `opacity: ${t}`,
};
}
const offset = dir === "right" ? OFFSET : -OFFSET;
return {
duration: DURATION,
delay: inDelay,
easing: cubicOut,
css: (t: number) => `opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
};
}
function outTransition(_node: HTMLElement): TransitionConfig {
const dir = navigationStore.direction;
const base = "position: absolute; top: 0; left: 0; width: 100%; height: 100%";
if (dir === "fade") {
return {
duration: DURATION,
delay: outDelay,
easing: cubicOut,
css: (t: number) => `${base}; opacity: ${t}`,
};
}
const offset = dir === "right" ? -OFFSET : OFFSET;
return {
duration: DURATION,
delay: outDelay,
easing: cubicOut,
css: (t: number) => `${base}; opacity: ${t}; transform: ${translate(axis, (1 - t) * offset)}`,
};
}
</script>
<div class="relative flex flex-1 flex-col overflow-hidden">
{#key key}
<div in:inTransition out:outTransition class="flex flex-1 flex-col">
{@render children()}
</div>
{/key}
</div>
+167
View File
@@ -0,0 +1,167 @@
<script lang="ts">
import { Select } from "bits-ui";
import { ChevronUp, ChevronDown } from "@lucide/svelte";
import type { Action } from "svelte/action";
import { formatNumber } from "$lib/utils";
const slideIn: Action<HTMLElement, number> = (node, direction) => {
if (direction !== 0) {
node.animate(
[
{ transform: `translateX(${direction * 20}px)`, opacity: 0 },
{ transform: "translateX(0)", opacity: 1 },
],
{ duration: 200, easing: "ease-out" }
);
}
};
let {
totalCount,
offset,
limit,
onPageChange,
}: {
totalCount: number;
offset: number;
limit: number;
onPageChange: (newOffset: number) => void;
} = $props();
const currentPage = $derived(Math.floor(offset / limit) + 1);
const totalPages = $derived(Math.ceil(totalCount / limit));
const start = $derived(offset + 1);
const end = $derived(Math.min(offset + limit, totalCount));
// Track direction for slide animation
let direction = $state(0);
// 5 page slots: current-2, current-1, current, current+1, current+2
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
function isSlotVisible(page: number): boolean {
return page >= 1 && page <= totalPages;
}
function goToPage(page: number) {
direction = page > currentPage ? 1 : -1;
onPageChange((page - 1) * limit);
}
// Build items array for the Select dropdown
const pageItems = $derived(
Array.from({ length: totalPages }, (_, i) => ({
value: String(i + 1),
label: String(i + 1),
}))
);
const selectValue = $derived(String(currentPage));
</script>
{#if totalCount > 0 && totalPages > 1}
<div class="flex items-start text-xs -mt-3 pl-2">
<!-- Left zone: result count -->
<div class="flex-1">
<span class="text-muted-foreground">
Showing {formatNumber(start)}&ndash;{formatNumber(end)} of {formatNumber(totalCount)} courses
</span>
</div>
<!-- Center zone: page buttons -->
<div class="flex items-center gap-1">
{#key currentPage}
{#each pageSlots as page, i (i)}
{#if i === 2}
<!-- Center slot: current page with dropdown trigger -->
<Select.Root
type="single"
value={selectValue}
onValueChange={(v) => {
if (v) goToPage(Number(v));
}}
items={pageItems}
>
<Select.Trigger
class="inline-flex items-center justify-center gap-1 w-auto min-w-9 h-9 px-2.5
rounded-md text-sm font-medium tabular-nums
border border-border bg-card text-foreground
hover:bg-muted/50 active:bg-muted transition-colors
cursor-pointer select-none outline-none
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
aria-label="Page {currentPage} of {totalPages}, click to select page"
>
<span use:slideIn={direction}>{currentPage}</span>
<ChevronUp class="size-3 text-muted-foreground" />
</Select.Trigger>
<Select.Portal>
<Select.Content
class="border border-border bg-card shadow-md outline-hidden z-50
max-h-72 min-w-16 w-auto
select-none rounded-md p-1
data-[state=open]:animate-in data-[state=closed]:animate-out
data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0
data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95
data-[side=top]:slide-in-from-bottom-2
data-[side=bottom]:slide-in-from-top-2"
side="top"
sideOffset={6}
>
<Select.ScrollUpButton class="flex w-full items-center justify-center py-0.5">
<ChevronUp class="size-3.5 text-muted-foreground" />
</Select.ScrollUpButton>
<Select.Viewport class="p-0.5">
{#each pageItems as item (item.value)}
<Select.Item
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center
justify-center px-3 text-sm tabular-nums
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
data-[selected]:font-semibold"
value={item.value}
label={item.label}
>
{item.label}
</Select.Item>
{/each}
</Select.Viewport>
<Select.ScrollDownButton class="flex w-full items-center justify-center py-0.5">
<ChevronDown class="size-3.5 text-muted-foreground" />
</Select.ScrollDownButton>
</Select.Content>
</Select.Portal>
</Select.Root>
{:else}
<!-- Side slot: navigable page button or invisible placeholder -->
<button
class="inline-flex items-center justify-center w-9 h-9
rounded-md text-sm tabular-nums
text-muted-foreground
hover:bg-muted/50 hover:text-foreground active:bg-muted transition-colors
cursor-pointer select-none
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
{isSlotVisible(page) ? '' : 'invisible pointer-events-none'}"
onclick={() => goToPage(page)}
aria-label="Go to page {page}"
aria-hidden={!isSlotVisible(page)}
tabindex={isSlotVisible(page) ? 0 : -1}
disabled={!isSlotVisible(page)}
use:slideIn={direction}
>
{page}
</button>
{/if}
{/each}
{/key}
</div>
<!-- Right zone: spacer for centering -->
<div class="flex-1"></div>
</div>
{:else if totalCount > 0}
<!-- Single page: just show the count, no pagination controls -->
<div class="flex items-start text-xs -mt-3 pl-2">
<span class="text-muted-foreground">
Showing {formatNumber(start)}&ndash;{formatNumber(end)} of {formatNumber(totalCount)} courses
</span>
</div>
{/if}
@@ -0,0 +1,45 @@
<script lang="ts">
import type { Term, Subject } from "$lib/api";
import SimpleTooltip from "./SimpleTooltip.svelte";
import TermCombobox from "./TermCombobox.svelte";
import SubjectCombobox from "./SubjectCombobox.svelte";
let {
terms,
subjects,
selectedTerm = $bindable(),
selectedSubjects = $bindable(),
query = $bindable(),
openOnly = $bindable(),
}: {
terms: Term[];
subjects: Subject[];
selectedTerm: string;
selectedSubjects: string[];
query: string;
openOnly: boolean;
} = $props();
</script>
<div class="flex flex-wrap gap-3 items-start">
<TermCombobox {terms} bind:value={selectedTerm} />
<SubjectCombobox {subjects} bind:value={selectedSubjects} />
<input
type="text"
placeholder="Search courses..."
aria-label="Search courses"
bind:value={query}
class="h-9 border border-border bg-card text-foreground rounded-md px-3 text-sm flex-1 min-w-[200px]
focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
transition-colors"
/>
<SimpleTooltip text="Show only courses with available seats" delay={200} passthrough>
<label class="flex items-center gap-1.5 h-9 text-sm text-muted-foreground cursor-pointer">
<input type="checkbox" bind:checked={openOnly} />
Open only
</label>
</SimpleTooltip>
</div>
@@ -0,0 +1,68 @@
<script lang="ts">
import { onMount } from "svelte";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
export interface SearchMeta {
totalCount: number;
durationMs: number;
timestamp: Date;
}
let { meta }: { meta: SearchMeta | null } = $props();
let now = $state(new Date());
let formattedTime = $derived(
meta
? meta.timestamp.toLocaleTimeString(undefined, {
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
})
: ""
);
let relativeTimeResult = $derived(meta ? relativeTime(meta.timestamp, now) : null);
let relativeTimeText = $derived(relativeTimeResult?.text ?? "");
let countLabel = $derived(meta ? formatNumber(meta.totalCount) : "");
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
let tooltipText = $derived(meta ? `${relativeTimeText} · ${formattedTime}` : "");
onMount(() => {
let nowTimeoutId: ReturnType<typeof setTimeout> | null = null;
function scheduleNowTick() {
const delay = relativeTimeResult?.nextUpdateMs ?? 1000;
nowTimeoutId = setTimeout(() => {
now = new Date();
scheduleNowTick();
}, delay);
}
scheduleNowTick();
return () => {
if (nowTimeoutId) clearTimeout(nowTimeoutId);
};
});
</script>
<SimpleTooltip
text={tooltipText}
contentClass="whitespace-nowrap text-[12px] px-2 py-1"
triggerClass="self-start"
sideOffset={0}
>
<span
class="pl-1 text-xs transition-opacity duration-200"
style:opacity={meta ? 1 : 0}
>
<span class="text-muted-foreground/70">{countLabel}</span>
<span class="text-muted-foreground/35">{resultNoun} in</span>
<span class="text-muted-foreground/70">{durationLabel}</span>
</span>
</SimpleTooltip>
@@ -0,0 +1,42 @@
<script lang="ts">
import { Tooltip } from "bits-ui";
import type { Snippet } from "svelte";
import { cn } from "$lib/utils";
let {
text,
delay = 150,
side = "top",
passthrough = false,
triggerClass = "",
contentClass = "",
sideOffset = 6,
children,
}: {
text: string;
delay?: number;
side?: "top" | "bottom" | "left" | "right";
passthrough?: boolean;
triggerClass?: string;
contentClass?: string;
sideOffset?: number;
children: Snippet;
} = $props();
</script>
<Tooltip.Root delayDuration={delay} disableHoverableContent={passthrough}>
<Tooltip.Trigger>
{#snippet child({ props })}
<span class={triggerClass} {...props}>
{@render children()}
</span>
{/snippet}
</Tooltip.Trigger>
<Tooltip.Content
{side}
{sideOffset}
class={cn("z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-sm whitespace-pre-line max-w-max text-left", contentClass)}
>
{text}
</Tooltip.Content>
</Tooltip.Root>
@@ -0,0 +1,162 @@
<script lang="ts">
import { Combobox } from "bits-ui";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Subject } from "$lib/api";
import { formatNumber } from "$lib/utils";
let {
subjects,
value = $bindable(),
}: {
subjects: Subject[];
value: string[];
} = $props();
let open = $state(false);
let searchValue = $state("");
let containerEl = $state<HTMLDivElement>(null!);
const filteredSubjects = $derived.by(() => {
const query = searchValue.toLowerCase().trim();
if (query === "") return subjects;
const exactCode: Subject[] = [];
const codeStartsWith: Subject[] = [];
const descriptionMatch: Subject[] = [];
for (const s of subjects) {
const codeLower = s.code.toLowerCase();
const descLower = s.description.toLowerCase();
if (codeLower === query) {
exactCode.push(s);
} else if (codeLower.startsWith(query)) {
codeStartsWith.push(s);
} else if (descLower.includes(query) || codeLower.includes(query)) {
descriptionMatch.push(s);
}
}
return [...exactCode, ...codeStartsWith, ...descriptionMatch];
});
const MAX_VISIBLE_CHIPS = 3;
const visibleChips = $derived(value.slice(0, MAX_VISIBLE_CHIPS));
const overflowCount = $derived(Math.max(0, value.length - MAX_VISIBLE_CHIPS));
function removeSubject(code: string) {
value = value.filter((v) => v !== code);
}
// bits-ui sets the input text to the last selected item's label — clear it
$effect(() => {
value;
const input = containerEl?.querySelector("input");
if (input) {
input.value = "";
searchValue = "";
}
});
</script>
<Combobox.Root
type="multiple"
bind:value
bind:open
onOpenChange={(o: boolean) => {
if (!o) searchValue = "";
}}
>
<!-- svelte-ignore a11y_click_events_have_key_events -->
<!-- svelte-ignore a11y_no_static_element_interactions -->
<div
class="relative h-9 rounded-md border border-border bg-card
flex flex-nowrap items-center gap-1 w-56 pr-9 overflow-hidden cursor-pointer
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
bind:this={containerEl}
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
>
{#if value.length > 0}
{#each (open ? value : visibleChips) as code (code)}
<span
role="button"
tabindex="-1"
onmousedown={(e) => { e.preventDefault(); e.stopPropagation(); }}
onclick={(e) => { e.stopPropagation(); removeSubject(code); }}
onkeydown={(e) => { if (e.key === "Enter" || e.key === " ") { e.stopPropagation(); removeSubject(code); } }}
class="inline-flex items-center rounded bg-muted px-1.5 py-0.5 text-xs font-mono shrink-0
text-muted-foreground hover:outline hover:outline-1 hover:outline-ring
cursor-pointer transition-[outline] duration-100 first:ml-2"
>
{code}
</span>
{/each}
{#if !open && overflowCount > 0}
<span class="text-xs text-muted-foreground shrink-0">+{formatNumber(overflowCount)}</span>
{/if}
{/if}
<Combobox.Input
oninput={(e) => (searchValue = e.currentTarget.value)}
onfocus={() => { open = true; }}
class="h-full min-w-0 flex-1 bg-transparent text-muted-foreground text-sm
placeholder:text-muted-foreground outline-none border-none
{value.length > 0 ? 'pl-1' : 'pl-3'}"
placeholder={value.length > 0 ? "Filter..." : "All Subjects"}
aria-label="Search subjects"
autocomplete="off"
autocorrect="off"
spellcheck={false}
/>
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
<ChevronsUpDown class="size-4" />
</span>
</div>
<Combobox.Portal>
<Combobox.Content
customAnchor={containerEl}
class="border border-border bg-card shadow-md
outline-hidden z-50
max-h-72 min-w-[var(--bits-combobox-anchor-width)] w-max max-w-96
select-none rounded-md p-1
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
sideOffset={4}
forceMount
>
{#snippet child({ wrapperProps, props, open: isOpen })}
{#if isOpen}
<div {...wrapperProps}>
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
<Combobox.Viewport class="p-0.5">
{#each filteredSubjects as subject (subject.code)}
<Combobox.Item
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center gap-2 px-2 text-sm whitespace-nowrap
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
value={subject.code}
label={subject.description}
>
{#snippet children({ selected })}
<span class="inline-flex items-center justify-center rounded bg-muted px-1 py-0.5
text-xs font-mono text-muted-foreground w-10 shrink-0 text-center">
{subject.code}
</span>
<span class="flex-1">{subject.description}</span>
{#if selected}
<Check class="ml-auto size-4 shrink-0" />
{/if}
{/snippet}
</Combobox.Item>
{:else}
<span class="block px-2 py-2 text-sm text-muted-foreground">
No subjects found.
</span>
{/each}
</Combobox.Viewport>
</div>
</div>
{/if}
{/snippet}
</Combobox.Content>
</Combobox.Portal>
</Combobox.Root>
+139
View File
@@ -0,0 +1,139 @@
<script lang="ts">
import { Combobox } from "bits-ui";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Term } from "$lib/api";
let {
terms,
value = $bindable(),
}: {
terms: Term[];
value: string;
} = $props();
let open = $state(false);
let searchValue = $state("");
let containerEl = $state<HTMLDivElement>(null!);
const currentTermCode = $derived(
terms.find((t) => !t.description.includes("(View Only)"))?.code ?? ""
);
const selectedLabel = $derived(
terms.find((t) => t.code === value)?.description ?? "Select term..."
);
const filteredTerms = $derived.by(() => {
const query = searchValue.toLowerCase();
const matched =
query === "" ? terms : terms.filter((t) => t.description.toLowerCase().includes(query));
const current = matched.find((t) => t.code === currentTermCode);
const rest = matched.filter((t) => t.code !== currentTermCode);
return current ? [current, ...rest] : rest;
});
// Manage DOM input text: clear when open for searching, restore label when closed
$effect(() => {
const _open = open;
void value; // track selection changes
const _label = selectedLabel;
const input = containerEl?.querySelector("input");
if (!input) return;
if (_open) {
input.value = "";
searchValue = "";
} else {
input.value = _label;
}
});
</script>
<Combobox.Root
type="single"
bind:value={() => value, (v) => { if (v) value = v; }}
bind:open
onOpenChange={(o: boolean) => {
if (!o) searchValue = "";
}}
>
<!-- svelte-ignore a11y_no_static_element_interactions -->
<div
class="relative h-9 rounded-md border border-border bg-card
flex items-center w-40 cursor-pointer
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
role="presentation"
bind:this={containerEl}
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
onkeydown={() => { containerEl?.querySelector('input')?.focus(); }}
>
<Combobox.Input
oninput={(e) => (searchValue = e.currentTarget.value)}
onfocus={() => { open = true; }}
class="h-full w-full bg-transparent text-muted-foreground text-sm
placeholder:text-muted-foreground outline-none border-none
pl-3 pr-9 truncate"
placeholder="Select term..."
aria-label="Select term"
autocomplete="off"
autocorrect="off"
spellcheck={false}
/>
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
<ChevronsUpDown class="size-4" />
</span>
</div>
<Combobox.Portal>
<Combobox.Content
customAnchor={containerEl}
class="border border-border bg-card shadow-md
outline-hidden z-50
max-h-72 min-w-[var(--bits-combobox-anchor-width)]
select-none rounded-md p-1
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
sideOffset={4}
forceMount
>
{#snippet child({ wrapperProps, props, open: isOpen })}
{#if isOpen}
<div {...wrapperProps}>
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
<Combobox.Viewport class="p-0.5">
{#each filteredTerms as term, i (term.code)}
{#if i === 1 && term.code !== currentTermCode && filteredTerms[0]?.code === currentTermCode}
<div class="mx-2 my-1 h-px bg-border"></div>
{/if}
<Combobox.Item
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center px-2 text-sm
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
{term.code === value ? 'cursor-default' : 'cursor-pointer'}
{term.code === currentTermCode ? 'font-medium text-foreground' : 'text-foreground'}"
value={term.code}
label={term.description}
>
{#snippet children({ selected })}
<span class="flex-1 truncate">
{term.description}
{#if term.code === currentTermCode}
<span class="ml-1.5 text-xs text-muted-foreground font-normal">current</span>
{/if}
</span>
{#if selected}
<Check class="ml-2 size-4 shrink-0" />
{/if}
{/snippet}
</Combobox.Item>
{:else}
<span class="block px-2 py-2 text-sm text-muted-foreground">
No terms found.
</span>
{/each}
</Combobox.Viewport>
</div>
</div>
{/if}
{/snippet}
</Combobox.Content>
</Combobox.Portal>
</Combobox.Root>
+69
View File
@@ -0,0 +1,69 @@
<script lang="ts">
import { tick } from "svelte";
import { Moon, Sun } from "@lucide/svelte";
import { themeStore } from "$lib/stores/theme.svelte";
import SimpleTooltip from "./SimpleTooltip.svelte";
/**
* Theme toggle with View Transitions API circular reveal animation.
* The clip-path circle expands from the click point to cover the viewport.
*/
async function handleToggle(event: MouseEvent) {
const supportsViewTransition =
typeof document !== "undefined" &&
"startViewTransition" in document &&
!window.matchMedia("(prefers-reduced-motion: reduce)").matches;
if (!supportsViewTransition) {
themeStore.toggle();
return;
}
const x = event.clientX;
const y = event.clientY;
const endRadius = Math.hypot(Math.max(x, innerWidth - x), Math.max(y, innerHeight - y));
const transition = document.startViewTransition(async () => {
themeStore.toggle();
await tick();
});
transition.ready.then(() => {
document.documentElement.animate(
{
clipPath: [`circle(0px at ${x}px ${y}px)`, `circle(${endRadius}px at ${x}px ${y}px)`],
},
{
duration: 500,
easing: "cubic-bezier(0.4, 0, 0.2, 1)",
pseudoElement: "::view-transition-new(root)",
}
);
});
}
</script>
<SimpleTooltip text={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"} delay={200} side="bottom" passthrough>
<button
type="button"
onclick={(e) => handleToggle(e)}
aria-label={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"}
class="cursor-pointer border-none rounded-md flex items-center justify-center p-1.5
text-muted-foreground hover:text-foreground hover:bg-background/50 bg-transparent transition-colors"
>
<div class="relative size-[18px]">
<Sun
size={18}
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
? 'rotate-90 scale-0 opacity-0'
: 'rotate-0 scale-100 opacity-100'}"
/>
<Moon
size={18}
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
? 'rotate-0 scale-100 opacity-100'
: '-rotate-90 scale-0 opacity-0'}"
/>
</div>
</button>
</SimpleTooltip>
@@ -0,0 +1,656 @@
<script lang="ts">
import { onMount } from "svelte";
import { scaleTime, scaleLinear } from "d3-scale";
import { SUBJECTS, type Subject } from "$lib/timeline/data";
import type { TimeSlot, ChartContext } from "$lib/timeline/types";
import {
PADDING,
DEFAULT_AXIS_RATIO,
CHART_HEIGHT_RATIO,
MIN_SPAN_MS,
MAX_SPAN_MS,
DEFAULT_SPAN_MS,
ZOOM_FACTOR,
ZOOM_KEY_FACTOR,
ZOOM_EASE,
ZOOM_SETTLE_THRESHOLD,
PAN_FRICTION,
PAN_STOP_THRESHOLD,
PAN_STOP_THRESHOLD_Y,
VELOCITY_SAMPLE_WINDOW,
VELOCITY_MIN_DT,
PAN_STEP_RATIO,
PAN_STEP_CTRL_RATIO,
PAN_EASE,
PAN_SETTLE_THRESHOLD_PX,
YRATIO_STEP,
YRATIO_MIN,
YRATIO_MAX,
YRATIO_SETTLE_THRESHOLD,
FOLLOW_EASE,
MIN_MAXY,
MAX_DT,
DEFAULT_DT,
TAP_MAX_DURATION_MS,
TAP_MAX_DISTANCE_PX,
} from "$lib/timeline/constants";
import { createTimelineStore } from "$lib/timeline/store.svelte";
import {
createAnimMap,
syncAnimTargets,
stepAnimations,
pruneAnimMap,
} from "$lib/timeline/animation";
import {
getVisibleSlots,
findSlotByTime,
snapToSlot,
enabledTotalClasses,
} from "$lib/timeline/viewport";
import {
drawGrid,
drawHoverColumn,
drawStackedArea,
drawNowLine,
drawTimeAxis,
stackVisibleSlots,
} from "$lib/timeline/renderer";
import TimelineDrawer from "./TimelineDrawer.svelte";
import TimelineTooltip from "./TimelineTooltip.svelte";
// ── Reactive DOM state ──────────────────────────────────────────────
let canvasEl: HTMLCanvasElement | undefined = $state();
let containerEl: HTMLDivElement | undefined = $state();
let width = $state(800);
let height = $state(400);
let dpr = $state(1);
// ── View window ─────────────────────────────────────────────────────
let viewCenter = $state(Date.now());
let viewSpan = $state(DEFAULT_SPAN_MS);
let viewYRatio = $state(DEFAULT_AXIS_RATIO);
// ── Interaction state ───────────────────────────────────────────────
let isDragging = $state(false);
let dragStartX = $state(0);
let dragStartY = $state(0);
let dragStartCenter = $state(0);
let dragStartYRatio = $state(0);
let followEnabled = $state(true);
let ctrlHeld = $state(false);
// ── Animation state (intentionally non-reactive — updated in rAF) ──
let panVelocity = 0;
let panVelocityY = 0;
let pointerSamples: { time: number; x: number; y: number }[] = [];
// ── Multi-touch / pinch state ────────────────────────────────────────
let activePointers = new Map<number, { x: number; y: number }>();
let isPinching = false;
let pinchStartDist = 0;
let pinchStartSpan = 0;
let pinchAnchorTime = 0;
let pinchAnchorRatio = 0.5;
// ── Tap detection ────────────────────────────────────────────────────
let pointerDownTime = 0;
let pointerDownPos = { x: 0, y: 0 };
let targetSpan = DEFAULT_SPAN_MS;
let zoomAnchorTime = 0;
let zoomAnchorRatio = 0.5;
let isZoomAnimating = false;
let targetCenter = Date.now();
let isPanAnimating = false;
let targetYRatio = DEFAULT_AXIS_RATIO;
let isYPanAnimating = false;
let animationFrameId = 0;
let lastFrameTime = 0;
let animatedMaxY = MIN_MAXY;
const animMap = createAnimMap();
// ── Tooltip + hover ─────────────────────────────────────────────────
let tooltipVisible = $state(false);
let tooltipX = $state(0);
let tooltipY = $state(0);
let tooltipSlot: TimeSlot | null = $state(null);
let hoverSlotTime: number | null = $state(null);
let lastPointerClientX = 0;
let lastPointerClientY = 0;
let pointerOverCanvas = false;
// ── Drawer ──────────────────────────────────────────────────────────
let drawerOpen = $state(false);
let enabledSubjects: Set<Subject> = $state(new Set(SUBJECTS));
// ── Data store ──────────────────────────────────────────────────────
const store = createTimelineStore();
let data: TimeSlot[] = $derived(store.data);
let activeSubjects = $derived(SUBJECTS.filter((s) => enabledSubjects.has(s)));
// ── Derived layout ──────────────────────────────────────────────────
let viewStart = $derived(viewCenter - viewSpan / 2);
let viewEnd = $derived(viewCenter + viewSpan / 2);
let chartHeight = $derived(height * CHART_HEIGHT_RATIO);
let chartBottom = $derived(height * viewYRatio);
let chartTop = $derived(chartBottom - chartHeight);
let xScale = $derived(
scaleTime()
.domain([new Date(viewStart), new Date(viewEnd)])
.range([PADDING.left, width - PADDING.right])
);
// Reused across frames — domain/range updated imperatively in render().
let yScale = scaleLinear()
.domain([0, MIN_MAXY * 1.1])
.range([0, 1]);
// ── Subject toggling ────────────────────────────────────────────────
function toggleSubject(subject: Subject) {
const next = new Set(enabledSubjects);
if (next.has(subject)) next.delete(subject);
else next.add(subject);
enabledSubjects = next;
}
function enableAll() {
enabledSubjects = new Set(SUBJECTS);
}
function disableAll() {
enabledSubjects = new Set();
}
// ── Rendering ───────────────────────────────────────────────────────
function render() {
if (!canvasEl) return;
const ctx = canvasEl.getContext("2d");
if (!ctx) return;
// Update yScale in-place (no allocation per frame).
yScale.domain([0, animatedMaxY * 1.1]).range([chartBottom, chartTop]);
ctx.save();
ctx.scale(dpr, dpr);
ctx.clearRect(0, 0, width, height);
const chart: ChartContext = {
ctx,
xScale,
yScale,
width,
chartTop,
chartBottom,
viewSpan,
viewStart,
viewEnd,
};
const visible = getVisibleSlots(data, viewStart, viewEnd);
const visibleStack = stackVisibleSlots(visible, enabledSubjects, animMap);
drawGrid(chart);
drawHoverColumn(chart, visibleStack, hoverSlotTime);
drawStackedArea(chart, visibleStack);
drawNowLine(chart);
drawTimeAxis(chart);
ctx.restore();
}
// ── Hover logic ─────────────────────────────────────────────────────
function updateHover() {
if (!pointerOverCanvas || isDragging || !canvasEl) return;
const rect = canvasEl.getBoundingClientRect();
const x = lastPointerClientX - rect.left;
const y = lastPointerClientY - rect.top;
if (y < chartTop || y > chartBottom) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
const time = xScale.invert(x);
const snappedTime = snapToSlot(time.getTime());
const slot = findSlotByTime(data, snappedTime);
if (!slot) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
const total = enabledTotalClasses(slot, activeSubjects);
if (total <= 0) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
// Bypass area overlap check when CTRL is held.
if (!ctrlHeld) {
const stackTopY = yScale(total);
if (y < stackTopY) {
tooltipVisible = false;
hoverSlotTime = null;
return;
}
}
tooltipSlot = slot;
tooltipX = lastPointerClientX;
tooltipY = lastPointerClientY;
tooltipVisible = true;
hoverSlotTime = snappedTime;
}
// ── Interaction helpers ───────────────────────────────────────────────
function pinchDistance(): number {
const pts = [...activePointers.values()];
if (pts.length < 2) return 0;
const dx = pts[1].x - pts[0].x;
const dy = pts[1].y - pts[0].y;
return Math.hypot(dx, dy);
}
function pinchMidpoint(): { x: number; y: number } {
const pts = [...activePointers.values()];
if (pts.length < 2) return { x: 0, y: 0 };
return { x: (pts[0].x + pts[1].x) / 2, y: (pts[0].y + pts[1].y) / 2 };
}
// ── Interaction handlers ────────────────────────────────────────────
function onPointerDown(e: PointerEvent) {
if (e.button !== 0) return;
(e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
// Two fingers down → start pinch-to-zoom
if (activePointers.size === 2) {
isDragging = false;
isPinching = true;
pinchStartDist = pinchDistance();
pinchStartSpan = viewSpan;
const mid = pinchMidpoint();
const rect = canvasEl?.getBoundingClientRect();
const midX = rect ? mid.x - rect.left : mid.x;
const chartWidth = width - PADDING.left - PADDING.right;
pinchAnchorTime = xScale.invert(midX).getTime();
pinchAnchorRatio = (midX - PADDING.left) / chartWidth;
return;
}
// Single finger / mouse → start drag
isDragging = true;
dragStartX = e.clientX;
dragStartY = e.clientY;
dragStartCenter = viewCenter;
dragStartYRatio = viewYRatio;
followEnabled = false;
panVelocity = 0;
panVelocityY = 0;
isZoomAnimating = false;
isPanAnimating = false;
isYPanAnimating = false;
targetSpan = viewSpan;
tooltipVisible = false;
hoverSlotTime = null;
pointerDownTime = performance.now();
pointerDownPos = { x: e.clientX, y: e.clientY };
pointerSamples = [{ time: performance.now(), x: e.clientX, y: e.clientY }];
}
function onPointerMove(e: PointerEvent) {
ctrlHeld = e.ctrlKey || e.metaKey;
lastPointerClientX = e.clientX;
lastPointerClientY = e.clientY;
pointerOverCanvas = true;
activePointers.set(e.pointerId, { x: e.clientX, y: e.clientY });
// Pinch-to-zoom (two-finger gesture)
if (isPinching && activePointers.size >= 2) {
const dist = pinchDistance();
if (pinchStartDist > 0) {
const scale = pinchStartDist / dist; // fingers apart = zoom in
const newSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, pinchStartSpan * scale));
viewSpan = newSpan;
targetSpan = newSpan;
viewCenter = pinchAnchorTime + (0.5 - pinchAnchorRatio) * viewSpan;
}
return;
}
if (isDragging) {
const dx = e.clientX - dragStartX;
const dy = e.clientY - dragStartY;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
viewCenter = dragStartCenter - dx * msPerPx;
viewYRatio = dragStartYRatio + dy / height;
const now = performance.now();
pointerSamples.push({ time: now, x: e.clientX, y: e.clientY });
const cutoff = now - VELOCITY_SAMPLE_WINDOW;
pointerSamples = pointerSamples.filter((s) => s.time >= cutoff);
} else {
updateHover();
}
}
function onPointerUp(e: PointerEvent) {
(e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
activePointers.delete(e.pointerId);
// End pinch when fewer than 2 fingers remain
if (isPinching) {
if (activePointers.size < 2) {
isPinching = false;
// If one finger remains, reset drag origin to that finger's position
if (activePointers.size === 1) {
const remaining = [...activePointers.values()][0];
isDragging = true;
dragStartX = remaining.x;
dragStartY = remaining.y;
dragStartCenter = viewCenter;
dragStartYRatio = viewYRatio;
pointerSamples = [{ time: performance.now(), x: remaining.x, y: remaining.y }];
}
}
return;
}
isDragging = false;
// Tap detection: short duration + minimal movement → show tooltip
const elapsed = performance.now() - pointerDownTime;
const dist = Math.hypot(e.clientX - pointerDownPos.x, e.clientY - pointerDownPos.y);
if (elapsed < TAP_MAX_DURATION_MS && dist < TAP_MAX_DISTANCE_PX) {
lastPointerClientX = e.clientX;
lastPointerClientY = e.clientY;
pointerOverCanvas = true;
// Bypass the isDragging guard in updateHover since we just cleared it
updateHover();
pointerSamples = [];
return;
}
// Momentum from drag
if (pointerSamples.length >= 2) {
const first = pointerSamples[0];
const last = pointerSamples[pointerSamples.length - 1];
const dt = last.time - first.time;
if (dt > VELOCITY_MIN_DT) {
const pxPerMsX = (last.x - first.x) / dt;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
panVelocity = -pxPerMsX * msPerPx;
panVelocityY = (last.y - first.y) / dt;
}
}
pointerSamples = [];
}
function onPointerLeave() {
pointerOverCanvas = false;
tooltipVisible = false;
hoverSlotTime = null;
}
function onPointerCancel(e: PointerEvent) {
activePointers.delete(e.pointerId);
if (activePointers.size < 2) isPinching = false;
if (activePointers.size === 0) isDragging = false;
}
function onWheel(e: WheelEvent) {
e.preventDefault();
if (!canvasEl) return;
followEnabled = false;
panVelocity = 0;
panVelocityY = 0;
const rect = canvasEl.getBoundingClientRect();
const mouseX = e.clientX - rect.left;
const chartWidth = width - PADDING.left - PADDING.right;
zoomAnchorTime = xScale.invert(mouseX).getTime();
zoomAnchorRatio = (mouseX - PADDING.left) / chartWidth;
const zoomIn = e.deltaY < 0;
const factor = zoomIn ? 1 / ZOOM_FACTOR : ZOOM_FACTOR;
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
isZoomAnimating = true;
}
function onKeyDown(e: KeyboardEvent) {
const wasCtrl = ctrlHeld;
ctrlHeld = e.ctrlKey || e.metaKey;
if (ctrlHeld !== wasCtrl) updateHover();
switch (e.key) {
case "ArrowLeft":
case "ArrowRight": {
e.preventDefault();
followEnabled = false;
panVelocity = 0;
const ratio = e.ctrlKey ? PAN_STEP_CTRL_RATIO : PAN_STEP_RATIO;
const step = viewSpan * ratio;
if (!isPanAnimating) targetCenter = viewCenter;
targetCenter += e.key === "ArrowRight" ? step : -step;
isPanAnimating = true;
break;
}
case "ArrowUp":
case "ArrowDown": {
e.preventDefault();
if (e.ctrlKey || e.metaKey) {
const direction = e.key === "ArrowUp" ? -1 : 1;
if (!isYPanAnimating) targetYRatio = viewYRatio;
targetYRatio = Math.max(
YRATIO_MIN,
Math.min(YRATIO_MAX, targetYRatio + direction * YRATIO_STEP)
);
isYPanAnimating = true;
} else {
const factor = e.key === "ArrowUp" ? 1 / ZOOM_KEY_FACTOR : ZOOM_KEY_FACTOR;
followEnabled = false;
panVelocity = 0;
zoomAnchorTime = isPanAnimating ? targetCenter : viewCenter;
zoomAnchorRatio = 0.5;
targetSpan = Math.min(MAX_SPAN_MS, Math.max(MIN_SPAN_MS, targetSpan * factor));
isZoomAnimating = true;
}
break;
}
}
}
function onKeyUp(e: KeyboardEvent) {
const wasCtrl = ctrlHeld;
ctrlHeld = e.ctrlKey || e.metaKey;
if (ctrlHeld !== wasCtrl) updateHover();
}
function onWindowBlur() {
const wasCtrl = ctrlHeld;
ctrlHeld = false;
if (wasCtrl) updateHover();
}
function resumeFollow() {
panVelocity = 0;
panVelocityY = 0;
isPanAnimating = false;
isYPanAnimating = false;
targetYRatio = DEFAULT_AXIS_RATIO;
viewYRatio = DEFAULT_AXIS_RATIO;
targetSpan = DEFAULT_SPAN_MS;
isZoomAnimating = true;
zoomAnchorTime = Date.now();
zoomAnchorRatio = 0.5;
followEnabled = true;
}
// ── Resize ──────────────────────────────────────────────────────────
function updateSize() {
if (!containerEl) return;
const rect = containerEl.getBoundingClientRect();
width = rect.width;
height = rect.height;
dpr = window.devicePixelRatio || 1;
if (canvasEl) {
canvasEl.width = width * dpr;
canvasEl.height = height * dpr;
}
}
// ── Animation loop ──────────────────────────────────────────────────
function tick(timestamp: number) {
const dt = lastFrameTime > 0 ? Math.min(timestamp - lastFrameTime, MAX_DT) : DEFAULT_DT;
lastFrameTime = timestamp;
const friction = Math.pow(PAN_FRICTION, dt / 16);
// Momentum panning
if (
!isDragging &&
(Math.abs(panVelocity) > PAN_STOP_THRESHOLD || Math.abs(panVelocityY) > PAN_STOP_THRESHOLD_Y)
) {
viewCenter += panVelocity * dt;
viewYRatio += (panVelocityY * dt) / height;
panVelocity *= friction;
panVelocityY *= friction;
if (Math.abs(panVelocity) < PAN_STOP_THRESHOLD) panVelocity = 0;
if (Math.abs(panVelocityY) < PAN_STOP_THRESHOLD_Y) panVelocityY = 0;
}
// Smooth zoom
if (isZoomAnimating && !isDragging) {
const spanDiff = targetSpan - viewSpan;
if (Math.abs(spanDiff) < ZOOM_SETTLE_THRESHOLD) {
viewSpan = targetSpan;
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
isZoomAnimating = false;
} else {
const zf = 1 - Math.pow(1 - ZOOM_EASE, dt / 16);
viewSpan += spanDiff * zf;
viewCenter = zoomAnchorTime + (0.5 - zoomAnchorRatio) * viewSpan;
}
}
// Keyboard pan
if (isPanAnimating && !isDragging) {
const panDiff = targetCenter - viewCenter;
const msPerPx = viewSpan / (width - PADDING.left - PADDING.right);
if (Math.abs(panDiff) < msPerPx * PAN_SETTLE_THRESHOLD_PX) {
viewCenter = targetCenter;
isPanAnimating = false;
} else {
viewCenter += panDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
}
}
// Y-axis pan
if (isYPanAnimating && !isDragging) {
const yDiff = targetYRatio - viewYRatio;
if (Math.abs(yDiff) < YRATIO_SETTLE_THRESHOLD) {
viewYRatio = targetYRatio;
isYPanAnimating = false;
} else {
viewYRatio += yDiff * (1 - Math.pow(1 - PAN_EASE, dt / 16));
}
}
// Follow mode
if (followEnabled && !isDragging) {
const target = Date.now();
viewCenter += (target - viewCenter) * (1 - Math.pow(1 - FOLLOW_EASE, dt / 16));
}
// Step value animations & prune offscreen entries
const result = stepAnimations(animMap, dt, animatedMaxY);
animatedMaxY = result.maxY;
pruneAnimMap(animMap, viewStart, viewEnd, viewSpan);
render();
animationFrameId = requestAnimationFrame(tick);
}
// ── Animation sync ──────────────────────────────────────────────────
$effect(() => {
const slots = data;
const enabled = enabledSubjects;
syncAnimTargets(animMap, slots, enabled);
});
// Request data whenever the visible window changes.
$effect(() => {
store.requestRange(viewStart, viewEnd);
});
// ── Lifecycle ───────────────────────────────────────────────────────
onMount(() => {
updateSize();
const ro = new ResizeObserver(updateSize);
if (containerEl) ro.observe(containerEl);
window.addEventListener("blur", onWindowBlur);
viewCenter = Date.now();
targetCenter = viewCenter;
targetSpan = viewSpan;
canvasEl?.focus();
animationFrameId = requestAnimationFrame(tick);
return () => {
cancelAnimationFrame(animationFrameId);
ro.disconnect();
window.removeEventListener("blur", onWindowBlur);
store.dispose();
};
});
</script>
<div class="absolute inset-0 select-none" bind:this={containerEl}>
<canvas
bind:this={canvasEl}
class="w-full h-full cursor-grab outline-none"
class:cursor-grabbing={isDragging}
style="display: block; touch-action: none;"
tabindex="0"
aria-label="Interactive class schedule timeline chart"
onpointerdown={(e) => { canvasEl?.focus(); onPointerDown(e); }}
onpointermove={onPointerMove}
onpointerup={onPointerUp}
onpointerleave={onPointerLeave}
onpointercancel={onPointerCancel}
onwheel={onWheel}
onkeydown={onKeyDown}
onkeyup={onKeyUp}
></canvas>
<TimelineDrawer
bind:open={drawerOpen}
{enabledSubjects}
{followEnabled}
onToggleSubject={toggleSubject}
onEnableAll={enableAll}
onDisableAll={disableAll}
onResumeFollow={resumeFollow}
/>
<TimelineTooltip
visible={tooltipVisible}
x={tooltipX}
y={tooltipY}
slot={tooltipSlot}
{activeSubjects}
/>
</div>
@@ -0,0 +1,135 @@
<script lang="ts">
import { Filter, X } from "@lucide/svelte";
import { SUBJECTS, SUBJECT_COLORS, type Subject } from "$lib/timeline/data";
import { DRAWER_WIDTH } from "$lib/timeline/constants";
interface Props {
open: boolean;
enabledSubjects: Set<Subject>;
followEnabled: boolean;
onToggleSubject: (subject: Subject) => void;
onEnableAll: () => void;
onDisableAll: () => void;
onResumeFollow: () => void;
}
let {
open = $bindable(),
enabledSubjects,
followEnabled,
onToggleSubject,
onEnableAll,
onDisableAll,
onResumeFollow,
}: Props = $props();
function onKeyDown(e: KeyboardEvent) {
if (e.key === "Escape" && open) {
open = false;
}
}
</script>
<svelte:window onkeydown={onKeyDown} />
<!-- Filter toggle button — slides out when drawer opens -->
<button
class="absolute right-3 z-50 p-2 rounded-md
bg-black text-white dark:bg-white dark:text-black
hover:bg-neutral-800 dark:hover:bg-neutral-200
border border-black/20 dark:border-white/20
shadow-md transition-all duration-200 ease-in-out cursor-pointer
{open ? 'opacity-0 pointer-events-none' : 'opacity-100'}"
style="top: 20%; transform: translateX({open ? '60px' : '0'});"
onclick={() => (open = true)}
aria-label="Open filters"
>
<Filter size={18} strokeWidth={2} />
</button>
<!-- Drawer panel -->
<div
class="absolute right-0 z-40 rounded-l-lg shadow-xl transition-transform duration-200 ease-in-out {open ? '' : 'pointer-events-none'}"
style="top: 20%; width: {DRAWER_WIDTH}px; height: 60%; transform: translateX({open
? 0
: DRAWER_WIDTH}px);"
>
<div
class="h-full flex flex-col bg-background/90 backdrop-blur-md border border-border/40 rounded-l-lg overflow-hidden"
style="width: {DRAWER_WIDTH}px;"
>
<!-- Header -->
<div class="flex items-center justify-between px-3 py-2.5 border-b border-border/40">
<span class="text-xs font-semibold text-foreground">Filters</span>
<button
class="p-0.5 rounded text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
onclick={() => (open = false)}
aria-label="Close filters"
>
<X size={14} strokeWidth={2} />
</button>
</div>
<!-- Follow status -->
<div class="px-3 py-2 border-b border-border/40">
{#if followEnabled}
<div
class="px-2 py-1 rounded-md text-[10px] font-medium text-center
bg-green-500/10 text-green-600 dark:text-green-400 border border-green-500/20"
>
FOLLOWING
</div>
{:else}
<button
class="w-full px-2 py-1 rounded-md text-[10px] font-medium text-center
bg-muted/80 text-muted-foreground hover:text-foreground
border border-border/50 transition-colors cursor-pointer"
onclick={onResumeFollow}
aria-label="Resume following current time"
>
FOLLOW
</button>
{/if}
</div>
<!-- Subject toggles -->
<div class="flex-1 overflow-y-auto px-3 py-2">
<div class="flex items-center justify-between mb-2 text-[10px] text-muted-foreground">
<span class="uppercase tracking-wider font-medium">Subjects</span>
<div class="flex gap-1.5">
<button
class="hover:text-foreground transition-colors cursor-pointer"
onclick={onEnableAll}>All</button
>
<span class="opacity-40">|</span>
<button
class="hover:text-foreground transition-colors cursor-pointer"
onclick={onDisableAll}>None</button
>
</div>
</div>
<div class="space-y-0.5">
{#each SUBJECTS as subject}
{@const enabled = enabledSubjects.has(subject)}
<button
class="flex items-center gap-2 w-full px-1.5 py-1 rounded text-xs
hover:bg-muted/50 transition-colors cursor-pointer text-left"
onclick={() => onToggleSubject(subject)}
>
<span
class="inline-block w-3 h-3 rounded-sm shrink-0 transition-opacity"
style="background: {SUBJECT_COLORS[subject]}; opacity: {enabled ? 1 : 0.2};"
></span>
<span
class="transition-opacity {enabled
? 'text-foreground'
: 'text-muted-foreground/50'}"
>
{subject}
</span>
</button>
{/each}
</div>
</div>
</div>
</div>
@@ -0,0 +1,52 @@
<script lang="ts">
import { timeFormat } from "d3-time-format";
import { SUBJECT_COLORS, type Subject } from "$lib/timeline/data";
import type { TimeSlot } from "$lib/timeline/types";
import { enabledTotalClasses } from "$lib/timeline/viewport";
interface Props {
visible: boolean;
x: number;
y: number;
slot: TimeSlot | null;
activeSubjects: readonly Subject[];
}
let { visible, x, y, slot, activeSubjects }: Props = $props();
const fmtTime = timeFormat("%-I:%M %p");
</script>
{#if visible && slot}
{@const total = enabledTotalClasses(slot, activeSubjects)}
<div
class="pointer-events-none fixed z-50 rounded-lg border border-border/60 bg-background/95
backdrop-blur-sm shadow-lg px-3 py-2 text-xs min-w-[140px]"
style="left: {x + 12}px; top: {y - 10}px; transform: translateY(-100%);"
>
<div class="font-semibold text-foreground mb-1.5">
{fmtTime(slot.time)}
</div>
<div class="space-y-0.5">
{#each activeSubjects as subject}
{@const count = slot.subjects[subject] || 0}
{#if count > 0}
<div class="flex items-center justify-between gap-3">
<div class="flex items-center gap-1.5">
<span
class="inline-block w-2 h-2 rounded-sm"
style="background: {SUBJECT_COLORS[subject]}"
></span>
<span class="text-muted-foreground">{subject}</span>
</div>
<span class="font-medium tabular-nums">{count}</span>
</div>
{/if}
{/each}
</div>
<div class="mt-1.5 pt-1.5 border-t border-border/40 flex justify-between font-medium">
<span>Total</span>
<span class="tabular-nums">{total}</span>
</div>
</div>
{/if}
@@ -0,0 +1,118 @@
import {
type RowData,
type TableOptions,
type TableOptionsResolved,
type TableState,
createTable,
} from "@tanstack/table-core";
/**
* Creates a reactive TanStack table for Svelte 5 using runes.
*
* Adapted from shadcn-svelte's data-table wrapper — uses `$state` and
* `$effect.pre` instead of Svelte stores for reactivity.
*/
export function createSvelteTable<TData extends RowData>(options: TableOptions<TData>) {
const resolvedOptions: TableOptionsResolved<TData> = mergeObjects(
{
state: {},
onStateChange() {},
renderFallbackValue: null,
mergeOptions: (
defaultOptions: TableOptions<TData>,
options: Partial<TableOptions<TData>>
) => {
return mergeObjects(defaultOptions, options);
},
},
options
);
const table = createTable(resolvedOptions);
let state = $state<Partial<TableState>>(table.initialState);
function updateOptions() {
table.setOptions((prev) => {
return mergeObjects(prev, options, {
state: mergeObjects(state, options.state || {}),
// eslint-disable-next-line @typescript-eslint/no-explicit-any
onStateChange: (updater: any) => {
if (updater instanceof Function) state = updater(state);
else state = { ...state, ...(updater as Partial<TableState>) };
options.onStateChange?.(updater);
},
});
});
}
updateOptions();
$effect.pre(() => {
updateOptions();
});
return table;
}
type MaybeThunk<T extends object> = T | (() => T | null | undefined);
type Intersection<T extends readonly unknown[]> = (T extends [infer H, ...infer R]
? H & Intersection<R>
: unknown) & {};
/**
* Lazily merges several objects (or thunks) while preserving
* getter semantics from every source. Proxy-based.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function mergeObjects<Sources extends readonly MaybeThunk<any>[]>(
...sources: Sources
): Intersection<{ [K in keyof Sources]: Sources[K] }> {
const resolve = <T extends object>(src: MaybeThunk<T>): T | undefined =>
typeof src === "function" ? (src() ?? undefined) : src;
const findSourceWithKey = (key: PropertyKey) => {
for (let i = sources.length - 1; i >= 0; i--) {
const obj = resolve(sources[i]);
if (obj && key in obj) return obj;
}
return undefined;
};
return new Proxy(Object.create(null), {
get(_, key) {
const src = findSourceWithKey(key);
return src?.[key as never];
},
has(_, key) {
return !!findSourceWithKey(key);
},
ownKeys(): (string | symbol)[] {
const all = new Set<string | symbol>();
for (const s of sources) {
const obj = resolve(s);
if (obj) {
for (const k of Reflect.ownKeys(obj) as (string | symbol)[]) {
all.add(k);
}
}
}
return [...all];
},
getOwnPropertyDescriptor(_, key) {
const src = findSourceWithKey(key);
if (!src) return undefined;
return {
configurable: true,
enumerable: true,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
value: (src as any)[key],
writable: true,
};
},
}) as Intersection<{ [K in keyof Sources]: Sources[K] }>;
}
@@ -0,0 +1,54 @@
<script lang="ts" module>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type FlexRenderProps<TProps = any> = {
content: unknown;
context: TProps;
};
</script>
<script lang="ts">
import { isRenderComponentConfig, isRenderSnippetConfig, mountComponent } from "./render-helpers.js";
let { content, context }: FlexRenderProps = $props();
function renderAction(node: HTMLElement, contentVal: typeof content) {
let cleanup: (() => void) | undefined;
function render(c: typeof content) {
cleanup?.();
node.textContent = "";
if (isRenderComponentConfig(c)) {
cleanup = mountComponent(c.component, node, c.props as Record<string, unknown>);
}
}
render(contentVal);
return {
update(newContent: typeof content) {
render(newContent);
},
destroy() {
cleanup?.();
},
};
}
</script>
{#if isRenderSnippetConfig(content)}
{@render content.snippet(content.props)}
{:else if isRenderComponentConfig(content)}
<div use:renderAction={content}></div>
{:else if typeof content === "function"}
{@const result = content(context)}
{#if isRenderComponentConfig(result)}
<div use:renderAction={result}></div>
{:else if isRenderSnippetConfig(result)}
{@render result.snippet(result.props)}
{:else if typeof result === "string" || typeof result === "number"}
{result}
{/if}
{:else if typeof content === "string" || typeof content === "number"}
{content}
{/if}
@@ -0,0 +1,3 @@
export { default as FlexRender } from "./flex-render.svelte";
export { renderComponent, renderSnippet } from "./render-helpers.js";
export { createSvelteTable } from "./data-table.svelte.js";
@@ -0,0 +1,67 @@
import { type Component, type Snippet, mount, unmount } from "svelte";
/**
* Wraps a Svelte component so TanStack Table can render it as a column
* header or cell. Returns a `RenderComponentConfig` that `FlexRender`
* picks up.
*/
export function renderComponent<
TProps extends Record<string, unknown>,
TComp extends Component<TProps>,
>(component: TComp, props: TProps) {
return {
component,
props,
[RENDER_COMPONENT_SYMBOL]: true,
} as const;
}
/**
* Wraps a Svelte 5 raw snippet for use in TanStack Table column defs.
*/
export function renderSnippet<TProps>(snippet: Snippet<[TProps]>, props: TProps) {
return {
snippet,
props,
[RENDER_SNIPPET_SYMBOL]: true,
} as const;
}
// Symbols for FlexRender to detect render types
export const RENDER_COMPONENT_SYMBOL = Symbol("renderComponent");
export const RENDER_SNIPPET_SYMBOL = Symbol("renderSnippet");
export type RenderComponentConfig<
TProps extends Record<string, unknown> = Record<string, unknown>,
> = {
component: Component<TProps>;
props: TProps;
[RENDER_COMPONENT_SYMBOL]: true;
};
export type RenderSnippetConfig<TProps = unknown> = {
snippet: Snippet<[TProps]>;
props: TProps;
[RENDER_SNIPPET_SYMBOL]: true;
};
export function isRenderComponentConfig(value: unknown): value is RenderComponentConfig {
return typeof value === "object" && value !== null && RENDER_COMPONENT_SYMBOL in value;
}
export function isRenderSnippetConfig(value: unknown): value is RenderSnippetConfig {
return typeof value === "object" && value !== null && RENDER_SNIPPET_SYMBOL in value;
}
/**
* Mount a Svelte component imperatively into a target element.
* Used by FlexRender for component-type cells.
*/
export function mountComponent<TProps extends Record<string, unknown>>(
component: Component<TProps>,
target: HTMLElement,
props: TProps
) {
const instance = mount(component, { target, props });
return () => unmount(instance);
}
@@ -0,0 +1,32 @@
/**
* Reactive clipboard copy with automatic "copied" state reset.
*
* Returns a `copiedValue` that is non-null while the copied feedback
* should be displayed, and a `copy()` function to trigger a copy.
*/
export function useClipboard(resetMs = 2000) {
let copiedValue = $state<string | null>(null);
let timeoutId: number | undefined;
async function copy(text: string, event?: MouseEvent | KeyboardEvent) {
event?.stopPropagation();
try {
await navigator.clipboard.writeText(text);
clearTimeout(timeoutId);
copiedValue = text;
timeoutId = window.setTimeout(() => {
copiedValue = null;
timeoutId = undefined;
}, resetMs);
} catch (err) {
console.error("Failed to copy to clipboard:", err);
}
}
return {
get copiedValue() {
return copiedValue;
},
copy,
};
}
@@ -0,0 +1,37 @@
import { onMount } from "svelte";
import { OverlayScrollbars, type PartialOptions } from "overlayscrollbars";
import { themeStore } from "$lib/stores/theme.svelte";
/**
* Set up OverlayScrollbars on an element with automatic theme reactivity.
*
* Must be called during component initialization (uses `onMount` internally).
* The scrollbar theme automatically syncs with `themeStore.isDark`.
*/
export function useOverlayScrollbars(getElement: () => HTMLElement, options: PartialOptions = {}) {
onMount(() => {
const element = getElement();
const osInstance = OverlayScrollbars(element, {
...options,
scrollbars: {
...options.scrollbars,
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
},
});
const unwatch = $effect.root(() => {
$effect(() => {
osInstance.options({
scrollbars: {
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
},
});
});
});
return () => {
unwatch();
osInstance.destroy();
};
});
}
+413
View File
@@ -0,0 +1,413 @@
import { describe, it, expect } from "vitest";
import {
formatTime,
formatTimeRange,
formatMeetingDays,
formatMeetingDaysVerbose,
formatMeetingTime,
formatMeetingTimeTooltip,
formatMeetingTimesTooltip,
abbreviateInstructor,
formatCreditHours,
getPrimaryInstructor,
isMeetingTimeTBA,
isTimeTBA,
formatDate,
formatDateShort,
formatMeetingDaysLong,
} from "$lib/course";
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
function makeMeetingTime(overrides: Partial<DbMeetingTime> = {}): DbMeetingTime {
return {
begin_time: null,
end_time: null,
start_date: "2024-08-26",
end_date: "2024-12-12",
monday: false,
tuesday: false,
wednesday: false,
thursday: false,
friday: false,
saturday: false,
sunday: false,
building: null,
building_description: null,
room: null,
campus: null,
meeting_type: "CLAS",
meeting_schedule_type: "LEC",
...overrides,
};
}
describe("formatTime", () => {
it("converts 0900 to 9:00 AM", () => expect(formatTime("0900")).toBe("9:00 AM"));
it("converts 1330 to 1:30 PM", () => expect(formatTime("1330")).toBe("1:30 PM"));
it("converts 0000 to 12:00 AM", () => expect(formatTime("0000")).toBe("12:00 AM"));
it("converts 1200 to 12:00 PM", () => expect(formatTime("1200")).toBe("12:00 PM"));
it("converts 2359 to 11:59 PM", () => expect(formatTime("2359")).toBe("11:59 PM"));
it("returns TBA for null", () => expect(formatTime(null)).toBe("TBA"));
it("returns TBA for empty string", () => expect(formatTime("")).toBe("TBA"));
it("returns TBA for short string", () => expect(formatTime("09")).toBe("TBA"));
});
describe("formatMeetingDays", () => {
it("returns MWF for mon/wed/fri", () => {
expect(
formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
).toBe("MWF");
});
it("returns TTh for tue/thu", () => {
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, thursday: true }))).toBe("TTh");
});
it("returns MW for mon/wed", () => {
expect(formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true }))).toBe("MW");
});
it("returns MTWThF for all weekdays", () => {
expect(
formatMeetingDays(
makeMeetingTime({
monday: true,
tuesday: true,
wednesday: true,
thursday: true,
friday: true,
})
)
).toBe("MTWThF");
});
it("returns partial abbreviation for single day", () => {
expect(formatMeetingDays(makeMeetingTime({ monday: true }))).toBe("Mon");
expect(formatMeetingDays(makeMeetingTime({ thursday: true }))).toBe("Thu");
expect(formatMeetingDays(makeMeetingTime({ saturday: true }))).toBe("Sat");
});
it("concatenates codes for other multi-day combos", () => {
expect(formatMeetingDays(makeMeetingTime({ monday: true, friday: true }))).toBe("MF");
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, saturday: true }))).toBe("TSa");
expect(
formatMeetingDays(makeMeetingTime({ wednesday: true, friday: true, sunday: true }))
).toBe("WFSu");
expect(
formatMeetingDays(
makeMeetingTime({ monday: true, tuesday: true, wednesday: true, thursday: true })
)
).toBe("MTWTh");
});
it("returns empty string when no days", () => {
expect(formatMeetingDays(makeMeetingTime())).toBe("");
});
});
describe("formatTimeRange", () => {
it("elides AM when both times are AM", () => {
expect(formatTimeRange("0900", "0950")).toBe("9:009:50 AM");
});
it("elides PM when both times are PM", () => {
expect(formatTimeRange("1315", "1430")).toBe("1:152:30 PM");
});
it("keeps both markers when crossing noon", () => {
expect(formatTimeRange("1130", "1220")).toBe("11:30 AM12:20 PM");
});
it("returns TBA for null begin", () => {
expect(formatTimeRange(null, "0950")).toBe("TBA");
});
it("returns TBA for null end", () => {
expect(formatTimeRange("0900", null)).toBe("TBA");
});
it("handles midnight and noon", () => {
expect(formatTimeRange("0000", "0050")).toBe("12:0012:50 AM");
expect(formatTimeRange("1200", "1250")).toBe("12:0012:50 PM");
});
});
describe("formatMeetingTime", () => {
it("formats a standard meeting time with elided AM/PM", () => {
expect(
formatMeetingTime(
makeMeetingTime({
monday: true,
wednesday: true,
friday: true,
begin_time: "0900",
end_time: "0950",
})
)
).toBe("MWF 9:009:50 AM");
});
it("keeps both markers when crossing noon", () => {
expect(
formatMeetingTime(
makeMeetingTime({
tuesday: true,
thursday: true,
begin_time: "1130",
end_time: "1220",
})
)
).toBe("TTh 11:30 AM12:20 PM");
});
it("returns TBA when no days", () => {
expect(formatMeetingTime(makeMeetingTime({ begin_time: "0900", end_time: "0950" }))).toBe(
"TBA"
);
});
it("returns days + TBA when no times", () => {
expect(formatMeetingTime(makeMeetingTime({ monday: true }))).toBe("Mon TBA");
});
});
describe("abbreviateInstructor", () => {
it("returns short names unabbreviated", () =>
expect(abbreviateInstructor("Li, Bo")).toBe("Li, Bo"));
it("returns names within budget unabbreviated", () =>
expect(abbreviateInstructor("Heaps, John")).toBe("Heaps, John"));
it("handles no comma", () => expect(abbreviateInstructor("Staff")).toBe("Staff"));
// Progressive abbreviation with multiple given names
it("abbreviates trailing given names first", () =>
expect(abbreviateInstructor("Ramirez, Maria Elena")).toBe("Ramirez, Maria E."));
it("abbreviates all given names when needed", () =>
expect(abbreviateInstructor("Ramirez, Maria Elena", 16)).toBe("Ramirez, M. E."));
it("falls back to first initial only", () =>
expect(abbreviateInstructor("Ramirez, Maria Elena", 12)).toBe("Ramirez, M."));
// Single given name that exceeds budget
it("abbreviates single given name when over budget", () =>
expect(abbreviateInstructor("Bartholomew, Christopher", 18)).toBe("Bartholomew, C."));
// Respects custom maxLen
it("keeps full name when within custom budget", () =>
expect(abbreviateInstructor("Ramirez, Maria Elena", 30)).toBe("Ramirez, Maria Elena"));
it("always abbreviates when budget is tiny", () =>
expect(abbreviateInstructor("Heaps, John", 5)).toBe("Heaps, J."));
});
describe("getPrimaryInstructor", () => {
it("returns primary instructor", () => {
const instructors: InstructorResponse[] = [
{
instructorId: 1,
bannerId: "1",
displayName: "A",
email: "a@utsa.edu",
isPrimary: false,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
{
instructorId: 2,
bannerId: "2",
displayName: "B",
email: "b@utsa.edu",
isPrimary: true,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
];
expect(getPrimaryInstructor(instructors)?.displayName).toBe("B");
});
it("returns first instructor when no primary", () => {
const instructors: InstructorResponse[] = [
{
instructorId: 3,
bannerId: "1",
displayName: "A",
email: "a@utsa.edu",
isPrimary: false,
rmpRating: null,
rmpNumRatings: null,
rmpLegacyId: null,
},
];
expect(getPrimaryInstructor(instructors)?.displayName).toBe("A");
});
it("returns undefined for empty array", () => {
expect(getPrimaryInstructor([])).toBeUndefined();
});
});
describe("formatCreditHours", () => {
it("returns creditHours when set", () => {
expect(
formatCreditHours({
creditHours: 3,
creditHourLow: null,
creditHourHigh: null,
} as CourseResponse)
).toBe("3");
});
it("returns range when variable", () => {
expect(
formatCreditHours({
creditHours: null,
creditHourLow: 1,
creditHourHigh: 3,
} as CourseResponse)
).toBe("13");
});
it("returns dash when no credit info", () => {
expect(
formatCreditHours({
creditHours: null,
creditHourLow: null,
creditHourHigh: null,
} as CourseResponse)
).toBe("—");
});
});
describe("isMeetingTimeTBA", () => {
it("returns true when no days set", () => {
expect(isMeetingTimeTBA(makeMeetingTime())).toBe(true);
});
it("returns false when any day is set", () => {
expect(isMeetingTimeTBA(makeMeetingTime({ monday: true }))).toBe(false);
});
it("returns false when multiple days set", () => {
expect(isMeetingTimeTBA(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(false);
});
});
describe("isTimeTBA", () => {
it("returns true when begin_time is null", () => {
expect(isTimeTBA(makeMeetingTime())).toBe(true);
});
it("returns true when begin_time is empty", () => {
expect(isTimeTBA(makeMeetingTime({ begin_time: "" }))).toBe(true);
});
it("returns true when begin_time is short", () => {
expect(isTimeTBA(makeMeetingTime({ begin_time: "09" }))).toBe(true);
});
it("returns false when begin_time is valid", () => {
expect(isTimeTBA(makeMeetingTime({ begin_time: "0900" }))).toBe(false);
});
});
describe("formatDate", () => {
it("formats standard date", () => {
expect(formatDate("2024-08-26")).toBe("August 26, 2024");
});
it("formats December date", () => {
expect(formatDate("2024-12-12")).toBe("December 12, 2024");
});
it("formats January 1st", () => {
expect(formatDate("2026-01-01")).toBe("January 1, 2026");
});
it("formats MM/DD/YYYY date", () => {
expect(formatDate("01/20/2026")).toBe("January 20, 2026");
});
it("formats MM/DD/YYYY with May", () => {
expect(formatDate("05/13/2026")).toBe("May 13, 2026");
});
it("returns original string for invalid input", () => {
expect(formatDate("bad-date")).toBe("bad-date");
});
});
describe("formatMeetingDaysLong", () => {
it("returns full plural for single day", () => {
expect(formatMeetingDaysLong(makeMeetingTime({ thursday: true }))).toBe("Thursdays");
});
it("returns full plural for Monday only", () => {
expect(formatMeetingDaysLong(makeMeetingTime({ monday: true }))).toBe("Mondays");
});
it("returns semi-abbreviated for multiple days", () => {
expect(
formatMeetingDaysLong(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
).toBe("Mon, Wed, Fri");
});
it("returns semi-abbreviated for TR", () => {
expect(formatMeetingDaysLong(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(
"Tue, Thur"
);
});
it("returns empty string when no days", () => {
expect(formatMeetingDaysLong(makeMeetingTime())).toBe("");
});
});
describe("formatDateShort", () => {
it("formats YYYY-MM-DD to short", () => {
expect(formatDateShort("2024-08-26")).toBe("Aug 26, 2024");
});
it("formats MM/DD/YYYY to short", () => {
expect(formatDateShort("12/12/2024")).toBe("Dec 12, 2024");
});
it("returns original for invalid", () => {
expect(formatDateShort("bad")).toBe("bad");
});
});
describe("formatMeetingDaysVerbose", () => {
it("returns plural for single day", () => {
expect(formatMeetingDaysVerbose(makeMeetingTime({ thursday: true }))).toBe("Thursdays");
});
it("joins two days with ampersand", () => {
expect(formatMeetingDaysVerbose(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(
"Tuesdays & Thursdays"
);
});
it("uses Oxford-style ampersand for 3+ days", () => {
expect(
formatMeetingDaysVerbose(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
).toBe("Mondays, Wednesdays & Fridays");
});
it("returns empty string when no days", () => {
expect(formatMeetingDaysVerbose(makeMeetingTime())).toBe("");
});
});
describe("formatMeetingTimeTooltip", () => {
it("formats full tooltip with location and dates", () => {
const mt = makeMeetingTime({
tuesday: true,
thursday: true,
begin_time: "1615",
end_time: "1730",
building_description: "Main Hall",
room: "2.206",
});
expect(formatMeetingTimeTooltip(mt)).toBe(
"Tuesdays & Thursdays, 4:155:30 PM\nMain Hall 2.206, Aug 26, 2024 Dec 12, 2024"
);
});
it("handles TBA days and times", () => {
expect(formatMeetingTimeTooltip(makeMeetingTime())).toBe("TBA\nAug 26, 2024 Dec 12, 2024");
});
it("handles days with TBA times", () => {
expect(formatMeetingTimeTooltip(makeMeetingTime({ monday: true }))).toBe(
"Mondays, TBA\nAug 26, 2024 Dec 12, 2024"
);
});
});
describe("formatMeetingTimesTooltip", () => {
it("returns TBA for empty array", () => {
expect(formatMeetingTimesTooltip([])).toBe("TBA");
});
it("joins multiple meetings with blank line", () => {
const mts = [
makeMeetingTime({
monday: true,
wednesday: true,
friday: true,
begin_time: "0900",
end_time: "0950",
}),
makeMeetingTime({
thursday: true,
begin_time: "1300",
end_time: "1400",
building_description: "Lab",
room: "101",
}),
];
const result = formatMeetingTimesTooltip(mts);
expect(result).toContain("Mondays, Wednesdays & Fridays, 9:009:50 AM");
expect(result).toContain("Thursdays, 1:002:00 PM\nLab 101");
expect(result).toContain("\n\n");
});
});
+438
View File
@@ -0,0 +1,438 @@
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
/** Convert "0900" to "9:00 AM" */
export function formatTime(time: string | null): string {
if (!time || time.length !== 4) return "TBA";
const hours = parseInt(time.slice(0, 2), 10);
const minutes = time.slice(2);
const period = hours >= 12 ? "PM" : "AM";
const display = hours > 12 ? hours - 12 : hours === 0 ? 12 : hours;
return `${display}:${minutes} ${period}`;
}
/**
* Compact day abbreviation for table cells.
*
* Single day → 3-letter: "Mon", "Thu"
* Multi-day → concatenated codes: "MWF", "TTh", "MTWTh", "TSa"
*
* Codes use single letters where unambiguous (M/T/W/F) and
* two letters where needed (Th/Sa/Su).
*/
export function formatMeetingDays(mt: DbMeetingTime): string {
const dayDefs: [boolean, string, string][] = [
[mt.monday, "M", "Mon"],
[mt.tuesday, "T", "Tue"],
[mt.wednesday, "W", "Wed"],
[mt.thursday, "Th", "Thu"],
[mt.friday, "F", "Fri"],
[mt.saturday, "Sa", "Sat"],
[mt.sunday, "Su", "Sun"],
];
const active = dayDefs.filter(([a]) => a);
if (active.length === 0) return "";
if (active.length === 1) return active[0][2];
return active.map(([, code]) => code).join("");
}
/** Longer day names for detail view: single day → "Thursdays", multiple → "Mon, Wed, Fri" */
export function formatMeetingDaysLong(mt: DbMeetingTime): string {
const days: [boolean, string, string][] = [
[mt.monday, "Mon", "Mondays"],
[mt.tuesday, "Tue", "Tuesdays"],
[mt.wednesday, "Wed", "Wednesdays"],
[mt.thursday, "Thur", "Thursdays"],
[mt.friday, "Fri", "Fridays"],
[mt.saturday, "Sat", "Saturdays"],
[mt.sunday, "Sun", "Sundays"],
];
const active = days.filter(([a]) => a);
if (active.length === 0) return "";
if (active.length === 1) return active[0][2];
return active.map(([, short]) => short).join(", ");
}
/**
* Format a time range with smart AM/PM elision.
*
* Same period: "9:009:50 AM"
* Cross-period: "11:30 AM12:20 PM"
* Missing: "TBA"
*/
export function formatTimeRange(begin: string | null, end: string | null): string {
if (!begin || begin.length !== 4 || !end || end.length !== 4) return "TBA";
const bHours = parseInt(begin.slice(0, 2), 10);
const eHours = parseInt(end.slice(0, 2), 10);
const bPeriod = bHours >= 12 ? "PM" : "AM";
const ePeriod = eHours >= 12 ? "PM" : "AM";
const bDisplay = bHours > 12 ? bHours - 12 : bHours === 0 ? 12 : bHours;
const eDisplay = eHours > 12 ? eHours - 12 : eHours === 0 ? 12 : eHours;
const endStr = `${eDisplay}:${end.slice(2)} ${ePeriod}`;
if (bPeriod === ePeriod) {
return `${bDisplay}:${begin.slice(2)}${endStr}`;
}
return `${bDisplay}:${begin.slice(2)} ${bPeriod}${endStr}`;
}
/** Condensed meeting time: "MWF 9:009:50 AM" */
export function formatMeetingTime(mt: DbMeetingTime): string {
const days = formatMeetingDays(mt);
if (!days) return "TBA";
const range = formatTimeRange(mt.begin_time, mt.end_time);
if (range === "TBA") return `${days} TBA`;
return `${days} ${range}`;
}
/**
* Progressively abbreviate an instructor name to fit within a character budget.
*
* Tries each level until the result fits `maxLen`:
* 1. Full name: "Ramirez, Maria Elena"
* 2. Abbreviate trailing given names: "Ramirez, Maria E."
* 3. Abbreviate all given names: "Ramirez, M. E."
* 4. First initial only: "Ramirez, M."
*
* Names without a comma (e.g. "Staff") are returned as-is.
*/
export function abbreviateInstructor(name: string, maxLen: number = 18): string {
if (name.length <= maxLen) return name;
const commaIdx = name.indexOf(", ");
if (commaIdx === -1) return name;
const last = name.slice(0, commaIdx);
const parts = name.slice(commaIdx + 2).split(" ");
// Level 2: abbreviate trailing given names, keep first given name intact
// "Maria Elena" → "Maria E."
if (parts.length > 1) {
const abbreviated = [parts[0], ...parts.slice(1).map((p) => `${p[0]}.`)].join(" ");
const result = `${last}, ${abbreviated}`;
if (result.length <= maxLen) return result;
}
// Level 3: abbreviate all given names
// "Maria Elena" → "M. E."
if (parts.length > 1) {
const allInitials = parts.map((p) => `${p[0]}.`).join(" ");
const result = `${last}, ${allInitials}`;
if (result.length <= maxLen) return result;
}
// Level 4: first initial only
// "Maria Elena" → "M." or "John" → "J."
return `${last}, ${parts[0][0]}.`;
}
/** Get primary instructor from a course, or first instructor */
export function getPrimaryInstructor(
instructors: InstructorResponse[]
): InstructorResponse | undefined {
return instructors.find((i) => i.isPrimary) ?? instructors[0];
}
/** Check if a meeting time has no scheduled days */
export function isMeetingTimeTBA(mt: DbMeetingTime): boolean {
return (
!mt.monday &&
!mt.tuesday &&
!mt.wednesday &&
!mt.thursday &&
!mt.friday &&
!mt.saturday &&
!mt.sunday
);
}
/** Check if a meeting time has no begin/end times */
export function isTimeTBA(mt: DbMeetingTime): boolean {
return !mt.begin_time || mt.begin_time.length !== 4;
}
/** Format a date string to "January 20, 2026". Accepts YYYY-MM-DD or MM/DD/YYYY. */
export function formatDate(dateStr: string): string {
let year: number, month: number, day: number;
if (dateStr.includes("-")) {
[year, month, day] = dateStr.split("-").map(Number);
} else if (dateStr.includes("/")) {
[month, day, year] = dateStr.split("/").map(Number);
} else {
return dateStr;
}
if (!year || !month || !day) return dateStr;
const date = new Date(year, month - 1, day);
return date.toLocaleDateString("en-US", { year: "numeric", month: "long", day: "numeric" });
}
/** Short location string from first meeting time: "MH 2.206" or campus fallback */
export function formatLocation(course: CourseResponse): string | null {
for (const mt of course.meetingTimes) {
if (mt.building && mt.room) return `${mt.building} ${mt.room}`;
if (mt.building) return mt.building;
}
return course.campus ?? null;
}
/** Longer location string using building description: "Main Hall 2.206" */
export function formatLocationLong(mt: DbMeetingTime): string | null {
const name = mt.building_description ?? mt.building;
if (!name) return null;
return mt.room ? `${name} ${mt.room}` : name;
}
/** Format a date as "Aug 26, 2024". Accepts YYYY-MM-DD or MM/DD/YYYY. */
export function formatDateShort(dateStr: string): string {
let year: number, month: number, day: number;
if (dateStr.includes("-")) {
[year, month, day] = dateStr.split("-").map(Number);
} else if (dateStr.includes("/")) {
[month, day, year] = dateStr.split("/").map(Number);
} else {
return dateStr;
}
if (!year || !month || !day) return dateStr;
const date = new Date(year, month - 1, day);
return date.toLocaleDateString("en-US", { year: "numeric", month: "short", day: "numeric" });
}
/**
* Verbose day names for tooltips: "Tuesdays & Thursdays", "Mondays, Wednesdays & Fridays".
* Single day → plural: "Thursdays".
*/
export function formatMeetingDaysVerbose(mt: DbMeetingTime): string {
const dayDefs: [boolean, string][] = [
[mt.monday, "Mondays"],
[mt.tuesday, "Tuesdays"],
[mt.wednesday, "Wednesdays"],
[mt.thursday, "Thursdays"],
[mt.friday, "Fridays"],
[mt.saturday, "Saturdays"],
[mt.sunday, "Sundays"],
];
const active = dayDefs.filter(([a]) => a).map(([, name]) => name);
if (active.length === 0) return "";
if (active.length === 1) return active[0];
return active.slice(0, -1).join(", ") + " & " + active[active.length - 1];
}
/**
* Full verbose tooltip for a single meeting time:
* "Tuesdays & Thursdays, 4:155:30 PM\nMain Hall 2.206 · Aug 26 Dec 12, 2024"
*/
export function formatMeetingTimeTooltip(mt: DbMeetingTime): string {
const days = formatMeetingDaysVerbose(mt);
const range = formatTimeRange(mt.begin_time, mt.end_time);
let line1: string;
if (!days && range === "TBA") {
line1 = "TBA";
} else if (!days) {
line1 = range;
} else if (range === "TBA") {
line1 = `${days}, TBA`;
} else {
line1 = `${days}, ${range}`;
}
const parts = [line1];
const loc = formatLocationLong(mt);
const dateRange =
mt.start_date && mt.end_date
? `${formatDateShort(mt.start_date)} ${formatDateShort(mt.end_date)}`
: null;
if (loc && dateRange) {
parts.push(`${loc}, ${dateRange}`);
} else if (loc) {
parts.push(loc);
} else if (dateRange) {
parts.push(dateRange);
}
return parts.join("\n");
}
/** Full verbose tooltip for all meeting times on a course, newline-separated. */
export function formatMeetingTimesTooltip(meetingTimes: DbMeetingTime[]): string {
if (meetingTimes.length === 0) return "TBA";
return meetingTimes.map(formatMeetingTimeTooltip).join("\n\n");
}
/**
* Delivery concern category for visual accent on location cells.
* - "online": fully online with no physical location (OA, OS, OH without INT building)
* - "internet": internet campus with INT building code
* - "hybrid": mix of online and in-person (HB, H1, H2)
* - "off-campus": in-person but not on Main Campus
* - null: normal in-person on main campus (no accent)
*/
export type DeliveryConcern = "online" | "internet" | "hybrid" | "off-campus" | null;
const ONLINE_METHODS = new Set(["OA", "OS", "OH"]);
const HYBRID_METHODS = new Set(["HB", "H1", "H2"]);
const MAIN_CAMPUS = "11";
const ONLINE_CAMPUSES = new Set(["9", "ONL"]);
export function getDeliveryConcern(course: CourseResponse): DeliveryConcern {
const method = course.instructionalMethod;
if (method && ONLINE_METHODS.has(method)) {
const hasIntBuilding = course.meetingTimes.some((mt: DbMeetingTime) => mt.building === "INT");
return hasIntBuilding ? "internet" : "online";
}
if (method && HYBRID_METHODS.has(method)) return "hybrid";
if (course.campus && course.campus !== MAIN_CAMPUS && !ONLINE_CAMPUSES.has(course.campus)) {
return "off-campus";
}
return null;
}
/** Border accent color for each delivery concern type. */
export function concernAccentColor(concern: DeliveryConcern): string | null {
switch (concern) {
case "online":
return "#3b82f6"; // blue-500
case "internet":
return "#06b6d4"; // cyan-500
case "hybrid":
return "#a855f7"; // purple-500
case "off-campus":
return "#f59e0b"; // amber-500
default:
return null;
}
}
/**
* Location display text for the table cell.
* Falls back to "Online" for online courses instead of showing a dash.
*/
export function formatLocationDisplay(course: CourseResponse): string | null {
const loc = formatLocation(course);
if (loc) return loc;
const concern = getDeliveryConcern(course);
if (concern === "online") return "Online";
return null;
}
/** Tooltip text for the location column: long-form location + delivery note */
export function formatLocationTooltip(course: CourseResponse): string | null {
const parts: string[] = [];
for (const mt of course.meetingTimes) {
const loc = formatLocationLong(mt);
if (loc && !parts.includes(loc)) parts.push(loc);
}
const locationLine = parts.length > 0 ? parts.join(", ") : null;
const concern = getDeliveryConcern(course);
let deliveryNote: string | null = null;
if (concern === "online") deliveryNote = "Online";
else if (concern === "internet") deliveryNote = "Internet";
else if (concern === "hybrid") deliveryNote = "Hybrid";
else if (concern === "off-campus") deliveryNote = "Off-campus";
if (locationLine && deliveryNote) return `${locationLine}\n${deliveryNote}`;
if (locationLine) return locationLine;
if (deliveryNote) return deliveryNote;
return null;
}
/** Number of open seats in a course section */
export function openSeats(course: CourseResponse): number {
return Math.max(0, course.maxEnrollment - course.enrollment);
}
/** Text color class for seat availability: red (full), yellow (low), green (open) */
export function seatsColor(course: CourseResponse): string {
const open = openSeats(course);
if (open === 0) return "text-status-red";
if (open <= 5) return "text-yellow-500";
return "text-status-green";
}
/** Background dot color class for seat availability */
export function seatsDotColor(course: CourseResponse): string {
const open = openSeats(course);
if (open === 0) return "bg-red-500";
if (open <= 5) return "bg-yellow-500";
return "bg-green-500";
}
/** Minimum number of ratings needed to consider RMP data reliable */
export const RMP_CONFIDENCE_THRESHOLD = 7;
/** RMP professor page URL from legacy ID */
export function rmpUrl(legacyId: number): string {
return `https://www.ratemyprofessors.com/professor/${legacyId}`;
}
/**
* Smooth OKLCH color + text-shadow for a RateMyProfessors rating.
*
* Three-stop gradient interpolated in OKLCH:
* 1.0 → red, 3.0 → amber, 5.0 → green
* with separate light/dark mode tuning.
*/
export function ratingStyle(rating: number, isDark: boolean): string {
const clamped = Math.max(1, Math.min(5, rating));
// OKLCH stops: [lightness, chroma, hue]
const stops: { light: [number, number, number]; dark: [number, number, number] }[] = [
{ light: [0.63, 0.2, 25], dark: [0.7, 0.19, 25] }, // 1.0 red
{ light: [0.7, 0.16, 85], dark: [0.78, 0.15, 85] }, // 3.0 amber
{ light: [0.65, 0.2, 145], dark: [0.72, 0.19, 145] }, // 5.0 green
];
let t: number;
let fromIdx: number;
if (clamped <= 3) {
t = (clamped - 1) / 2;
fromIdx = 0;
} else {
t = (clamped - 3) / 2;
fromIdx = 1;
}
const from = isDark ? stops[fromIdx].dark : stops[fromIdx].light;
const to = isDark ? stops[fromIdx + 1].dark : stops[fromIdx + 1].light;
const l = from[0] + (to[0] - from[0]) * t;
const c = from[1] + (to[1] - from[1]) * t;
const h = from[2] + (to[2] - from[2]) * t;
return `color: oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)}); text-shadow: 0 0 4px oklch(${l.toFixed(3)} ${c.toFixed(3)} ${h.toFixed(1)} / 0.3);`;
}
/** Format credit hours display */
export function formatCreditHours(course: CourseResponse): string {
if (course.creditHours != null) return String(course.creditHours);
if (course.creditHourLow != null && course.creditHourHigh != null) {
return `${course.creditHourLow}${course.creditHourHigh}`;
}
return "—";
}
/**
* Convert Banner "Last, First Middle" → "First Middle Last".
* Handles: no comma (returned as-is), trailing/leading spaces,
* middle names/initials preserved.
*/
export function formatInstructorName(displayName: string): string {
const commaIdx = displayName.indexOf(",");
if (commaIdx === -1) return displayName.trim();
const last = displayName.slice(0, commaIdx).trim();
const rest = displayName.slice(commaIdx + 1).trim();
if (!rest) return last;
return `${rest} ${last}`;
}
/** Check if a rating value represents real data (not the 0.0 placeholder for unrated professors). */
export function isRatingValid(avgRating: number | null, numRatings: number): boolean {
return avgRating !== null && !(avgRating === 0 && numRatings === 0);
}
+13
View File
@@ -0,0 +1,13 @@
import { format, formatDistanceToNow } from "date-fns";
/** Returns a relative time string like "3 minutes ago" or "in 2 hours". */
export function formatRelativeDate(date: string | Date): string {
const d = typeof date === "string" ? new Date(date) : date;
return formatDistanceToNow(d, { addSuffix: true });
}
/** Returns a full absolute datetime string for tooltip display, e.g. "Jan 29, 2026, 3:45:12 PM". */
export function formatAbsoluteDate(date: string | Date): string {
const d = typeof date === "string" ? new Date(date) : date;
return format(d, "MMM d, yyyy, h:mm:ss a");
}

Some files were not shown because too many files have changed in this diff Show More