mirror of
https://github.com/Xevion/banner.git
synced 2026-01-31 08:23:35 -06:00
Compare commits
21 Commits
v0.3.4
...
4deeef2f00
| Author | SHA1 | Date | |
|---|---|---|---|
| 4deeef2f00 | |||
| e008ee5a12 | |||
| a007ccb6a2 | |||
| 527cbebc6a | |||
| 4207783cdd | |||
| c90bd740de | |||
| 61f8bd9de7 | |||
| b5eaedc9bc | |||
| 58475c8673 | |||
| 78159707e2 | |||
| 779144a4d5 | |||
| 0da2e810fe | |||
| ed72ac6bff | |||
| 57b5cafb27 | |||
| 841191c44d | |||
| 67d7c81ef4 | |||
| d108a41f91 | |||
| 5fab8c216a | |||
| 15256ff91c | |||
| 6df4303bd6 | |||
| e3b855b956 |
@@ -0,0 +1,2 @@
|
||||
[env]
|
||||
TS_RS_EXPORT_DIR = { value = "web/src/lib/bindings/", relative = true }
|
||||
Vendored
+4
-3
@@ -1,5 +1,6 @@
|
||||
.env
|
||||
/target
|
||||
/go/
|
||||
.cargo/config.toml
|
||||
src/scraper/README.md
|
||||
|
||||
# ts-rs bindings
|
||||
web/src/lib/bindings/*.ts
|
||||
!web/src/lib/bindings/index.ts
|
||||
|
||||
Generated
+228
-12
@@ -26,6 +26,21 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alloc-no-stdlib"
|
||||
version = "2.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
|
||||
|
||||
[[package]]
|
||||
name = "alloc-stdlib"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
@@ -106,6 +121,19 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-compression"
|
||||
version = "0.4.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2"
|
||||
dependencies = [
|
||||
"compression-codecs",
|
||||
"compression-core",
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.89"
|
||||
@@ -149,9 +177,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.4"
|
||||
version = "0.8.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"bytes",
|
||||
@@ -168,8 +196,7 @@ dependencies = [
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_core",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
@@ -183,9 +210,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.2"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
|
||||
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
@@ -194,13 +221,37 @@ dependencies = [
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"sync_wrapper 1.0.2",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-extra"
|
||||
version = "0.12.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fef252edff26ddba56bbcdf2ee3307b8129acb86f5749b68990c168a6fcc9c76"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"axum-core",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"serde_core",
|
||||
"serde_html_form",
|
||||
"serde_path_to_error",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.75"
|
||||
@@ -218,11 +269,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "banner"
|
||||
version = "0.3.4"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"axum",
|
||||
"axum-extra",
|
||||
"bitflags 2.9.4",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -235,6 +287,7 @@ dependencies = [
|
||||
"fundu",
|
||||
"futures",
|
||||
"governor",
|
||||
"html-escape",
|
||||
"http 1.3.1",
|
||||
"mime_guess",
|
||||
"num-format",
|
||||
@@ -257,7 +310,9 @@ dependencies = [
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"ts-rs",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"yansi",
|
||||
]
|
||||
|
||||
@@ -303,6 +358,27 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "8.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
"brotli-decompressor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli-decompressor"
|
||||
version = "5.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
|
||||
dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
"alloc-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.12.0"
|
||||
@@ -380,6 +456,8 @@ version = "1.2.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
"shlex",
|
||||
]
|
||||
|
||||
@@ -474,6 +552,26 @@ dependencies = [
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-codecs"
|
||||
version = "0.4.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b"
|
||||
dependencies = [
|
||||
"brotli",
|
||||
"compression-core",
|
||||
"flate2",
|
||||
"memchr",
|
||||
"zstd",
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "compression-core"
|
||||
version = "0.4.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d"
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-queue"
|
||||
version = "2.5.0"
|
||||
@@ -1227,6 +1325,15 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html-escape"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476"
|
||||
dependencies = [
|
||||
"utf8-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.12"
|
||||
@@ -1606,6 +1713,16 @@ version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
@@ -2677,9 +2794,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2695,15 +2822,28 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_html_form"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2f2d7ff8a2140333718bb329f5c40fc5f0865b84c426183ce14c97d2ab8154f"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"indexmap",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.143"
|
||||
@@ -3256,6 +3396,15 @@ dependencies = [
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@@ -3535,14 +3684,17 @@ version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"bitflags 2.9.4",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"iri-string",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -3648,6 +3800,30 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "ts-rs"
|
||||
version = "11.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4994acea2522cd2b3b85c1d9529a55991e3ad5e25cdcd3de9d505972c4379424"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"ts-rs-macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ts-rs-macros"
|
||||
version = "11.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee6ff59666c9cbaec3533964505d39154dc4e0a56151fdea30a09ed0301f62e2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.21.0"
|
||||
@@ -3770,12 +3946,24 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urlencoding"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf-8"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
|
||||
|
||||
[[package]]
|
||||
name = "utf8-width"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1292c0d970b54115d14f2492fe0170adf21d68a1de108eebc51c1df4f346a091"
|
||||
|
||||
[[package]]
|
||||
name = "utf8_iter"
|
||||
version = "1.0.4"
|
||||
@@ -4459,3 +4647,31 @@ dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
|
||||
dependencies = [
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-safe"
|
||||
version = "7.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
|
||||
dependencies = [
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.16+zstd.1.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
+6
-2
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "banner"
|
||||
version = "0.3.4"
|
||||
version = "0.5.0"
|
||||
edition = "2024"
|
||||
default-run = "banner"
|
||||
|
||||
@@ -48,13 +48,17 @@ url = "2.5"
|
||||
governor = "0.10.1"
|
||||
serde_path_to_error = "0.1.17"
|
||||
num-format = "0.4.4"
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout"] }
|
||||
tower-http = { version = "0.6.0", features = ["cors", "trace", "timeout", "compression-full"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"], optional = true }
|
||||
mime_guess = { version = "2.0", optional = true }
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
rapidhash = "4.1.0"
|
||||
yansi = "1.0.1"
|
||||
extension-traits = "2"
|
||||
ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-json-impl"] }
|
||||
html-escape = "0.2.13"
|
||||
axum-extra = { version = "0.12.5", features = ["query"] }
|
||||
urlencoding = "2.1.3"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
|
||||
+5
-2
@@ -7,6 +7,9 @@ FROM oven/bun:1 AS frontend-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install zstd for pre-compression
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends zstd && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy backend Cargo.toml for build-time version retrieval
|
||||
COPY ./Cargo.toml ./
|
||||
|
||||
@@ -19,8 +22,8 @@ RUN bun install --frozen-lockfile
|
||||
# Copy frontend source code
|
||||
COPY ./web ./
|
||||
|
||||
# Build frontend
|
||||
RUN bun run build
|
||||
# Build frontend, then pre-compress static assets (gzip, brotli, zstd)
|
||||
RUN bun run build && bun run scripts/compress-assets.ts
|
||||
|
||||
# --- Chef Base Stage ---
|
||||
FROM lukemathwalker/cargo-chef:latest-rust-${RUST_VERSION} AS chef
|
||||
|
||||
@@ -8,27 +8,30 @@ default:
|
||||
check:
|
||||
cargo fmt --all -- --check
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run
|
||||
bun run --cwd web typecheck
|
||||
bun run --cwd web lint
|
||||
bun run --cwd web test --run
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
bun run --cwd web test
|
||||
|
||||
# Generate TypeScript bindings from Rust types (ts-rs)
|
||||
bindings:
|
||||
cargo test export_bindings
|
||||
|
||||
# Run all tests (Rust + frontend)
|
||||
test: test-rust test-web
|
||||
|
||||
# Run only Rust tests
|
||||
# Run only Rust tests (excludes ts-rs bindings generation)
|
||||
test-rust *ARGS:
|
||||
cargo nextest run {{ARGS}}
|
||||
cargo nextest run -E 'not test(export_bindings)' {{ARGS}}
|
||||
|
||||
# Run only frontend tests
|
||||
test-web:
|
||||
bun run --cwd web test --run
|
||||
bun run --cwd web test
|
||||
|
||||
# Quick check: clippy + tests only (skips formatting)
|
||||
# Quick check: clippy + tests + typecheck (skips formatting)
|
||||
check-quick:
|
||||
cargo clippy --all-features -- --deny warnings
|
||||
cargo nextest run
|
||||
bun run --cwd web typecheck
|
||||
cargo nextest run -E 'not test(export_bindings)'
|
||||
bun run --cwd web check
|
||||
|
||||
# Run the Banner API search demo (hits live UTSA API, ~20s)
|
||||
search *ARGS:
|
||||
|
||||
+27
-4
@@ -4,10 +4,33 @@
|
||||
|
||||
The Banner project is built as a multi-service application with the following components:
|
||||
|
||||
- **Discord Bot Service**: Handles Discord interactions and commands
|
||||
- **Web Service**: Serves the React frontend and provides API endpoints
|
||||
- **Scraper Service**: Background data collection and synchronization
|
||||
- **Database Layer**: PostgreSQL for persistent storage
|
||||
- **Discord Bot Service**: Handles Discord interactions and commands (Serenity/Poise)
|
||||
- **Web Service**: Axum HTTP server serving the SvelteKit frontend and REST API endpoints
|
||||
- **Scraper Service**: Background data collection and synchronization with job queue
|
||||
- **Database Layer**: PostgreSQL 17 for persistent storage (SQLx with compile-time verification)
|
||||
- **RateMyProfessors Client**: GraphQL-based bulk sync of professor ratings
|
||||
|
||||
### Frontend Stack
|
||||
|
||||
- **SvelteKit** with Svelte 5 runes (`$state`, `$derived`, `$effect`)
|
||||
- **Tailwind CSS v4** via `@tailwindcss/vite`
|
||||
- **bits-ui** for headless UI primitives (comboboxes, tooltips, dropdowns)
|
||||
- **TanStack Table** for interactive data tables with sorting and column control
|
||||
- **OverlayScrollbars** for styled, theme-aware scrollable areas
|
||||
- **ts-rs** generates TypeScript type bindings from Rust structs
|
||||
|
||||
### API Endpoints
|
||||
|
||||
| Endpoint | Description |
|
||||
|---|---|
|
||||
| `GET /api/health` | Health check |
|
||||
| `GET /api/status` | Service status, version, and commit hash |
|
||||
| `GET /api/metrics` | Basic metrics |
|
||||
| `GET /api/courses/search` | Paginated course search with filters (term, subject, query, open-only, sort) |
|
||||
| `GET /api/courses/:term/:crn` | Single course detail with instructors and RMP ratings |
|
||||
| `GET /api/terms` | Available terms from reference cache |
|
||||
| `GET /api/subjects?term=` | Subjects for a term, ordered by enrollment |
|
||||
| `GET /api/reference/:category` | Reference data lookups (campuses, instructional methods, etc.) |
|
||||
|
||||
## Technical Analysis
|
||||
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.5.0] - 2026-01-29
|
||||
|
||||
### Added
|
||||
|
||||
- Multi-select subject filtering with searchable comboboxes.
|
||||
- Smart instructor name abbreviation for compact table display.
|
||||
- Delivery mode indicators and tooltips in location column.
|
||||
- Page selector dropdown with animated pagination controls.
|
||||
- FLIP animations for smooth table row transitions during pagination.
|
||||
- Time tooltip with detailed meeting schedule and day abbreviations.
|
||||
- Reusable SimpleTooltip component for consistent UI hints.
|
||||
|
||||
### Changed
|
||||
|
||||
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
|
||||
- Consolidated menu snippets and strengthened component type safety.
|
||||
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
|
||||
- Eliminated initial theme flash on page load.
|
||||
|
||||
## [0.4.0] - 2026-01-28
|
||||
|
||||
### Added
|
||||
|
||||
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
|
||||
- TypeScript type bindings generated from Rust types via ts-rs.
|
||||
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
|
||||
- Course detail expansion panel with enrollment, meeting times, and instructor info.
|
||||
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
|
||||
- Pagination component for navigating large search result sets.
|
||||
- Footer component with version display.
|
||||
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
|
||||
- Frontend API client with typed request/response handling and test coverage.
|
||||
- Course formatting utilities with comprehensive unit tests.
|
||||
|
||||
## [0.3.4] - 2026-01
|
||||
|
||||
### Added
|
||||
|
||||
- Live service status tracking on web dashboard with auto-refresh and health indicators.
|
||||
- DB operation extraction for improved testability.
|
||||
- Unit test suite foundation covering core functionality.
|
||||
- Docker support for PostgreSQL development environment.
|
||||
- ICS calendar export with comprehensive holiday exclusion coverage.
|
||||
- Google Calendar link generation with recurrence rules and meeting details.
|
||||
- Job queue with priority-based scheduling for background scraping.
|
||||
- Rate limiting with burst allowance for Banner API requests.
|
||||
- Session management and caching for Banner API interactions.
|
||||
- Discord bot commands: search, terms, ics, gcal.
|
||||
- Intelligent scraping system with priority queues and retry tracking.
|
||||
|
||||
### Changed
|
||||
|
||||
- Type consolidation and dead code removal across the codebase.
|
||||
@@ -1,58 +0,0 @@
|
||||
# Features
|
||||
|
||||
## Current Features
|
||||
|
||||
### Discord Bot Commands
|
||||
|
||||
- **search** - Search for courses with various filters (title, course code, keywords)
|
||||
- **terms** - List available terms or search for a specific term
|
||||
- **time** - Get meeting times for a specific course (CRN)
|
||||
- **ics** - Generate ICS calendar file for a course with holiday exclusions
|
||||
- **gcal** - Generate Google Calendar link for a course
|
||||
|
||||
### Data Pipeline
|
||||
|
||||
- Intelligent scraping system with priority queues
|
||||
- Rate limiting and burst handling
|
||||
- Background data synchronization
|
||||
|
||||
## Feature Wishlist
|
||||
|
||||
### Commands
|
||||
|
||||
- ICS Download (get a ICS download of your classes with location & timing perfectly - set for every class you're in)
|
||||
- Classes Now (find classes happening)
|
||||
- Autocomplete
|
||||
- Class Title
|
||||
- Course Number
|
||||
- Term/Part of Term
|
||||
- Professor
|
||||
- Attribute
|
||||
- Component Pagination
|
||||
- RateMyProfessor Integration (Linked/Embedded)
|
||||
- Smart term selection (i.e. Summer 2024 will be selected automatically when opened)
|
||||
- Rate Limiting (bursting with global/user limits)
|
||||
- DMs Integration (allow usage of the bot in DMs)
|
||||
- Class Change Notifications (get notified when details about a class change)
|
||||
- Multi-term Querying (currently the backend for searching is kinda weird)
|
||||
- Full Autocomplete for Every Search Option
|
||||
- Metrics, Log Query, Privileged Error Feedback
|
||||
- Search for Classes
|
||||
- Major, Professor, Location, Name, Time of Day
|
||||
- Subscribe to Classes
|
||||
- Availability (seat, pre-seat)
|
||||
- Waitlist Movement
|
||||
- Detail Changes (meta, time, location, seats, professor)
|
||||
- `time` Start, End, Days of Week
|
||||
- `seats` Any change in seat/waitlist data
|
||||
- `meta`
|
||||
- Lookup via Course Reference Number (CRN)
|
||||
- Smart Time of Day Handling
|
||||
- "2 PM" -> Start within 2:00 PM to 2:59 PM
|
||||
- "2-3 PM" -> Start within 2:00 PM to 3:59 PM
|
||||
- "ends by 2 PM" -> Ends within 12:00 AM to 2:00 PM
|
||||
- "after 2 PM" -> Start within 2:01 PM to 11:59 PM
|
||||
- "before 2 PM" -> Ends within 12:00 AM to 1:59 PM
|
||||
- Get By Section Command
|
||||
- CS 4393 001 =>
|
||||
- Will require SQL to be able to search for a class by its section number
|
||||
+2
-1
@@ -4,7 +4,8 @@ This folder contains detailed documentation for the Banner project. This file ac
|
||||
|
||||
## Files
|
||||
|
||||
- [`FEATURES.md`](FEATURES.md) - Current features, implemented functionality, and future roadmap
|
||||
- [`CHANGELOG.md`](CHANGELOG.md) - Notable changes by version
|
||||
- [`ROADMAP.md`](ROADMAP.md) - Planned features and priorities
|
||||
- [`BANNER.md`](BANNER.md) - General API documentation on the Banner system
|
||||
- [`ARCHITECTURE.md`](ARCHITECTURE.md) - Technical implementation details, system design, and analysis
|
||||
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
# Roadmap
|
||||
|
||||
## Now
|
||||
|
||||
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
|
||||
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
|
||||
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
|
||||
- **Test coverage expansion** - Broaden coverage with session/rate-limiter tests and more DB integration tests.
|
||||
|
||||
## Soon
|
||||
|
||||
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
|
||||
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
|
||||
- **Search result pagination** - Paginated embeds for large result sets in Discord.
|
||||
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
|
||||
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
|
||||
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
|
||||
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
|
||||
|
||||
## Eventually
|
||||
|
||||
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
|
||||
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
|
||||
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
|
||||
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
|
||||
- **CRN direct lookup** - Look up a course by its CRN without going through search.
|
||||
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
|
||||
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
|
||||
|
||||
## Done
|
||||
|
||||
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
|
||||
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
|
||||
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
|
||||
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.4–0.4.0)*
|
||||
@@ -0,0 +1,83 @@
|
||||
-- ============================================================
|
||||
-- Expand courses table with rich Banner API fields
|
||||
-- ============================================================
|
||||
|
||||
-- Section identifiers
|
||||
ALTER TABLE courses ADD COLUMN sequence_number VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN part_of_term VARCHAR;
|
||||
|
||||
-- Schedule & delivery (store codes, descriptions come from reference_data)
|
||||
ALTER TABLE courses ADD COLUMN instructional_method VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN campus VARCHAR;
|
||||
|
||||
-- Credit hours
|
||||
ALTER TABLE courses ADD COLUMN credit_hours INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN credit_hour_low INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN credit_hour_high INTEGER;
|
||||
|
||||
-- Cross-listing
|
||||
ALTER TABLE courses ADD COLUMN cross_list VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN cross_list_capacity INTEGER;
|
||||
ALTER TABLE courses ADD COLUMN cross_list_count INTEGER;
|
||||
|
||||
-- Section linking
|
||||
ALTER TABLE courses ADD COLUMN link_identifier VARCHAR;
|
||||
ALTER TABLE courses ADD COLUMN is_section_linked BOOLEAN;
|
||||
|
||||
-- JSONB columns for 1-to-many data
|
||||
ALTER TABLE courses ADD COLUMN meeting_times JSONB NOT NULL DEFAULT '[]'::jsonb;
|
||||
ALTER TABLE courses ADD COLUMN attributes JSONB NOT NULL DEFAULT '[]'::jsonb;
|
||||
|
||||
-- ============================================================
|
||||
-- Full-text search support
|
||||
-- ============================================================
|
||||
|
||||
-- Generated tsvector for word-based search on title
|
||||
ALTER TABLE courses ADD COLUMN title_search tsvector
|
||||
GENERATED ALWAYS AS (to_tsvector('simple', coalesce(title, ''))) STORED;
|
||||
|
||||
CREATE INDEX idx_courses_title_search ON courses USING GIN (title_search);
|
||||
|
||||
-- Trigram index for substring/ILIKE search on title
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE INDEX idx_courses_title_trgm ON courses USING GIN (title gin_trgm_ops);
|
||||
|
||||
-- ============================================================
|
||||
-- New filter indexes
|
||||
-- ============================================================
|
||||
|
||||
CREATE INDEX idx_courses_instructional_method ON courses(instructional_method);
|
||||
CREATE INDEX idx_courses_campus ON courses(campus);
|
||||
|
||||
-- Composite for "open CS courses in Fall 2024" pattern
|
||||
CREATE INDEX idx_courses_term_subject_avail ON courses(term_code, subject, max_enrollment, enrollment);
|
||||
|
||||
-- ============================================================
|
||||
-- Instructors table (normalized, deduplicated)
|
||||
-- ============================================================
|
||||
|
||||
CREATE TABLE instructors (
|
||||
banner_id VARCHAR PRIMARY KEY,
|
||||
display_name VARCHAR NOT NULL,
|
||||
email VARCHAR
|
||||
);
|
||||
|
||||
CREATE TABLE course_instructors (
|
||||
course_id INTEGER NOT NULL REFERENCES courses(id) ON DELETE CASCADE,
|
||||
instructor_id VARCHAR NOT NULL REFERENCES instructors(banner_id) ON DELETE CASCADE,
|
||||
is_primary BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (course_id, instructor_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_course_instructors_instructor ON course_instructors(instructor_id);
|
||||
|
||||
-- ============================================================
|
||||
-- Reference data table (all code→description lookups)
|
||||
-- ============================================================
|
||||
|
||||
CREATE TABLE reference_data (
|
||||
category VARCHAR NOT NULL,
|
||||
code VARCHAR NOT NULL,
|
||||
description VARCHAR NOT NULL,
|
||||
PRIMARY KEY (category, code)
|
||||
);
|
||||
@@ -0,0 +1,17 @@
|
||||
-- RMP professor data (bulk synced from RateMyProfessors)
|
||||
CREATE TABLE rmp_professors (
|
||||
legacy_id INTEGER PRIMARY KEY,
|
||||
graphql_id VARCHAR NOT NULL,
|
||||
first_name VARCHAR NOT NULL,
|
||||
last_name VARCHAR NOT NULL,
|
||||
department VARCHAR,
|
||||
avg_rating REAL,
|
||||
avg_difficulty REAL,
|
||||
num_ratings INTEGER NOT NULL DEFAULT 0,
|
||||
would_take_again_pct REAL,
|
||||
last_synced_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Link Banner instructors to RMP professors
|
||||
ALTER TABLE instructors ADD COLUMN rmp_legacy_id INTEGER REFERENCES rmp_professors(legacy_id);
|
||||
ALTER TABLE instructors ADD COLUMN rmp_match_status VARCHAR NOT NULL DEFAULT 'pending';
|
||||
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE users (
|
||||
discord_id BIGINT PRIMARY KEY,
|
||||
discord_username TEXT NOT NULL,
|
||||
discord_avatar_hash TEXT,
|
||||
is_admin BOOLEAN NOT NULL DEFAULT false,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE TABLE user_sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id BIGINT NOT NULL REFERENCES users(discord_id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
expires_at TIMESTAMPTZ NOT NULL,
|
||||
last_active_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_user_sessions_user_id ON user_sessions(user_id);
|
||||
CREATE INDEX idx_user_sessions_expires_at ON user_sessions(expires_at);
|
||||
+26
-2
@@ -6,13 +6,14 @@ use crate::services::bot::BotService;
|
||||
use crate::services::manager::ServiceManager;
|
||||
use crate::services::web::WebService;
|
||||
use crate::state::AppState;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use anyhow::Context;
|
||||
use figment::value::UncasedStr;
|
||||
use figment::{Figment, providers::Env};
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
use std::process::ExitCode;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use anyhow::Context;
|
||||
use tracing::{error, info};
|
||||
|
||||
/// Main application struct containing all necessary components
|
||||
@@ -79,6 +80,19 @@ impl App {
|
||||
let banner_api_arc = Arc::new(banner_api);
|
||||
let app_state = AppState::new(banner_api_arc.clone(), db_pool.clone());
|
||||
|
||||
// Load reference data cache from DB (may be empty on first run)
|
||||
if let Err(e) = app_state.load_reference_cache().await {
|
||||
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
|
||||
}
|
||||
|
||||
// Seed the initial admin user if configured
|
||||
if let Some(admin_id) = config.admin_discord_id {
|
||||
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
|
||||
.await
|
||||
.context("Failed to seed admin user")?;
|
||||
info!(discord_id = admin_id, username = %user.discord_username, "Seed admin ensured");
|
||||
}
|
||||
|
||||
Ok(App {
|
||||
config,
|
||||
db_pool,
|
||||
@@ -92,7 +106,16 @@ impl App {
|
||||
pub fn setup_services(&mut self, services: &[ServiceName]) -> Result<(), anyhow::Error> {
|
||||
// Register enabled services with the manager
|
||||
if services.contains(&ServiceName::Web) {
|
||||
let web_service = Box::new(WebService::new(self.config.port, self.app_state.clone()));
|
||||
let auth_config = AuthConfig {
|
||||
client_id: self.config.discord_client_id.clone(),
|
||||
client_secret: self.config.discord_client_secret.clone(),
|
||||
redirect_base: self.config.discord_redirect_uri.clone(),
|
||||
};
|
||||
let web_service = Box::new(WebService::new(
|
||||
self.config.port,
|
||||
self.app_state.clone(),
|
||||
auth_config,
|
||||
));
|
||||
self.service_manager
|
||||
.register_service(ServiceName::Web.as_str(), web_service);
|
||||
}
|
||||
@@ -101,6 +124,7 @@ impl App {
|
||||
let scraper_service = Box::new(ScraperService::new(
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.app_state.reference_cache.clone(),
|
||||
self.app_state.service_statuses.clone(),
|
||||
));
|
||||
self.service_manager
|
||||
|
||||
@@ -228,6 +228,29 @@ impl BannerApi {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves campus codes and descriptions.
|
||||
pub async fn get_campuses(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_campus", "", term, 1, 500).await
|
||||
}
|
||||
|
||||
/// Retrieves instructional method codes and descriptions.
|
||||
pub async fn get_instructional_methods(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_instructionalMethod", "", term, 1, 500)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves part-of-term codes and descriptions.
|
||||
pub async fn get_parts_of_term(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_partOfTerm", "", term, 1, 500)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves section attribute codes and descriptions.
|
||||
pub async fn get_attributes(&self, term: &str) -> Result<Vec<Pair>> {
|
||||
self.get_list_endpoint("get_attribute", "", term, 1, 500)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Retrieves meeting time information for a course.
|
||||
pub async fn get_course_meeting_time(
|
||||
&self,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use bitflags::{bitflags, Flags};
|
||||
use bitflags::{Flags, bitflags};
|
||||
use chrono::{DateTime, NaiveDate, NaiveTime, Timelike, Utc, Weekday};
|
||||
use extension_traits::extension;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
@@ -320,10 +320,11 @@ pub enum MeetingType {
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Parse from the meeting type string
|
||||
pub fn from_string(s: &str) -> Self {
|
||||
match s {
|
||||
impl std::str::FromStr for MeetingType {
|
||||
type Err = std::convert::Infallible;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"HB" | "H2" | "H1" => MeetingType::HybridBlended,
|
||||
"OS" => MeetingType::OnlineSynchronous,
|
||||
"OA" => MeetingType::OnlineAsynchronous,
|
||||
@@ -331,9 +332,11 @@ impl MeetingType {
|
||||
"ID" => MeetingType::IndependentStudy,
|
||||
"FF" => MeetingType::FaceToFace,
|
||||
other => MeetingType::Unknown(other.to_string()),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MeetingType {
|
||||
/// Get description for the meeting type
|
||||
pub fn description(&self) -> &'static str {
|
||||
match self {
|
||||
@@ -424,7 +427,7 @@ impl MeetingScheduleInfo {
|
||||
end: now,
|
||||
}
|
||||
});
|
||||
let meeting_type = MeetingType::from_string(&meeting_time.meeting_type);
|
||||
let meeting_type: MeetingType = meeting_time.meeting_type.parse().unwrap();
|
||||
let location = MeetingLocation::from_meeting_time(meeting_time);
|
||||
let duration_weeks = date_range.weeks_duration();
|
||||
|
||||
|
||||
+3
-14
@@ -10,8 +10,9 @@ pub struct Range {
|
||||
pub high: i32,
|
||||
}
|
||||
|
||||
/// Builder for constructing Banner API search queries
|
||||
/// Builder for constructing Banner API search queries.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SearchQuery {
|
||||
subject: Option<String>,
|
||||
title: Option<String>,
|
||||
@@ -32,6 +33,7 @@ pub struct SearchQuery {
|
||||
course_number_range: Option<Range>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SearchQuery {
|
||||
/// Creates a new SearchQuery with default values
|
||||
pub fn new() -> Self {
|
||||
@@ -67,7 +69,6 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Adds a keyword to the query
|
||||
#[allow(dead_code)]
|
||||
pub fn keyword<S: Into<String>>(mut self, keyword: S) -> Self {
|
||||
match &mut self.keywords {
|
||||
Some(keywords) => keywords.push(keyword.into()),
|
||||
@@ -77,63 +78,54 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets whether to search for open courses only
|
||||
#[allow(dead_code)]
|
||||
pub fn open_only(mut self, open_only: bool) -> Self {
|
||||
self.open_only = Some(open_only);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the term part for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn term_part(mut self, term_part: Vec<String>) -> Self {
|
||||
self.term_part = Some(term_part);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the campuses for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn campus(mut self, campus: Vec<String>) -> Self {
|
||||
self.campus = Some(campus);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the instructional methods for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn instructional_method(mut self, instructional_method: Vec<String>) -> Self {
|
||||
self.instructional_method = Some(instructional_method);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the attributes for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn attributes(mut self, attributes: Vec<String>) -> Self {
|
||||
self.attributes = Some(attributes);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the instructors for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn instructor(mut self, instructor: Vec<u64>) -> Self {
|
||||
self.instructor = Some(instructor);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the start time for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn start_time(mut self, start_time: Duration) -> Self {
|
||||
self.start_time = Some(start_time);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the end time for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn end_time(mut self, end_time: Duration) -> Self {
|
||||
self.end_time = Some(end_time);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the credit range for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn credits(mut self, low: i32, high: i32) -> Self {
|
||||
self.min_credits = Some(low);
|
||||
self.max_credits = Some(high);
|
||||
@@ -141,14 +133,12 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets the minimum credits for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn min_credits(mut self, value: i32) -> Self {
|
||||
self.min_credits = Some(value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the maximum credits for the query
|
||||
#[allow(dead_code)]
|
||||
pub fn max_credits(mut self, value: i32) -> Self {
|
||||
self.max_credits = Some(value);
|
||||
self
|
||||
@@ -161,7 +151,6 @@ impl SearchQuery {
|
||||
}
|
||||
|
||||
/// Sets the offset for pagination
|
||||
#[allow(dead_code)]
|
||||
pub fn offset(mut self, offset: i32) -> Self {
|
||||
self.offset = offset;
|
||||
self
|
||||
|
||||
@@ -452,7 +452,11 @@ impl SessionPool {
|
||||
self.select_term(&term.to_string(), &unique_session_id, &cookie_header)
|
||||
.await?;
|
||||
|
||||
Ok(BannerSession::new(&unique_session_id, jsessionid, ssb_cookie))
|
||||
Ok(BannerSession::new(
|
||||
&unique_session_id,
|
||||
jsessionid,
|
||||
ssb_cookie,
|
||||
))
|
||||
}
|
||||
|
||||
/// Retrieves a list of terms from the Banner API.
|
||||
|
||||
@@ -18,7 +18,9 @@ fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Opti
|
||||
|
||||
/// Compute a consecutive range of dates starting from `start` for `count` days.
|
||||
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
|
||||
(0..count).filter_map(|i| start.checked_add_signed(Duration::days(i))).collect()
|
||||
(0..count)
|
||||
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute university holidays for a given year.
|
||||
|
||||
@@ -47,6 +47,19 @@ pub struct Config {
|
||||
/// Rate limiting configuration for Banner API requests
|
||||
#[serde(default = "default_rate_limiting")]
|
||||
pub rate_limiting: RateLimitingConfig,
|
||||
|
||||
/// Discord OAuth2 client ID for web authentication
|
||||
#[serde(deserialize_with = "deserialize_string_or_uint")]
|
||||
pub discord_client_id: String,
|
||||
/// Discord OAuth2 client secret for web authentication
|
||||
pub discord_client_secret: String,
|
||||
/// Optional base URL override for OAuth2 redirect (e.g. "https://banner.xevion.dev").
|
||||
/// When unset, the redirect URI is derived from the incoming request's Origin/Host.
|
||||
#[serde(default)]
|
||||
pub discord_redirect_uri: Option<String>,
|
||||
/// Discord user ID to seed as initial admin on startup (optional)
|
||||
#[serde(default)]
|
||||
pub admin_discord_id: Option<u64>,
|
||||
}
|
||||
|
||||
/// Default log level of "info"
|
||||
@@ -216,6 +229,43 @@ where
|
||||
deserializer.deserialize_any(DurationVisitor)
|
||||
}
|
||||
|
||||
/// Deserializes a value that may arrive as either a string or unsigned integer.
|
||||
///
|
||||
/// Figment's env provider infers types from raw values, so numeric-looking strings
|
||||
/// like Discord client IDs get parsed as integers. This accepts both forms.
|
||||
fn deserialize_string_or_uint<'de, D>(deserializer: D) -> Result<String, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
use serde::de::Visitor;
|
||||
|
||||
struct StringOrUintVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for StringOrUintVisitor {
|
||||
type Value = String;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string or unsigned integer")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_owned())
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StringOrUintVisitor)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
+255
-52
@@ -1,43 +1,73 @@
|
||||
//! Batch database operations for improved performance.
|
||||
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::DbMeetingTime;
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use std::time::Instant;
|
||||
use tracing::info;
|
||||
|
||||
/// Convert a Banner API course's meeting times to the DB JSONB shape.
|
||||
fn to_db_meeting_times(course: &Course) -> serde_json::Value {
|
||||
let meetings: Vec<DbMeetingTime> = course
|
||||
.meetings_faculty
|
||||
.iter()
|
||||
.map(|mf| {
|
||||
let mt = &mf.meeting_time;
|
||||
DbMeetingTime {
|
||||
begin_time: mt.begin_time.clone(),
|
||||
end_time: mt.end_time.clone(),
|
||||
start_date: mt.start_date.clone(),
|
||||
end_date: mt.end_date.clone(),
|
||||
monday: mt.monday,
|
||||
tuesday: mt.tuesday,
|
||||
wednesday: mt.wednesday,
|
||||
thursday: mt.thursday,
|
||||
friday: mt.friday,
|
||||
saturday: mt.saturday,
|
||||
sunday: mt.sunday,
|
||||
building: mt.building.clone(),
|
||||
building_description: mt.building_description.clone(),
|
||||
room: mt.room.clone(),
|
||||
campus: mt.campus.clone(),
|
||||
meeting_type: mt.meeting_type.clone(),
|
||||
meeting_schedule_type: mt.meeting_schedule_type.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
serde_json::to_value(meetings).unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Convert a Banner API course's section attributes to a JSONB array of code strings.
|
||||
fn to_db_attributes(course: &Course) -> serde_json::Value {
|
||||
let codes: Vec<&str> = course
|
||||
.section_attributes
|
||||
.iter()
|
||||
.map(|a| a.code.as_str())
|
||||
.collect();
|
||||
serde_json::to_value(codes).unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Extract the campus code from the first meeting time (Banner doesn't put it on the course directly).
|
||||
fn extract_campus_code(course: &Course) -> Option<String> {
|
||||
course
|
||||
.meetings_faculty
|
||||
.first()
|
||||
.and_then(|mf| mf.meeting_time.campus.clone())
|
||||
}
|
||||
|
||||
/// Batch upsert courses in a single database query.
|
||||
///
|
||||
/// This function performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses
|
||||
/// in a single round-trip to the database, significantly reducing overhead compared
|
||||
/// to individual inserts.
|
||||
/// Performs a bulk INSERT...ON CONFLICT DO UPDATE for all courses, including
|
||||
/// new fields (meeting times, attributes, instructor data). Returns the
|
||||
/// database IDs for all upserted courses (in input order) so instructors
|
||||
/// can be linked.
|
||||
///
|
||||
/// # Performance
|
||||
/// - Reduces N database round-trips to 1
|
||||
/// - Reduces N database round-trips to 3 (courses, instructors, junction)
|
||||
/// - Typical usage: 50-200 courses per batch
|
||||
/// - PostgreSQL parameter limit: 65,535 (we use ~10 per course)
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `courses` - Slice of Course structs from the Banner API
|
||||
/// * `db_pool` - PostgreSQL connection pool
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Ok(())` on success
|
||||
/// * `Err(_)` if the database operation fails
|
||||
///
|
||||
/// # Example
|
||||
/// ```no_run
|
||||
/// use banner::data::batch::batch_upsert_courses;
|
||||
/// use banner::banner::Course;
|
||||
/// use sqlx::PgPool;
|
||||
///
|
||||
/// async fn example(courses: &[Course], pool: &PgPool) -> anyhow::Result<()> {
|
||||
/// batch_upsert_courses(courses, pool).await?;
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
// Early return for empty batches
|
||||
if courses.is_empty() {
|
||||
info!("No courses to upsert, skipping batch operation");
|
||||
return Ok(());
|
||||
@@ -46,42 +76,108 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
|
||||
let start = Instant::now();
|
||||
let course_count = courses.len();
|
||||
|
||||
// Extract course fields into vectors for UNNEST
|
||||
// Step 1: Upsert courses with all fields, returning IDs
|
||||
let course_ids = upsert_courses(courses, db_pool).await?;
|
||||
|
||||
// Step 2: Upsert instructors (deduplicated across batch)
|
||||
upsert_instructors(courses, db_pool).await?;
|
||||
|
||||
// Step 3: Link courses to instructors via junction table
|
||||
upsert_course_instructors(courses, &course_ids, db_pool).await?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
courses_count = course_count,
|
||||
duration_ms = duration.as_millis(),
|
||||
"Batch upserted courses with instructors"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Upsert all courses and return their database IDs in input order.
|
||||
async fn upsert_courses(courses: &[Course], db_pool: &PgPool) -> Result<Vec<i32>> {
|
||||
let crns: Vec<&str> = courses
|
||||
.iter()
|
||||
.map(|c| c.course_reference_number.as_str())
|
||||
.collect();
|
||||
|
||||
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
|
||||
|
||||
let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
|
||||
|
||||
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();
|
||||
|
||||
let term_codes: Vec<&str> = courses.iter().map(|c| c.term.as_str()).collect();
|
||||
|
||||
let enrollments: Vec<i32> = courses.iter().map(|c| c.enrollment).collect();
|
||||
|
||||
let max_enrollments: Vec<i32> = courses.iter().map(|c| c.maximum_enrollment).collect();
|
||||
|
||||
let wait_counts: Vec<i32> = courses.iter().map(|c| c.wait_count).collect();
|
||||
|
||||
let wait_capacities: Vec<i32> = courses.iter().map(|c| c.wait_capacity).collect();
|
||||
|
||||
// Perform batch upsert using UNNEST for efficient bulk insertion
|
||||
let result = sqlx::query(
|
||||
// New scalar fields
|
||||
let sequence_numbers: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.sequence_number.as_str()))
|
||||
.collect();
|
||||
let parts_of_term: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.part_of_term.as_str()))
|
||||
.collect();
|
||||
let instructional_methods: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| Some(c.instructional_method.as_str()))
|
||||
.collect();
|
||||
let campuses: Vec<Option<String>> = courses.iter().map(extract_campus_code).collect();
|
||||
let credit_hours: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hours).collect();
|
||||
let credit_hour_lows: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_low).collect();
|
||||
let credit_hour_highs: Vec<Option<i32>> = courses.iter().map(|c| c.credit_hour_high).collect();
|
||||
let cross_lists: Vec<Option<&str>> = courses.iter().map(|c| c.cross_list.as_deref()).collect();
|
||||
let cross_list_capacities: Vec<Option<i32>> =
|
||||
courses.iter().map(|c| c.cross_list_capacity).collect();
|
||||
let cross_list_counts: Vec<Option<i32>> = courses.iter().map(|c| c.cross_list_count).collect();
|
||||
let link_identifiers: Vec<Option<&str>> = courses
|
||||
.iter()
|
||||
.map(|c| c.link_identifier.as_deref())
|
||||
.collect();
|
||||
let is_section_linkeds: Vec<Option<bool>> =
|
||||
courses.iter().map(|c| Some(c.is_section_linked)).collect();
|
||||
|
||||
// JSONB fields
|
||||
let meeting_times_json: Vec<serde_json::Value> =
|
||||
courses.iter().map(to_db_meeting_times).collect();
|
||||
let attributes_json: Vec<serde_json::Value> = courses.iter().map(to_db_attributes).collect();
|
||||
|
||||
let rows = sqlx::query_scalar::<_, i32>(
|
||||
r#"
|
||||
INSERT INTO courses (
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
SELECT * FROM UNNEST(
|
||||
SELECT
|
||||
v.crn, v.subject, v.course_number, v.title, v.term_code,
|
||||
v.enrollment, v.max_enrollment, v.wait_count, v.wait_capacity, NOW(),
|
||||
v.sequence_number, v.part_of_term, v.instructional_method, v.campus,
|
||||
v.credit_hours, v.credit_hour_low, v.credit_hour_high,
|
||||
v.cross_list, v.cross_list_capacity, v.cross_list_count,
|
||||
v.link_identifier, v.is_section_linked,
|
||||
v.meeting_times, v.attributes
|
||||
FROM UNNEST(
|
||||
$1::text[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::int4[], $7::int4[], $8::int4[], $9::int4[],
|
||||
array_fill(NOW()::timestamptz, ARRAY[$10])
|
||||
) AS t(
|
||||
$10::text[], $11::text[], $12::text[], $13::text[],
|
||||
$14::int4[], $15::int4[], $16::int4[],
|
||||
$17::text[], $18::int4[], $19::int4[],
|
||||
$20::text[], $21::bool[],
|
||||
$22::jsonb[], $23::jsonb[]
|
||||
) AS v(
|
||||
crn, subject, course_number, title, term_code,
|
||||
enrollment, max_enrollment, wait_count, wait_capacity, last_scraped_at
|
||||
enrollment, max_enrollment, wait_count, wait_capacity,
|
||||
sequence_number, part_of_term, instructional_method, campus,
|
||||
credit_hours, credit_hour_low, credit_hour_high,
|
||||
cross_list, cross_list_capacity, cross_list_count,
|
||||
link_identifier, is_section_linked,
|
||||
meeting_times, attributes
|
||||
)
|
||||
ON CONFLICT (crn, term_code)
|
||||
DO UPDATE SET
|
||||
@@ -92,7 +188,22 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
|
||||
max_enrollment = EXCLUDED.max_enrollment,
|
||||
wait_count = EXCLUDED.wait_count,
|
||||
wait_capacity = EXCLUDED.wait_capacity,
|
||||
last_scraped_at = EXCLUDED.last_scraped_at
|
||||
last_scraped_at = EXCLUDED.last_scraped_at,
|
||||
sequence_number = EXCLUDED.sequence_number,
|
||||
part_of_term = EXCLUDED.part_of_term,
|
||||
instructional_method = EXCLUDED.instructional_method,
|
||||
campus = EXCLUDED.campus,
|
||||
credit_hours = EXCLUDED.credit_hours,
|
||||
credit_hour_low = EXCLUDED.credit_hour_low,
|
||||
credit_hour_high = EXCLUDED.credit_hour_high,
|
||||
cross_list = EXCLUDED.cross_list,
|
||||
cross_list_capacity = EXCLUDED.cross_list_capacity,
|
||||
cross_list_count = EXCLUDED.cross_list_count,
|
||||
link_identifier = EXCLUDED.link_identifier,
|
||||
is_section_linked = EXCLUDED.is_section_linked,
|
||||
meeting_times = EXCLUDED.meeting_times,
|
||||
attributes = EXCLUDED.attributes
|
||||
RETURNING id
|
||||
"#,
|
||||
)
|
||||
.bind(&crns)
|
||||
@@ -104,19 +215,111 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
|
||||
.bind(&max_enrollments)
|
||||
.bind(&wait_counts)
|
||||
.bind(&wait_capacities)
|
||||
.bind(course_count as i32)
|
||||
.execute(db_pool)
|
||||
.bind(&sequence_numbers)
|
||||
.bind(&parts_of_term)
|
||||
.bind(&instructional_methods)
|
||||
.bind(&campuses)
|
||||
.bind(&credit_hours)
|
||||
.bind(&credit_hour_lows)
|
||||
.bind(&credit_hour_highs)
|
||||
.bind(&cross_lists)
|
||||
.bind(&cross_list_capacities)
|
||||
.bind(&cross_list_counts)
|
||||
.bind(&link_identifiers)
|
||||
.bind(&is_section_linkeds)
|
||||
.bind(&meeting_times_json)
|
||||
.bind(&attributes_json)
|
||||
.fetch_all(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert courses: {}", e))?;
|
||||
|
||||
let duration = start.elapsed();
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
info!(
|
||||
courses_count = course_count,
|
||||
rows_affected = result.rows_affected(),
|
||||
duration_ms = duration.as_millis(),
|
||||
"Batch upserted courses"
|
||||
);
|
||||
/// Deduplicate and upsert all instructors from the batch.
|
||||
async fn upsert_instructors(courses: &[Course], db_pool: &PgPool) -> Result<()> {
|
||||
let mut seen = HashSet::new();
|
||||
let mut banner_ids = Vec::new();
|
||||
let mut display_names = Vec::new();
|
||||
let mut emails: Vec<Option<&str>> = Vec::new();
|
||||
|
||||
for course in courses {
|
||||
for faculty in &course.faculty {
|
||||
if seen.insert(faculty.banner_id.as_str()) {
|
||||
banner_ids.push(faculty.banner_id.as_str());
|
||||
display_names.push(faculty.display_name.as_str());
|
||||
emails.push(faculty.email_address.as_deref());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if banner_ids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO instructors (banner_id, display_name, email)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
|
||||
ON CONFLICT (banner_id)
|
||||
DO UPDATE SET
|
||||
display_name = EXCLUDED.display_name,
|
||||
email = COALESCE(EXCLUDED.email, instructors.email)
|
||||
"#,
|
||||
)
|
||||
.bind(&banner_ids)
|
||||
.bind(&display_names)
|
||||
.bind(&emails)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Link courses to their instructors via the junction table.
|
||||
async fn upsert_course_instructors(
|
||||
courses: &[Course],
|
||||
course_ids: &[i32],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
let mut cids = Vec::new();
|
||||
let mut iids = Vec::new();
|
||||
let mut primaries = Vec::new();
|
||||
|
||||
for (course, &course_id) in courses.iter().zip(course_ids) {
|
||||
for faculty in &course.faculty {
|
||||
cids.push(course_id);
|
||||
iids.push(faculty.banner_id.as_str());
|
||||
primaries.push(faculty.primary_indicator);
|
||||
}
|
||||
}
|
||||
|
||||
if cids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Delete existing links for these courses then re-insert.
|
||||
// This handles instructor changes cleanly.
|
||||
sqlx::query("DELETE FROM course_instructors WHERE course_id = ANY($1)")
|
||||
.bind(&cids)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO course_instructors (course_id, instructor_id, is_primary)
|
||||
SELECT * FROM UNNEST($1::int4[], $2::text[], $3::bool[])
|
||||
ON CONFLICT (course_id, instructor_id)
|
||||
DO UPDATE SET is_primary = EXCLUDED.is_primary
|
||||
"#,
|
||||
)
|
||||
.bind(&cids)
|
||||
.bind(&iids)
|
||||
.bind(&primaries)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert course_instructors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,235 @@
|
||||
//! Database query functions for courses, used by the web API.
|
||||
|
||||
use crate::data::models::{Course, CourseInstructorDetail};
|
||||
use crate::error::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Column to sort search results by.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortColumn {
|
||||
CourseCode,
|
||||
Title,
|
||||
Instructor,
|
||||
Time,
|
||||
Seats,
|
||||
}
|
||||
|
||||
/// Sort direction.
|
||||
#[derive(Debug, Clone, Copy, serde::Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum SortDirection {
|
||||
Asc,
|
||||
Desc,
|
||||
}
|
||||
|
||||
/// Shared WHERE clause for course search filters.
|
||||
///
|
||||
/// Parameters $1-$8 match the bind order in `search_courses`.
|
||||
const SEARCH_WHERE: &str = r#"
|
||||
WHERE term_code = $1
|
||||
AND ($2::text[] IS NULL OR subject = ANY($2))
|
||||
AND ($3::text IS NULL OR title_search @@ plainto_tsquery('simple', $3) OR title ILIKE '%' || $3 || '%')
|
||||
AND ($4::int IS NULL OR course_number::int >= $4)
|
||||
AND ($5::int IS NULL OR course_number::int <= $5)
|
||||
AND ($6::bool = false OR max_enrollment > enrollment)
|
||||
AND ($7::text IS NULL OR instructional_method = $7)
|
||||
AND ($8::text IS NULL OR campus = $8)
|
||||
"#;
|
||||
|
||||
/// Build a safe ORDER BY clause from typed sort parameters.
|
||||
///
|
||||
/// All column names are hardcoded string literals — no caller input is interpolated.
|
||||
fn sort_clause(column: Option<SortColumn>, direction: Option<SortDirection>) -> String {
|
||||
let dir = match direction.unwrap_or(SortDirection::Asc) {
|
||||
SortDirection::Asc => "ASC",
|
||||
SortDirection::Desc => "DESC",
|
||||
};
|
||||
|
||||
match column {
|
||||
Some(SortColumn::CourseCode) => {
|
||||
format!("subject {dir}, course_number {dir}, sequence_number {dir}")
|
||||
}
|
||||
Some(SortColumn::Title) => format!("title {dir}"),
|
||||
Some(SortColumn::Instructor) => {
|
||||
format!(
|
||||
"(SELECT i.display_name FROM course_instructors ci \
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id \
|
||||
WHERE ci.course_id = courses.id AND ci.is_primary = true \
|
||||
LIMIT 1) {dir} NULLS LAST"
|
||||
)
|
||||
}
|
||||
Some(SortColumn::Time) => {
|
||||
format!("(meeting_times->0->>'begin_time') {dir} NULLS LAST")
|
||||
}
|
||||
Some(SortColumn::Seats) => {
|
||||
format!("(max_enrollment - enrollment) {dir}")
|
||||
}
|
||||
None => "subject ASC, course_number ASC, sequence_number ASC".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Search courses by term with optional filters.
|
||||
///
|
||||
/// Returns `(courses, total_count)` for pagination. Uses FTS tsvector for word
|
||||
/// search and falls back to trigram ILIKE for substring matching.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn search_courses(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
subject: Option<&[String]>,
|
||||
title_query: Option<&str>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
open_only: bool,
|
||||
instructional_method: Option<&str>,
|
||||
campus: Option<&str>,
|
||||
limit: i32,
|
||||
offset: i32,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
) -> Result<(Vec<Course>, i64)> {
|
||||
let order_by = sort_clause(sort_by, sort_dir);
|
||||
|
||||
let data_query =
|
||||
format!("SELECT * FROM courses {SEARCH_WHERE} ORDER BY {order_by} LIMIT $9 OFFSET $10");
|
||||
let count_query = format!("SELECT COUNT(*) FROM courses {SEARCH_WHERE}");
|
||||
|
||||
let courses = sqlx::query_as::<_, Course>(&data_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let total: (i64,) = sqlx::query_as(&count_query)
|
||||
.bind(term_code)
|
||||
.bind(subject)
|
||||
.bind(title_query)
|
||||
.bind(course_number_low)
|
||||
.bind(course_number_high)
|
||||
.bind(open_only)
|
||||
.bind(instructional_method)
|
||||
.bind(campus)
|
||||
.fetch_one(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok((courses, total.0))
|
||||
}
|
||||
|
||||
/// Get a single course by CRN and term.
|
||||
pub async fn get_course_by_crn(
|
||||
db_pool: &PgPool,
|
||||
crn: &str,
|
||||
term_code: &str,
|
||||
) -> Result<Option<Course>> {
|
||||
let course =
|
||||
sqlx::query_as::<_, Course>("SELECT * FROM courses WHERE crn = $1 AND term_code = $2")
|
||||
.bind(crn)
|
||||
.bind(term_code)
|
||||
.fetch_optional(db_pool)
|
||||
.await?;
|
||||
Ok(course)
|
||||
}
|
||||
|
||||
/// Get instructors for a single course by course ID.
|
||||
pub async fn get_course_instructors(
|
||||
db_pool: &PgPool,
|
||||
course_id: i32,
|
||||
) -> Result<Vec<CourseInstructorDetail>> {
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE ci.course_id = $1
|
||||
ORDER BY ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
)
|
||||
.bind(course_id)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Batch-fetch instructors for multiple courses in a single query.
|
||||
///
|
||||
/// Returns a map of `course_id → Vec<CourseInstructorDetail>`.
|
||||
pub async fn get_instructors_for_courses(
|
||||
db_pool: &PgPool,
|
||||
course_ids: &[i32],
|
||||
) -> Result<HashMap<i32, Vec<CourseInstructorDetail>>> {
|
||||
if course_ids.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let rows = sqlx::query_as::<_, CourseInstructorDetail>(
|
||||
r#"
|
||||
SELECT i.banner_id, i.display_name, i.email, ci.is_primary,
|
||||
rp.avg_rating, rp.num_ratings,
|
||||
ci.course_id
|
||||
FROM course_instructors ci
|
||||
JOIN instructors i ON i.banner_id = ci.instructor_id
|
||||
LEFT JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE ci.course_id = ANY($1)
|
||||
ORDER BY ci.course_id, ci.is_primary DESC, i.display_name
|
||||
"#,
|
||||
)
|
||||
.bind(course_ids)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
let mut map: HashMap<i32, Vec<CourseInstructorDetail>> = HashMap::new();
|
||||
for row in rows {
|
||||
// course_id is always present in the batch query
|
||||
let cid = row.course_id.unwrap_or_default();
|
||||
map.entry(cid).or_default().push(row);
|
||||
}
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
/// Get subjects for a term, sorted by total enrollment (descending).
|
||||
///
|
||||
/// Returns only subjects that have courses in the given term, with their
|
||||
/// descriptions from reference_data and enrollment totals for ranking.
|
||||
pub async fn get_subjects_by_enrollment(
|
||||
db_pool: &PgPool,
|
||||
term_code: &str,
|
||||
) -> Result<Vec<(String, String, i64)>> {
|
||||
let rows: Vec<(String, String, i64)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT c.subject,
|
||||
COALESCE(rd.description, c.subject),
|
||||
COALESCE(SUM(c.enrollment), 0) as total_enrollment
|
||||
FROM courses c
|
||||
LEFT JOIN reference_data rd ON rd.category = 'subject' AND rd.code = c.subject
|
||||
WHERE c.term_code = $1
|
||||
GROUP BY c.subject, rd.description
|
||||
ORDER BY total_enrollment DESC
|
||||
"#,
|
||||
)
|
||||
.bind(term_code)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Get all distinct term codes that have courses in the DB.
|
||||
pub async fn get_available_terms(db_pool: &PgPool) -> Result<Vec<String>> {
|
||||
let rows: Vec<(String,)> =
|
||||
sqlx::query_as("SELECT DISTINCT term_code FROM courses ORDER BY term_code DESC")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows.into_iter().map(|(tc,)| tc).collect())
|
||||
}
|
||||
@@ -1,5 +1,10 @@
|
||||
//! Database models and schema.
|
||||
|
||||
pub mod batch;
|
||||
pub mod courses;
|
||||
pub mod models;
|
||||
pub mod reference;
|
||||
pub mod rmp;
|
||||
pub mod scrape_jobs;
|
||||
pub mod sessions;
|
||||
pub mod users;
|
||||
|
||||
@@ -1,7 +1,32 @@
|
||||
//! `sqlx` models for the database schema.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Represents a meeting time stored as JSONB in the courses table.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct DbMeetingTime {
|
||||
pub begin_time: Option<String>,
|
||||
pub end_time: Option<String>,
|
||||
pub start_date: String,
|
||||
pub end_date: String,
|
||||
pub monday: bool,
|
||||
pub tuesday: bool,
|
||||
pub wednesday: bool,
|
||||
pub thursday: bool,
|
||||
pub friday: bool,
|
||||
pub saturday: bool,
|
||||
pub sunday: bool,
|
||||
pub building: Option<String>,
|
||||
pub building_description: Option<String>,
|
||||
pub room: Option<String>,
|
||||
pub campus: Option<String>,
|
||||
pub meeting_type: String,
|
||||
pub meeting_schedule_type: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
@@ -17,6 +42,59 @@ pub struct Course {
|
||||
pub wait_count: i32,
|
||||
pub wait_capacity: i32,
|
||||
pub last_scraped_at: DateTime<Utc>,
|
||||
// New scalar fields
|
||||
pub sequence_number: Option<String>,
|
||||
pub part_of_term: Option<String>,
|
||||
pub instructional_method: Option<String>,
|
||||
pub campus: Option<String>,
|
||||
pub credit_hours: Option<i32>,
|
||||
pub credit_hour_low: Option<i32>,
|
||||
pub credit_hour_high: Option<i32>,
|
||||
pub cross_list: Option<String>,
|
||||
pub cross_list_capacity: Option<i32>,
|
||||
pub cross_list_count: Option<i32>,
|
||||
pub link_identifier: Option<String>,
|
||||
pub is_section_linked: Option<bool>,
|
||||
// JSONB fields
|
||||
pub meeting_times: Value,
|
||||
pub attributes: Value,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct Instructor {
|
||||
pub banner_id: String,
|
||||
pub display_name: String,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructor {
|
||||
pub course_id: i32,
|
||||
pub instructor_id: String,
|
||||
pub is_primary: bool,
|
||||
}
|
||||
|
||||
/// Joined instructor data for a course (from course_instructors + instructors + rmp_professors).
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct CourseInstructorDetail {
|
||||
pub banner_id: String,
|
||||
pub display_name: String,
|
||||
pub email: Option<String>,
|
||||
pub is_primary: bool,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub num_ratings: Option<i32>,
|
||||
/// Present when fetched via batch query; `None` for single-course queries.
|
||||
pub course_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct ReferenceData {
|
||||
pub category: String,
|
||||
pub code: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -77,3 +155,27 @@ pub struct ScrapeJob {
|
||||
/// Maximum number of retry attempts allowed (non-negative, enforced by CHECK constraint)
|
||||
pub max_retries: i32,
|
||||
}
|
||||
|
||||
/// A user authenticated via Discord OAuth.
|
||||
#[derive(sqlx::FromRow, Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct User {
|
||||
pub discord_id: i64,
|
||||
pub discord_username: String,
|
||||
pub discord_avatar_hash: Option<String>,
|
||||
pub is_admin: bool,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// A server-side session for an authenticated user.
|
||||
#[allow(dead_code)] // Fields read via sqlx::FromRow; some only used in DB queries
|
||||
#[derive(sqlx::FromRow, Debug, Clone)]
|
||||
pub struct UserSession {
|
||||
pub id: String,
|
||||
pub user_id: i64,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
pub last_active_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
//! Database operations for the `reference_data` table (code→description lookups).
|
||||
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::error::Result;
|
||||
use html_escape::decode_html_entities;
|
||||
use sqlx::PgPool;
|
||||
|
||||
/// Batch upsert reference data entries.
|
||||
pub async fn batch_upsert(entries: &[ReferenceData], db_pool: &PgPool) -> Result<()> {
|
||||
if entries.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let categories: Vec<&str> = entries.iter().map(|e| e.category.as_str()).collect();
|
||||
let codes: Vec<&str> = entries.iter().map(|e| e.code.as_str()).collect();
|
||||
let descriptions: Vec<String> = entries
|
||||
.iter()
|
||||
.map(|e| decode_html_entities(&e.description).into_owned())
|
||||
.collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO reference_data (category, code, description)
|
||||
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[])
|
||||
ON CONFLICT (category, code)
|
||||
DO UPDATE SET description = EXCLUDED.description
|
||||
"#,
|
||||
)
|
||||
.bind(&categories)
|
||||
.bind(&codes)
|
||||
.bind(&descriptions)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all reference data entries for a category.
|
||||
pub async fn get_by_category(category: &str, db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
|
||||
let rows = sqlx::query_as::<_, ReferenceData>(
|
||||
"SELECT category, code, description FROM reference_data WHERE category = $1 ORDER BY description",
|
||||
)
|
||||
.bind(category)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Get all reference data entries (for cache initialization).
|
||||
pub async fn get_all(db_pool: &PgPool) -> Result<Vec<ReferenceData>> {
|
||||
let rows = sqlx::query_as::<_, ReferenceData>(
|
||||
"SELECT category, code, description FROM reference_data ORDER BY category, description",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
Ok(rows)
|
||||
}
|
||||
+311
@@ -0,0 +1,311 @@
|
||||
//! Database operations for RateMyProfessors data.
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpProfessor;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Bulk upsert RMP professors using the UNNEST pattern.
|
||||
///
|
||||
/// Deduplicates by `legacy_id` before inserting — the RMP API can return
|
||||
/// the same professor on multiple pages.
|
||||
pub async fn batch_upsert_rmp_professors(
|
||||
professors: &[RmpProfessor],
|
||||
db_pool: &PgPool,
|
||||
) -> Result<()> {
|
||||
if professors.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Deduplicate: keep last occurrence per legacy_id (latest page wins)
|
||||
let mut seen = HashSet::new();
|
||||
let deduped: Vec<&RmpProfessor> = professors
|
||||
.iter()
|
||||
.rev()
|
||||
.filter(|p| seen.insert(p.legacy_id))
|
||||
.collect();
|
||||
|
||||
let legacy_ids: Vec<i32> = deduped.iter().map(|p| p.legacy_id).collect();
|
||||
let graphql_ids: Vec<&str> = deduped.iter().map(|p| p.graphql_id.as_str()).collect();
|
||||
let first_names: Vec<String> = deduped
|
||||
.iter()
|
||||
.map(|p| p.first_name.trim().to_string())
|
||||
.collect();
|
||||
let first_name_refs: Vec<&str> = first_names.iter().map(|s| s.as_str()).collect();
|
||||
let last_names: Vec<String> = deduped
|
||||
.iter()
|
||||
.map(|p| p.last_name.trim().to_string())
|
||||
.collect();
|
||||
let last_name_refs: Vec<&str> = last_names.iter().map(|s| s.as_str()).collect();
|
||||
let departments: Vec<Option<&str>> = deduped.iter().map(|p| p.department.as_deref()).collect();
|
||||
let avg_ratings: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_rating).collect();
|
||||
let avg_difficulties: Vec<Option<f32>> = deduped.iter().map(|p| p.avg_difficulty).collect();
|
||||
let num_ratings: Vec<i32> = deduped.iter().map(|p| p.num_ratings).collect();
|
||||
let would_take_again_pcts: Vec<Option<f32>> =
|
||||
deduped.iter().map(|p| p.would_take_again_pct).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO rmp_professors (
|
||||
legacy_id, graphql_id, first_name, last_name, department,
|
||||
avg_rating, avg_difficulty, num_ratings, would_take_again_pct,
|
||||
last_synced_at
|
||||
)
|
||||
SELECT
|
||||
v.legacy_id, v.graphql_id, v.first_name, v.last_name, v.department,
|
||||
v.avg_rating, v.avg_difficulty, v.num_ratings, v.would_take_again_pct,
|
||||
NOW()
|
||||
FROM UNNEST(
|
||||
$1::int4[], $2::text[], $3::text[], $4::text[], $5::text[],
|
||||
$6::real[], $7::real[], $8::int4[], $9::real[]
|
||||
) AS v(
|
||||
legacy_id, graphql_id, first_name, last_name, department,
|
||||
avg_rating, avg_difficulty, num_ratings, would_take_again_pct
|
||||
)
|
||||
ON CONFLICT (legacy_id)
|
||||
DO UPDATE SET
|
||||
graphql_id = EXCLUDED.graphql_id,
|
||||
first_name = EXCLUDED.first_name,
|
||||
last_name = EXCLUDED.last_name,
|
||||
department = EXCLUDED.department,
|
||||
avg_rating = EXCLUDED.avg_rating,
|
||||
avg_difficulty = EXCLUDED.avg_difficulty,
|
||||
num_ratings = EXCLUDED.num_ratings,
|
||||
would_take_again_pct = EXCLUDED.would_take_again_pct,
|
||||
last_synced_at = EXCLUDED.last_synced_at
|
||||
"#,
|
||||
)
|
||||
.bind(&legacy_ids)
|
||||
.bind(&graphql_ids)
|
||||
.bind(&first_name_refs)
|
||||
.bind(&last_name_refs)
|
||||
.bind(&departments)
|
||||
.bind(&avg_ratings)
|
||||
.bind(&avg_difficulties)
|
||||
.bind(&num_ratings)
|
||||
.bind(&would_take_again_pcts)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to batch upsert RMP professors: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
|
||||
fn normalize(s: &str) -> String {
|
||||
s.trim().to_lowercase().trim_end_matches('.').to_string()
|
||||
}
|
||||
|
||||
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
|
||||
///
|
||||
/// Returns `None` if the format is unparseable (no comma, empty parts).
|
||||
fn parse_display_name(display_name: &str) -> Option<(String, String)> {
|
||||
let (last_part, first_part) = display_name.split_once(',')?;
|
||||
let last = normalize(last_part);
|
||||
// Take only the first token of the first-name portion to drop middle names/initials.
|
||||
let first = normalize(first_part.split_whitespace().next()?);
|
||||
if last.is_empty() || first.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((last, first))
|
||||
}
|
||||
|
||||
/// Auto-match instructors to RMP professors by normalized name.
|
||||
///
|
||||
/// Loads all pending instructors and all RMP professors, then matches in Rust
|
||||
/// using normalized name comparison. Only assigns a match when exactly one RMP
|
||||
/// professor matches a given instructor.
|
||||
pub async fn auto_match_instructors(db_pool: &PgPool) -> Result<u64> {
|
||||
// Load pending instructors
|
||||
let instructors: Vec<(String, String)> = sqlx::query_as(
|
||||
"SELECT banner_id, display_name FROM instructors WHERE rmp_match_status = 'pending'",
|
||||
)
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
if instructors.is_empty() {
|
||||
info!(matched = 0, "No pending instructors to match");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
// Load all RMP professors
|
||||
let professors: Vec<(i32, String, String)> =
|
||||
sqlx::query_as("SELECT legacy_id, first_name, last_name FROM rmp_professors")
|
||||
.fetch_all(db_pool)
|
||||
.await?;
|
||||
|
||||
// Build a lookup: (normalized_last, normalized_first) -> list of legacy_ids
|
||||
let mut rmp_index: HashMap<(String, String), Vec<i32>> = HashMap::new();
|
||||
for (legacy_id, first, last) in &professors {
|
||||
let key = (normalize(last), normalize(first));
|
||||
rmp_index.entry(key).or_default().push(*legacy_id);
|
||||
}
|
||||
|
||||
// Match each instructor
|
||||
let mut matches: Vec<(i32, String)> = Vec::new(); // (legacy_id, banner_id)
|
||||
let mut no_comma = 0u64;
|
||||
let mut no_match = 0u64;
|
||||
let mut ambiguous = 0u64;
|
||||
|
||||
for (banner_id, display_name) in &instructors {
|
||||
let Some((last, first)) = parse_display_name(display_name) else {
|
||||
no_comma += 1;
|
||||
continue;
|
||||
};
|
||||
|
||||
let key = (last, first);
|
||||
match rmp_index.get(&key) {
|
||||
Some(ids) if ids.len() == 1 => {
|
||||
matches.push((ids[0], banner_id.clone()));
|
||||
}
|
||||
Some(ids) => {
|
||||
ambiguous += 1;
|
||||
debug!(
|
||||
banner_id,
|
||||
display_name,
|
||||
candidates = ids.len(),
|
||||
"Ambiguous RMP match, skipping"
|
||||
);
|
||||
}
|
||||
None => {
|
||||
no_match += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if no_comma > 0 || ambiguous > 0 {
|
||||
warn!(
|
||||
total_pending = instructors.len(),
|
||||
no_comma,
|
||||
no_match,
|
||||
ambiguous,
|
||||
matched = matches.len(),
|
||||
"RMP matching diagnostics"
|
||||
);
|
||||
}
|
||||
|
||||
// Batch update matches
|
||||
if matches.is_empty() {
|
||||
info!(matched = 0, "Auto-matched instructors to RMP professors");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let legacy_ids: Vec<i32> = matches.iter().map(|(id, _)| *id).collect();
|
||||
let banner_ids: Vec<&str> = matches.iter().map(|(_, bid)| bid.as_str()).collect();
|
||||
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
UPDATE instructors i
|
||||
SET
|
||||
rmp_legacy_id = m.legacy_id,
|
||||
rmp_match_status = 'auto'
|
||||
FROM UNNEST($1::int4[], $2::text[]) AS m(legacy_id, banner_id)
|
||||
WHERE i.banner_id = m.banner_id
|
||||
"#,
|
||||
)
|
||||
.bind(&legacy_ids)
|
||||
.bind(&banner_ids)
|
||||
.execute(db_pool)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to update instructor RMP matches: {}", e))?;
|
||||
|
||||
let matched = result.rows_affected();
|
||||
info!(matched, "Auto-matched instructors to RMP professors");
|
||||
Ok(matched)
|
||||
}
|
||||
|
||||
/// Retrieve RMP rating data for an instructor by banner_id.
|
||||
///
|
||||
/// Returns `(avg_rating, num_ratings)` if the instructor has an RMP match.
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_instructor_rmp_data(
|
||||
db_pool: &PgPool,
|
||||
banner_id: &str,
|
||||
) -> Result<Option<(f32, i32)>> {
|
||||
let row: Option<(f32, i32)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT rp.avg_rating, rp.num_ratings
|
||||
FROM instructors i
|
||||
JOIN rmp_professors rp ON rp.legacy_id = i.rmp_legacy_id
|
||||
WHERE i.banner_id = $1
|
||||
AND rp.avg_rating IS NOT NULL
|
||||
"#,
|
||||
)
|
||||
.bind(banner_id)
|
||||
.fetch_optional(db_pool)
|
||||
.await?;
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_standard_name() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle() {
|
||||
assert_eq!(
|
||||
parse_display_name("Smith, John David"),
|
||||
Some(("smith".into(), "john".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_middle_initial() {
|
||||
assert_eq!(
|
||||
parse_display_name("Garcia, Maria L."),
|
||||
Some(("garcia".into(), "maria".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_name_with_suffix_in_last() {
|
||||
// Banner may encode "Jr." as part of the last name.
|
||||
// normalize() strips trailing periods so "Jr." becomes "jr".
|
||||
assert_eq!(
|
||||
parse_display_name("Smith Jr., James"),
|
||||
Some(("smith jr".into(), "james".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_no_comma_returns_none() {
|
||||
assert_eq!(parse_display_name("SingleName"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_first_returns_none() {
|
||||
assert_eq!(parse_display_name("Smith,"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_last_returns_none() {
|
||||
assert_eq!(parse_display_name(", John"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extra_whitespace() {
|
||||
assert_eq!(
|
||||
parse_display_name(" Doe , Jane Marie "),
|
||||
Some(("doe".into(), "jane".into()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_trims_and_lowercases() {
|
||||
assert_eq!(normalize(" FOO "), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_strips_trailing_period() {
|
||||
assert_eq!(normalize("Jr."), "jr");
|
||||
}
|
||||
}
|
||||
+20
-13
@@ -134,7 +134,7 @@ pub async fn find_existing_job_payloads(
|
||||
Ok(existing_payloads)
|
||||
}
|
||||
|
||||
/// Batch insert scrape jobs in a single transaction.
|
||||
/// Batch insert scrape jobs using UNNEST for a single round-trip.
|
||||
///
|
||||
/// All jobs are inserted with `execute_at` set to the current time.
|
||||
///
|
||||
@@ -149,22 +149,29 @@ pub async fn batch_insert_jobs(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let now = chrono::Utc::now();
|
||||
let mut tx = db_pool.begin().await?;
|
||||
let mut target_types: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
let mut payloads: Vec<serde_json::Value> = Vec::with_capacity(jobs.len());
|
||||
let mut priorities: Vec<String> = Vec::with_capacity(jobs.len());
|
||||
|
||||
for (payload, target_type, priority) in jobs {
|
||||
sqlx::query(
|
||||
"INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at) VALUES ($1, $2, $3, $4)"
|
||||
)
|
||||
.bind(target_type)
|
||||
.bind(payload)
|
||||
.bind(priority)
|
||||
.bind(now)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
target_types.push(format!("{target_type:?}"));
|
||||
payloads.push(payload.clone());
|
||||
priorities.push(format!("{priority:?}"));
|
||||
}
|
||||
|
||||
tx.commit().await?;
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO scrape_jobs (target_type, target_payload, priority, execute_at)
|
||||
SELECT v.target_type::target_type, v.payload, v.priority::scrape_priority, NOW()
|
||||
FROM UNNEST($1::text[], $2::jsonb[], $3::text[])
|
||||
AS v(target_type, payload, priority)
|
||||
"#,
|
||||
)
|
||||
.bind(&target_types)
|
||||
.bind(&payloads)
|
||||
.bind(&priorities)
|
||||
.execute(db_pool)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
//! Database query functions for user sessions.
|
||||
|
||||
use anyhow::Context;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::UserSession;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Generate a cryptographically random 32-byte hex token.
|
||||
fn generate_token() -> String {
|
||||
let bytes: [u8; 32] = rand::rng().random();
|
||||
bytes.iter().map(|b| format!("{b:02x}")).collect()
|
||||
}
|
||||
|
||||
/// Create a new session for a user with the given duration.
|
||||
pub async fn create_session(
|
||||
pool: &PgPool,
|
||||
user_id: i64,
|
||||
duration: std::time::Duration,
|
||||
) -> Result<UserSession> {
|
||||
let token = generate_token();
|
||||
let duration_secs = duration.as_secs() as i64;
|
||||
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
r#"
|
||||
INSERT INTO user_sessions (id, user_id, expires_at)
|
||||
VALUES ($1, $2, now() + make_interval(secs => $3::double precision))
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(&token)
|
||||
.bind(user_id)
|
||||
.bind(duration_secs as f64)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to create session")
|
||||
}
|
||||
|
||||
/// Fetch a session by token, only if it has not expired.
|
||||
pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSession>> {
|
||||
sqlx::query_as::<_, UserSession>(
|
||||
"SELECT * FROM user_sessions WHERE id = $1 AND expires_at > now()",
|
||||
)
|
||||
.bind(token)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get session")
|
||||
}
|
||||
|
||||
/// Update the last-active timestamp for a session.
|
||||
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query("UPDATE user_sessions SET last_active_at = now() WHERE id = $1")
|
||||
.bind(token)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to touch session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete a session by token.
|
||||
pub async fn delete_session(pool: &PgPool, token: &str) -> Result<()> {
|
||||
sqlx::query("DELETE FROM user_sessions WHERE id = $1")
|
||||
.bind(token)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete session")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete all sessions for a user. Returns the number of sessions deleted.
|
||||
#[allow(dead_code)] // Available for admin user-deletion flow
|
||||
pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE user_id = $1")
|
||||
.bind(user_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to delete user sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// Delete all expired sessions. Returns the number of sessions cleaned up.
|
||||
#[allow(dead_code)] // Called by SessionCache::cleanup_expired (not yet wired to periodic task)
|
||||
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
|
||||
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("failed to cleanup expired sessions")?;
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
//! Database query functions for users.
|
||||
|
||||
use anyhow::Context;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use super::models::User;
|
||||
use crate::error::Result;
|
||||
|
||||
/// Insert a new user or update username/avatar on conflict.
|
||||
pub async fn upsert_user(
|
||||
pool: &PgPool,
|
||||
discord_id: i64,
|
||||
username: &str,
|
||||
avatar_hash: Option<&str>,
|
||||
) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, discord_avatar_hash)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET discord_username = EXCLUDED.discord_username,
|
||||
discord_avatar_hash = EXCLUDED.discord_avatar_hash,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(username)
|
||||
.bind(avatar_hash)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to upsert user")
|
||||
}
|
||||
|
||||
/// Fetch a user by Discord ID.
|
||||
pub async fn get_user(pool: &PgPool, discord_id: i64) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users WHERE discord_id = $1")
|
||||
.bind(discord_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to get user")
|
||||
}
|
||||
|
||||
/// List all users ordered by creation date (newest first).
|
||||
pub async fn list_users(pool: &PgPool) -> Result<Vec<User>> {
|
||||
sqlx::query_as::<_, User>("SELECT * FROM users ORDER BY created_at DESC")
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.context("failed to list users")
|
||||
}
|
||||
|
||||
/// Set the admin flag for a user, returning the updated user if found.
|
||||
pub async fn set_admin(pool: &PgPool, discord_id: i64, is_admin: bool) -> Result<Option<User>> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
UPDATE users
|
||||
SET is_admin = $2, updated_at = now()
|
||||
WHERE discord_id = $1
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.bind(is_admin)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.context("failed to set admin status")
|
||||
}
|
||||
|
||||
/// Ensure a seed admin exists. Upserts with `is_admin = true` and a placeholder
|
||||
/// username that will be replaced on first OAuth login.
|
||||
pub async fn ensure_seed_admin(pool: &PgPool, discord_id: i64) -> Result<User> {
|
||||
sqlx::query_as::<_, User>(
|
||||
r#"
|
||||
INSERT INTO users (discord_id, discord_username, is_admin)
|
||||
VALUES ($1, 'seed-admin', true)
|
||||
ON CONFLICT (discord_id) DO UPDATE
|
||||
SET is_admin = true,
|
||||
updated_at = now()
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(discord_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.context("failed to ensure seed admin")
|
||||
}
|
||||
@@ -7,6 +7,7 @@ pub mod data;
|
||||
pub mod error;
|
||||
pub mod formatter;
|
||||
pub mod logging;
|
||||
pub mod rmp;
|
||||
pub mod scraper;
|
||||
pub mod services;
|
||||
pub mod signals;
|
||||
|
||||
+1
-1
@@ -14,11 +14,11 @@ mod data;
|
||||
mod error;
|
||||
mod formatter;
|
||||
mod logging;
|
||||
mod rmp;
|
||||
mod scraper;
|
||||
mod services;
|
||||
mod signals;
|
||||
mod state;
|
||||
#[allow(dead_code)]
|
||||
mod status;
|
||||
mod web;
|
||||
|
||||
|
||||
+156
@@ -0,0 +1,156 @@
|
||||
//! RateMyProfessors GraphQL client for bulk professor data sync.
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, info};
|
||||
|
||||
/// UTSA's school ID on RateMyProfessors (base64 of "School-1516").
|
||||
const UTSA_SCHOOL_ID: &str = "U2Nob29sLTE1MTY=";
|
||||
|
||||
/// Basic auth header value (base64 of "test:test").
|
||||
const AUTH_HEADER: &str = "Basic dGVzdDp0ZXN0";
|
||||
|
||||
/// GraphQL endpoint.
|
||||
const GRAPHQL_URL: &str = "https://www.ratemyprofessors.com/graphql";
|
||||
|
||||
/// Page size for paginated fetches.
|
||||
const PAGE_SIZE: u32 = 100;
|
||||
|
||||
/// A professor record from RateMyProfessors.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RmpProfessor {
|
||||
pub legacy_id: i32,
|
||||
pub graphql_id: String,
|
||||
pub first_name: String,
|
||||
pub last_name: String,
|
||||
pub department: Option<String>,
|
||||
pub avg_rating: Option<f32>,
|
||||
pub avg_difficulty: Option<f32>,
|
||||
pub num_ratings: i32,
|
||||
pub would_take_again_pct: Option<f32>,
|
||||
}
|
||||
|
||||
/// Client for fetching professor data from RateMyProfessors.
|
||||
pub struct RmpClient {
|
||||
http: reqwest::Client,
|
||||
}
|
||||
|
||||
impl Default for RmpClient {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl RmpClient {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
http: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch all professors for UTSA via paginated GraphQL queries.
|
||||
pub async fn fetch_all_professors(&self) -> Result<Vec<RmpProfessor>> {
|
||||
let mut all = Vec::new();
|
||||
let mut cursor: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let after_clause = match &cursor {
|
||||
Some(c) => format!(r#", after: "{}""#, c),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let query = format!(
|
||||
r#"query {{
|
||||
newSearch {{
|
||||
teachers(query: {{ text: "", schoolID: "{school_id}" }}, first: {page_size}{after}) {{
|
||||
edges {{
|
||||
cursor
|
||||
node {{
|
||||
id
|
||||
legacyId
|
||||
firstName
|
||||
lastName
|
||||
department
|
||||
avgRating
|
||||
avgDifficulty
|
||||
numRatings
|
||||
wouldTakeAgainPercent
|
||||
}}
|
||||
}}
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}"#,
|
||||
school_id = UTSA_SCHOOL_ID,
|
||||
page_size = PAGE_SIZE,
|
||||
after = after_clause,
|
||||
);
|
||||
|
||||
let body = serde_json::json!({ "query": query });
|
||||
|
||||
let resp = self
|
||||
.http
|
||||
.post(GRAPHQL_URL)
|
||||
.header("Authorization", AUTH_HEADER)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
if !status.is_success() {
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
anyhow::bail!("RMP GraphQL request failed ({status}): {text}");
|
||||
}
|
||||
|
||||
let json: serde_json::Value = resp.json().await?;
|
||||
|
||||
let teachers = &json["data"]["newSearch"]["teachers"];
|
||||
let edges = teachers["edges"]
|
||||
.as_array()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing edges in RMP response"))?;
|
||||
|
||||
for edge in edges {
|
||||
let node = &edge["node"];
|
||||
let wta = node["wouldTakeAgainPercent"]
|
||||
.as_f64()
|
||||
.map(|v| v as f32)
|
||||
.filter(|&v| v >= 0.0);
|
||||
|
||||
all.push(RmpProfessor {
|
||||
legacy_id: node["legacyId"]
|
||||
.as_i64()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing legacyId"))?
|
||||
as i32,
|
||||
graphql_id: node["id"]
|
||||
.as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("Missing id"))?
|
||||
.to_string(),
|
||||
first_name: node["firstName"].as_str().unwrap_or_default().to_string(),
|
||||
last_name: node["lastName"].as_str().unwrap_or_default().to_string(),
|
||||
department: node["department"].as_str().map(|s| s.to_string()),
|
||||
avg_rating: node["avgRating"].as_f64().map(|v| v as f32),
|
||||
avg_difficulty: node["avgDifficulty"].as_f64().map(|v| v as f32),
|
||||
num_ratings: node["numRatings"].as_i64().unwrap_or(0) as i32,
|
||||
would_take_again_pct: wta,
|
||||
});
|
||||
}
|
||||
|
||||
let page_info = &teachers["pageInfo"];
|
||||
let has_next = page_info["hasNextPage"].as_bool().unwrap_or(false);
|
||||
|
||||
if !has_next {
|
||||
break;
|
||||
}
|
||||
|
||||
cursor = page_info["endCursor"].as_str().map(|s| s.to_string());
|
||||
|
||||
debug!(fetched = all.len(), "RMP pagination: fetching next page");
|
||||
}
|
||||
|
||||
info!(total = all.len(), "Fetched all RMP professors");
|
||||
Ok(all)
|
||||
}
|
||||
}
|
||||
+17
-4
@@ -4,10 +4,11 @@ pub mod worker;
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::services::Service;
|
||||
use crate::state::ReferenceCache;
|
||||
use crate::status::{ServiceStatus, ServiceStatusRegistry};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::{info, warn};
|
||||
|
||||
@@ -21,6 +22,7 @@ use self::worker::Worker;
|
||||
pub struct ScraperService {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
scheduler_handle: Option<JoinHandle<()>>,
|
||||
worker_handles: Vec<JoinHandle<()>>,
|
||||
@@ -29,10 +31,16 @@ pub struct ScraperService {
|
||||
|
||||
impl ScraperService {
|
||||
/// Creates a new `ScraperService`.
|
||||
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>, service_statuses: ServiceStatusRegistry) -> Self {
|
||||
pub fn new(
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
service_statuses: ServiceStatusRegistry,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
service_statuses,
|
||||
scheduler_handle: None,
|
||||
worker_handles: Vec::new(),
|
||||
@@ -48,7 +56,11 @@ impl ScraperService {
|
||||
let (shutdown_tx, _) = broadcast::channel(1);
|
||||
self.shutdown_tx = Some(shutdown_tx.clone());
|
||||
|
||||
let scheduler = Scheduler::new(self.db_pool.clone(), self.banner_api.clone());
|
||||
let scheduler = Scheduler::new(
|
||||
self.db_pool.clone(),
|
||||
self.banner_api.clone(),
|
||||
self.reference_cache.clone(),
|
||||
);
|
||||
let shutdown_rx = shutdown_tx.subscribe();
|
||||
let scheduler_handle = tokio::spawn(async move {
|
||||
scheduler.run(shutdown_rx).await;
|
||||
@@ -86,7 +98,8 @@ impl Service for ScraperService {
|
||||
}
|
||||
|
||||
async fn shutdown(&mut self) -> Result<(), anyhow::Error> {
|
||||
self.service_statuses.set("scraper", ServiceStatus::Disabled);
|
||||
self.service_statuses
|
||||
.set("scraper", ServiceStatus::Disabled);
|
||||
info!("Shutting down scraper service");
|
||||
|
||||
// Send shutdown signal to all tasks
|
||||
|
||||
+186
-15
@@ -1,28 +1,42 @@
|
||||
use crate::banner::{BannerApi, Term};
|
||||
use crate::data::models::{ScrapePriority, TargetType};
|
||||
use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
|
||||
use crate::data::scrape_jobs;
|
||||
use crate::error::Result;
|
||||
use crate::rmp::RmpClient;
|
||||
use crate::scraper::jobs::subject::SubjectJob;
|
||||
use crate::state::ReferenceCache;
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::broadcast;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::{RwLock, broadcast};
|
||||
use tokio::time;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
/// How often reference data is re-scraped (6 hours).
|
||||
const REFERENCE_DATA_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
|
||||
|
||||
/// How often RMP data is synced (24 hours).
|
||||
const RMP_SYNC_INTERVAL: Duration = Duration::from_secs(24 * 60 * 60);
|
||||
|
||||
/// Periodically analyzes data and enqueues prioritized scrape jobs.
|
||||
pub struct Scheduler {
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
}
|
||||
|
||||
impl Scheduler {
|
||||
pub fn new(db_pool: PgPool, banner_api: Arc<BannerApi>) -> Self {
|
||||
pub fn new(
|
||||
db_pool: PgPool,
|
||||
banner_api: Arc<BannerApi>,
|
||||
reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
db_pool,
|
||||
banner_api,
|
||||
reference_cache,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,33 +55,68 @@ impl Scheduler {
|
||||
let work_interval = Duration::from_secs(60);
|
||||
let mut next_run = time::Instant::now();
|
||||
let mut current_work: Option<(tokio::task::JoinHandle<()>, CancellationToken)> = None;
|
||||
// Scrape reference data immediately on first cycle
|
||||
let mut last_ref_scrape = Instant::now() - REFERENCE_DATA_INTERVAL;
|
||||
// Sync RMP data immediately on first cycle
|
||||
let mut last_rmp_sync = Instant::now() - RMP_SYNC_INTERVAL;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = time::sleep_until(next_run) => {
|
||||
let cancel_token = CancellationToken::new();
|
||||
|
||||
let should_scrape_ref = last_ref_scrape.elapsed() >= REFERENCE_DATA_INTERVAL;
|
||||
let should_sync_rmp = last_rmp_sync.elapsed() >= RMP_SYNC_INTERVAL;
|
||||
|
||||
// Spawn work in separate task to allow graceful cancellation during shutdown.
|
||||
// Without this, shutdown would have to wait for the full scheduling cycle.
|
||||
let work_handle = tokio::spawn({
|
||||
let db_pool = self.db_pool.clone();
|
||||
let banner_api = self.banner_api.clone();
|
||||
let cancel_token = cancel_token.clone();
|
||||
let reference_cache = self.reference_cache.clone();
|
||||
|
||||
async move {
|
||||
tokio::select! {
|
||||
result = Self::schedule_jobs_impl(&db_pool, &banner_api) => {
|
||||
if let Err(e) = result {
|
||||
error!(error = ?e, "Failed to schedule jobs");
|
||||
async move {
|
||||
tokio::select! {
|
||||
_ = async {
|
||||
// RMP sync is independent of Banner API — run it
|
||||
// concurrently with reference data scraping so it
|
||||
// doesn't wait behind rate-limited Banner calls.
|
||||
let rmp_fut = async {
|
||||
if should_sync_rmp
|
||||
&& let Err(e) = Self::sync_rmp_data(&db_pool).await
|
||||
{
|
||||
error!(error = ?e, "Failed to sync RMP data");
|
||||
}
|
||||
};
|
||||
|
||||
let ref_fut = async {
|
||||
if should_scrape_ref
|
||||
&& let Err(e) = Self::scrape_reference_data(&db_pool, &banner_api, &reference_cache).await
|
||||
{
|
||||
error!(error = ?e, "Failed to scrape reference data");
|
||||
}
|
||||
};
|
||||
|
||||
tokio::join!(rmp_fut, ref_fut);
|
||||
|
||||
if let Err(e) = Self::schedule_jobs_impl(&db_pool, &banner_api).await {
|
||||
error!(error = ?e, "Failed to schedule jobs");
|
||||
}
|
||||
} => {}
|
||||
_ = cancel_token.cancelled() => {
|
||||
debug!("Scheduling work cancelled gracefully");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = cancel_token.cancelled() => {
|
||||
debug!("Scheduling work cancelled gracefully");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if should_scrape_ref {
|
||||
last_ref_scrape = Instant::now();
|
||||
}
|
||||
if should_sync_rmp {
|
||||
last_rmp_sync = Instant::now();
|
||||
}
|
||||
|
||||
current_work = Some((work_handle, cancel_token));
|
||||
next_run = time::Instant::now() + work_interval;
|
||||
}
|
||||
@@ -170,4 +219,126 @@ impl Scheduler {
|
||||
debug!("Job scheduling complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fetch all RMP professors, upsert to DB, and auto-match against Banner instructors.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn sync_rmp_data(db_pool: &PgPool) -> Result<()> {
|
||||
info!("Starting RMP data sync");
|
||||
|
||||
let client = RmpClient::new();
|
||||
let professors = client.fetch_all_professors().await?;
|
||||
let total = professors.len();
|
||||
|
||||
crate::data::rmp::batch_upsert_rmp_professors(&professors, db_pool).await?;
|
||||
info!(total, "RMP professors upserted");
|
||||
|
||||
let matched = crate::data::rmp::auto_match_instructors(db_pool).await?;
|
||||
info!(total, matched, "RMP sync complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Scrape all reference data categories from Banner and upsert to DB, then refresh cache.
|
||||
#[tracing::instrument(skip_all)]
|
||||
async fn scrape_reference_data(
|
||||
db_pool: &PgPool,
|
||||
banner_api: &BannerApi,
|
||||
reference_cache: &Arc<RwLock<ReferenceCache>>,
|
||||
) -> Result<()> {
|
||||
let term = Term::get_current().inner().to_string();
|
||||
info!(term = %term, "Scraping reference data");
|
||||
|
||||
let mut all_entries = Vec::new();
|
||||
|
||||
// Terms (fetched via session pool, no active session needed)
|
||||
match banner_api.sessions.get_terms("", 1, 500).await {
|
||||
Ok(terms) => {
|
||||
debug!(count = terms.len(), "Fetched terms");
|
||||
all_entries.extend(terms.into_iter().map(|t| ReferenceData {
|
||||
category: "term".to_string(),
|
||||
code: t.code,
|
||||
description: t.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch terms"),
|
||||
}
|
||||
|
||||
// Subjects
|
||||
match banner_api.get_subjects("", &term, 1, 500).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched subjects");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "subject".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch subjects"),
|
||||
}
|
||||
|
||||
// Campuses
|
||||
match banner_api.get_campuses(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched campuses");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "campus".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch campuses"),
|
||||
}
|
||||
|
||||
// Instructional methods
|
||||
match banner_api.get_instructional_methods(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched instructional methods");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "instructional_method".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch instructional methods"),
|
||||
}
|
||||
|
||||
// Parts of term
|
||||
match banner_api.get_parts_of_term(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched parts of term");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "part_of_term".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch parts of term"),
|
||||
}
|
||||
|
||||
// Attributes
|
||||
match banner_api.get_attributes(&term).await {
|
||||
Ok(pairs) => {
|
||||
debug!(count = pairs.len(), "Fetched attributes");
|
||||
all_entries.extend(pairs.into_iter().map(|p| ReferenceData {
|
||||
category: "attribute".to_string(),
|
||||
code: p.code,
|
||||
description: p.description,
|
||||
}));
|
||||
}
|
||||
Err(e) => warn!(error = ?e, "Failed to fetch attributes"),
|
||||
}
|
||||
|
||||
// Batch upsert all entries
|
||||
let total = all_entries.len();
|
||||
crate::data::reference::batch_upsert(&all_entries, db_pool).await?;
|
||||
info!(total_entries = total, "Reference data upserted to DB");
|
||||
|
||||
// Refresh in-memory cache
|
||||
let all = crate::data::reference::get_all(db_pool).await?;
|
||||
let count = all.len();
|
||||
*reference_cache.write().await = ReferenceCache::from_entries(all);
|
||||
info!(entries = count, "Reference cache refreshed");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
+9
-7
@@ -1,6 +1,7 @@
|
||||
use super::Service;
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
use crate::web::auth::AuthConfig;
|
||||
use crate::web::create_router;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
@@ -11,22 +12,21 @@ use tracing::{info, trace, warn};
|
||||
pub struct WebService {
|
||||
port: u16,
|
||||
app_state: AppState,
|
||||
auth_config: AuthConfig,
|
||||
shutdown_tx: Option<broadcast::Sender<()>>,
|
||||
}
|
||||
|
||||
impl WebService {
|
||||
pub fn new(port: u16, app_state: AppState) -> Self {
|
||||
pub fn new(port: u16, app_state: AppState, auth_config: AuthConfig) -> Self {
|
||||
Self {
|
||||
port,
|
||||
app_state,
|
||||
auth_config,
|
||||
shutdown_tx: None,
|
||||
}
|
||||
}
|
||||
/// Periodically pings the database and updates the "database" service status.
|
||||
async fn db_health_check_loop(
|
||||
state: AppState,
|
||||
mut shutdown_rx: broadcast::Receiver<()>,
|
||||
) {
|
||||
async fn db_health_check_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
|
||||
use std::time::Duration;
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(30));
|
||||
|
||||
@@ -61,12 +61,14 @@ impl Service for WebService {
|
||||
|
||||
async fn run(&mut self) -> Result<(), anyhow::Error> {
|
||||
// Create the main router with Banner API routes
|
||||
let app = create_router(self.app_state.clone());
|
||||
let app = create_router(self.app_state.clone(), self.auth_config.clone());
|
||||
|
||||
let addr = SocketAddr::from(([0, 0, 0, 0], self.port));
|
||||
|
||||
let listener = TcpListener::bind(addr).await?;
|
||||
self.app_state.service_statuses.set("web", ServiceStatus::Active);
|
||||
self.app_state
|
||||
.service_statuses
|
||||
.set("web", ServiceStatus::Active);
|
||||
info!(
|
||||
service = "web",
|
||||
address = %addr,
|
||||
|
||||
@@ -2,27 +2,103 @@
|
||||
|
||||
use crate::banner::BannerApi;
|
||||
use crate::banner::Course;
|
||||
use crate::data::models::ReferenceData;
|
||||
use crate::status::ServiceStatusRegistry;
|
||||
use crate::web::session_cache::{OAuthStateStore, SessionCache};
|
||||
use anyhow::Result;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// In-memory cache for reference data (code→description lookups).
|
||||
///
|
||||
/// Loaded from the `reference_data` table on startup and refreshed periodically.
|
||||
/// Uses a two-level HashMap so lookups take `&str` without allocating.
|
||||
pub struct ReferenceCache {
|
||||
/// category → (code → description)
|
||||
data: HashMap<String, HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl Default for ReferenceCache {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ReferenceCache {
|
||||
/// Create an empty cache.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
data: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build cache from a list of reference data entries.
|
||||
pub fn from_entries(entries: Vec<ReferenceData>) -> Self {
|
||||
let mut data: HashMap<String, HashMap<String, String>> = HashMap::new();
|
||||
for e in entries {
|
||||
data.entry(e.category)
|
||||
.or_default()
|
||||
.insert(e.code, e.description);
|
||||
}
|
||||
Self { data }
|
||||
}
|
||||
|
||||
/// Look up a description by category and code. Zero allocations.
|
||||
pub fn lookup(&self, category: &str, code: &str) -> Option<&str> {
|
||||
self.data
|
||||
.get(category)
|
||||
.and_then(|codes| codes.get(code))
|
||||
.map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get all `(code, description)` pairs for a category, sorted by description.
|
||||
pub fn entries_for_category(&self, category: &str) -> Vec<(&str, &str)> {
|
||||
let Some(codes) = self.data.get(category) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut entries: Vec<(&str, &str)> = codes
|
||||
.iter()
|
||||
.map(|(code, desc)| (code.as_str(), desc.as_str()))
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.1.cmp(b.1));
|
||||
entries
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub banner_api: Arc<BannerApi>,
|
||||
pub db_pool: PgPool,
|
||||
pub service_statuses: ServiceStatusRegistry,
|
||||
pub reference_cache: Arc<RwLock<ReferenceCache>>,
|
||||
pub session_cache: SessionCache,
|
||||
pub oauth_state_store: OAuthStateStore,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
session_cache: SessionCache::new(db_pool.clone()),
|
||||
oauth_state_store: OAuthStateStore::new(),
|
||||
banner_api,
|
||||
db_pool,
|
||||
service_statuses: ServiceStatusRegistry::new(),
|
||||
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize the reference cache from the database.
|
||||
pub async fn load_reference_cache(&self) -> Result<()> {
|
||||
let entries = crate::data::reference::get_all(&self.db_pool).await?;
|
||||
let count = entries.len();
|
||||
let cache = ReferenceCache::from_entries(entries);
|
||||
*self.reference_cache.write().await = cache;
|
||||
tracing::info!(entries = count, "Reference cache loaded");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a course by CRN directly from Banner API
|
||||
pub async fn get_course_or_fetch(&self, term: &str, crn: &str) -> Result<Course> {
|
||||
self.banner_api
|
||||
|
||||
+6
-1
@@ -3,11 +3,14 @@ use std::time::Instant;
|
||||
|
||||
use dashmap::DashMap;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Health status of a service.
|
||||
#[derive(Debug, Clone, Serialize, PartialEq)]
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(export)]
|
||||
pub enum ServiceStatus {
|
||||
#[allow(dead_code)]
|
||||
Starting,
|
||||
Active,
|
||||
Connected,
|
||||
@@ -19,6 +22,7 @@ pub enum ServiceStatus {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StatusEntry {
|
||||
pub status: ServiceStatus,
|
||||
#[allow(dead_code)]
|
||||
pub updated_at: Instant,
|
||||
}
|
||||
|
||||
@@ -46,6 +50,7 @@ impl ServiceStatusRegistry {
|
||||
}
|
||||
|
||||
/// Returns the current status of a named service, if present.
|
||||
#[allow(dead_code)]
|
||||
pub fn get(&self, name: &str) -> Option<ServiceStatus> {
|
||||
self.inner.get(name).map(|entry| entry.status.clone())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
//! Admin API handlers.
|
||||
//!
|
||||
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
|
||||
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::response::Json;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
use crate::web::extractors::AdminUser;
|
||||
|
||||
/// `GET /api/admin/status` — Enhanced system status for admins.
|
||||
pub async fn admin_status(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let (user_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM users")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (session_count,): (i64,) =
|
||||
sqlx::query_as("SELECT COUNT(*) FROM user_sessions WHERE expires_at > now()")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count sessions");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count sessions"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let course_count = state.get_course_count().await.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count courses");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count courses"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let (scrape_job_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM scrape_jobs")
|
||||
.fetch_one(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to count scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to count scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let services: Vec<Value> = state
|
||||
.service_statuses
|
||||
.all()
|
||||
.into_iter()
|
||||
.map(|(name, status)| {
|
||||
json!({
|
||||
"name": name,
|
||||
"status": status,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"userCount": user_count,
|
||||
"sessionCount": session_count,
|
||||
"courseCount": course_count,
|
||||
"scrapeJobCount": scrape_job_count,
|
||||
"services": services,
|
||||
})))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/users` — List all users.
|
||||
pub async fn list_users(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<User>>, (StatusCode, Json<Value>)> {
|
||||
let users = crate::data::users::list_users(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list users");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list users"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(Json(users))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SetAdminBody {
|
||||
is_admin: bool,
|
||||
}
|
||||
|
||||
/// `PUT /api/admin/users/{discord_id}/admin` — Set admin status for a user.
|
||||
pub async fn set_user_admin(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
Path(discord_id): Path<i64>,
|
||||
Json(body): Json<SetAdminBody>,
|
||||
) -> Result<Json<User>, (StatusCode, Json<Value>)> {
|
||||
let user = crate::data::users::set_admin(&state.db_pool, discord_id, body.is_admin)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to set admin status");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to set admin status"})),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
Json(json!({"error": "user not found"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
state.session_cache.evict_user(discord_id);
|
||||
|
||||
Ok(Json(user))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/scrape-jobs` — List scrape jobs.
|
||||
pub async fn list_scrape_jobs(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, crate::data::models::ScrapeJob>(
|
||||
"SELECT * FROM scrape_jobs ORDER BY priority DESC, execute_at ASC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list scrape jobs");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list scrape jobs"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let jobs: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|j| {
|
||||
json!({
|
||||
"id": j.id,
|
||||
"targetType": format!("{:?}", j.target_type),
|
||||
"targetPayload": j.target_payload,
|
||||
"priority": format!("{:?}", j.priority),
|
||||
"executeAt": j.execute_at.to_rfc3339(),
|
||||
"createdAt": j.created_at.to_rfc3339(),
|
||||
"lockedAt": j.locked_at.map(|t| t.to_rfc3339()),
|
||||
"retryCount": j.retry_count,
|
||||
"maxRetries": j.max_retries,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({ "jobs": jobs })))
|
||||
}
|
||||
|
||||
/// `GET /api/admin/audit-log` — List recent audit entries.
|
||||
pub async fn list_audit_log(
|
||||
AdminUser(_user): AdminUser,
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Value>, (StatusCode, Json<Value>)> {
|
||||
let rows = sqlx::query_as::<_, crate::data::models::CourseAudit>(
|
||||
"SELECT * FROM course_audits ORDER BY timestamp DESC LIMIT 200",
|
||||
)
|
||||
.fetch_all(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "failed to list audit log");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({"error": "failed to list audit log"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let entries: Vec<Value> = rows
|
||||
.iter()
|
||||
.map(|a| {
|
||||
json!({
|
||||
"id": a.id,
|
||||
"courseId": a.course_id,
|
||||
"timestamp": a.timestamp.to_rfc3339(),
|
||||
"fieldChanged": a.field_changed,
|
||||
"oldValue": a.old_value,
|
||||
"newValue": a.new_value,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({ "entries": entries })))
|
||||
}
|
||||
+114
-19
@@ -1,14 +1,18 @@
|
||||
//! Embedded assets for the web frontend
|
||||
//! Embedded assets for the web frontend.
|
||||
//!
|
||||
//! This module handles serving static assets that are embedded into the binary
|
||||
//! at compile time using rust-embed.
|
||||
//! Serves static assets embedded into the binary at compile time using rust-embed.
|
||||
//! Supports content negotiation for pre-compressed variants (.br, .gz, .zst)
|
||||
//! generated at build time by `web/scripts/compress-assets.ts`.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
use dashmap::DashMap;
|
||||
use rapidhash::v3::rapidhash_v3;
|
||||
use rust_embed::RustEmbed;
|
||||
use std::fmt;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use super::encoding::{COMPRESSION_MIN_SIZE, ContentEncoding, parse_accepted_encodings};
|
||||
|
||||
/// Embedded web assets from the dist directory
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "web/dist/"]
|
||||
@@ -21,17 +25,15 @@ pub struct WebAssets;
|
||||
pub struct AssetHash(u64);
|
||||
|
||||
impl AssetHash {
|
||||
/// Create a new AssetHash from u64 value
|
||||
pub fn new(hash: u64) -> Self {
|
||||
Self(hash)
|
||||
}
|
||||
|
||||
/// Get the hash as a hex string
|
||||
pub fn to_hex(&self) -> String {
|
||||
format!("{:016x}", self.0)
|
||||
}
|
||||
|
||||
/// Get the hash as a quoted hex string
|
||||
/// Get the hash as a quoted hex string (for ETag headers)
|
||||
pub fn quoted(&self) -> String {
|
||||
format!("\"{}\"", self.to_hex())
|
||||
}
|
||||
@@ -51,12 +53,8 @@ pub struct AssetMetadata {
|
||||
}
|
||||
|
||||
impl AssetMetadata {
|
||||
/// Check if the etag matches the asset hash
|
||||
pub fn etag_matches(&self, etag: &str) -> bool {
|
||||
// Remove quotes if present (ETags are typically quoted)
|
||||
let etag = etag.trim_matches('"');
|
||||
|
||||
// ETags generated from u64 hex should be 16 characters
|
||||
etag.len() == 16
|
||||
&& u64::from_str_radix(etag, 16)
|
||||
.map(|parsed| parsed == self.hash.0)
|
||||
@@ -68,28 +66,125 @@ impl AssetMetadata {
|
||||
static ASSET_CACHE: LazyLock<DashMap<String, AssetMetadata>> = LazyLock::new(DashMap::new);
|
||||
|
||||
/// Get cached asset metadata for a file path, caching on-demand
|
||||
/// Returns AssetMetadata containing MIME type and RapidHash hash
|
||||
pub fn get_asset_metadata_cached(path: &str, content: &[u8]) -> AssetMetadata {
|
||||
// Check cache first
|
||||
if let Some(cached) = ASSET_CACHE.get(path) {
|
||||
return cached.value().clone();
|
||||
}
|
||||
|
||||
// Calculate MIME type
|
||||
let mime_type = mime_guess::from_path(path)
|
||||
.first()
|
||||
.map(|mime| mime.to_string());
|
||||
|
||||
// Calculate RapidHash hash (using u64 native output size)
|
||||
let hash_value = rapidhash_v3(content);
|
||||
let hash = AssetHash::new(hash_value);
|
||||
|
||||
let hash = AssetHash::new(rapidhash_v3(content));
|
||||
let metadata = AssetMetadata { mime_type, hash };
|
||||
|
||||
// Only cache if we haven't exceeded the limit
|
||||
if ASSET_CACHE.len() < 1000 {
|
||||
ASSET_CACHE.insert(path.to_string(), metadata.clone());
|
||||
}
|
||||
|
||||
metadata
|
||||
}
|
||||
|
||||
/// Set appropriate `Cache-Control` header based on the asset path.
|
||||
///
|
||||
/// SvelteKit outputs fingerprinted assets under `_app/immutable/` which are
|
||||
/// safe to cache indefinitely. Other assets get shorter cache durations.
|
||||
fn set_cache_control(headers: &mut HeaderMap, path: &str) {
|
||||
let cache_control = if path.contains("immutable/") {
|
||||
// SvelteKit fingerprinted assets — cache forever
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" || path.ends_with(".html") {
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.rsplit_once('.').map(|(_, ext)| ext) {
|
||||
Some("css" | "js") => "public, max-age=86400",
|
||||
Some("png" | "jpg" | "jpeg" | "gif" | "svg" | "ico") => "public, max-age=2592000",
|
||||
_ => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Serve an embedded asset with content encoding negotiation.
|
||||
///
|
||||
/// Tries pre-compressed variants (.br, .gz, .zst) in the order preferred by
|
||||
/// the client's `Accept-Encoding` header, falling back to the uncompressed
|
||||
/// original. Returns `None` if the asset doesn't exist at all.
|
||||
pub fn try_serve_asset_with_encoding(
|
||||
path: &str,
|
||||
request_headers: &HeaderMap,
|
||||
) -> Option<axum::response::Response> {
|
||||
use axum::response::IntoResponse;
|
||||
|
||||
let asset_path = path.strip_prefix('/').unwrap_or(path);
|
||||
|
||||
// Get the uncompressed original first (for metadata: MIME type, ETag)
|
||||
let original = WebAssets::get(asset_path)?;
|
||||
let metadata = get_asset_metadata_cached(asset_path, &original.data);
|
||||
|
||||
// Check ETag for conditional requests (304 Not Modified)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return Some(axum::http::StatusCode::NOT_MODIFIED.into_response());
|
||||
}
|
||||
|
||||
let mime_type = metadata
|
||||
.mime_type
|
||||
.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
|
||||
// Only attempt pre-compressed variants for files above the compression
|
||||
// threshold — the build script skips smaller files too.
|
||||
let accepted_encodings = if original.data.len() >= COMPRESSION_MIN_SIZE {
|
||||
parse_accepted_encodings(request_headers)
|
||||
} else {
|
||||
vec![ContentEncoding::Identity]
|
||||
};
|
||||
|
||||
for encoding in &accepted_encodings {
|
||||
if *encoding == ContentEncoding::Identity {
|
||||
continue;
|
||||
}
|
||||
|
||||
let compressed_path = format!("{}{}", asset_path, encoding.extension());
|
||||
if let Some(compressed) = WebAssets::get(&compressed_path) {
|
||||
let mut response_headers = HeaderMap::new();
|
||||
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Some(ce) = encoding.header_value() {
|
||||
response_headers.insert(header::CONTENT_ENCODING, ce);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
// Vary so caches distinguish by encoding
|
||||
response_headers.insert(header::VARY, HeaderValue::from_static("Accept-Encoding"));
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
return Some(
|
||||
(
|
||||
axum::http::StatusCode::OK,
|
||||
response_headers,
|
||||
compressed.data,
|
||||
)
|
||||
.into_response(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// No compressed variant found — serve uncompressed original
|
||||
let mut response_headers = HeaderMap::new();
|
||||
if let Ok(ct) = HeaderValue::from_str(&mime_type) {
|
||||
response_headers.insert(header::CONTENT_TYPE, ct);
|
||||
}
|
||||
if let Ok(etag_val) = HeaderValue::from_str(&metadata.hash.quoted()) {
|
||||
response_headers.insert(header::ETAG, etag_val);
|
||||
}
|
||||
set_cache_control(&mut response_headers, asset_path);
|
||||
|
||||
Some((axum::http::StatusCode::OK, response_headers, original.data).into_response())
|
||||
}
|
||||
|
||||
+300
@@ -0,0 +1,300 @@
|
||||
//! Discord OAuth2 authentication handlers.
|
||||
//!
|
||||
//! Provides login, callback, logout, and session introspection endpoints
|
||||
//! for Discord OAuth2 authentication flow.
|
||||
|
||||
use axum::extract::{Extension, Query, State};
|
||||
use axum::http::{HeaderMap, StatusCode, header};
|
||||
use axum::response::{IntoResponse, Json, Redirect, Response};
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, json};
|
||||
use std::time::Duration;
|
||||
use tracing::{error, info, warn};
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
/// OAuth configuration passed as an Axum Extension.
|
||||
#[derive(Clone)]
|
||||
pub struct AuthConfig {
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
/// Optional base URL override (e.g. "https://banner.xevion.dev").
|
||||
/// When `None`, the redirect URI is derived from the request's Origin/Host header.
|
||||
pub redirect_base: Option<String>,
|
||||
}
|
||||
|
||||
const CALLBACK_PATH: &str = "/api/auth/callback";
|
||||
|
||||
/// Derive the origin (scheme + host + port) the user's browser is actually on.
|
||||
///
|
||||
/// Priority:
|
||||
/// 1. Configured `redirect_base` (production override)
|
||||
/// 2. `Referer` header — preserves the real browser origin even through
|
||||
/// reverse proxies that rewrite `Host` (e.g. Vite dev proxy with
|
||||
/// `changeOrigin: true`)
|
||||
/// 3. `Origin` header (present on POST / CORS requests)
|
||||
/// 4. `Host` header (last resort, may be rewritten by proxies)
|
||||
fn resolve_origin(auth_config: &AuthConfig, headers: &HeaderMap) -> String {
|
||||
if let Some(base) = &auth_config.redirect_base {
|
||||
return base.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
// Referer carries the full browser URL; extract just the origin.
|
||||
if let Some(referer) = headers.get(header::REFERER).and_then(|v| v.to_str().ok())
|
||||
&& let Ok(parsed) = url::Url::parse(referer)
|
||||
{
|
||||
let origin = parsed.origin().unicode_serialization();
|
||||
if origin != "null" {
|
||||
return origin;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(origin) = headers.get("origin").and_then(|v| v.to_str().ok()) {
|
||||
return origin.trim_end_matches('/').to_owned();
|
||||
}
|
||||
|
||||
if let Some(host) = headers.get(header::HOST).and_then(|v| v.to_str().ok()) {
|
||||
return format!("http://{host}");
|
||||
}
|
||||
|
||||
"http://localhost:8080".to_owned()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CallbackParams {
|
||||
code: String,
|
||||
state: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TokenResponse {
|
||||
access_token: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DiscordUser {
|
||||
id: String,
|
||||
username: String,
|
||||
avatar: Option<String>,
|
||||
}
|
||||
|
||||
/// Extract the `session` cookie value from request headers.
|
||||
fn extract_session_token(headers: &HeaderMap) -> Option<String> {
|
||||
headers
|
||||
.get(header::COOKIE)?
|
||||
.to_str()
|
||||
.ok()?
|
||||
.split(';')
|
||||
.find_map(|cookie| {
|
||||
let cookie = cookie.trim();
|
||||
cookie.strip_prefix("session=").map(|v| v.to_owned())
|
||||
})
|
||||
}
|
||||
|
||||
/// Build a `Set-Cookie` header value for the session cookie.
|
||||
fn session_cookie(token: &str, max_age: i64, secure: bool) -> String {
|
||||
let mut cookie = format!("session={token}; HttpOnly; SameSite=Lax; Path=/; Max-Age={max_age}");
|
||||
if secure {
|
||||
cookie.push_str("; Secure");
|
||||
}
|
||||
cookie
|
||||
}
|
||||
|
||||
/// `GET /api/auth/login` — Redirect to Discord OAuth2 authorization page.
|
||||
pub async fn auth_login(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
headers: HeaderMap,
|
||||
) -> Redirect {
|
||||
let origin = resolve_origin(&auth_config, &headers);
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let csrf_state = state.oauth_state_store.generate(origin);
|
||||
let redirect_uri_encoded = urlencoding::encode(&redirect_uri);
|
||||
|
||||
let url = format!(
|
||||
"https://discord.com/oauth2/authorize\
|
||||
?client_id={}\
|
||||
&redirect_uri={redirect_uri_encoded}\
|
||||
&response_type=code\
|
||||
&scope=identify\
|
||||
&state={csrf_state}",
|
||||
auth_config.client_id,
|
||||
);
|
||||
|
||||
Redirect::temporary(&url)
|
||||
}
|
||||
|
||||
/// `GET /api/auth/callback` — Handle Discord OAuth2 callback.
|
||||
pub async fn auth_callback(
|
||||
State(state): State<AppState>,
|
||||
Extension(auth_config): Extension<AuthConfig>,
|
||||
Query(params): Query<CallbackParams>,
|
||||
) -> Result<Response, (StatusCode, Json<Value>)> {
|
||||
// 1. Validate CSRF state and recover the origin used during login
|
||||
let origin = state
|
||||
.oauth_state_store
|
||||
.validate(¶ms.state)
|
||||
.ok_or_else(|| {
|
||||
warn!("OAuth callback with invalid CSRF state");
|
||||
(
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(json!({ "error": "Invalid OAuth state" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 2. Exchange authorization code for access token
|
||||
let redirect_uri = format!("{origin}{CALLBACK_PATH}");
|
||||
let client = reqwest::Client::new();
|
||||
let token_response = client
|
||||
.post("https://discord.com/api/oauth2/token")
|
||||
.form(&[
|
||||
("client_id", auth_config.client_id.as_str()),
|
||||
("client_secret", auth_config.client_secret.as_str()),
|
||||
("grant_type", "authorization_code"),
|
||||
("code", params.code.as_str()),
|
||||
("redirect_uri", redirect_uri.as_str()),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to exchange OAuth code for token");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to exchange code with Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
if !token_response.status().is_success() {
|
||||
let status = token_response.status();
|
||||
let body = token_response.text().await.unwrap_or_default();
|
||||
error!(%status, %body, "Discord token exchange returned error");
|
||||
return Err((
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Discord token exchange failed" })),
|
||||
));
|
||||
}
|
||||
|
||||
let token_data: TokenResponse = token_response.json().await.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord token response");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid token response from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 3. Fetch Discord user profile
|
||||
let discord_user: DiscordUser = client
|
||||
.get("https://discord.com/api/users/@me")
|
||||
.bearer_auth(&token_data.access_token)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to fetch Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Failed to fetch Discord profile" })),
|
||||
)
|
||||
})?
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to parse Discord user profile");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid user profile from Discord" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
let discord_id: i64 = discord_user.id.parse().map_err(|_| {
|
||||
error!(id = %discord_user.id, "Discord user ID is not a valid i64");
|
||||
(
|
||||
StatusCode::BAD_GATEWAY,
|
||||
Json(json!({ "error": "Invalid Discord user ID" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 4. Upsert user
|
||||
let user = crate::data::users::upsert_user(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
&discord_user.username,
|
||||
discord_user.avatar.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to upsert user");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Database error" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
info!(discord_id, username = %user.discord_username, "user authenticated via OAuth");
|
||||
|
||||
// 5. Create session
|
||||
let session = crate::data::sessions::create_session(
|
||||
&state.db_pool,
|
||||
discord_id,
|
||||
Duration::from_secs(7 * 24 * 3600),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!(error = %e, "failed to create session");
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(json!({ "error": "Failed to create session" })),
|
||||
)
|
||||
})?;
|
||||
|
||||
// 6. Build response with session cookie
|
||||
let secure = redirect_uri.starts_with("https://");
|
||||
let cookie = session_cookie(&session.id, 604800, secure);
|
||||
|
||||
let redirect_to = if user.is_admin { "/admin" } else { "/" };
|
||||
|
||||
Ok((
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Redirect::temporary(redirect_to),
|
||||
)
|
||||
.into_response())
|
||||
}
|
||||
|
||||
/// `POST /api/auth/logout` — Destroy the current session.
|
||||
pub async fn auth_logout(State(state): State<AppState>, headers: HeaderMap) -> Response {
|
||||
if let Some(token) = extract_session_token(&headers) {
|
||||
if let Err(e) = crate::data::sessions::delete_session(&state.db_pool, &token).await {
|
||||
warn!(error = %e, "failed to delete session from database");
|
||||
}
|
||||
state.session_cache.evict(&token);
|
||||
}
|
||||
|
||||
let cookie = session_cookie("", 0, false);
|
||||
|
||||
(
|
||||
StatusCode::OK,
|
||||
[(header::SET_COOKIE, cookie)],
|
||||
Json(json!({ "ok": true })),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
|
||||
/// `GET /api/auth/me` — Return the current authenticated user's info.
|
||||
pub async fn auth_me(
|
||||
State(state): State<AppState>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Json<Value>, StatusCode> {
|
||||
let token = extract_session_token(&headers).ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
let user = state
|
||||
.session_cache
|
||||
.get_user(&token)
|
||||
.await
|
||||
.ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"discordId": user.discord_id.to_string(),
|
||||
"username": user.discord_username,
|
||||
"avatarHash": user.discord_avatar_hash,
|
||||
"isAdmin": user.is_admin,
|
||||
})))
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
//! Content encoding negotiation for pre-compressed asset serving.
|
||||
//!
|
||||
//! Parses Accept-Encoding headers with quality values and returns
|
||||
//! supported encodings in priority order for content negotiation.
|
||||
|
||||
use axum::http::{HeaderMap, HeaderValue, header};
|
||||
|
||||
/// Minimum size threshold for compression (bytes).
|
||||
///
|
||||
/// Must match `MIN_SIZE` in `web/scripts/compress-assets.ts`.
|
||||
pub const COMPRESSION_MIN_SIZE: usize = 512;
|
||||
|
||||
/// Supported content encodings in priority order (best compression first).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ContentEncoding {
|
||||
Zstd,
|
||||
Brotli,
|
||||
Gzip,
|
||||
Identity,
|
||||
}
|
||||
|
||||
impl ContentEncoding {
|
||||
/// File extension suffix for pre-compressed variant lookup.
|
||||
#[inline]
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Zstd => ".zst",
|
||||
Self::Brotli => ".br",
|
||||
Self::Gzip => ".gz",
|
||||
Self::Identity => "",
|
||||
}
|
||||
}
|
||||
|
||||
/// `Content-Encoding` header value, or `None` for identity.
|
||||
#[inline]
|
||||
pub fn header_value(&self) -> Option<HeaderValue> {
|
||||
match self {
|
||||
Self::Zstd => Some(HeaderValue::from_static("zstd")),
|
||||
Self::Brotli => Some(HeaderValue::from_static("br")),
|
||||
Self::Gzip => Some(HeaderValue::from_static("gzip")),
|
||||
Self::Identity => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Default priority when quality values are equal (higher = better).
|
||||
#[inline]
|
||||
fn default_priority(&self) -> u8 {
|
||||
match self {
|
||||
Self::Zstd => 4,
|
||||
Self::Brotli => 3,
|
||||
Self::Gzip => 2,
|
||||
Self::Identity => 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse `Accept-Encoding` header and return supported encodings in priority order.
|
||||
///
|
||||
/// Supports quality values: `Accept-Encoding: gzip;q=0.8, br;q=1.0, zstd`
|
||||
/// When quality values are equal: zstd > brotli > gzip > identity.
|
||||
/// Encodings with `q=0` are excluded.
|
||||
pub fn parse_accepted_encodings(headers: &HeaderMap) -> Vec<ContentEncoding> {
|
||||
let Some(accept) = headers
|
||||
.get(header::ACCEPT_ENCODING)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
else {
|
||||
return vec![ContentEncoding::Identity];
|
||||
};
|
||||
|
||||
let mut encodings: Vec<(ContentEncoding, f32)> = Vec::new();
|
||||
|
||||
for part in accept.split(',') {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (encoding_str, quality) = if let Some((enc, params)) = part.split_once(';') {
|
||||
let q = params
|
||||
.split(';')
|
||||
.find_map(|p| p.trim().strip_prefix("q="))
|
||||
.and_then(|q| q.parse::<f32>().ok())
|
||||
.unwrap_or(1.0);
|
||||
(enc.trim(), q)
|
||||
} else {
|
||||
(part, 1.0)
|
||||
};
|
||||
|
||||
if quality == 0.0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let encoding = match encoding_str.to_lowercase().as_str() {
|
||||
"zstd" => ContentEncoding::Zstd,
|
||||
"br" | "brotli" => ContentEncoding::Brotli,
|
||||
"gzip" | "x-gzip" => ContentEncoding::Gzip,
|
||||
"*" => ContentEncoding::Gzip,
|
||||
"identity" => ContentEncoding::Identity,
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
encodings.push((encoding, quality));
|
||||
}
|
||||
|
||||
// Sort by quality (desc), then default priority (desc)
|
||||
encodings.sort_by(|a, b| {
|
||||
b.1.partial_cmp(&a.1)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
.then_with(|| b.0.default_priority().cmp(&a.0.default_priority()))
|
||||
});
|
||||
|
||||
if encodings.is_empty() {
|
||||
vec![ContentEncoding::Identity]
|
||||
} else {
|
||||
encodings.into_iter().map(|(e, _)| e).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_all_encodings() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::ACCEPT_ENCODING, "gzip, br, zstd".parse().unwrap());
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[1], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[2], ContentEncoding::Gzip);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_with_quality_values() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip;q=1.0, br;q=0.5, zstd;q=0.8".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Gzip);
|
||||
assert_eq!(encodings[1], ContentEncoding::Zstd);
|
||||
assert_eq!(encodings[2], ContentEncoding::Brotli);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_header_returns_identity() {
|
||||
let headers = HeaderMap::new();
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings, vec![ContentEncoding::Identity]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disabled_encoding_excluded() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"zstd;q=0, br, gzip".parse().unwrap(),
|
||||
);
|
||||
let encodings = parse_accepted_encodings(&headers);
|
||||
assert_eq!(encodings[0], ContentEncoding::Brotli);
|
||||
assert_eq!(encodings[1], ContentEncoding::Gzip);
|
||||
assert!(!encodings.contains(&ContentEncoding::Zstd));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_real_chrome_header() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::ACCEPT_ENCODING,
|
||||
"gzip, deflate, br, zstd".parse().unwrap(),
|
||||
);
|
||||
assert_eq!(parse_accepted_encodings(&headers)[0], ContentEncoding::Zstd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extensions() {
|
||||
assert_eq!(ContentEncoding::Zstd.extension(), ".zst");
|
||||
assert_eq!(ContentEncoding::Brotli.extension(), ".br");
|
||||
assert_eq!(ContentEncoding::Gzip.extension(), ".gz");
|
||||
assert_eq!(ContentEncoding::Identity.extension(), "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_header_values() {
|
||||
assert_eq!(
|
||||
ContentEncoding::Zstd.header_value().unwrap(),
|
||||
HeaderValue::from_static("zstd")
|
||||
);
|
||||
assert_eq!(
|
||||
ContentEncoding::Brotli.header_value().unwrap(),
|
||||
HeaderValue::from_static("br")
|
||||
);
|
||||
assert!(ContentEncoding::Identity.header_value().is_none());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
//! Axum extractors for authentication and authorization.
|
||||
|
||||
use axum::extract::FromRequestParts;
|
||||
use axum::http::{StatusCode, header};
|
||||
use axum::response::Json;
|
||||
use http::request::Parts;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::data::models::User;
|
||||
use crate::state::AppState;
|
||||
|
||||
/// Extractor that resolves the session cookie to an authenticated [`User`].
|
||||
///
|
||||
/// Returns 401 if no valid session cookie is present.
|
||||
pub struct AuthUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AuthUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let token = parts
|
||||
.headers
|
||||
.get(header::COOKIE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|cookies| {
|
||||
cookies
|
||||
.split(';')
|
||||
.find_map(|c| c.trim().strip_prefix("session=").map(|v| v.to_owned()))
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "No session cookie"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
let user = state.session_cache.get_user(&token).await.ok_or_else(|| {
|
||||
(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
Json(json!({"error": "unauthorized", "message": "Invalid or expired session"})),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(AuthUser(user))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extractor that requires an authenticated admin user.
|
||||
///
|
||||
/// Returns 401 if not authenticated, 403 if not admin.
|
||||
pub struct AdminUser(pub User);
|
||||
|
||||
impl FromRequestParts<AppState> for AdminUser {
|
||||
type Rejection = (StatusCode, Json<serde_json::Value>);
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut Parts,
|
||||
state: &AppState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let AuthUser(user) = AuthUser::from_request_parts(parts, state).await?;
|
||||
|
||||
if !user.is_admin {
|
||||
return Err((
|
||||
StatusCode::FORBIDDEN,
|
||||
Json(json!({"error": "forbidden", "message": "Admin access required"})),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(AdminUser(user))
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
//! Web API module for the banner application.
|
||||
|
||||
pub mod admin;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod assets;
|
||||
pub mod auth;
|
||||
#[cfg(feature = "embed-assets")]
|
||||
pub mod encoding;
|
||||
pub mod extractors;
|
||||
pub mod routes;
|
||||
pub mod session_cache;
|
||||
|
||||
pub use routes::*;
|
||||
|
||||
+395
-108
@@ -1,79 +1,75 @@
|
||||
//! Web API endpoints for Banner bot monitoring and metrics.
|
||||
|
||||
use axum::{
|
||||
Router,
|
||||
Extension, Router,
|
||||
body::Body,
|
||||
extract::{Request, State},
|
||||
extract::{Path, Query, Request, State},
|
||||
http::StatusCode as AxumStatusCode,
|
||||
response::{Json, Response},
|
||||
routing::get,
|
||||
routing::{get, post, put},
|
||||
};
|
||||
|
||||
use crate::web::admin;
|
||||
use crate::web::auth::{self, AuthConfig};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use axum::{
|
||||
http::{HeaderMap, HeaderValue, StatusCode, Uri},
|
||||
response::{Html, IntoResponse},
|
||||
http::{HeaderMap, StatusCode, Uri},
|
||||
response::IntoResponse,
|
||||
};
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use http::header;
|
||||
use serde::Serialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Value, json};
|
||||
use std::{collections::BTreeMap, time::Duration};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::state::AppState;
|
||||
use crate::status::ServiceStatus;
|
||||
#[cfg(not(feature = "embed-assets"))]
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::{classify::ServerErrorsFailureClass, timeout::TimeoutLayer, trace::TraceLayer};
|
||||
use tower_http::{
|
||||
classify::ServerErrorsFailureClass, compression::CompressionLayer, timeout::TimeoutLayer,
|
||||
trace::TraceLayer,
|
||||
};
|
||||
use tracing::{Span, debug, trace, warn};
|
||||
|
||||
#[cfg(feature = "embed-assets")]
|
||||
use crate::web::assets::{WebAssets, get_asset_metadata_cached};
|
||||
|
||||
/// Set appropriate caching headers based on asset type
|
||||
#[cfg(feature = "embed-assets")]
|
||||
fn set_caching_headers(response: &mut Response, path: &str, etag: &str) {
|
||||
let headers = response.headers_mut();
|
||||
|
||||
// Set ETag
|
||||
if let Ok(etag_value) = HeaderValue::from_str(etag) {
|
||||
headers.insert(header::ETAG, etag_value);
|
||||
}
|
||||
|
||||
// Set Cache-Control based on asset type
|
||||
let cache_control = if path.starts_with("assets/") {
|
||||
// Static assets with hashed filenames - long-term cache
|
||||
"public, max-age=31536000, immutable"
|
||||
} else if path == "index.html" {
|
||||
// HTML files - short-term cache
|
||||
"public, max-age=300"
|
||||
} else {
|
||||
match path.split_once('.').map(|(_, extension)| extension) {
|
||||
Some(ext) => match ext {
|
||||
// CSS/JS files - medium-term cache
|
||||
"css" | "js" => "public, max-age=86400",
|
||||
// Images - long-term cache
|
||||
"png" | "jpg" | "jpeg" | "gif" | "svg" | "ico" => "public, max-age=2592000",
|
||||
// Default for other files
|
||||
_ => "public, max-age=3600",
|
||||
},
|
||||
// Default for files without an extension
|
||||
None => "public, max-age=3600",
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(cache_control_value) = HeaderValue::from_str(cache_control) {
|
||||
headers.insert(header::CACHE_CONTROL, cache_control_value);
|
||||
}
|
||||
}
|
||||
use crate::web::assets::try_serve_asset_with_encoding;
|
||||
|
||||
/// Creates the web server router
|
||||
pub fn create_router(app_state: AppState) -> Router {
|
||||
pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
|
||||
let api_router = Router::new()
|
||||
.route("/health", get(health))
|
||||
.route("/status", get(status))
|
||||
.route("/metrics", get(metrics))
|
||||
.route("/courses/search", get(search_courses))
|
||||
.route("/courses/{term}/{crn}", get(get_course))
|
||||
.route("/terms", get(get_terms))
|
||||
.route("/subjects", get(get_subjects))
|
||||
.route("/reference/{category}", get(get_reference))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let auth_router = Router::new()
|
||||
.route("/auth/login", get(auth::auth_login))
|
||||
.route("/auth/callback", get(auth::auth_callback))
|
||||
.route("/auth/logout", post(auth::auth_logout))
|
||||
.route("/auth/me", get(auth::auth_me))
|
||||
.layer(Extension(auth_config))
|
||||
.with_state(app_state.clone());
|
||||
|
||||
let admin_router = Router::new()
|
||||
.route("/admin/status", get(admin::admin_status))
|
||||
.route("/admin/users", get(admin::list_users))
|
||||
.route(
|
||||
"/admin/users/{discord_id}/admin",
|
||||
put(admin::set_user_admin),
|
||||
)
|
||||
.route("/admin/scrape-jobs", get(admin::list_scrape_jobs))
|
||||
.route("/admin/audit-log", get(admin::list_audit_log))
|
||||
.with_state(app_state);
|
||||
|
||||
let mut router = Router::new().nest("/api", api_router);
|
||||
let mut router = Router::new()
|
||||
.nest("/api", api_router)
|
||||
.nest("/api", auth_router)
|
||||
.nest("/api", admin_router);
|
||||
|
||||
// When embed-assets feature is enabled, serve embedded static assets
|
||||
#[cfg(feature = "embed-assets")]
|
||||
@@ -93,6 +89,13 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
}
|
||||
|
||||
router.layer((
|
||||
// Compress API responses (gzip/brotli/zstd). Pre-compressed static
|
||||
// assets already have Content-Encoding set, so tower-http skips them.
|
||||
CompressionLayer::new()
|
||||
.zstd(true)
|
||||
.br(true)
|
||||
.gzip(true)
|
||||
.quality(tower_http::CompressionLevel::Fastest),
|
||||
TraceLayer::new_for_http()
|
||||
.make_span_with(|request: &Request<Body>| {
|
||||
tracing::debug_span!("request", path = request.uri().path())
|
||||
@@ -139,71 +142,35 @@ pub fn create_router(app_state: AppState) -> Router {
|
||||
))
|
||||
}
|
||||
|
||||
/// Handler that extracts request information for caching
|
||||
/// SPA fallback handler with content encoding negotiation.
|
||||
///
|
||||
/// Serves embedded static assets with pre-compressed variants when available,
|
||||
/// falling back to `index.html` for SPA client-side routing.
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn fallback(request: Request) -> Response {
|
||||
async fn fallback(request: Request) -> axum::response::Response {
|
||||
let uri = request.uri().clone();
|
||||
let headers = request.headers().clone();
|
||||
handle_spa_fallback_with_headers(uri, headers).await
|
||||
handle_spa_fallback(uri, headers).await
|
||||
}
|
||||
|
||||
/// Handles SPA routing by serving index.html for non-API, non-asset requests
|
||||
/// This version includes HTTP caching headers and ETag support
|
||||
#[cfg(feature = "embed-assets")]
|
||||
async fn handle_spa_fallback_with_headers(uri: Uri, request_headers: HeaderMap) -> Response {
|
||||
let path = uri.path().trim_start_matches('/');
|
||||
|
||||
if let Some(content) = WebAssets::get(path) {
|
||||
// Get asset metadata (MIME type and hash) with caching
|
||||
let metadata = get_asset_metadata_cached(path, &content.data);
|
||||
|
||||
// Check if client has a matching ETag (conditional request)
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
// Use cached MIME type, only set Content-Type if we have a valid MIME type
|
||||
let mut response = (
|
||||
[(
|
||||
header::CONTENT_TYPE,
|
||||
// For unknown types, set to application/octet-stream
|
||||
metadata
|
||||
.mime_type
|
||||
.unwrap_or("application/octet-stream".to_string()),
|
||||
)],
|
||||
content.data,
|
||||
)
|
||||
.into_response();
|
||||
|
||||
// Set caching headers
|
||||
set_caching_headers(&mut response, path, &metadata.hash.quoted());
|
||||
async fn handle_spa_fallback(uri: Uri, request_headers: HeaderMap) -> axum::response::Response {
|
||||
let path = uri.path();
|
||||
|
||||
// Try serving the exact asset (with encoding negotiation)
|
||||
if let Some(response) = try_serve_asset_with_encoding(path, &request_headers) {
|
||||
return response;
|
||||
} else {
|
||||
// Any assets that are not found should be treated as a 404, not falling back to the SPA index.html
|
||||
if path.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to the SPA index.html
|
||||
match WebAssets::get("index.html") {
|
||||
Some(content) => {
|
||||
let metadata = get_asset_metadata_cached("index.html", &content.data);
|
||||
// SvelteKit assets under _app/ that don't exist are a hard 404
|
||||
let trimmed = path.trim_start_matches('/');
|
||||
if trimmed.starts_with("_app/") || trimmed.starts_with("assets/") {
|
||||
return (StatusCode::NOT_FOUND, "Asset not found").into_response();
|
||||
}
|
||||
|
||||
// Check if client has a matching ETag for index.html
|
||||
if let Some(etag) = request_headers.get(header::IF_NONE_MATCH)
|
||||
&& etag.to_str().is_ok_and(|s| metadata.etag_matches(s))
|
||||
{
|
||||
return StatusCode::NOT_MODIFIED.into_response();
|
||||
}
|
||||
|
||||
let mut response = Html(content.data).into_response();
|
||||
set_caching_headers(&mut response, "index.html", &metadata.hash.quoted());
|
||||
response
|
||||
}
|
||||
// SPA fallback: serve index.html with encoding negotiation
|
||||
match try_serve_asset_with_encoding("/index.html", &request_headers) {
|
||||
Some(response) => response,
|
||||
None => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to load index.html",
|
||||
@@ -221,14 +188,16 @@ async fn health() -> Json<Value> {
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ServiceInfo {
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct ServiceInfo {
|
||||
name: String,
|
||||
status: ServiceStatus,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct StatusResponse {
|
||||
#[derive(Serialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct StatusResponse {
|
||||
status: ServiceStatus,
|
||||
version: String,
|
||||
commit: String,
|
||||
@@ -249,7 +218,10 @@ async fn status(State(state): State<AppState>) -> Json<StatusResponse> {
|
||||
);
|
||||
}
|
||||
|
||||
let overall_status = if services.values().any(|s| matches!(s.status, ServiceStatus::Error)) {
|
||||
let overall_status = if services
|
||||
.values()
|
||||
.any(|s| matches!(s.status, ServiceStatus::Error))
|
||||
{
|
||||
ServiceStatus::Error
|
||||
} else if !services.is_empty()
|
||||
&& services
|
||||
@@ -281,3 +253,318 @@ async fn metrics() -> Json<Value> {
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
}))
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Course search & detail API
|
||||
// ============================================================
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SubjectsParams {
|
||||
term: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchParams {
|
||||
term: String,
|
||||
#[serde(default)]
|
||||
subject: Vec<String>,
|
||||
q: Option<String>,
|
||||
course_number_low: Option<i32>,
|
||||
course_number_high: Option<i32>,
|
||||
#[serde(default)]
|
||||
open_only: bool,
|
||||
instructional_method: Option<String>,
|
||||
campus: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: i32,
|
||||
#[serde(default)]
|
||||
offset: i32,
|
||||
sort_by: Option<SortColumn>,
|
||||
sort_dir: Option<SortDirection>,
|
||||
}
|
||||
|
||||
use crate::data::courses::{SortColumn, SortDirection};
|
||||
|
||||
fn default_limit() -> i32 {
|
||||
25
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct CourseResponse {
|
||||
crn: String,
|
||||
subject: String,
|
||||
course_number: String,
|
||||
title: String,
|
||||
term_code: String,
|
||||
sequence_number: Option<String>,
|
||||
instructional_method: Option<String>,
|
||||
campus: Option<String>,
|
||||
enrollment: i32,
|
||||
max_enrollment: i32,
|
||||
wait_count: i32,
|
||||
wait_capacity: i32,
|
||||
credit_hours: Option<i32>,
|
||||
credit_hour_low: Option<i32>,
|
||||
credit_hour_high: Option<i32>,
|
||||
cross_list: Option<String>,
|
||||
cross_list_capacity: Option<i32>,
|
||||
cross_list_count: Option<i32>,
|
||||
link_identifier: Option<String>,
|
||||
is_section_linked: Option<bool>,
|
||||
part_of_term: Option<String>,
|
||||
meeting_times: Vec<crate::data::models::DbMeetingTime>,
|
||||
attributes: Vec<String>,
|
||||
instructors: Vec<InstructorResponse>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct InstructorResponse {
|
||||
banner_id: String,
|
||||
display_name: String,
|
||||
email: Option<String>,
|
||||
is_primary: bool,
|
||||
rmp_rating: Option<f32>,
|
||||
rmp_num_ratings: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct SearchResponse {
|
||||
courses: Vec<CourseResponse>,
|
||||
total_count: i32,
|
||||
offset: i32,
|
||||
limit: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export)]
|
||||
pub struct CodeDescription {
|
||||
code: String,
|
||||
description: String,
|
||||
}
|
||||
|
||||
/// Build a `CourseResponse` from a DB course with pre-fetched instructor details.
|
||||
fn build_course_response(
|
||||
course: &crate::data::models::Course,
|
||||
instructors: Vec<crate::data::models::CourseInstructorDetail>,
|
||||
) -> CourseResponse {
|
||||
let instructors = instructors
|
||||
.into_iter()
|
||||
.map(|i| InstructorResponse {
|
||||
banner_id: i.banner_id,
|
||||
display_name: i.display_name,
|
||||
email: i.email,
|
||||
is_primary: i.is_primary,
|
||||
rmp_rating: i.avg_rating,
|
||||
rmp_num_ratings: i.num_ratings,
|
||||
})
|
||||
.collect();
|
||||
|
||||
CourseResponse {
|
||||
crn: course.crn.clone(),
|
||||
subject: course.subject.clone(),
|
||||
course_number: course.course_number.clone(),
|
||||
title: course.title.clone(),
|
||||
term_code: course.term_code.clone(),
|
||||
sequence_number: course.sequence_number.clone(),
|
||||
instructional_method: course.instructional_method.clone(),
|
||||
campus: course.campus.clone(),
|
||||
enrollment: course.enrollment,
|
||||
max_enrollment: course.max_enrollment,
|
||||
wait_count: course.wait_count,
|
||||
wait_capacity: course.wait_capacity,
|
||||
credit_hours: course.credit_hours,
|
||||
credit_hour_low: course.credit_hour_low,
|
||||
credit_hour_high: course.credit_hour_high,
|
||||
cross_list: course.cross_list.clone(),
|
||||
cross_list_capacity: course.cross_list_capacity,
|
||||
cross_list_count: course.cross_list_count,
|
||||
link_identifier: course.link_identifier.clone(),
|
||||
is_section_linked: course.is_section_linked,
|
||||
part_of_term: course.part_of_term.clone(),
|
||||
meeting_times: serde_json::from_value(course.meeting_times.clone()).unwrap_or_default(),
|
||||
attributes: serde_json::from_value(course.attributes.clone()).unwrap_or_default(),
|
||||
instructors,
|
||||
}
|
||||
}
|
||||
|
||||
/// `GET /api/courses/search`
|
||||
async fn search_courses(
|
||||
State(state): State<AppState>,
|
||||
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
|
||||
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
|
||||
let limit = params.limit.clamp(1, 100);
|
||||
let offset = params.offset.max(0);
|
||||
|
||||
let (courses, total_count) = crate::data::courses::search_courses(
|
||||
&state.db_pool,
|
||||
¶ms.term,
|
||||
if params.subject.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(¶ms.subject)
|
||||
},
|
||||
params.q.as_deref(),
|
||||
params.course_number_low,
|
||||
params.course_number_high,
|
||||
params.open_only,
|
||||
params.instructional_method.as_deref(),
|
||||
params.campus.as_deref(),
|
||||
limit,
|
||||
offset,
|
||||
params.sort_by,
|
||||
params.sort_dir,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course search failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Search failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Batch-fetch all instructors in a single query instead of N+1
|
||||
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
|
||||
let mut instructor_map =
|
||||
crate::data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let course_responses: Vec<CourseResponse> = courses
|
||||
.iter()
|
||||
.map(|course| {
|
||||
let instructors = instructor_map.remove(&course.id).unwrap_or_default();
|
||||
build_course_response(course, instructors)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(SearchResponse {
|
||||
courses: course_responses,
|
||||
total_count: total_count as i32,
|
||||
offset,
|
||||
limit,
|
||||
}))
|
||||
}
|
||||
|
||||
/// `GET /api/courses/:term/:crn`
|
||||
async fn get_course(
|
||||
State(state): State<AppState>,
|
||||
Path((term, crn)): Path<(String, String)>,
|
||||
) -> Result<Json<CourseResponse>, (AxumStatusCode, String)> {
|
||||
let course = crate::data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Course lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?
|
||||
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
|
||||
|
||||
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
Ok(Json(build_course_response(&course, instructors)))
|
||||
}
|
||||
|
||||
/// `GET /api/terms`
|
||||
async fn get_terms(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let term_codes = crate::data::courses::get_available_terms(&state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get terms");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get terms".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let terms: Vec<CodeDescription> = term_codes
|
||||
.into_iter()
|
||||
.map(|code| {
|
||||
let description = cache
|
||||
.lookup("term", &code)
|
||||
.unwrap_or("Unknown Term")
|
||||
.to_string();
|
||||
CodeDescription { code, description }
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(terms))
|
||||
}
|
||||
|
||||
/// `GET /api/subjects?term=202620`
|
||||
async fn get_subjects(
|
||||
State(state): State<AppState>,
|
||||
Query(params): Query<SubjectsParams>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let rows = crate::data::courses::get_subjects_by_enrollment(&state.db_pool, ¶ms.term)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, "Failed to get subjects");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Failed to get subjects".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let subjects: Vec<CodeDescription> = rows
|
||||
.into_iter()
|
||||
.map(|(code, description, _enrollment)| CodeDescription { code, description })
|
||||
.collect();
|
||||
|
||||
Ok(Json(subjects))
|
||||
}
|
||||
|
||||
/// `GET /api/reference/:category`
|
||||
async fn get_reference(
|
||||
State(state): State<AppState>,
|
||||
Path(category): Path<String>,
|
||||
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
|
||||
let cache = state.reference_cache.read().await;
|
||||
let entries = cache.entries_for_category(&category);
|
||||
|
||||
if entries.is_empty() {
|
||||
// Fall back to DB query in case cache doesn't have this category
|
||||
drop(cache);
|
||||
let rows = crate::data::reference::get_by_category(&category, &state.db_pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(error = %e, category = %category, "Reference lookup failed");
|
||||
(
|
||||
AxumStatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Lookup failed".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
return Ok(Json(
|
||||
rows.into_iter()
|
||||
.map(|r| CodeDescription {
|
||||
code: r.code,
|
||||
description: r.description,
|
||||
})
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Json(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|(code, desc)| CodeDescription {
|
||||
code: code.to_string(),
|
||||
description: desc.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
//! In-memory caches for session resolution and OAuth CSRF state.
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use dashmap::DashMap;
|
||||
use rand::Rng;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::data::models::User;
|
||||
|
||||
/// Cached session entry with TTL.
|
||||
#[derive(Debug, Clone)]
|
||||
struct CachedSession {
|
||||
user: User,
|
||||
session_expires_at: DateTime<Utc>,
|
||||
cached_at: Instant,
|
||||
}
|
||||
|
||||
/// In-memory session cache backed by PostgreSQL.
|
||||
///
|
||||
/// Provides fast session resolution without a DB round-trip on every request.
|
||||
/// Cache entries expire after a configurable TTL (default 5 minutes).
|
||||
#[derive(Clone)]
|
||||
pub struct SessionCache {
|
||||
cache: Arc<DashMap<String, CachedSession>>,
|
||||
db_pool: PgPool,
|
||||
cache_ttl: Duration,
|
||||
}
|
||||
|
||||
impl SessionCache {
|
||||
/// Create a new session cache with a 5-minute default TTL.
|
||||
pub fn new(db_pool: PgPool) -> Self {
|
||||
Self {
|
||||
cache: Arc::new(DashMap::new()),
|
||||
db_pool,
|
||||
cache_ttl: Duration::from_secs(5 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a session token to a [`User`], using the cache when possible.
|
||||
///
|
||||
/// On cache hit (entry present, not stale, session not expired), returns the
|
||||
/// cached user immediately. On miss or stale entry, queries the database for
|
||||
/// the session and user, populates the cache, and fire-and-forgets a
|
||||
/// `touch_session` call to update `last_active_at`.
|
||||
pub async fn get_user(&self, token: &str) -> Option<User> {
|
||||
// Check cache first
|
||||
if let Some(entry) = self.cache.get(token) {
|
||||
let now_instant = Instant::now();
|
||||
let now_utc = Utc::now();
|
||||
|
||||
let cache_fresh = entry.cached_at + self.cache_ttl > now_instant;
|
||||
let session_valid = entry.session_expires_at > now_utc;
|
||||
|
||||
if cache_fresh && session_valid {
|
||||
return Some(entry.user.clone());
|
||||
}
|
||||
|
||||
// Stale or expired — drop the ref before removing
|
||||
drop(entry);
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
// Cache miss — query DB
|
||||
let session = crate::data::sessions::get_session(&self.db_pool, token)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
let user = crate::data::users::get_user(&self.db_pool, session.user_id)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()?;
|
||||
|
||||
self.cache.insert(
|
||||
token.to_owned(),
|
||||
CachedSession {
|
||||
user: user.clone(),
|
||||
session_expires_at: session.expires_at,
|
||||
cached_at: Instant::now(),
|
||||
},
|
||||
);
|
||||
|
||||
// Fire-and-forget touch to update last_active_at
|
||||
let pool = self.db_pool.clone();
|
||||
let token_owned = token.to_owned();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = crate::data::sessions::touch_session(&pool, &token_owned).await {
|
||||
tracing::warn!(error = %e, "failed to touch session");
|
||||
}
|
||||
});
|
||||
|
||||
Some(user)
|
||||
}
|
||||
|
||||
/// Remove a single session from the cache (e.g. on logout).
|
||||
pub fn evict(&self, token: &str) {
|
||||
self.cache.remove(token);
|
||||
}
|
||||
|
||||
/// Remove all cached sessions belonging to a user.
|
||||
pub fn evict_user(&self, discord_id: i64) {
|
||||
self.cache
|
||||
.retain(|_, entry| entry.user.discord_id != discord_id);
|
||||
}
|
||||
|
||||
/// Delete expired sessions from the database and sweep the in-memory cache.
|
||||
///
|
||||
/// Returns the number of sessions deleted from the database.
|
||||
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
|
||||
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
|
||||
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
|
||||
|
||||
let now = Utc::now();
|
||||
self.cache.retain(|_, entry| entry.session_expires_at > now);
|
||||
|
||||
Ok(deleted)
|
||||
}
|
||||
}
|
||||
|
||||
/// Data stored alongside each OAuth CSRF state token.
|
||||
struct OAuthStateEntry {
|
||||
created_at: Instant,
|
||||
/// The browser origin that initiated the login flow, so the callback
|
||||
/// can reconstruct the exact redirect_uri Discord expects.
|
||||
origin: String,
|
||||
}
|
||||
|
||||
/// Ephemeral store for OAuth CSRF state tokens.
|
||||
///
|
||||
/// Tokens are stored with creation time and expire after a configurable TTL.
|
||||
/// Each token is single-use: validation consumes it.
|
||||
#[derive(Clone)]
|
||||
pub struct OAuthStateStore {
|
||||
states: Arc<DashMap<String, OAuthStateEntry>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
impl Default for OAuthStateStore {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl OAuthStateStore {
|
||||
/// Create a new store with a 10-minute TTL.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
states: Arc::new(DashMap::new()),
|
||||
ttl: Duration::from_secs(10 * 60),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a random 16-byte hex CSRF token, store it with the given
|
||||
/// origin, and return the token.
|
||||
pub fn generate(&self, origin: String) -> String {
|
||||
let bytes: [u8; 16] = rand::rng().random();
|
||||
let token: String = bytes.iter().map(|b| format!("{b:02x}")).collect();
|
||||
self.states.insert(
|
||||
token.clone(),
|
||||
OAuthStateEntry {
|
||||
created_at: Instant::now(),
|
||||
origin,
|
||||
},
|
||||
);
|
||||
token
|
||||
}
|
||||
|
||||
/// Validate and consume a CSRF token. Returns the stored origin if the
|
||||
/// token was present and not expired.
|
||||
pub fn validate(&self, state: &str) -> Option<String> {
|
||||
let (_, entry) = self.states.remove(state)?;
|
||||
if entry.created_at.elapsed() < self.ttl {
|
||||
Some(entry.origin)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove all expired entries from the store.
|
||||
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
|
||||
pub fn cleanup(&self) {
|
||||
let ttl = self.ttl;
|
||||
self.states
|
||||
.retain(|_, entry| entry.created_at.elapsed() < ttl);
|
||||
}
|
||||
}
|
||||
Vendored
+1
-2
@@ -5,5 +5,4 @@ dist-ssr
|
||||
*.local
|
||||
count.txt
|
||||
.env
|
||||
.nitro
|
||||
.tanstack
|
||||
.svelte-kit
|
||||
|
||||
+1
-1
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": ["dist/", "node_modules/", ".tanstack/"]
|
||||
"ignore": ["dist/", "node_modules/", ".svelte-kit/", "src/lib/bindings/"]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
|
||||
+143
-878
File diff suppressed because it is too large
Load Diff
@@ -1,60 +0,0 @@
|
||||
import js from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
import react from "eslint-plugin-react";
|
||||
import reactHooks from "eslint-plugin-react-hooks";
|
||||
import reactRefresh from "eslint-plugin-react-refresh";
|
||||
|
||||
export default tseslint.config(
|
||||
// Ignore generated files and build outputs
|
||||
{
|
||||
ignores: ["dist", "node_modules", "src/routeTree.gen.ts", "*.config.js"],
|
||||
},
|
||||
// Base configs
|
||||
js.configs.recommended,
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
// React plugin configuration
|
||||
{
|
||||
files: ["**/*.{ts,tsx}"],
|
||||
plugins: {
|
||||
react,
|
||||
"react-hooks": reactHooks,
|
||||
"react-refresh": reactRefresh,
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
project: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: "19.0",
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
// React rules
|
||||
...react.configs.recommended.rules,
|
||||
...react.configs["jsx-runtime"].rules,
|
||||
...reactHooks.configs.recommended.rules,
|
||||
|
||||
// React Refresh
|
||||
"react-refresh/only-export-components": ["warn", { allowConstantExport: true }],
|
||||
|
||||
// TypeScript overrides
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
argsIgnorePattern: "^_",
|
||||
varsIgnorePattern: "^_",
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/no-explicit-any": "warn",
|
||||
|
||||
// Disable prop-types since we're using TypeScript
|
||||
"react/prop-types": "off",
|
||||
},
|
||||
}
|
||||
);
|
||||
@@ -1,20 +0,0 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<link rel="icon" href="/favicon.ico" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta
|
||||
name="description"
|
||||
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="/logo192.png" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<title>Banner</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
+23
-34
@@ -3,48 +3,37 @@
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite --port 3000",
|
||||
"start": "vite --port 3000",
|
||||
"build": "vite build && tsc",
|
||||
"serve": "vite preview",
|
||||
"dev": "vite dev --port 3000",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||
"test": "vitest run",
|
||||
"lint": "tsc && eslint . --ext .ts,.tsx",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome format ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@radix-ui/themes": "^3.2.1",
|
||||
"@tanstack/react-devtools": "^0.2.2",
|
||||
"@tanstack/react-router": "^1.157.16",
|
||||
"@tanstack/react-router-devtools": "^1.157.16",
|
||||
"@tanstack/router-plugin": "^1.157.16",
|
||||
"lucide-react": "^0.544.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4",
|
||||
"react-timeago": "^8.3.0",
|
||||
"recharts": "^3.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@testing-library/dom": "^10.4.0",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@types/node": "^24.10.9",
|
||||
"@types/react": "^19.2.10",
|
||||
"@types/react-dom": "^19.0.3",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"baseline-browser-mapping": "^2.9.19",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.4.26",
|
||||
"@fontsource-variable/inter": "^5.2.5",
|
||||
"@lucide/svelte": "^0.563.0",
|
||||
"@sveltejs/adapter-static": "^3.0.8",
|
||||
"@sveltejs/kit": "^2.16.0",
|
||||
"@sveltejs/vite-plugin-svelte": "^5.0.3",
|
||||
"@tailwindcss/vite": "^4.0.0",
|
||||
"@tanstack/table-core": "^8.21.3",
|
||||
"@types/node": "^25.1.0",
|
||||
"bits-ui": "^1.3.7",
|
||||
"clsx": "^2.1.1",
|
||||
"jsdom": "^26.0.0",
|
||||
"svelte": "^5.19.0",
|
||||
"svelte-check": "^4.1.4",
|
||||
"tailwind-merge": "^3.0.1",
|
||||
"tailwindcss": "^4.0.0",
|
||||
"typescript": "^5.7.2",
|
||||
"typescript-eslint": "^8.54.0",
|
||||
"vite": "^6.3.5",
|
||||
"vitest": "^3.0.5",
|
||||
"web-vitals": "^4.2.4"
|
||||
"vitest": "^3.0.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"overlayscrollbars": "^2.14.0",
|
||||
"overlayscrollbars-svelte": "^0.5.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Pre-compress static assets with maximum compression levels.
|
||||
* Run after `bun run build`.
|
||||
*
|
||||
* Generates .gz, .br, .zst variants for compressible files ≥ MIN_SIZE bytes.
|
||||
* These are embedded alongside originals by rust-embed and served via
|
||||
* content negotiation in src/web/assets.rs.
|
||||
*/
|
||||
import { readdir, stat, readFile, writeFile } from "fs/promises";
|
||||
import { join, extname } from "path";
|
||||
import { gzipSync, brotliCompressSync, constants } from "zlib";
|
||||
import { $ } from "bun";
|
||||
|
||||
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
|
||||
const MIN_SIZE = 512;
|
||||
|
||||
const COMPRESSIBLE_EXTENSIONS = new Set([
|
||||
".js",
|
||||
".css",
|
||||
".html",
|
||||
".json",
|
||||
".svg",
|
||||
".txt",
|
||||
".xml",
|
||||
".map",
|
||||
]);
|
||||
|
||||
// Check if zstd CLI is available
|
||||
let hasZstd = false;
|
||||
try {
|
||||
await $`which zstd`.quiet();
|
||||
hasZstd = true;
|
||||
} catch {
|
||||
console.warn("Warning: zstd not found, skipping .zst generation");
|
||||
}
|
||||
|
||||
async function* walkDir(dir: string): AsyncGenerator<string> {
|
||||
try {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const path = join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
yield* walkDir(path);
|
||||
} else if (entry.isFile()) {
|
||||
yield path;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist, skip
|
||||
}
|
||||
}
|
||||
|
||||
async function compressFile(path: string): Promise<void> {
|
||||
const ext = extname(path);
|
||||
|
||||
if (!COMPRESSIBLE_EXTENSIONS.has(ext)) return;
|
||||
if (path.endsWith(".br") || path.endsWith(".gz") || path.endsWith(".zst")) return;
|
||||
|
||||
const stats = await stat(path);
|
||||
if (stats.size < MIN_SIZE) return;
|
||||
|
||||
// Skip if all compressed variants already exist
|
||||
const variantsExist = await Promise.all([
|
||||
stat(`${path}.br`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
stat(`${path}.gz`).then(
|
||||
() => true,
|
||||
() => false
|
||||
),
|
||||
hasZstd
|
||||
? stat(`${path}.zst`).then(
|
||||
() => true,
|
||||
() => false
|
||||
)
|
||||
: Promise.resolve(false),
|
||||
]);
|
||||
|
||||
if (variantsExist.every((exists) => exists || !hasZstd)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const content = await readFile(path);
|
||||
const originalSize = content.length;
|
||||
|
||||
// Brotli (maximum quality = 11)
|
||||
const brContent = brotliCompressSync(content, {
|
||||
params: {
|
||||
[constants.BROTLI_PARAM_QUALITY]: 11,
|
||||
},
|
||||
});
|
||||
await writeFile(`${path}.br`, brContent);
|
||||
|
||||
// Gzip (level 9)
|
||||
const gzContent = gzipSync(content, { level: 9 });
|
||||
await writeFile(`${path}.gz`, gzContent);
|
||||
|
||||
// Zstd (level 19 - maximum)
|
||||
if (hasZstd) {
|
||||
try {
|
||||
await $`zstd -19 -q -f -o ${path}.zst ${path}`.quiet();
|
||||
} catch (e) {
|
||||
console.warn(`Warning: Failed to compress ${path} with zstd: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
const brRatio = ((brContent.length / originalSize) * 100).toFixed(1);
|
||||
const gzRatio = ((gzContent.length / originalSize) * 100).toFixed(1);
|
||||
console.log(`Compressed: ${path} (br: ${brRatio}%, gz: ${gzRatio}%, ${originalSize} bytes)`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Pre-compressing static assets...");
|
||||
|
||||
// Banner uses adapter-static with output in dist/
|
||||
const dirs = ["dist"];
|
||||
let scannedFiles = 0;
|
||||
let compressedFiles = 0;
|
||||
|
||||
for (const dir of dirs) {
|
||||
for await (const file of walkDir(dir)) {
|
||||
const ext = extname(file);
|
||||
scannedFiles++;
|
||||
|
||||
if (
|
||||
COMPRESSIBLE_EXTENSIONS.has(ext) &&
|
||||
!file.endsWith(".br") &&
|
||||
!file.endsWith(".gz") &&
|
||||
!file.endsWith(".zst")
|
||||
) {
|
||||
const stats = await stat(file);
|
||||
if (stats.size >= MIN_SIZE) {
|
||||
await compressFile(file);
|
||||
compressedFiles++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Done! Scanned ${scannedFiles} files, compressed ${compressedFiles} files.`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("Compression failed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,54 +0,0 @@
|
||||
.App {
|
||||
min-height: 100vh;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
|
||||
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
|
||||
background-color: var(--color-background);
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%,
|
||||
100% {
|
||||
opacity: 0.2;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.4;
|
||||
}
|
||||
}
|
||||
|
||||
.animate-pulse {
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* Theme toggle button */
|
||||
.theme-toggle {
|
||||
cursor: pointer;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
margin: 4px;
|
||||
padding: 7px;
|
||||
border-radius: 6px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: var(--gray-11);
|
||||
transition: background-color 0.2s, color 0.2s;
|
||||
transform: scale(1.25);
|
||||
}
|
||||
|
||||
.theme-toggle:hover {
|
||||
background-color: var(--gray-4);
|
||||
}
|
||||
|
||||
/* Screen reader only text */
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
white-space: nowrap;
|
||||
border: 0;
|
||||
}
|
||||
Vendored
+11
@@ -0,0 +1,11 @@
|
||||
/// <reference types="@sveltejs/kit" />
|
||||
|
||||
declare const __APP_VERSION__: string;
|
||||
|
||||
declare namespace App {
|
||||
// interface Error {}
|
||||
// interface Locals {}
|
||||
// interface PageData {}
|
||||
// interface PageState {}
|
||||
// interface Platform {}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="no-transition">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<link rel="icon" href="%sveltekit.assets%/favicon.ico" />
|
||||
<meta name="theme-color" content="#000000" />
|
||||
<meta
|
||||
name="description"
|
||||
content="Banner, a Discord bot and web interface for UTSA Course Monitoring"
|
||||
/>
|
||||
<link rel="apple-touch-icon" href="%sveltekit.assets%/logo192.png" />
|
||||
<link rel="manifest" href="%sveltekit.assets%/manifest.json" />
|
||||
<title>Banner</title>
|
||||
<script>
|
||||
(function () {
|
||||
var stored = localStorage.getItem("theme");
|
||||
var isDark =
|
||||
stored === "dark" ||
|
||||
(stored !== "light" &&
|
||||
window.matchMedia("(prefers-color-scheme: dark)").matches);
|
||||
if (isDark) {
|
||||
document.documentElement.classList.add("dark");
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,36 +0,0 @@
|
||||
import { Button } from "@radix-ui/themes";
|
||||
import { Monitor, Moon, Sun } from "lucide-react";
|
||||
import { useTheme } from "next-themes";
|
||||
import { useMemo } from "react";
|
||||
|
||||
export function ThemeToggle() {
|
||||
const { theme, setTheme } = useTheme();
|
||||
|
||||
const nextTheme = useMemo(() => {
|
||||
switch (theme) {
|
||||
case "light":
|
||||
return "dark";
|
||||
case "dark":
|
||||
return "system";
|
||||
case "system":
|
||||
return "light";
|
||||
default:
|
||||
console.error(`Invalid theme: ${theme}`);
|
||||
return "system";
|
||||
}
|
||||
}, [theme]);
|
||||
|
||||
const icon = useMemo(() => {
|
||||
if (nextTheme === "system") {
|
||||
return <Monitor size={18} />;
|
||||
}
|
||||
return nextTheme === "dark" ? <Moon size={18} /> : <Sun size={18} />;
|
||||
}, [nextTheme]);
|
||||
|
||||
return (
|
||||
<Button variant="ghost" size="3" onClick={() => setTheme(nextTheme)} className="theme-toggle">
|
||||
{icon}
|
||||
<span className="sr-only">Toggle theme</span>
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
+98
-19
@@ -1,7 +1,6 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { BannerApiClient } from "./api";
|
||||
|
||||
// Mock fetch
|
||||
global.fetch = vi.fn();
|
||||
|
||||
describe("BannerApiClient", () => {
|
||||
@@ -12,23 +11,6 @@ describe("BannerApiClient", () => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should fetch health data", async () => {
|
||||
const mockHealth = {
|
||||
status: "healthy",
|
||||
timestamp: "2024-01-01T00:00:00Z",
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockHealth),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getHealth();
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/health");
|
||||
expect(result).toEqual(mockHealth);
|
||||
});
|
||||
|
||||
it("should fetch status data", async () => {
|
||||
const mockStatus = {
|
||||
status: "active" as const,
|
||||
@@ -58,8 +40,105 @@ describe("BannerApiClient", () => {
|
||||
statusText: "Internal Server Error",
|
||||
} as Response);
|
||||
|
||||
await expect(apiClient.getHealth()).rejects.toThrow(
|
||||
await expect(apiClient.getStatus()).rejects.toThrow(
|
||||
"API request failed: 500 Internal Server Error"
|
||||
);
|
||||
});
|
||||
|
||||
it("should search courses with all params", async () => {
|
||||
const mockResponse = {
|
||||
courses: [],
|
||||
totalCount: 0,
|
||||
offset: 0,
|
||||
limit: 25,
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.searchCourses({
|
||||
term: "202420",
|
||||
subjects: ["CS"],
|
||||
q: "data",
|
||||
open_only: true,
|
||||
limit: 25,
|
||||
offset: 50,
|
||||
});
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
"/api/courses/search?term=202420&subject=CS&q=data&open_only=true&limit=25&offset=50"
|
||||
);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
|
||||
it("should search courses with minimal params", async () => {
|
||||
const mockResponse = {
|
||||
courses: [],
|
||||
totalCount: 0,
|
||||
offset: 0,
|
||||
limit: 25,
|
||||
};
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
await apiClient.searchCourses({ term: "202420" });
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/courses/search?term=202420");
|
||||
});
|
||||
|
||||
it("should fetch terms", async () => {
|
||||
const mockTerms = [
|
||||
{ code: "202420", description: "Fall 2024" },
|
||||
{ code: "202510", description: "Spring 2025" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockTerms),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getTerms();
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/terms");
|
||||
expect(result).toEqual(mockTerms);
|
||||
});
|
||||
|
||||
it("should fetch subjects for a term", async () => {
|
||||
const mockSubjects = [
|
||||
{ code: "CS", description: "Computer Science" },
|
||||
{ code: "MAT", description: "Mathematics" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockSubjects),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getSubjects("202420");
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/subjects?term=202420");
|
||||
expect(result).toEqual(mockSubjects);
|
||||
});
|
||||
|
||||
it("should fetch reference data", async () => {
|
||||
const mockRef = [
|
||||
{ code: "F", description: "Face to Face" },
|
||||
{ code: "OL", description: "Online" },
|
||||
];
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockRef),
|
||||
} as Response);
|
||||
|
||||
const result = await apiClient.getReference("instructional_methods");
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith("/api/reference/instructional_methods");
|
||||
expect(result).toEqual(mockRef);
|
||||
});
|
||||
});
|
||||
|
||||
+134
-29
@@ -1,41 +1,99 @@
|
||||
// API client for Banner backend
|
||||
import type {
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorResponse,
|
||||
SearchResponse as SearchResponseGenerated,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
User,
|
||||
} from "$lib/bindings";
|
||||
|
||||
const API_BASE_URL = "/api";
|
||||
|
||||
export interface HealthResponse {
|
||||
status: string;
|
||||
// Re-export generated types under their canonical names
|
||||
export type {
|
||||
CodeDescription,
|
||||
CourseResponse,
|
||||
DbMeetingTime,
|
||||
InstructorResponse,
|
||||
ServiceInfo,
|
||||
ServiceStatus,
|
||||
StatusResponse,
|
||||
};
|
||||
|
||||
// Semantic aliases — these all share the CodeDescription shape
|
||||
export type Term = CodeDescription;
|
||||
export type Subject = CodeDescription;
|
||||
export type ReferenceEntry = CodeDescription;
|
||||
|
||||
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
|
||||
export type SearchResponse = SearchResponseGenerated;
|
||||
|
||||
// Client-side only — not generated from Rust
|
||||
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
|
||||
export type SortDirection = "asc" | "desc";
|
||||
|
||||
export interface AdminStatus {
|
||||
userCount: number;
|
||||
sessionCount: number;
|
||||
courseCount: number;
|
||||
scrapeJobCount: number;
|
||||
services: { name: string; status: string }[];
|
||||
}
|
||||
|
||||
export interface ScrapeJob {
|
||||
id: number;
|
||||
targetType: string;
|
||||
targetPayload: unknown;
|
||||
priority: string;
|
||||
executeAt: string;
|
||||
createdAt: string;
|
||||
lockedAt: string | null;
|
||||
retryCount: number;
|
||||
maxRetries: number;
|
||||
}
|
||||
|
||||
export interface ScrapeJobsResponse {
|
||||
jobs: ScrapeJob[];
|
||||
}
|
||||
|
||||
export interface AuditLogEntry {
|
||||
id: number;
|
||||
courseId: number;
|
||||
timestamp: string;
|
||||
fieldChanged: string;
|
||||
oldValue: string;
|
||||
newValue: string;
|
||||
}
|
||||
|
||||
export type Status = "starting" | "active" | "connected" | "disabled" | "error";
|
||||
|
||||
export interface ServiceInfo {
|
||||
name: string;
|
||||
status: Status;
|
||||
export interface AuditLogResponse {
|
||||
entries: AuditLogEntry[];
|
||||
}
|
||||
|
||||
export interface StatusResponse {
|
||||
status: Status;
|
||||
version: string;
|
||||
commit: string;
|
||||
services: Record<string, ServiceInfo>;
|
||||
}
|
||||
|
||||
export interface MetricsResponse {
|
||||
banner_api: {
|
||||
status: string;
|
||||
};
|
||||
timestamp: string;
|
||||
export interface SearchParams {
|
||||
term: string;
|
||||
subjects?: string[];
|
||||
q?: string;
|
||||
open_only?: boolean;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
sort_by?: SortColumn;
|
||||
sort_dir?: SortDirection;
|
||||
}
|
||||
|
||||
export class BannerApiClient {
|
||||
private baseUrl: string;
|
||||
private fetchFn: typeof fetch;
|
||||
|
||||
constructor(baseUrl: string = API_BASE_URL) {
|
||||
constructor(baseUrl: string = API_BASE_URL, fetchFn: typeof fetch = fetch) {
|
||||
this.baseUrl = baseUrl;
|
||||
this.fetchFn = fetchFn;
|
||||
}
|
||||
|
||||
private async request<T>(endpoint: string): Promise<T> {
|
||||
const response = await fetch(`${this.baseUrl}${endpoint}`);
|
||||
const response = await this.fetchFn(`${this.baseUrl}${endpoint}`);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
|
||||
@@ -44,18 +102,65 @@ export class BannerApiClient {
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
async getHealth(): Promise<HealthResponse> {
|
||||
return this.request<HealthResponse>("/health");
|
||||
}
|
||||
|
||||
async getStatus(): Promise<StatusResponse> {
|
||||
return this.request<StatusResponse>("/status");
|
||||
}
|
||||
|
||||
async getMetrics(): Promise<MetricsResponse> {
|
||||
return this.request<MetricsResponse>("/metrics");
|
||||
async searchCourses(params: SearchParams): Promise<SearchResponse> {
|
||||
const query = new URLSearchParams();
|
||||
query.set("term", params.term);
|
||||
if (params.subjects) {
|
||||
for (const s of params.subjects) {
|
||||
query.append("subject", s);
|
||||
}
|
||||
}
|
||||
if (params.q) query.set("q", params.q);
|
||||
if (params.open_only) query.set("open_only", "true");
|
||||
if (params.limit !== undefined) query.set("limit", String(params.limit));
|
||||
if (params.offset !== undefined) query.set("offset", String(params.offset));
|
||||
if (params.sort_by) query.set("sort_by", params.sort_by);
|
||||
if (params.sort_dir) query.set("sort_dir", params.sort_dir);
|
||||
return this.request<SearchResponse>(`/courses/search?${query.toString()}`);
|
||||
}
|
||||
|
||||
async getTerms(): Promise<Term[]> {
|
||||
return this.request<Term[]>("/terms");
|
||||
}
|
||||
|
||||
async getSubjects(termCode: string): Promise<Subject[]> {
|
||||
return this.request<Subject[]>(`/subjects?term=${encodeURIComponent(termCode)}`);
|
||||
}
|
||||
|
||||
async getReference(category: string): Promise<ReferenceEntry[]> {
|
||||
return this.request<ReferenceEntry[]>(`/reference/${encodeURIComponent(category)}`);
|
||||
}
|
||||
|
||||
// Admin endpoints
|
||||
async getAdminStatus(): Promise<AdminStatus> {
|
||||
return this.request<AdminStatus>("/admin/status");
|
||||
}
|
||||
|
||||
async getAdminUsers(): Promise<User[]> {
|
||||
return this.request<User[]>("/admin/users");
|
||||
}
|
||||
|
||||
async setUserAdmin(discordId: bigint, isAdmin: boolean): Promise<User> {
|
||||
const response = await this.fetchFn(`${this.baseUrl}/admin/users/${discordId}/admin`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ is_admin: isAdmin }),
|
||||
});
|
||||
if (!response.ok) throw new Error(`API request failed: ${response.status}`);
|
||||
return (await response.json()) as User;
|
||||
}
|
||||
|
||||
async getAdminScrapeJobs(): Promise<ScrapeJobsResponse> {
|
||||
return this.request<ScrapeJobsResponse>("/admin/scrape-jobs");
|
||||
}
|
||||
|
||||
async getAdminAuditLog(): Promise<AuditLogResponse> {
|
||||
return this.request<AuditLogResponse>("/admin/audit-log");
|
||||
}
|
||||
}
|
||||
|
||||
// Export a default instance
|
||||
export const client = new BannerApiClient();
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import type { User } from "$lib/bindings";
|
||||
|
||||
type AuthState =
|
||||
| { mode: "loading" }
|
||||
| { mode: "authenticated"; user: User }
|
||||
| { mode: "unauthenticated" };
|
||||
|
||||
class AuthStore {
|
||||
state = $state<AuthState>({ mode: "loading" });
|
||||
|
||||
get user(): User | null {
|
||||
return this.state.mode === "authenticated" ? this.state.user : null;
|
||||
}
|
||||
|
||||
get isAdmin(): boolean {
|
||||
return this.user?.isAdmin ?? false;
|
||||
}
|
||||
|
||||
get isLoading(): boolean {
|
||||
return this.state.mode === "loading";
|
||||
}
|
||||
|
||||
get isAuthenticated(): boolean {
|
||||
return this.state.mode === "authenticated";
|
||||
}
|
||||
|
||||
async init() {
|
||||
try {
|
||||
const response = await fetch("/api/auth/me");
|
||||
if (response.ok) {
|
||||
const user: User = await response.json();
|
||||
this.state = { mode: "authenticated", user };
|
||||
} else {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
}
|
||||
} catch {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
}
|
||||
}
|
||||
|
||||
login() {
|
||||
window.location.href = "/api/auth/login";
|
||||
}
|
||||
|
||||
async logout() {
|
||||
try {
|
||||
await fetch("/api/auth/logout", { method: "POST" });
|
||||
} finally {
|
||||
this.state = { mode: "unauthenticated" };
|
||||
window.location.href = "/";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const authStore = new AuthStore();
|
||||
@@ -0,0 +1,9 @@
|
||||
export type { CodeDescription } from "./CodeDescription";
|
||||
export type { CourseResponse } from "./CourseResponse";
|
||||
export type { DbMeetingTime } from "./DbMeetingTime";
|
||||
export type { InstructorResponse } from "./InstructorResponse";
|
||||
export type { SearchResponse } from "./SearchResponse";
|
||||
export type { ServiceInfo } from "./ServiceInfo";
|
||||
export type { ServiceStatus } from "./ServiceStatus";
|
||||
export type { StatusResponse } from "./StatusResponse";
|
||||
export type { User } from "./User";
|
||||
@@ -0,0 +1,226 @@
|
||||
<script lang="ts">
|
||||
import type { CourseResponse } from "$lib/api";
|
||||
import {
|
||||
formatTime,
|
||||
formatCreditHours,
|
||||
formatDate,
|
||||
formatMeetingDaysLong,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
ratingColor,
|
||||
} from "$lib/course";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { cn, tooltipContentClass } from "$lib/utils";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import { Info, Copy, Check } from "@lucide/svelte";
|
||||
|
||||
let { course }: { course: CourseResponse } = $props();
|
||||
|
||||
const clipboard = useClipboard();
|
||||
</script>
|
||||
|
||||
<div class="bg-muted/60 p-5 text-sm border-b border-border">
|
||||
<div class="grid grid-cols-1 sm:grid-cols-2 gap-5">
|
||||
<!-- Instructors -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Instructors
|
||||
</h4>
|
||||
{#if course.instructors.length > 0}
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.instructors as instructor}
|
||||
<Tooltip.Root delayDuration={200}>
|
||||
<Tooltip.Trigger>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
|
||||
>
|
||||
{instructor.displayName}
|
||||
{#if instructor.rmpRating != null}
|
||||
{@const rating = instructor.rmpRating}
|
||||
<span
|
||||
class="text-[10px] font-semibold {ratingColor(rating)}"
|
||||
>{rating.toFixed(1)}★</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={cn(tooltipContentClass, "px-3 py-2")}
|
||||
>
|
||||
<div class="space-y-1.5">
|
||||
<div class="font-medium">{instructor.displayName}</div>
|
||||
{#if instructor.isPrimary}
|
||||
<div class="text-muted-foreground">Primary instructor</div>
|
||||
{/if}
|
||||
{#if instructor.rmpRating != null}
|
||||
<div class="text-muted-foreground">
|
||||
{instructor.rmpRating.toFixed(1)}/5 ({instructor.rmpNumRatings ?? 0} ratings)
|
||||
</div>
|
||||
{/if}
|
||||
{#if instructor.email}
|
||||
<button
|
||||
onclick={(e) => clipboard.copy(instructor.email!, e)}
|
||||
class="inline-flex items-center gap-1 text-muted-foreground hover:text-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
{#if clipboard.copiedValue === instructor.email}
|
||||
<Check class="size-3" />
|
||||
<span>Copied!</span>
|
||||
{:else}
|
||||
<Copy class="size-3" />
|
||||
<span>{instructor.email}</span>
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{/each}
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-muted-foreground italic">Staff</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Meeting Times -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Meeting Times
|
||||
</h4>
|
||||
{#if course.meetingTimes.length > 0}
|
||||
<ul class="space-y-2">
|
||||
{#each course.meetingTimes as mt}
|
||||
<li>
|
||||
{#if isMeetingTimeTBA(mt) && isTimeTBA(mt)}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{:else}
|
||||
<div class="flex items-baseline gap-1.5">
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span class="font-medium text-foreground">
|
||||
{formatMeetingDaysLong(mt)}
|
||||
</span>
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span class="text-muted-foreground">
|
||||
{formatTime(mt.begin_time)}–{formatTime(mt.end_time)}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">Time TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{#if mt.building || mt.room}
|
||||
<div class="text-xs text-muted-foreground mt-0.5">
|
||||
{mt.building_description ?? mt.building}{mt.room ? ` ${mt.room}` : ""}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="text-xs text-muted-foreground/70 mt-0.5">
|
||||
{formatDate(mt.start_date)} – {formatDate(mt.end_date)}
|
||||
</div>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<span class="italic text-muted-foreground">TBA</span>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Delivery -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Delivery
|
||||
<SimpleTooltip text="How the course is taught: in-person, online, hybrid, etc." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<span class="text-foreground">
|
||||
{course.instructionalMethod ?? "—"}
|
||||
{#if course.campus}
|
||||
<span class="text-muted-foreground"> · {course.campus}</span>
|
||||
{/if}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Credits -->
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Credits
|
||||
</h4>
|
||||
<span class="text-foreground">{formatCreditHours(course)}</span>
|
||||
</div>
|
||||
|
||||
<!-- Attributes -->
|
||||
{#if course.attributes.length > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Attributes
|
||||
<SimpleTooltip text="Course flags for degree requirements, core curriculum, or special designations" delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each course.attributes as attr}
|
||||
<SimpleTooltip text="Course attribute code" delay={150} passthrough>
|
||||
<span
|
||||
class="inline-flex text-xs font-medium bg-card border border-border rounded-md px-2 py-0.5 text-muted-foreground hover:text-foreground hover:border-foreground/20 transition-colors"
|
||||
>
|
||||
{attr}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Cross-list -->
|
||||
{#if course.crossList}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
<span class="inline-flex items-center gap-1">
|
||||
Cross-list
|
||||
<SimpleTooltip text="Cross-listed sections share enrollment across multiple course numbers. Students in any linked section attend the same class." delay={150} passthrough>
|
||||
<Info class="size-3 text-muted-foreground/50" />
|
||||
</SimpleTooltip>
|
||||
</span>
|
||||
</h4>
|
||||
<Tooltip.Root delayDuration={150} disableHoverableContent>
|
||||
<Tooltip.Trigger>
|
||||
<span class="inline-flex items-center gap-1.5 text-foreground font-mono">
|
||||
<span class="bg-card border border-border rounded-md px-2 py-0.5 text-xs font-medium">
|
||||
{course.crossList}
|
||||
</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
<span class="text-muted-foreground text-xs">
|
||||
{course.crossListCount}/{course.crossListCapacity}
|
||||
</span>
|
||||
{/if}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
sideOffset={6}
|
||||
class={tooltipContentClass}
|
||||
>
|
||||
Group <span class="font-mono font-medium">{course.crossList}</span>
|
||||
{#if course.crossListCount != null && course.crossListCapacity != null}
|
||||
— {course.crossListCount} enrolled across {course.crossListCapacity} shared seats
|
||||
{/if}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- Waitlist -->
|
||||
{#if course.waitCapacity > 0}
|
||||
<div>
|
||||
<h4 class="text-sm text-foreground mb-2">
|
||||
Waitlist
|
||||
</h4>
|
||||
<span class="text-foreground">{course.waitCount} / {course.waitCapacity}</span>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,684 @@
|
||||
<script lang="ts">
|
||||
import type { CourseResponse } from "$lib/api";
|
||||
import {
|
||||
abbreviateInstructor,
|
||||
concernAccentColor,
|
||||
formatLocationDisplay,
|
||||
formatLocationTooltip,
|
||||
formatMeetingDays,
|
||||
formatMeetingTimesTooltip,
|
||||
formatTimeRange,
|
||||
getDeliveryConcern,
|
||||
getPrimaryInstructor,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
openSeats,
|
||||
seatsColor,
|
||||
seatsDotColor,
|
||||
ratingColor,
|
||||
} from "$lib/course";
|
||||
import { useClipboard } from "$lib/composables/useClipboard.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
import CourseDetail from "./CourseDetail.svelte";
|
||||
import { fade, fly, slide } from "svelte/transition";
|
||||
import { flip } from "svelte/animate";
|
||||
import { createSvelteTable, FlexRender } from "$lib/components/ui/data-table/index.js";
|
||||
import {
|
||||
getCoreRowModel,
|
||||
getSortedRowModel,
|
||||
type ColumnDef,
|
||||
type SortingState,
|
||||
type VisibilityState,
|
||||
type Updater,
|
||||
} from "@tanstack/table-core";
|
||||
import { ArrowUp, ArrowDown, ArrowUpDown, Columns3, Check, RotateCcw } from "@lucide/svelte";
|
||||
import { DropdownMenu, ContextMenu } from "bits-ui";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
let {
|
||||
courses,
|
||||
loading,
|
||||
sorting = [],
|
||||
onSortingChange,
|
||||
manualSorting = false,
|
||||
subjectMap = {},
|
||||
}: {
|
||||
courses: CourseResponse[];
|
||||
loading: boolean;
|
||||
sorting?: SortingState;
|
||||
onSortingChange?: (sorting: SortingState) => void;
|
||||
manualSorting?: boolean;
|
||||
subjectMap?: Record<string, string>;
|
||||
} = $props();
|
||||
|
||||
let expandedCrn: string | null = $state(null);
|
||||
let tableWrapper: HTMLDivElement = undefined!;
|
||||
const clipboard = useClipboard(1000);
|
||||
|
||||
// Collapse expanded row when the dataset changes to avoid stale detail rows
|
||||
// and FLIP position calculation glitches from lingering expanded content
|
||||
$effect(() => {
|
||||
courses; // track dependency
|
||||
expandedCrn = null;
|
||||
});
|
||||
|
||||
useOverlayScrollbars(() => tableWrapper, {
|
||||
overflow: { x: "scroll", y: "hidden" },
|
||||
scrollbars: { autoHide: "never" },
|
||||
});
|
||||
|
||||
// Column visibility state
|
||||
let columnVisibility: VisibilityState = $state({});
|
||||
|
||||
function resetColumnVisibility() {
|
||||
columnVisibility = {};
|
||||
}
|
||||
|
||||
function handleVisibilityChange(updater: Updater<VisibilityState>) {
|
||||
const newVisibility = typeof updater === "function" ? updater(columnVisibility) : updater;
|
||||
columnVisibility = newVisibility;
|
||||
}
|
||||
|
||||
// visibleColumnIds and hasCustomVisibility derived after column definitions below
|
||||
|
||||
function toggleRow(crn: string) {
|
||||
expandedCrn = expandedCrn === crn ? null : crn;
|
||||
}
|
||||
|
||||
function primaryInstructorDisplay(course: CourseResponse): string {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary) return "Staff";
|
||||
return abbreviateInstructor(primary.displayName);
|
||||
}
|
||||
|
||||
function primaryRating(course: CourseResponse): { rating: number; count: number } | null {
|
||||
const primary = getPrimaryInstructor(course.instructors);
|
||||
if (!primary?.rmpRating) return null;
|
||||
return { rating: primary.rmpRating, count: primary.rmpNumRatings ?? 0 };
|
||||
}
|
||||
|
||||
function timeIsTBA(course: CourseResponse): boolean {
|
||||
if (course.meetingTimes.length === 0) return true;
|
||||
const mt = course.meetingTimes[0];
|
||||
return isMeetingTimeTBA(mt) && isTimeTBA(mt);
|
||||
}
|
||||
|
||||
// Column definitions
|
||||
const columns: ColumnDef<CourseResponse, unknown>[] = [
|
||||
{
|
||||
id: "crn",
|
||||
accessorKey: "crn",
|
||||
header: "CRN",
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
id: "course_code",
|
||||
accessorFn: (row) => `${row.subject} ${row.courseNumber}`,
|
||||
header: "Course",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "title",
|
||||
accessorKey: "title",
|
||||
header: "Title",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "instructor",
|
||||
accessorFn: (row) => primaryInstructorDisplay(row),
|
||||
header: "Instructor",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "time",
|
||||
accessorFn: (row) => {
|
||||
if (row.meetingTimes.length === 0) return "";
|
||||
const mt = row.meetingTimes[0];
|
||||
return `${formatMeetingDays(mt)} ${formatTimeRange(mt.begin_time, mt.end_time)}`;
|
||||
},
|
||||
header: "Time",
|
||||
enableSorting: true,
|
||||
},
|
||||
{
|
||||
id: "location",
|
||||
accessorFn: (row) => formatLocationDisplay(row) ?? "",
|
||||
header: "Location",
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
id: "seats",
|
||||
accessorFn: (row) => openSeats(row),
|
||||
header: "Seats",
|
||||
enableSorting: true,
|
||||
},
|
||||
];
|
||||
|
||||
/** Column IDs that are currently visible */
|
||||
let visibleColumnIds = $derived(
|
||||
columns.map((c) => c.id!).filter((id) => columnVisibility[id] !== false)
|
||||
);
|
||||
|
||||
let hasCustomVisibility = $derived(Object.values(columnVisibility).some((v) => v === false));
|
||||
|
||||
function handleSortingChange(updater: Updater<SortingState>) {
|
||||
const newSorting = typeof updater === "function" ? updater(sorting) : updater;
|
||||
onSortingChange?.(newSorting);
|
||||
}
|
||||
|
||||
const table = createSvelteTable({
|
||||
get data() {
|
||||
return courses;
|
||||
},
|
||||
getRowId: (row) => String(row.crn),
|
||||
columns,
|
||||
state: {
|
||||
get sorting() {
|
||||
return sorting;
|
||||
},
|
||||
get columnVisibility() {
|
||||
return columnVisibility;
|
||||
},
|
||||
},
|
||||
onSortingChange: handleSortingChange,
|
||||
onColumnVisibilityChange: handleVisibilityChange,
|
||||
getCoreRowModel: getCoreRowModel(),
|
||||
get getSortedRowModel() {
|
||||
return manualSorting ? undefined : getSortedRowModel<CourseResponse>();
|
||||
},
|
||||
get manualSorting() {
|
||||
return manualSorting;
|
||||
},
|
||||
enableSortingRemoval: true,
|
||||
});
|
||||
</script>
|
||||
|
||||
{#snippet columnVisibilityGroup(
|
||||
Group: typeof DropdownMenu.Group,
|
||||
GroupHeading: typeof DropdownMenu.GroupHeading,
|
||||
CheckboxItem: typeof DropdownMenu.CheckboxItem,
|
||||
Separator: typeof DropdownMenu.Separator,
|
||||
Item: typeof DropdownMenu.Item,
|
||||
)}
|
||||
<Group>
|
||||
<GroupHeading
|
||||
class="px-2 py-1.5 text-xs font-medium text-muted-foreground"
|
||||
>
|
||||
Toggle columns
|
||||
</GroupHeading>
|
||||
{#each columns as col}
|
||||
{@const id = col.id!}
|
||||
{@const label =
|
||||
typeof col.header === "string" ? col.header : id}
|
||||
<CheckboxItem
|
||||
checked={columnVisibility[id] !== false}
|
||||
closeOnSelect={false}
|
||||
onCheckedChange={(checked) => {
|
||||
columnVisibility = {
|
||||
...columnVisibility,
|
||||
[id]: checked,
|
||||
};
|
||||
}}
|
||||
class="relative flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
>
|
||||
{#snippet children({ checked })}
|
||||
<span
|
||||
class="flex size-4 items-center justify-center rounded-sm border border-border"
|
||||
>
|
||||
{#if checked}
|
||||
<Check class="size-3" />
|
||||
{/if}
|
||||
</span>
|
||||
{label}
|
||||
{/snippet}
|
||||
</CheckboxItem>
|
||||
{/each}
|
||||
</Group>
|
||||
{#if hasCustomVisibility}
|
||||
<Separator class="mx-1 my-1 h-px bg-border" />
|
||||
<Item
|
||||
class="flex items-center gap-2 rounded-sm px-2 py-1.5 text-sm cursor-pointer select-none outline-none data-highlighted:bg-accent data-highlighted:text-accent-foreground"
|
||||
onSelect={resetColumnVisibility}
|
||||
>
|
||||
<RotateCcw class="size-3.5" />
|
||||
Reset to default
|
||||
</Item>
|
||||
{/if}
|
||||
{/snippet}
|
||||
|
||||
<!-- Toolbar: View columns button -->
|
||||
<div class="flex items-center justify-end pb-2">
|
||||
<DropdownMenu.Root>
|
||||
<DropdownMenu.Trigger
|
||||
class="inline-flex items-center gap-1.5 rounded-md border border-border bg-background px-2.5 py-1.5 text-xs font-medium text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors cursor-pointer"
|
||||
>
|
||||
<Columns3 class="size-3.5" />
|
||||
View
|
||||
</DropdownMenu.Trigger>
|
||||
<DropdownMenu.Portal>
|
||||
<DropdownMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
align="end"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
transition:fly={{ duration: 150, y: -10 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
DropdownMenu.Group,
|
||||
DropdownMenu.GroupHeading,
|
||||
DropdownMenu.CheckboxItem,
|
||||
DropdownMenu.Separator,
|
||||
DropdownMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</DropdownMenu.Content>
|
||||
</DropdownMenu.Portal>
|
||||
</DropdownMenu.Root>
|
||||
</div>
|
||||
|
||||
<!-- Table with context menu on header -->
|
||||
<div bind:this={tableWrapper} class="overflow-x-auto">
|
||||
<ContextMenu.Root>
|
||||
<ContextMenu.Trigger class="contents">
|
||||
<table class="w-full min-w-160 border-collapse text-sm">
|
||||
<thead>
|
||||
{#each table.getHeaderGroups() as headerGroup}
|
||||
<tr
|
||||
class="border-b border-border text-left text-muted-foreground"
|
||||
>
|
||||
{#each headerGroup.headers as header}
|
||||
{#if header.column.getIsVisible()}
|
||||
<th
|
||||
class="py-2 px-2 font-medium {header.id ===
|
||||
'seats'
|
||||
? 'text-right'
|
||||
: ''}"
|
||||
class:cursor-pointer={header.column.getCanSort()}
|
||||
class:select-none={header.column.getCanSort()}
|
||||
onclick={header.column.getToggleSortingHandler()}
|
||||
>
|
||||
{#if header.column.getCanSort()}
|
||||
<span
|
||||
class="inline-flex items-center gap-1"
|
||||
>
|
||||
{#if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef
|
||||
.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column
|
||||
.columnDef.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
{#if header.column.getIsSorted() === "asc"}
|
||||
<ArrowUp class="size-3.5" />
|
||||
{:else if header.column.getIsSorted() === "desc"}
|
||||
<ArrowDown
|
||||
class="size-3.5"
|
||||
/>
|
||||
{:else}
|
||||
<ArrowUpDown
|
||||
class="size-3.5 text-muted-foreground/40"
|
||||
/>
|
||||
{/if}
|
||||
</span>
|
||||
{:else if typeof header.column.columnDef.header === "string"}
|
||||
{header.column.columnDef.header}
|
||||
{:else}
|
||||
<FlexRender
|
||||
content={header.column.columnDef
|
||||
.header}
|
||||
context={header.getContext()}
|
||||
/>
|
||||
{/if}
|
||||
</th>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</thead>
|
||||
{#if loading && courses.length === 0}
|
||||
<tbody>
|
||||
{#each Array(5) as _}
|
||||
<tr class="border-b border-border">
|
||||
{#each table.getVisibleLeafColumns() as col}
|
||||
<td class="py-2.5 px-2">
|
||||
<div
|
||||
class="h-4 bg-muted rounded animate-pulse {col.id ===
|
||||
'seats'
|
||||
? 'w-14 ml-auto'
|
||||
: col.id === 'title'
|
||||
? 'w-40'
|
||||
: col.id === 'crn'
|
||||
? 'w-10'
|
||||
: 'w-20'}"
|
||||
></div>
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
{:else if courses.length === 0}
|
||||
<tbody>
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="py-12 text-center text-muted-foreground"
|
||||
>
|
||||
No courses found. Try adjusting your filters.
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
{:else}
|
||||
<!-- No out: transition — Svelte outros break table layout (tbody loses positioning and overlaps) -->
|
||||
{#each table.getRowModel().rows as row, i (row.id)}
|
||||
{@const course = row.original}
|
||||
<tbody
|
||||
animate:flip={{ duration: 300 }}
|
||||
in:fade={{ duration: 200, delay: Math.min(i * 20, 400) }}
|
||||
>
|
||||
<tr
|
||||
class="border-b border-border cursor-pointer hover:bg-muted/50 transition-colors whitespace-nowrap {expandedCrn ===
|
||||
course.crn
|
||||
? 'bg-muted/30'
|
||||
: ''}"
|
||||
onclick={() => toggleRow(course.crn)}
|
||||
>
|
||||
{#each row.getVisibleCells() as cell (cell.id)}
|
||||
{@const colId = cell.column.id}
|
||||
{#if colId === "crn"}
|
||||
<td class="py-2 px-2 relative">
|
||||
<button
|
||||
class="relative inline-flex items-center rounded-full px-2 py-0.5 border border-border/50 bg-muted/20 hover:bg-muted/40 hover:border-foreground/30 transition-colors duration-150 cursor-copy focus-visible:outline-2 focus-visible:outline-offset-1 focus-visible:outline-ring font-mono text-xs text-muted-foreground/70"
|
||||
onclick={(e) =>
|
||||
clipboard.copy(
|
||||
course.crn,
|
||||
e,
|
||||
)}
|
||||
onkeydown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
e.preventDefault();
|
||||
clipboard.copy(course.crn, e);
|
||||
}
|
||||
}}
|
||||
aria-label="Copy CRN {course.crn} to clipboard"
|
||||
>
|
||||
{course.crn}
|
||||
{#if clipboard.copiedValue === course.crn}
|
||||
<span
|
||||
class="absolute -top-8 left-1/2 -translate-x-1/2 whitespace-nowrap text-xs px-2 py-1 rounded-md bg-green-500/10 border border-green-500/20 text-green-700 dark:text-green-300 pointer-events-none z-10"
|
||||
in:fade={{
|
||||
duration: 100,
|
||||
}}
|
||||
out:fade={{
|
||||
duration: 200,
|
||||
}}
|
||||
>
|
||||
Copied!
|
||||
</span>
|
||||
{/if}
|
||||
</button>
|
||||
</td>
|
||||
{:else if colId === "course_code"}
|
||||
{@const subjectDesc =
|
||||
subjectMap[course.subject]}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={subjectDesc
|
||||
? `${subjectDesc} ${course.courseNumber}`
|
||||
: `${course.subject} ${course.courseNumber}`}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="font-semibold"
|
||||
>{course.subject}
|
||||
{course.courseNumber}</span
|
||||
>{#if course.sequenceNumber}<span
|
||||
class="text-muted-foreground"
|
||||
>-{course.sequenceNumber}</span
|
||||
>{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "title"}
|
||||
<td
|
||||
class="py-2 px-2 font-medium max-w-50 truncate"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text={course.title}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span class="block truncate"
|
||||
>{course.title}</span
|
||||
>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "instructor"}
|
||||
{@const primary = getPrimaryInstructor(
|
||||
course.instructors,
|
||||
)}
|
||||
{@const display = primaryInstructorDisplay(course)}
|
||||
{@const commaIdx = display.indexOf(", ")}
|
||||
{@const ratingData = primaryRating(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if display === "Staff"}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60 uppercase"
|
||||
>Staff</span
|
||||
>
|
||||
{:else}
|
||||
<SimpleTooltip
|
||||
text={primary?.displayName ??
|
||||
"Staff"}
|
||||
delay={200}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
{#if commaIdx !== -1}
|
||||
<span>{display.slice(0, commaIdx)},
|
||||
<span class="text-muted-foreground">{display.slice(commaIdx + 1)}</span
|
||||
></span>
|
||||
{:else}
|
||||
<span>{display}</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
{#if ratingData}
|
||||
<SimpleTooltip
|
||||
text="{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}/5 ({ratingData.count} ratings on RateMyProfessors)"
|
||||
delay={150}
|
||||
side="bottom"
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="ml-1 text-xs font-medium {ratingColor(
|
||||
ratingData.rating,
|
||||
)}"
|
||||
>{ratingData.rating.toFixed(
|
||||
1,
|
||||
)}★</span
|
||||
>
|
||||
</SimpleTooltip>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "time"}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
<SimpleTooltip
|
||||
text={formatMeetingTimesTooltip(course.meetingTimes)}
|
||||
passthrough
|
||||
>
|
||||
{#if timeIsTBA(course)}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{:else}
|
||||
{@const mt =
|
||||
course.meetingTimes[0]}
|
||||
<span>
|
||||
{#if !isMeetingTimeTBA(mt)}
|
||||
<span
|
||||
class="font-mono font-medium"
|
||||
>{formatMeetingDays(
|
||||
mt,
|
||||
)}</span
|
||||
>
|
||||
{" "}
|
||||
{/if}
|
||||
{#if !isTimeTBA(mt)}
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
>{formatTimeRange(
|
||||
mt.begin_time,
|
||||
mt.end_time,
|
||||
)}</span
|
||||
>
|
||||
{:else}
|
||||
<span
|
||||
class="text-xs text-muted-foreground/60"
|
||||
>TBA</span
|
||||
>
|
||||
{/if}
|
||||
{#if course.meetingTimes.length > 1}
|
||||
<span
|
||||
class="ml-1 text-xs text-muted-foreground/70 font-medium"
|
||||
>+{course
|
||||
.meetingTimes
|
||||
.length -
|
||||
1}</span
|
||||
>
|
||||
{/if}
|
||||
</span>
|
||||
{/if}
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{:else if colId === "location"}
|
||||
{@const concern = getDeliveryConcern(course)}
|
||||
{@const accentColor = concernAccentColor(concern)}
|
||||
{@const locTooltip = formatLocationTooltip(course)}
|
||||
{@const locDisplay = formatLocationDisplay(course)}
|
||||
<td class="py-2 px-2 whitespace-nowrap">
|
||||
{#if locTooltip}
|
||||
<SimpleTooltip
|
||||
text={locTooltip}
|
||||
delay={200}
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground"
|
||||
class:pl-2={accentColor !== null}
|
||||
style:border-left={accentColor ? `2px solid ${accentColor}` : undefined}
|
||||
>
|
||||
{locDisplay ?? "—"}
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
{:else if locDisplay}
|
||||
<span class="text-muted-foreground">
|
||||
{locDisplay}
|
||||
</span>
|
||||
{:else}
|
||||
<span class="text-xs text-muted-foreground/50">—</span>
|
||||
{/if}
|
||||
</td>
|
||||
{:else if colId === "seats"}
|
||||
<td
|
||||
class="py-2 px-2 text-right whitespace-nowrap"
|
||||
>
|
||||
<SimpleTooltip
|
||||
text="{openSeats(
|
||||
course,
|
||||
)} of {course.maxEnrollment} seats open, {course.enrollment} enrolled{course.waitCount >
|
||||
0
|
||||
? `, ${course.waitCount} waitlisted`
|
||||
: ''}"
|
||||
delay={200}
|
||||
side="left"
|
||||
passthrough
|
||||
>
|
||||
<span
|
||||
class="inline-flex items-center gap-1.5"
|
||||
>
|
||||
<span
|
||||
class="size-1.5 rounded-full {seatsDotColor(
|
||||
course,
|
||||
)} shrink-0"
|
||||
></span>
|
||||
<span
|
||||
class="{seatsColor(
|
||||
course,
|
||||
)} font-medium tabular-nums"
|
||||
>{#if openSeats(course) === 0}Full{:else}{openSeats(
|
||||
course,
|
||||
)} open{/if}</span
|
||||
>
|
||||
<span
|
||||
class="text-muted-foreground/60 tabular-nums"
|
||||
>{course.enrollment}/{course.maxEnrollment}{#if course.waitCount > 0}
|
||||
· WL {course.waitCount}/{course.waitCapacity}{/if}</span
|
||||
>
|
||||
</span>
|
||||
</SimpleTooltip>
|
||||
</td>
|
||||
{/if}
|
||||
{/each}
|
||||
</tr>
|
||||
{#if expandedCrn === course.crn}
|
||||
<tr>
|
||||
<td
|
||||
colspan={visibleColumnIds.length}
|
||||
class="p-0"
|
||||
>
|
||||
<div
|
||||
transition:slide={{ duration: 200 }}
|
||||
>
|
||||
<CourseDetail {course} />
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{/if}
|
||||
</tbody>
|
||||
{/each}
|
||||
{/if}
|
||||
</table>
|
||||
</ContextMenu.Trigger>
|
||||
<ContextMenu.Portal>
|
||||
<ContextMenu.Content
|
||||
class="z-50 min-w-40 rounded-md border border-border bg-card p-1 text-card-foreground shadow-lg"
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open })}
|
||||
{#if open}
|
||||
<div {...wrapperProps}>
|
||||
<div
|
||||
{...props}
|
||||
in:fade={{ duration: 100 }}
|
||||
out:fade={{ duration: 100 }}
|
||||
>
|
||||
{@render columnVisibilityGroup(
|
||||
ContextMenu.Group,
|
||||
ContextMenu.GroupHeading,
|
||||
ContextMenu.CheckboxItem,
|
||||
ContextMenu.Separator,
|
||||
ContextMenu.Item,
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</ContextMenu.Content>
|
||||
</ContextMenu.Portal>
|
||||
</ContextMenu.Root>
|
||||
</div>
|
||||
@@ -0,0 +1,36 @@
|
||||
<script lang="ts">
|
||||
import { cn } from "$lib/utils";
|
||||
|
||||
let {
|
||||
commitHash,
|
||||
showStatusLink = true,
|
||||
class: className,
|
||||
}: {
|
||||
commitHash?: string | null;
|
||||
showStatusLink?: boolean;
|
||||
class?: string;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class={cn("flex justify-center items-center gap-2 mt-auto pt-6 pb-4", className)}>
|
||||
{#if __APP_VERSION__}
|
||||
<span class="text-xs text-muted-foreground">v{__APP_VERSION__}</span>
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
{/if}
|
||||
<a
|
||||
href={commitHash
|
||||
? `https://github.com/Xevion/banner/commit/${commitHash}`
|
||||
: "https://github.com/Xevion/banner"}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
class="text-xs text-muted-foreground no-underline hover:underline"
|
||||
>
|
||||
GitHub
|
||||
</a>
|
||||
{#if showStatusLink}
|
||||
<div class="w-px h-3 bg-muted-foreground opacity-30"></div>
|
||||
<a href="/health" class="text-xs text-muted-foreground no-underline hover:underline">
|
||||
Status
|
||||
</a>
|
||||
{/if}
|
||||
</div>
|
||||
@@ -0,0 +1,162 @@
|
||||
<script lang="ts">
|
||||
import { Select } from "bits-ui";
|
||||
import { ChevronUp, ChevronDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
|
||||
let {
|
||||
totalCount,
|
||||
offset,
|
||||
limit,
|
||||
onPageChange,
|
||||
}: {
|
||||
totalCount: number;
|
||||
offset: number;
|
||||
limit: number;
|
||||
onPageChange: (newOffset: number) => void;
|
||||
} = $props();
|
||||
|
||||
const currentPage = $derived(Math.floor(offset / limit) + 1);
|
||||
const totalPages = $derived(Math.ceil(totalCount / limit));
|
||||
const start = $derived(offset + 1);
|
||||
const end = $derived(Math.min(offset + limit, totalCount));
|
||||
|
||||
// Track direction for slide animation
|
||||
let prevPage = $state(1);
|
||||
let direction = $state(0);
|
||||
|
||||
$effect(() => {
|
||||
const page = currentPage;
|
||||
if (page !== prevPage) {
|
||||
direction = page > prevPage ? 1 : -1;
|
||||
prevPage = page;
|
||||
}
|
||||
});
|
||||
|
||||
// 5 page slots: current-2, current-1, current, current+1, current+2
|
||||
const pageSlots = $derived([-2, -1, 0, 1, 2].map((delta) => currentPage + delta));
|
||||
|
||||
function isSlotVisible(page: number): boolean {
|
||||
return page >= 1 && page <= totalPages;
|
||||
}
|
||||
|
||||
function goToPage(page: number) {
|
||||
onPageChange((page - 1) * limit);
|
||||
}
|
||||
|
||||
// Build items array for the Select dropdown
|
||||
const pageItems = $derived(
|
||||
Array.from({ length: totalPages }, (_, i) => ({
|
||||
value: String(i + 1),
|
||||
label: String(i + 1),
|
||||
}))
|
||||
);
|
||||
|
||||
const selectValue = $derived(String(currentPage));
|
||||
</script>
|
||||
|
||||
{#if totalCount > 0 && totalPages > 1}
|
||||
<div class="flex items-center text-sm">
|
||||
<!-- Left zone: result count -->
|
||||
<div class="flex-1">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {start}–{end} of {totalCount} courses
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Center zone: page buttons -->
|
||||
<div class="flex items-center gap-1">
|
||||
{#key currentPage}
|
||||
{#each pageSlots as page, i (i)}
|
||||
{#if i === 2}
|
||||
<!-- Center slot: current page with dropdown trigger -->
|
||||
<Select.Root
|
||||
type="single"
|
||||
value={selectValue}
|
||||
onValueChange={(v) => {
|
||||
if (v) goToPage(Number(v));
|
||||
}}
|
||||
items={pageItems}
|
||||
>
|
||||
<Select.Trigger
|
||||
class="inline-flex items-center justify-center gap-1 w-auto min-w-9 h-9 px-2.5
|
||||
rounded-md text-sm font-medium tabular-nums
|
||||
border border-border bg-card text-foreground
|
||||
hover:bg-muted/50 active:bg-muted transition-colors
|
||||
cursor-pointer select-none outline-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background"
|
||||
aria-label="Page {currentPage} of {totalPages}, click to select page"
|
||||
>
|
||||
<span in:fly={{ x: direction * 20, duration: 200 }}>{currentPage}</span>
|
||||
<ChevronUp class="size-3 text-muted-foreground" />
|
||||
</Select.Trigger>
|
||||
<Select.Portal>
|
||||
<Select.Content
|
||||
class="border border-border bg-card shadow-md outline-hidden z-50
|
||||
max-h-72 min-w-16 w-auto
|
||||
select-none rounded-md p-1
|
||||
data-[state=open]:animate-in data-[state=closed]:animate-out
|
||||
data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0
|
||||
data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95
|
||||
data-[side=top]:slide-in-from-bottom-2
|
||||
data-[side=bottom]:slide-in-from-top-2"
|
||||
side="top"
|
||||
sideOffset={6}
|
||||
>
|
||||
<Select.ScrollUpButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronUp class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollUpButton>
|
||||
<Select.Viewport class="p-0.5">
|
||||
{#each pageItems as item (item.value)}
|
||||
<Select.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center
|
||||
justify-center px-3 text-sm tabular-nums
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
|
||||
data-[selected]:font-semibold"
|
||||
value={item.value}
|
||||
label={item.label}
|
||||
>
|
||||
{item.label}
|
||||
</Select.Item>
|
||||
{/each}
|
||||
</Select.Viewport>
|
||||
<Select.ScrollDownButton class="flex w-full items-center justify-center py-0.5">
|
||||
<ChevronDown class="size-3.5 text-muted-foreground" />
|
||||
</Select.ScrollDownButton>
|
||||
</Select.Content>
|
||||
</Select.Portal>
|
||||
</Select.Root>
|
||||
{:else}
|
||||
<!-- Side slot: navigable page button or invisible placeholder -->
|
||||
<button
|
||||
class="inline-flex items-center justify-center w-9 h-9
|
||||
rounded-md text-sm tabular-nums
|
||||
text-muted-foreground
|
||||
hover:bg-muted/50 hover:text-foreground active:bg-muted transition-colors
|
||||
cursor-pointer select-none
|
||||
focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
{isSlotVisible(page) ? '' : 'invisible pointer-events-none'}"
|
||||
onclick={() => goToPage(page)}
|
||||
aria-label="Go to page {page}"
|
||||
aria-hidden={!isSlotVisible(page)}
|
||||
tabindex={isSlotVisible(page) ? 0 : -1}
|
||||
disabled={!isSlotVisible(page)}
|
||||
in:fly={{ x: direction * 20, duration: 200 }}
|
||||
>
|
||||
{page}
|
||||
</button>
|
||||
{/if}
|
||||
{/each}
|
||||
{/key}
|
||||
</div>
|
||||
|
||||
<!-- Right zone: spacer for centering -->
|
||||
<div class="flex-1"></div>
|
||||
</div>
|
||||
{:else if totalCount > 0}
|
||||
<!-- Single page: just show the count, no pagination controls -->
|
||||
<div class="flex items-center text-sm">
|
||||
<span class="text-muted-foreground">
|
||||
Showing {start}–{end} of {totalCount} courses
|
||||
</span>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,45 @@
|
||||
<script lang="ts">
|
||||
import type { Term, Subject } from "$lib/api";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
import TermCombobox from "./TermCombobox.svelte";
|
||||
import SubjectCombobox from "./SubjectCombobox.svelte";
|
||||
|
||||
let {
|
||||
terms,
|
||||
subjects,
|
||||
selectedTerm = $bindable(),
|
||||
selectedSubjects = $bindable(),
|
||||
query = $bindable(),
|
||||
openOnly = $bindable(),
|
||||
}: {
|
||||
terms: Term[];
|
||||
subjects: Subject[];
|
||||
selectedTerm: string;
|
||||
selectedSubjects: string[];
|
||||
query: string;
|
||||
openOnly: boolean;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<div class="flex flex-wrap gap-3 items-start">
|
||||
<TermCombobox {terms} bind:value={selectedTerm} />
|
||||
|
||||
<SubjectCombobox {subjects} bind:value={selectedSubjects} />
|
||||
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search courses..."
|
||||
aria-label="Search courses"
|
||||
bind:value={query}
|
||||
class="h-9 border border-border bg-card text-foreground rounded-md px-3 text-sm flex-1 min-w-[200px]
|
||||
focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 focus-visible:ring-offset-background
|
||||
transition-colors"
|
||||
/>
|
||||
|
||||
<SimpleTooltip text="Show only courses with available seats" delay={200} passthrough>
|
||||
<label class="flex items-center gap-1.5 h-9 text-sm text-muted-foreground cursor-pointer">
|
||||
<input type="checkbox" bind:checked={openOnly} />
|
||||
Open only
|
||||
</label>
|
||||
</SimpleTooltip>
|
||||
</div>
|
||||
@@ -0,0 +1,34 @@
|
||||
<script lang="ts">
|
||||
export interface SearchMeta {
|
||||
totalCount: number;
|
||||
durationMs: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
let { meta }: { meta: SearchMeta | null } = $props();
|
||||
|
||||
let formattedTime = $derived(
|
||||
meta
|
||||
? meta.timestamp.toLocaleTimeString(undefined, {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
second: "2-digit",
|
||||
})
|
||||
: ""
|
||||
);
|
||||
|
||||
let countLabel = $derived(meta ? meta.totalCount.toLocaleString() : "");
|
||||
let resultNoun = $derived(meta ? (meta.totalCount !== 1 ? "results" : "result") : "");
|
||||
let durationLabel = $derived(meta ? `${Math.round(meta.durationMs)}ms` : "");
|
||||
</script>
|
||||
|
||||
{#if meta}
|
||||
<p
|
||||
class="pl-1 text-xs"
|
||||
title="Last searched at {formattedTime}"
|
||||
>
|
||||
<span class="text-muted-foreground/70">{countLabel}</span>
|
||||
<span class="text-muted-foreground/35">{resultNoun} in</span>
|
||||
<span class="text-muted-foreground/70">{durationLabel}</span>
|
||||
</p>
|
||||
{/if}
|
||||
@@ -0,0 +1,31 @@
|
||||
<script lang="ts">
|
||||
import { Tooltip } from "bits-ui";
|
||||
import type { Snippet } from "svelte";
|
||||
|
||||
let {
|
||||
text,
|
||||
delay = 150,
|
||||
side = "top",
|
||||
passthrough = false,
|
||||
children,
|
||||
}: {
|
||||
text: string;
|
||||
delay?: number;
|
||||
side?: "top" | "bottom" | "left" | "right";
|
||||
passthrough?: boolean;
|
||||
children: Snippet;
|
||||
} = $props();
|
||||
</script>
|
||||
|
||||
<Tooltip.Root delayDuration={delay} disableHoverableContent={passthrough}>
|
||||
<Tooltip.Trigger>
|
||||
{@render children()}
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content
|
||||
{side}
|
||||
sideOffset={6}
|
||||
class="z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md whitespace-pre-line max-w-max"
|
||||
>
|
||||
{text}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
@@ -0,0 +1,161 @@
|
||||
<script lang="ts">
|
||||
import { Combobox } from "bits-ui";
|
||||
import { Check, ChevronsUpDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
import type { Subject } from "$lib/api";
|
||||
|
||||
let {
|
||||
subjects,
|
||||
value = $bindable(),
|
||||
}: {
|
||||
subjects: Subject[];
|
||||
value: string[];
|
||||
} = $props();
|
||||
|
||||
let open = $state(false);
|
||||
let searchValue = $state("");
|
||||
let containerEl = $state<HTMLDivElement>(null!);
|
||||
|
||||
const filteredSubjects = $derived.by(() => {
|
||||
const query = searchValue.toLowerCase().trim();
|
||||
if (query === "") return subjects;
|
||||
|
||||
const exactCode: Subject[] = [];
|
||||
const codeStartsWith: Subject[] = [];
|
||||
const descriptionMatch: Subject[] = [];
|
||||
|
||||
for (const s of subjects) {
|
||||
const codeLower = s.code.toLowerCase();
|
||||
const descLower = s.description.toLowerCase();
|
||||
|
||||
if (codeLower === query) {
|
||||
exactCode.push(s);
|
||||
} else if (codeLower.startsWith(query)) {
|
||||
codeStartsWith.push(s);
|
||||
} else if (descLower.includes(query) || codeLower.includes(query)) {
|
||||
descriptionMatch.push(s);
|
||||
}
|
||||
}
|
||||
|
||||
return [...exactCode, ...codeStartsWith, ...descriptionMatch];
|
||||
});
|
||||
|
||||
const MAX_VISIBLE_CHIPS = 3;
|
||||
const visibleChips = $derived(value.slice(0, MAX_VISIBLE_CHIPS));
|
||||
const overflowCount = $derived(Math.max(0, value.length - MAX_VISIBLE_CHIPS));
|
||||
|
||||
function removeSubject(code: string) {
|
||||
value = value.filter((v) => v !== code);
|
||||
}
|
||||
|
||||
// bits-ui sets the input text to the last selected item's label — clear it
|
||||
$effect(() => {
|
||||
value;
|
||||
const input = containerEl?.querySelector("input");
|
||||
if (input) {
|
||||
input.value = "";
|
||||
searchValue = "";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<Combobox.Root
|
||||
type="multiple"
|
||||
bind:value
|
||||
bind:open
|
||||
onOpenChange={(o: boolean) => {
|
||||
if (!o) searchValue = "";
|
||||
}}
|
||||
>
|
||||
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||
<div
|
||||
class="relative h-9 rounded-md border border-border bg-card
|
||||
flex flex-nowrap items-center gap-1 w-56 pr-9 overflow-hidden cursor-pointer
|
||||
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
|
||||
bind:this={containerEl}
|
||||
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
>
|
||||
{#if value.length > 0}
|
||||
{#each (open ? value : visibleChips) as code (code)}
|
||||
<span
|
||||
role="button"
|
||||
tabindex="-1"
|
||||
onmousedown={(e) => { e.preventDefault(); e.stopPropagation(); }}
|
||||
onclick={(e) => { e.stopPropagation(); removeSubject(code); }}
|
||||
onkeydown={(e) => { if (e.key === "Enter" || e.key === " ") { e.stopPropagation(); removeSubject(code); } }}
|
||||
class="inline-flex items-center rounded bg-muted px-1.5 py-0.5 text-xs font-mono shrink-0
|
||||
text-muted-foreground hover:outline hover:outline-1 hover:outline-ring
|
||||
cursor-pointer transition-[outline] duration-100 first:ml-2"
|
||||
>
|
||||
{code}
|
||||
</span>
|
||||
{/each}
|
||||
{#if !open && overflowCount > 0}
|
||||
<span class="text-xs text-muted-foreground shrink-0">+{overflowCount}</span>
|
||||
{/if}
|
||||
{/if}
|
||||
<Combobox.Input
|
||||
|
||||
oninput={(e) => (searchValue = e.currentTarget.value)}
|
||||
onfocus={() => { open = true; }}
|
||||
class="h-full min-w-0 flex-1 bg-transparent text-muted-foreground text-sm
|
||||
placeholder:text-muted-foreground outline-none border-none
|
||||
{value.length > 0 ? 'pl-1' : 'pl-3'}"
|
||||
placeholder={value.length > 0 ? "Filter..." : "All Subjects"}
|
||||
aria-label="Search subjects"
|
||||
autocomplete="off"
|
||||
autocorrect="off"
|
||||
spellcheck={false}
|
||||
/>
|
||||
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
|
||||
<ChevronsUpDown class="size-4" />
|
||||
</span>
|
||||
</div>
|
||||
<Combobox.Portal>
|
||||
<Combobox.Content
|
||||
customAnchor={containerEl}
|
||||
class="border border-border bg-card shadow-md
|
||||
outline-hidden z-50
|
||||
max-h-72 min-w-[var(--bits-combobox-anchor-width)] w-max max-w-96
|
||||
select-none rounded-md p-1
|
||||
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open: isOpen })}
|
||||
{#if isOpen}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
|
||||
<Combobox.Viewport class="p-0.5">
|
||||
{#each filteredSubjects as subject (subject.code)}
|
||||
<Combobox.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center gap-2 px-2 text-sm whitespace-nowrap
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground"
|
||||
value={subject.code}
|
||||
label={subject.description}
|
||||
>
|
||||
{#snippet children({ selected })}
|
||||
<span class="inline-flex items-center justify-center rounded bg-muted px-1 py-0.5
|
||||
text-xs font-mono text-muted-foreground w-10 shrink-0 text-center">
|
||||
{subject.code}
|
||||
</span>
|
||||
<span class="flex-1">{subject.description}</span>
|
||||
{#if selected}
|
||||
<Check class="ml-auto size-4 shrink-0" />
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Item>
|
||||
{:else}
|
||||
<span class="block px-2 py-2 text-sm text-muted-foreground">
|
||||
No subjects found.
|
||||
</span>
|
||||
{/each}
|
||||
</Combobox.Viewport>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Content>
|
||||
</Combobox.Portal>
|
||||
</Combobox.Root>
|
||||
@@ -0,0 +1,139 @@
|
||||
<script lang="ts">
|
||||
import { Combobox } from "bits-ui";
|
||||
import { Check, ChevronsUpDown } from "@lucide/svelte";
|
||||
import { fly } from "svelte/transition";
|
||||
import type { Term } from "$lib/api";
|
||||
|
||||
let {
|
||||
terms,
|
||||
value = $bindable(),
|
||||
}: {
|
||||
terms: Term[];
|
||||
value: string;
|
||||
} = $props();
|
||||
|
||||
let open = $state(false);
|
||||
let searchValue = $state("");
|
||||
let containerEl = $state<HTMLDivElement>(null!);
|
||||
|
||||
const currentTermCode = $derived(
|
||||
terms.find((t) => !t.description.includes("(View Only)"))?.code ?? ""
|
||||
);
|
||||
|
||||
const selectedLabel = $derived(
|
||||
terms.find((t) => t.code === value)?.description ?? "Select term..."
|
||||
);
|
||||
|
||||
const filteredTerms = $derived.by(() => {
|
||||
const query = searchValue.toLowerCase();
|
||||
const matched =
|
||||
query === "" ? terms : terms.filter((t) => t.description.toLowerCase().includes(query));
|
||||
|
||||
const current = matched.find((t) => t.code === currentTermCode);
|
||||
const rest = matched.filter((t) => t.code !== currentTermCode);
|
||||
return current ? [current, ...rest] : rest;
|
||||
});
|
||||
|
||||
// Manage DOM input text: clear when open for searching, restore label when closed
|
||||
$effect(() => {
|
||||
const _open = open;
|
||||
void value; // track selection changes
|
||||
const _label = selectedLabel;
|
||||
const input = containerEl?.querySelector("input");
|
||||
if (!input) return;
|
||||
if (_open) {
|
||||
input.value = "";
|
||||
searchValue = "";
|
||||
} else {
|
||||
input.value = _label;
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<Combobox.Root
|
||||
type="single"
|
||||
bind:value={() => value, (v) => { if (v) value = v; }}
|
||||
bind:open
|
||||
onOpenChange={(o: boolean) => {
|
||||
if (!o) searchValue = "";
|
||||
}}
|
||||
>
|
||||
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||
<div
|
||||
class="relative h-9 rounded-md border border-border bg-card
|
||||
flex items-center w-40 cursor-pointer
|
||||
has-[:focus-visible]:ring-2 has-[:focus-visible]:ring-ring has-[:focus-visible]:ring-offset-2 has-[:focus-visible]:ring-offset-background"
|
||||
role="presentation"
|
||||
bind:this={containerEl}
|
||||
onclick={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
onkeydown={() => { containerEl?.querySelector('input')?.focus(); }}
|
||||
>
|
||||
<Combobox.Input
|
||||
oninput={(e) => (searchValue = e.currentTarget.value)}
|
||||
onfocus={() => { open = true; }}
|
||||
class="h-full w-full bg-transparent text-muted-foreground text-sm
|
||||
placeholder:text-muted-foreground outline-none border-none
|
||||
pl-3 pr-9 truncate"
|
||||
placeholder="Select term..."
|
||||
aria-label="Select term"
|
||||
autocomplete="off"
|
||||
autocorrect="off"
|
||||
spellcheck={false}
|
||||
/>
|
||||
<span class="absolute end-2 top-1/2 -translate-y-1/2 text-muted-foreground pointer-events-none">
|
||||
<ChevronsUpDown class="size-4" />
|
||||
</span>
|
||||
</div>
|
||||
<Combobox.Portal>
|
||||
<Combobox.Content
|
||||
customAnchor={containerEl}
|
||||
class="border border-border bg-card shadow-md
|
||||
outline-hidden z-50
|
||||
max-h-72 min-w-[var(--bits-combobox-anchor-width)]
|
||||
select-none rounded-md p-1
|
||||
data-[side=bottom]:translate-y-1 data-[side=top]:-translate-y-1"
|
||||
sideOffset={4}
|
||||
forceMount
|
||||
>
|
||||
{#snippet child({ wrapperProps, props, open: isOpen })}
|
||||
{#if isOpen}
|
||||
<div {...wrapperProps}>
|
||||
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
|
||||
<Combobox.Viewport class="p-0.5">
|
||||
{#each filteredTerms as term, i (term.code)}
|
||||
{#if i === 1 && term.code !== currentTermCode && filteredTerms[0]?.code === currentTermCode}
|
||||
<div class="mx-2 my-1 h-px bg-border"></div>
|
||||
{/if}
|
||||
<Combobox.Item
|
||||
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center px-2 text-sm
|
||||
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
|
||||
{term.code === value ? 'cursor-default' : 'cursor-pointer'}
|
||||
{term.code === currentTermCode ? 'font-medium text-foreground' : 'text-foreground'}"
|
||||
value={term.code}
|
||||
label={term.description}
|
||||
>
|
||||
{#snippet children({ selected })}
|
||||
<span class="flex-1 truncate">
|
||||
{term.description}
|
||||
{#if term.code === currentTermCode}
|
||||
<span class="ml-1.5 text-xs text-muted-foreground font-normal">current</span>
|
||||
{/if}
|
||||
</span>
|
||||
{#if selected}
|
||||
<Check class="ml-2 size-4 shrink-0" />
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Item>
|
||||
{:else}
|
||||
<span class="block px-2 py-2 text-sm text-muted-foreground">
|
||||
No terms found.
|
||||
</span>
|
||||
{/each}
|
||||
</Combobox.Viewport>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/snippet}
|
||||
</Combobox.Content>
|
||||
</Combobox.Portal>
|
||||
</Combobox.Root>
|
||||
@@ -0,0 +1,69 @@
|
||||
<script lang="ts">
|
||||
import { tick } from "svelte";
|
||||
import { Moon, Sun } from "@lucide/svelte";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import SimpleTooltip from "./SimpleTooltip.svelte";
|
||||
|
||||
/**
|
||||
* Theme toggle with View Transitions API circular reveal animation.
|
||||
* The clip-path circle expands from the click point to cover the viewport.
|
||||
*/
|
||||
async function handleToggle(event: MouseEvent) {
|
||||
const supportsViewTransition =
|
||||
typeof document !== "undefined" &&
|
||||
"startViewTransition" in document &&
|
||||
!window.matchMedia("(prefers-reduced-motion: reduce)").matches;
|
||||
|
||||
if (!supportsViewTransition) {
|
||||
themeStore.toggle();
|
||||
return;
|
||||
}
|
||||
|
||||
const x = event.clientX;
|
||||
const y = event.clientY;
|
||||
const endRadius = Math.hypot(Math.max(x, innerWidth - x), Math.max(y, innerHeight - y));
|
||||
|
||||
const transition = document.startViewTransition(async () => {
|
||||
themeStore.toggle();
|
||||
await tick();
|
||||
});
|
||||
|
||||
transition.ready.then(() => {
|
||||
document.documentElement.animate(
|
||||
{
|
||||
clipPath: [`circle(0px at ${x}px ${y}px)`, `circle(${endRadius}px at ${x}px ${y}px)`],
|
||||
},
|
||||
{
|
||||
duration: 500,
|
||||
easing: "cubic-bezier(0.4, 0, 0.2, 1)",
|
||||
pseudoElement: "::view-transition-new(root)",
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<SimpleTooltip text={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"} delay={200} side="bottom" passthrough>
|
||||
<button
|
||||
type="button"
|
||||
onclick={(e) => handleToggle(e)}
|
||||
aria-label={themeStore.isDark ? "Switch to light mode" : "Switch to dark mode"}
|
||||
class="cursor-pointer border-none rounded-md flex items-center justify-center p-2 scale-125
|
||||
text-muted-foreground hover:bg-muted bg-transparent transition-colors"
|
||||
>
|
||||
<div class="relative size-[18px]">
|
||||
<Sun
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-90 scale-0 opacity-0'
|
||||
: 'rotate-0 scale-100 opacity-100'}"
|
||||
/>
|
||||
<Moon
|
||||
size={18}
|
||||
class="absolute inset-0 transition-all duration-300 {themeStore.isDark
|
||||
? 'rotate-0 scale-100 opacity-100'
|
||||
: '-rotate-90 scale-0 opacity-0'}"
|
||||
/>
|
||||
</div>
|
||||
</button>
|
||||
</SimpleTooltip>
|
||||
@@ -0,0 +1,118 @@
|
||||
import {
|
||||
type RowData,
|
||||
type TableOptions,
|
||||
type TableOptionsResolved,
|
||||
type TableState,
|
||||
createTable,
|
||||
} from "@tanstack/table-core";
|
||||
|
||||
/**
|
||||
* Creates a reactive TanStack table for Svelte 5 using runes.
|
||||
*
|
||||
* Adapted from shadcn-svelte's data-table wrapper — uses `$state` and
|
||||
* `$effect.pre` instead of Svelte stores for reactivity.
|
||||
*/
|
||||
export function createSvelteTable<TData extends RowData>(options: TableOptions<TData>) {
|
||||
const resolvedOptions: TableOptionsResolved<TData> = mergeObjects(
|
||||
{
|
||||
state: {},
|
||||
onStateChange() {},
|
||||
renderFallbackValue: null,
|
||||
mergeOptions: (
|
||||
defaultOptions: TableOptions<TData>,
|
||||
options: Partial<TableOptions<TData>>
|
||||
) => {
|
||||
return mergeObjects(defaultOptions, options);
|
||||
},
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
const table = createTable(resolvedOptions);
|
||||
let state = $state<Partial<TableState>>(table.initialState);
|
||||
|
||||
function updateOptions() {
|
||||
table.setOptions((prev) => {
|
||||
return mergeObjects(prev, options, {
|
||||
state: mergeObjects(state, options.state || {}),
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
onStateChange: (updater: any) => {
|
||||
if (updater instanceof Function) state = updater(state);
|
||||
else state = mergeObjects(state, updater as Partial<TableState>);
|
||||
|
||||
options.onStateChange?.(updater);
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
updateOptions();
|
||||
|
||||
$effect.pre(() => {
|
||||
updateOptions();
|
||||
});
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
type MaybeThunk<T extends object> = T | (() => T | null | undefined);
|
||||
type Intersection<T extends readonly unknown[]> = (T extends [infer H, ...infer R]
|
||||
? H & Intersection<R>
|
||||
: unknown) & {};
|
||||
|
||||
/**
|
||||
* Lazily merges several objects (or thunks) while preserving
|
||||
* getter semantics from every source. Proxy-based.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function mergeObjects<Sources extends readonly MaybeThunk<any>[]>(
|
||||
...sources: Sources
|
||||
): Intersection<{ [K in keyof Sources]: Sources[K] }> {
|
||||
const resolve = <T extends object>(src: MaybeThunk<T>): T | undefined =>
|
||||
typeof src === "function" ? (src() ?? undefined) : src;
|
||||
|
||||
const findSourceWithKey = (key: PropertyKey) => {
|
||||
for (let i = sources.length - 1; i >= 0; i--) {
|
||||
const obj = resolve(sources[i]);
|
||||
if (obj && key in obj) return obj;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
return new Proxy(Object.create(null), {
|
||||
get(_, key) {
|
||||
const src = findSourceWithKey(key);
|
||||
return src?.[key as never];
|
||||
},
|
||||
|
||||
has(_, key) {
|
||||
return !!findSourceWithKey(key);
|
||||
},
|
||||
|
||||
ownKeys(): (string | symbol)[] {
|
||||
const all = new Set<string | symbol>();
|
||||
for (const s of sources) {
|
||||
const obj = resolve(s);
|
||||
if (obj) {
|
||||
for (const k of Reflect.ownKeys(obj) as (string | symbol)[]) {
|
||||
all.add(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
return [...all];
|
||||
},
|
||||
|
||||
getOwnPropertyDescriptor(_, key) {
|
||||
const src = findSourceWithKey(key);
|
||||
if (!src) return undefined;
|
||||
return {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
value: (src as any)[key],
|
||||
writable: true,
|
||||
};
|
||||
},
|
||||
}) as Intersection<{ [K in keyof Sources]: Sources[K] }>;
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
<script lang="ts" module>
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type FlexRenderProps<TProps = any> = {
|
||||
content: unknown;
|
||||
context: TProps;
|
||||
};
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import { isRenderComponentConfig, isRenderSnippetConfig, mountComponent } from "./render-helpers.js";
|
||||
|
||||
let { content, context }: FlexRenderProps = $props();
|
||||
|
||||
function renderAction(node: HTMLElement, contentVal: typeof content) {
|
||||
let cleanup: (() => void) | undefined;
|
||||
|
||||
function render(c: typeof content) {
|
||||
cleanup?.();
|
||||
node.textContent = "";
|
||||
|
||||
if (isRenderComponentConfig(c)) {
|
||||
cleanup = mountComponent(c.component, node, c.props as Record<string, unknown>);
|
||||
}
|
||||
}
|
||||
|
||||
render(contentVal);
|
||||
|
||||
return {
|
||||
update(newContent: typeof content) {
|
||||
render(newContent);
|
||||
},
|
||||
destroy() {
|
||||
cleanup?.();
|
||||
},
|
||||
};
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if isRenderSnippetConfig(content)}
|
||||
{@render content.snippet(content.props)}
|
||||
{:else if isRenderComponentConfig(content)}
|
||||
<div use:renderAction={content}></div>
|
||||
{:else if typeof content === "function"}
|
||||
{@const result = content(context)}
|
||||
{#if isRenderComponentConfig(result)}
|
||||
<div use:renderAction={result}></div>
|
||||
{:else if isRenderSnippetConfig(result)}
|
||||
{@render result.snippet(result.props)}
|
||||
{:else if typeof result === "string" || typeof result === "number"}
|
||||
{result}
|
||||
{/if}
|
||||
{:else if typeof content === "string" || typeof content === "number"}
|
||||
{content}
|
||||
{/if}
|
||||
@@ -0,0 +1,3 @@
|
||||
export { default as FlexRender } from "./flex-render.svelte";
|
||||
export { renderComponent, renderSnippet } from "./render-helpers.js";
|
||||
export { createSvelteTable } from "./data-table.svelte.js";
|
||||
@@ -0,0 +1,67 @@
|
||||
import { type Component, type Snippet, mount, unmount } from "svelte";
|
||||
|
||||
/**
|
||||
* Wraps a Svelte component so TanStack Table can render it as a column
|
||||
* header or cell. Returns a `RenderComponentConfig` that `FlexRender`
|
||||
* picks up.
|
||||
*/
|
||||
export function renderComponent<
|
||||
TProps extends Record<string, unknown>,
|
||||
TComp extends Component<TProps>,
|
||||
>(component: TComp, props: TProps) {
|
||||
return {
|
||||
component,
|
||||
props,
|
||||
[RENDER_COMPONENT_SYMBOL]: true,
|
||||
} as const;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a Svelte 5 raw snippet for use in TanStack Table column defs.
|
||||
*/
|
||||
export function renderSnippet<TProps>(snippet: Snippet<[TProps]>, props: TProps) {
|
||||
return {
|
||||
snippet,
|
||||
props,
|
||||
[RENDER_SNIPPET_SYMBOL]: true,
|
||||
} as const;
|
||||
}
|
||||
|
||||
// Symbols for FlexRender to detect render types
|
||||
export const RENDER_COMPONENT_SYMBOL = Symbol("renderComponent");
|
||||
export const RENDER_SNIPPET_SYMBOL = Symbol("renderSnippet");
|
||||
|
||||
export type RenderComponentConfig<
|
||||
TProps extends Record<string, unknown> = Record<string, unknown>,
|
||||
> = {
|
||||
component: Component<TProps>;
|
||||
props: TProps;
|
||||
[RENDER_COMPONENT_SYMBOL]: true;
|
||||
};
|
||||
|
||||
export type RenderSnippetConfig<TProps = unknown> = {
|
||||
snippet: Snippet<[TProps]>;
|
||||
props: TProps;
|
||||
[RENDER_SNIPPET_SYMBOL]: true;
|
||||
};
|
||||
|
||||
export function isRenderComponentConfig(value: unknown): value is RenderComponentConfig {
|
||||
return typeof value === "object" && value !== null && RENDER_COMPONENT_SYMBOL in value;
|
||||
}
|
||||
|
||||
export function isRenderSnippetConfig(value: unknown): value is RenderSnippetConfig {
|
||||
return typeof value === "object" && value !== null && RENDER_SNIPPET_SYMBOL in value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mount a Svelte component imperatively into a target element.
|
||||
* Used by FlexRender for component-type cells.
|
||||
*/
|
||||
export function mountComponent<TProps extends Record<string, unknown>>(
|
||||
component: Component<TProps>,
|
||||
target: HTMLElement,
|
||||
props: TProps
|
||||
) {
|
||||
const instance = mount(component, { target, props });
|
||||
return () => unmount(instance);
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* Reactive clipboard copy with automatic "copied" state reset.
|
||||
*
|
||||
* Returns a `copiedValue` that is non-null while the copied feedback
|
||||
* should be displayed, and a `copy()` function to trigger a copy.
|
||||
*/
|
||||
export function useClipboard(resetMs = 2000) {
|
||||
let copiedValue = $state<string | null>(null);
|
||||
let timeoutId: number | undefined;
|
||||
|
||||
async function copy(text: string, event?: MouseEvent | KeyboardEvent) {
|
||||
event?.stopPropagation();
|
||||
try {
|
||||
await navigator.clipboard.writeText(text);
|
||||
clearTimeout(timeoutId);
|
||||
copiedValue = text;
|
||||
timeoutId = window.setTimeout(() => {
|
||||
copiedValue = null;
|
||||
timeoutId = undefined;
|
||||
}, resetMs);
|
||||
} catch (err) {
|
||||
console.error("Failed to copy to clipboard:", err);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
get copiedValue() {
|
||||
return copiedValue;
|
||||
},
|
||||
copy,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
import { onMount } from "svelte";
|
||||
import { OverlayScrollbars, type PartialOptions } from "overlayscrollbars";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
|
||||
/**
|
||||
* Set up OverlayScrollbars on an element with automatic theme reactivity.
|
||||
*
|
||||
* Must be called during component initialization (uses `onMount` internally).
|
||||
* The scrollbar theme automatically syncs with `themeStore.isDark`.
|
||||
*/
|
||||
export function useOverlayScrollbars(getElement: () => HTMLElement, options: PartialOptions = {}) {
|
||||
onMount(() => {
|
||||
const element = getElement();
|
||||
const osInstance = OverlayScrollbars(element, {
|
||||
...options,
|
||||
scrollbars: {
|
||||
...options.scrollbars,
|
||||
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
|
||||
},
|
||||
});
|
||||
|
||||
const unwatch = $effect.root(() => {
|
||||
$effect(() => {
|
||||
osInstance.options({
|
||||
scrollbars: {
|
||||
theme: themeStore.isDark ? "os-theme-dark" : "os-theme-light",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return () => {
|
||||
unwatch();
|
||||
osInstance.destroy();
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,407 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
formatTime,
|
||||
formatTimeRange,
|
||||
formatMeetingDays,
|
||||
formatMeetingDaysVerbose,
|
||||
formatMeetingTime,
|
||||
formatMeetingTimeTooltip,
|
||||
formatMeetingTimesTooltip,
|
||||
abbreviateInstructor,
|
||||
formatCreditHours,
|
||||
getPrimaryInstructor,
|
||||
isMeetingTimeTBA,
|
||||
isTimeTBA,
|
||||
formatDate,
|
||||
formatDateShort,
|
||||
formatMeetingDaysLong,
|
||||
} from "$lib/course";
|
||||
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
|
||||
|
||||
function makeMeetingTime(overrides: Partial<DbMeetingTime> = {}): DbMeetingTime {
|
||||
return {
|
||||
begin_time: null,
|
||||
end_time: null,
|
||||
start_date: "2024-08-26",
|
||||
end_date: "2024-12-12",
|
||||
monday: false,
|
||||
tuesday: false,
|
||||
wednesday: false,
|
||||
thursday: false,
|
||||
friday: false,
|
||||
saturday: false,
|
||||
sunday: false,
|
||||
building: null,
|
||||
building_description: null,
|
||||
room: null,
|
||||
campus: null,
|
||||
meeting_type: "CLAS",
|
||||
meeting_schedule_type: "LEC",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("formatTime", () => {
|
||||
it("converts 0900 to 9:00 AM", () => expect(formatTime("0900")).toBe("9:00 AM"));
|
||||
it("converts 1330 to 1:30 PM", () => expect(formatTime("1330")).toBe("1:30 PM"));
|
||||
it("converts 0000 to 12:00 AM", () => expect(formatTime("0000")).toBe("12:00 AM"));
|
||||
it("converts 1200 to 12:00 PM", () => expect(formatTime("1200")).toBe("12:00 PM"));
|
||||
it("converts 2359 to 11:59 PM", () => expect(formatTime("2359")).toBe("11:59 PM"));
|
||||
it("returns TBA for null", () => expect(formatTime(null)).toBe("TBA"));
|
||||
it("returns TBA for empty string", () => expect(formatTime("")).toBe("TBA"));
|
||||
it("returns TBA for short string", () => expect(formatTime("09")).toBe("TBA"));
|
||||
});
|
||||
|
||||
describe("formatMeetingDays", () => {
|
||||
it("returns MWF for mon/wed/fri", () => {
|
||||
expect(
|
||||
formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
|
||||
).toBe("MWF");
|
||||
});
|
||||
it("returns TTh for tue/thu", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, thursday: true }))).toBe("TTh");
|
||||
});
|
||||
it("returns MW for mon/wed", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true, wednesday: true }))).toBe("MW");
|
||||
});
|
||||
it("returns MTWThF for all weekdays", () => {
|
||||
expect(
|
||||
formatMeetingDays(
|
||||
makeMeetingTime({
|
||||
monday: true,
|
||||
tuesday: true,
|
||||
wednesday: true,
|
||||
thursday: true,
|
||||
friday: true,
|
||||
})
|
||||
)
|
||||
).toBe("MTWThF");
|
||||
});
|
||||
it("returns partial abbreviation for single day", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true }))).toBe("Mon");
|
||||
expect(formatMeetingDays(makeMeetingTime({ thursday: true }))).toBe("Thu");
|
||||
expect(formatMeetingDays(makeMeetingTime({ saturday: true }))).toBe("Sat");
|
||||
});
|
||||
it("concatenates codes for other multi-day combos", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime({ monday: true, friday: true }))).toBe("MF");
|
||||
expect(formatMeetingDays(makeMeetingTime({ tuesday: true, saturday: true }))).toBe("TSa");
|
||||
expect(
|
||||
formatMeetingDays(makeMeetingTime({ wednesday: true, friday: true, sunday: true }))
|
||||
).toBe("WFSu");
|
||||
expect(
|
||||
formatMeetingDays(
|
||||
makeMeetingTime({ monday: true, tuesday: true, wednesday: true, thursday: true })
|
||||
)
|
||||
).toBe("MTWTh");
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDays(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatTimeRange", () => {
|
||||
it("elides AM when both times are AM", () => {
|
||||
expect(formatTimeRange("0900", "0950")).toBe("9:00–9:50 AM");
|
||||
});
|
||||
it("elides PM when both times are PM", () => {
|
||||
expect(formatTimeRange("1315", "1430")).toBe("1:15–2:30 PM");
|
||||
});
|
||||
it("keeps both markers when crossing noon", () => {
|
||||
expect(formatTimeRange("1130", "1220")).toBe("11:30 AM–12:20 PM");
|
||||
});
|
||||
it("returns TBA for null begin", () => {
|
||||
expect(formatTimeRange(null, "0950")).toBe("TBA");
|
||||
});
|
||||
it("returns TBA for null end", () => {
|
||||
expect(formatTimeRange("0900", null)).toBe("TBA");
|
||||
});
|
||||
it("handles midnight and noon", () => {
|
||||
expect(formatTimeRange("0000", "0050")).toBe("12:00–12:50 AM");
|
||||
expect(formatTimeRange("1200", "1250")).toBe("12:00–12:50 PM");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTime", () => {
|
||||
it("formats a standard meeting time with elided AM/PM", () => {
|
||||
expect(
|
||||
formatMeetingTime(
|
||||
makeMeetingTime({
|
||||
monday: true,
|
||||
wednesday: true,
|
||||
friday: true,
|
||||
begin_time: "0900",
|
||||
end_time: "0950",
|
||||
})
|
||||
)
|
||||
).toBe("MWF 9:00–9:50 AM");
|
||||
});
|
||||
it("keeps both markers when crossing noon", () => {
|
||||
expect(
|
||||
formatMeetingTime(
|
||||
makeMeetingTime({
|
||||
tuesday: true,
|
||||
thursday: true,
|
||||
begin_time: "1130",
|
||||
end_time: "1220",
|
||||
})
|
||||
)
|
||||
).toBe("TTh 11:30 AM–12:20 PM");
|
||||
});
|
||||
it("returns TBA when no days", () => {
|
||||
expect(formatMeetingTime(makeMeetingTime({ begin_time: "0900", end_time: "0950" }))).toBe(
|
||||
"TBA"
|
||||
);
|
||||
});
|
||||
it("returns days + TBA when no times", () => {
|
||||
expect(formatMeetingTime(makeMeetingTime({ monday: true }))).toBe("Mon TBA");
|
||||
});
|
||||
});
|
||||
|
||||
describe("abbreviateInstructor", () => {
|
||||
it("returns short names unabbreviated", () =>
|
||||
expect(abbreviateInstructor("Li, Bo")).toBe("Li, Bo"));
|
||||
it("returns names within budget unabbreviated", () =>
|
||||
expect(abbreviateInstructor("Heaps, John")).toBe("Heaps, John"));
|
||||
it("handles no comma", () => expect(abbreviateInstructor("Staff")).toBe("Staff"));
|
||||
|
||||
// Progressive abbreviation with multiple given names
|
||||
it("abbreviates trailing given names first", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena")).toBe("Ramirez, Maria E."));
|
||||
it("abbreviates all given names when needed", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 16)).toBe("Ramirez, M. E."));
|
||||
it("falls back to first initial only", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 12)).toBe("Ramirez, M."));
|
||||
|
||||
// Single given name that exceeds budget
|
||||
it("abbreviates single given name when over budget", () =>
|
||||
expect(abbreviateInstructor("Bartholomew, Christopher", 18)).toBe("Bartholomew, C."));
|
||||
|
||||
// Respects custom maxLen
|
||||
it("keeps full name when within custom budget", () =>
|
||||
expect(abbreviateInstructor("Ramirez, Maria Elena", 30)).toBe("Ramirez, Maria Elena"));
|
||||
it("always abbreviates when budget is tiny", () =>
|
||||
expect(abbreviateInstructor("Heaps, John", 5)).toBe("Heaps, J."));
|
||||
});
|
||||
|
||||
describe("getPrimaryInstructor", () => {
|
||||
it("returns primary instructor", () => {
|
||||
const instructors: InstructorResponse[] = [
|
||||
{
|
||||
bannerId: "1",
|
||||
displayName: "A",
|
||||
email: null,
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
},
|
||||
{
|
||||
bannerId: "2",
|
||||
displayName: "B",
|
||||
email: null,
|
||||
isPrimary: true,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("B");
|
||||
});
|
||||
it("returns first instructor when no primary", () => {
|
||||
const instructors: InstructorResponse[] = [
|
||||
{
|
||||
bannerId: "1",
|
||||
displayName: "A",
|
||||
email: null,
|
||||
isPrimary: false,
|
||||
rmpRating: null,
|
||||
rmpNumRatings: null,
|
||||
},
|
||||
];
|
||||
expect(getPrimaryInstructor(instructors)?.displayName).toBe("A");
|
||||
});
|
||||
it("returns undefined for empty array", () => {
|
||||
expect(getPrimaryInstructor([])).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatCreditHours", () => {
|
||||
it("returns creditHours when set", () => {
|
||||
expect(
|
||||
formatCreditHours({
|
||||
creditHours: 3,
|
||||
creditHourLow: null,
|
||||
creditHourHigh: null,
|
||||
} as CourseResponse)
|
||||
).toBe("3");
|
||||
});
|
||||
it("returns range when variable", () => {
|
||||
expect(
|
||||
formatCreditHours({
|
||||
creditHours: null,
|
||||
creditHourLow: 1,
|
||||
creditHourHigh: 3,
|
||||
} as CourseResponse)
|
||||
).toBe("1–3");
|
||||
});
|
||||
it("returns dash when no credit info", () => {
|
||||
expect(
|
||||
formatCreditHours({
|
||||
creditHours: null,
|
||||
creditHourLow: null,
|
||||
creditHourHigh: null,
|
||||
} as CourseResponse)
|
||||
).toBe("—");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isMeetingTimeTBA", () => {
|
||||
it("returns true when no days set", () => {
|
||||
expect(isMeetingTimeTBA(makeMeetingTime())).toBe(true);
|
||||
});
|
||||
it("returns false when any day is set", () => {
|
||||
expect(isMeetingTimeTBA(makeMeetingTime({ monday: true }))).toBe(false);
|
||||
});
|
||||
it("returns false when multiple days set", () => {
|
||||
expect(isMeetingTimeTBA(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isTimeTBA", () => {
|
||||
it("returns true when begin_time is null", () => {
|
||||
expect(isTimeTBA(makeMeetingTime())).toBe(true);
|
||||
});
|
||||
it("returns true when begin_time is empty", () => {
|
||||
expect(isTimeTBA(makeMeetingTime({ begin_time: "" }))).toBe(true);
|
||||
});
|
||||
it("returns true when begin_time is short", () => {
|
||||
expect(isTimeTBA(makeMeetingTime({ begin_time: "09" }))).toBe(true);
|
||||
});
|
||||
it("returns false when begin_time is valid", () => {
|
||||
expect(isTimeTBA(makeMeetingTime({ begin_time: "0900" }))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDate", () => {
|
||||
it("formats standard date", () => {
|
||||
expect(formatDate("2024-08-26")).toBe("August 26, 2024");
|
||||
});
|
||||
it("formats December date", () => {
|
||||
expect(formatDate("2024-12-12")).toBe("December 12, 2024");
|
||||
});
|
||||
it("formats January 1st", () => {
|
||||
expect(formatDate("2026-01-01")).toBe("January 1, 2026");
|
||||
});
|
||||
it("formats MM/DD/YYYY date", () => {
|
||||
expect(formatDate("01/20/2026")).toBe("January 20, 2026");
|
||||
});
|
||||
it("formats MM/DD/YYYY with May", () => {
|
||||
expect(formatDate("05/13/2026")).toBe("May 13, 2026");
|
||||
});
|
||||
it("returns original string for invalid input", () => {
|
||||
expect(formatDate("bad-date")).toBe("bad-date");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingDaysLong", () => {
|
||||
it("returns full plural for single day", () => {
|
||||
expect(formatMeetingDaysLong(makeMeetingTime({ thursday: true }))).toBe("Thursdays");
|
||||
});
|
||||
it("returns full plural for Monday only", () => {
|
||||
expect(formatMeetingDaysLong(makeMeetingTime({ monday: true }))).toBe("Mondays");
|
||||
});
|
||||
it("returns semi-abbreviated for multiple days", () => {
|
||||
expect(
|
||||
formatMeetingDaysLong(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
|
||||
).toBe("Mon, Wed, Fri");
|
||||
});
|
||||
it("returns semi-abbreviated for TR", () => {
|
||||
expect(formatMeetingDaysLong(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(
|
||||
"Tue, Thur"
|
||||
);
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDaysLong(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDateShort", () => {
|
||||
it("formats YYYY-MM-DD to short", () => {
|
||||
expect(formatDateShort("2024-08-26")).toBe("Aug 26, 2024");
|
||||
});
|
||||
it("formats MM/DD/YYYY to short", () => {
|
||||
expect(formatDateShort("12/12/2024")).toBe("Dec 12, 2024");
|
||||
});
|
||||
it("returns original for invalid", () => {
|
||||
expect(formatDateShort("bad")).toBe("bad");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingDaysVerbose", () => {
|
||||
it("returns plural for single day", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime({ thursday: true }))).toBe("Thursdays");
|
||||
});
|
||||
it("joins two days with ampersand", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime({ tuesday: true, thursday: true }))).toBe(
|
||||
"Tuesdays & Thursdays"
|
||||
);
|
||||
});
|
||||
it("uses Oxford-style ampersand for 3+ days", () => {
|
||||
expect(
|
||||
formatMeetingDaysVerbose(makeMeetingTime({ monday: true, wednesday: true, friday: true }))
|
||||
).toBe("Mondays, Wednesdays & Fridays");
|
||||
});
|
||||
it("returns empty string when no days", () => {
|
||||
expect(formatMeetingDaysVerbose(makeMeetingTime())).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTimeTooltip", () => {
|
||||
it("formats full tooltip with location and dates", () => {
|
||||
const mt = makeMeetingTime({
|
||||
tuesday: true,
|
||||
thursday: true,
|
||||
begin_time: "1615",
|
||||
end_time: "1730",
|
||||
building_description: "Main Hall",
|
||||
room: "2.206",
|
||||
});
|
||||
expect(formatMeetingTimeTooltip(mt)).toBe(
|
||||
"Tuesdays & Thursdays, 4:15–5:30 PM\nMain Hall 2.206, Aug 26, 2024 – Dec 12, 2024"
|
||||
);
|
||||
});
|
||||
it("handles TBA days and times", () => {
|
||||
expect(formatMeetingTimeTooltip(makeMeetingTime())).toBe("TBA\nAug 26, 2024 – Dec 12, 2024");
|
||||
});
|
||||
it("handles days with TBA times", () => {
|
||||
expect(formatMeetingTimeTooltip(makeMeetingTime({ monday: true }))).toBe(
|
||||
"Mondays, TBA\nAug 26, 2024 – Dec 12, 2024"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatMeetingTimesTooltip", () => {
|
||||
it("returns TBA for empty array", () => {
|
||||
expect(formatMeetingTimesTooltip([])).toBe("TBA");
|
||||
});
|
||||
it("joins multiple meetings with blank line", () => {
|
||||
const mts = [
|
||||
makeMeetingTime({
|
||||
monday: true,
|
||||
wednesday: true,
|
||||
friday: true,
|
||||
begin_time: "0900",
|
||||
end_time: "0950",
|
||||
}),
|
||||
makeMeetingTime({
|
||||
thursday: true,
|
||||
begin_time: "1300",
|
||||
end_time: "1400",
|
||||
building_description: "Lab",
|
||||
room: "101",
|
||||
}),
|
||||
];
|
||||
const result = formatMeetingTimesTooltip(mts);
|
||||
expect(result).toContain("Mondays, Wednesdays & Fridays, 9:00–9:50 AM");
|
||||
expect(result).toContain("Thursdays, 1:00–2:00 PM\nLab 101");
|
||||
expect(result).toContain("\n\n");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,379 @@
|
||||
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
|
||||
|
||||
/** Convert "0900" to "9:00 AM" */
|
||||
export function formatTime(time: string | null): string {
|
||||
if (!time || time.length !== 4) return "TBA";
|
||||
const hours = parseInt(time.slice(0, 2), 10);
|
||||
const minutes = time.slice(2);
|
||||
const period = hours >= 12 ? "PM" : "AM";
|
||||
const display = hours > 12 ? hours - 12 : hours === 0 ? 12 : hours;
|
||||
return `${display}:${minutes} ${period}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compact day abbreviation for table cells.
|
||||
*
|
||||
* Single day → 3-letter: "Mon", "Thu"
|
||||
* Multi-day → concatenated codes: "MWF", "TTh", "MTWTh", "TSa"
|
||||
*
|
||||
* Codes use single letters where unambiguous (M/T/W/F) and
|
||||
* two letters where needed (Th/Sa/Su).
|
||||
*/
|
||||
export function formatMeetingDays(mt: DbMeetingTime): string {
|
||||
const dayDefs: [boolean, string, string][] = [
|
||||
[mt.monday, "M", "Mon"],
|
||||
[mt.tuesday, "T", "Tue"],
|
||||
[mt.wednesday, "W", "Wed"],
|
||||
[mt.thursday, "Th", "Thu"],
|
||||
[mt.friday, "F", "Fri"],
|
||||
[mt.saturday, "Sa", "Sat"],
|
||||
[mt.sunday, "Su", "Sun"],
|
||||
];
|
||||
const active = dayDefs.filter(([a]) => a);
|
||||
if (active.length === 0) return "";
|
||||
if (active.length === 1) return active[0][2];
|
||||
return active.map(([, code]) => code).join("");
|
||||
}
|
||||
|
||||
/** Longer day names for detail view: single day → "Thursdays", multiple → "Mon, Wed, Fri" */
|
||||
export function formatMeetingDaysLong(mt: DbMeetingTime): string {
|
||||
const days: [boolean, string, string][] = [
|
||||
[mt.monday, "Mon", "Mondays"],
|
||||
[mt.tuesday, "Tue", "Tuesdays"],
|
||||
[mt.wednesday, "Wed", "Wednesdays"],
|
||||
[mt.thursday, "Thur", "Thursdays"],
|
||||
[mt.friday, "Fri", "Fridays"],
|
||||
[mt.saturday, "Sat", "Saturdays"],
|
||||
[mt.sunday, "Sun", "Sundays"],
|
||||
];
|
||||
const active = days.filter(([a]) => a);
|
||||
if (active.length === 0) return "";
|
||||
if (active.length === 1) return active[0][2];
|
||||
return active.map(([, short]) => short).join(", ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a time range with smart AM/PM elision.
|
||||
*
|
||||
* Same period: "9:00–9:50 AM"
|
||||
* Cross-period: "11:30 AM–12:20 PM"
|
||||
* Missing: "TBA"
|
||||
*/
|
||||
export function formatTimeRange(begin: string | null, end: string | null): string {
|
||||
if (!begin || begin.length !== 4 || !end || end.length !== 4) return "TBA";
|
||||
|
||||
const bHours = parseInt(begin.slice(0, 2), 10);
|
||||
const eHours = parseInt(end.slice(0, 2), 10);
|
||||
const bPeriod = bHours >= 12 ? "PM" : "AM";
|
||||
const ePeriod = eHours >= 12 ? "PM" : "AM";
|
||||
|
||||
const bDisplay = bHours > 12 ? bHours - 12 : bHours === 0 ? 12 : bHours;
|
||||
const eDisplay = eHours > 12 ? eHours - 12 : eHours === 0 ? 12 : eHours;
|
||||
|
||||
const endStr = `${eDisplay}:${end.slice(2)} ${ePeriod}`;
|
||||
if (bPeriod === ePeriod) {
|
||||
return `${bDisplay}:${begin.slice(2)}–${endStr}`;
|
||||
}
|
||||
return `${bDisplay}:${begin.slice(2)} ${bPeriod}–${endStr}`;
|
||||
}
|
||||
|
||||
/** Condensed meeting time: "MWF 9:00–9:50 AM" */
|
||||
export function formatMeetingTime(mt: DbMeetingTime): string {
|
||||
const days = formatMeetingDays(mt);
|
||||
if (!days) return "TBA";
|
||||
const range = formatTimeRange(mt.begin_time, mt.end_time);
|
||||
if (range === "TBA") return `${days} TBA`;
|
||||
return `${days} ${range}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Progressively abbreviate an instructor name to fit within a character budget.
|
||||
*
|
||||
* Tries each level until the result fits `maxLen`:
|
||||
* 1. Full name: "Ramirez, Maria Elena"
|
||||
* 2. Abbreviate trailing given names: "Ramirez, Maria E."
|
||||
* 3. Abbreviate all given names: "Ramirez, M. E."
|
||||
* 4. First initial only: "Ramirez, M."
|
||||
*
|
||||
* Names without a comma (e.g. "Staff") are returned as-is.
|
||||
*/
|
||||
export function abbreviateInstructor(name: string, maxLen: number = 18): string {
|
||||
if (name.length <= maxLen) return name;
|
||||
|
||||
const commaIdx = name.indexOf(", ");
|
||||
if (commaIdx === -1) return name;
|
||||
|
||||
const last = name.slice(0, commaIdx);
|
||||
const parts = name.slice(commaIdx + 2).split(" ");
|
||||
|
||||
// Level 2: abbreviate trailing given names, keep first given name intact
|
||||
// "Maria Elena" → "Maria E."
|
||||
if (parts.length > 1) {
|
||||
const abbreviated = [parts[0], ...parts.slice(1).map((p) => `${p[0]}.`)].join(" ");
|
||||
const result = `${last}, ${abbreviated}`;
|
||||
if (result.length <= maxLen) return result;
|
||||
}
|
||||
|
||||
// Level 3: abbreviate all given names
|
||||
// "Maria Elena" → "M. E."
|
||||
if (parts.length > 1) {
|
||||
const allInitials = parts.map((p) => `${p[0]}.`).join(" ");
|
||||
const result = `${last}, ${allInitials}`;
|
||||
if (result.length <= maxLen) return result;
|
||||
}
|
||||
|
||||
// Level 4: first initial only
|
||||
// "Maria Elena" → "M." or "John" → "J."
|
||||
return `${last}, ${parts[0][0]}.`;
|
||||
}
|
||||
|
||||
/** Get primary instructor from a course, or first instructor */
|
||||
export function getPrimaryInstructor(
|
||||
instructors: InstructorResponse[]
|
||||
): InstructorResponse | undefined {
|
||||
return instructors.find((i) => i.isPrimary) ?? instructors[0];
|
||||
}
|
||||
|
||||
/** Check if a meeting time has no scheduled days */
|
||||
export function isMeetingTimeTBA(mt: DbMeetingTime): boolean {
|
||||
return (
|
||||
!mt.monday &&
|
||||
!mt.tuesday &&
|
||||
!mt.wednesday &&
|
||||
!mt.thursday &&
|
||||
!mt.friday &&
|
||||
!mt.saturday &&
|
||||
!mt.sunday
|
||||
);
|
||||
}
|
||||
|
||||
/** Check if a meeting time has no begin/end times */
|
||||
export function isTimeTBA(mt: DbMeetingTime): boolean {
|
||||
return !mt.begin_time || mt.begin_time.length !== 4;
|
||||
}
|
||||
|
||||
/** Format a date string to "January 20, 2026". Accepts YYYY-MM-DD or MM/DD/YYYY. */
|
||||
export function formatDate(dateStr: string): string {
|
||||
let year: number, month: number, day: number;
|
||||
if (dateStr.includes("-")) {
|
||||
[year, month, day] = dateStr.split("-").map(Number);
|
||||
} else if (dateStr.includes("/")) {
|
||||
[month, day, year] = dateStr.split("/").map(Number);
|
||||
} else {
|
||||
return dateStr;
|
||||
}
|
||||
if (!year || !month || !day) return dateStr;
|
||||
const date = new Date(year, month - 1, day);
|
||||
return date.toLocaleDateString("en-US", { year: "numeric", month: "long", day: "numeric" });
|
||||
}
|
||||
|
||||
/** Short location string from first meeting time: "MH 2.206" or campus fallback */
|
||||
export function formatLocation(course: CourseResponse): string | null {
|
||||
for (const mt of course.meetingTimes) {
|
||||
if (mt.building && mt.room) return `${mt.building} ${mt.room}`;
|
||||
if (mt.building) return mt.building;
|
||||
}
|
||||
return course.campus ?? null;
|
||||
}
|
||||
|
||||
/** Longer location string using building description: "Main Hall 2.206" */
|
||||
export function formatLocationLong(mt: DbMeetingTime): string | null {
|
||||
const name = mt.building_description ?? mt.building;
|
||||
if (!name) return null;
|
||||
return mt.room ? `${name} ${mt.room}` : name;
|
||||
}
|
||||
|
||||
/** Format a date as "Aug 26, 2024". Accepts YYYY-MM-DD or MM/DD/YYYY. */
|
||||
export function formatDateShort(dateStr: string): string {
|
||||
let year: number, month: number, day: number;
|
||||
if (dateStr.includes("-")) {
|
||||
[year, month, day] = dateStr.split("-").map(Number);
|
||||
} else if (dateStr.includes("/")) {
|
||||
[month, day, year] = dateStr.split("/").map(Number);
|
||||
} else {
|
||||
return dateStr;
|
||||
}
|
||||
if (!year || !month || !day) return dateStr;
|
||||
const date = new Date(year, month - 1, day);
|
||||
return date.toLocaleDateString("en-US", { year: "numeric", month: "short", day: "numeric" });
|
||||
}
|
||||
|
||||
/**
|
||||
* Verbose day names for tooltips: "Tuesdays & Thursdays", "Mondays, Wednesdays & Fridays".
|
||||
* Single day → plural: "Thursdays".
|
||||
*/
|
||||
export function formatMeetingDaysVerbose(mt: DbMeetingTime): string {
|
||||
const dayDefs: [boolean, string][] = [
|
||||
[mt.monday, "Mondays"],
|
||||
[mt.tuesday, "Tuesdays"],
|
||||
[mt.wednesday, "Wednesdays"],
|
||||
[mt.thursday, "Thursdays"],
|
||||
[mt.friday, "Fridays"],
|
||||
[mt.saturday, "Saturdays"],
|
||||
[mt.sunday, "Sundays"],
|
||||
];
|
||||
const active = dayDefs.filter(([a]) => a).map(([, name]) => name);
|
||||
if (active.length === 0) return "";
|
||||
if (active.length === 1) return active[0];
|
||||
return active.slice(0, -1).join(", ") + " & " + active[active.length - 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Full verbose tooltip for a single meeting time:
|
||||
* "Tuesdays & Thursdays, 4:15–5:30 PM\nMain Hall 2.206 · Aug 26 – Dec 12, 2024"
|
||||
*/
|
||||
export function formatMeetingTimeTooltip(mt: DbMeetingTime): string {
|
||||
const days = formatMeetingDaysVerbose(mt);
|
||||
const range = formatTimeRange(mt.begin_time, mt.end_time);
|
||||
let line1: string;
|
||||
if (!days && range === "TBA") {
|
||||
line1 = "TBA";
|
||||
} else if (!days) {
|
||||
line1 = range;
|
||||
} else if (range === "TBA") {
|
||||
line1 = `${days}, TBA`;
|
||||
} else {
|
||||
line1 = `${days}, ${range}`;
|
||||
}
|
||||
|
||||
const parts = [line1];
|
||||
|
||||
const loc = formatLocationLong(mt);
|
||||
const dateRange =
|
||||
mt.start_date && mt.end_date
|
||||
? `${formatDateShort(mt.start_date)} – ${formatDateShort(mt.end_date)}`
|
||||
: null;
|
||||
|
||||
if (loc && dateRange) {
|
||||
parts.push(`${loc}, ${dateRange}`);
|
||||
} else if (loc) {
|
||||
parts.push(loc);
|
||||
} else if (dateRange) {
|
||||
parts.push(dateRange);
|
||||
}
|
||||
|
||||
return parts.join("\n");
|
||||
}
|
||||
|
||||
/** Full verbose tooltip for all meeting times on a course, newline-separated. */
|
||||
export function formatMeetingTimesTooltip(meetingTimes: DbMeetingTime[]): string {
|
||||
if (meetingTimes.length === 0) return "TBA";
|
||||
return meetingTimes.map(formatMeetingTimeTooltip).join("\n\n");
|
||||
}
|
||||
|
||||
/**
|
||||
* Delivery concern category for visual accent on location cells.
|
||||
* - "online": fully online with no physical location (OA, OS, OH without INT building)
|
||||
* - "internet": internet campus with INT building code
|
||||
* - "hybrid": mix of online and in-person (HB, H1, H2)
|
||||
* - "off-campus": in-person but not on Main Campus
|
||||
* - null: normal in-person on main campus (no accent)
|
||||
*/
|
||||
export type DeliveryConcern = "online" | "internet" | "hybrid" | "off-campus" | null;
|
||||
|
||||
const ONLINE_METHODS = new Set(["OA", "OS", "OH"]);
|
||||
const HYBRID_METHODS = new Set(["HB", "H1", "H2"]);
|
||||
const MAIN_CAMPUS = "11";
|
||||
const ONLINE_CAMPUSES = new Set(["9", "ONL"]);
|
||||
|
||||
export function getDeliveryConcern(course: CourseResponse): DeliveryConcern {
|
||||
const method = course.instructionalMethod;
|
||||
if (method && ONLINE_METHODS.has(method)) {
|
||||
const hasIntBuilding = course.meetingTimes.some((mt: DbMeetingTime) => mt.building === "INT");
|
||||
return hasIntBuilding ? "internet" : "online";
|
||||
}
|
||||
if (method && HYBRID_METHODS.has(method)) return "hybrid";
|
||||
if (course.campus && course.campus !== MAIN_CAMPUS && !ONLINE_CAMPUSES.has(course.campus)) {
|
||||
return "off-campus";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Border accent color for each delivery concern type. */
|
||||
export function concernAccentColor(concern: DeliveryConcern): string | null {
|
||||
switch (concern) {
|
||||
case "online":
|
||||
return "#3b82f6"; // blue-500
|
||||
case "internet":
|
||||
return "#06b6d4"; // cyan-500
|
||||
case "hybrid":
|
||||
return "#a855f7"; // purple-500
|
||||
case "off-campus":
|
||||
return "#f59e0b"; // amber-500
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Location display text for the table cell.
|
||||
* Falls back to "Online" for online courses instead of showing a dash.
|
||||
*/
|
||||
export function formatLocationDisplay(course: CourseResponse): string | null {
|
||||
const loc = formatLocation(course);
|
||||
if (loc) return loc;
|
||||
const concern = getDeliveryConcern(course);
|
||||
if (concern === "online") return "Online";
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Tooltip text for the location column: long-form location + delivery note */
|
||||
export function formatLocationTooltip(course: CourseResponse): string | null {
|
||||
const parts: string[] = [];
|
||||
|
||||
for (const mt of course.meetingTimes) {
|
||||
const loc = formatLocationLong(mt);
|
||||
if (loc && !parts.includes(loc)) parts.push(loc);
|
||||
}
|
||||
|
||||
const locationLine = parts.length > 0 ? parts.join(", ") : null;
|
||||
|
||||
const concern = getDeliveryConcern(course);
|
||||
let deliveryNote: string | null = null;
|
||||
if (concern === "online") deliveryNote = "Online";
|
||||
else if (concern === "internet") deliveryNote = "Internet";
|
||||
else if (concern === "hybrid") deliveryNote = "Hybrid";
|
||||
else if (concern === "off-campus") deliveryNote = "Off-campus";
|
||||
|
||||
if (locationLine && deliveryNote) return `${locationLine}\n${deliveryNote}`;
|
||||
if (locationLine) return locationLine;
|
||||
if (deliveryNote) return deliveryNote;
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Number of open seats in a course section */
|
||||
export function openSeats(course: CourseResponse): number {
|
||||
return Math.max(0, course.maxEnrollment - course.enrollment);
|
||||
}
|
||||
|
||||
/** Text color class for seat availability: red (full), yellow (low), green (open) */
|
||||
export function seatsColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "text-status-red";
|
||||
if (open <= 5) return "text-yellow-500";
|
||||
return "text-status-green";
|
||||
}
|
||||
|
||||
/** Background dot color class for seat availability */
|
||||
export function seatsDotColor(course: CourseResponse): string {
|
||||
const open = openSeats(course);
|
||||
if (open === 0) return "bg-red-500";
|
||||
if (open <= 5) return "bg-yellow-500";
|
||||
return "bg-green-500";
|
||||
}
|
||||
|
||||
/** Text color class for a RateMyProfessors rating */
|
||||
export function ratingColor(rating: number): string {
|
||||
if (rating >= 4.0) return "text-status-green";
|
||||
if (rating >= 3.0) return "text-yellow-500";
|
||||
return "text-status-red";
|
||||
}
|
||||
|
||||
/** Format credit hours display */
|
||||
export function formatCreditHours(course: CourseResponse): string {
|
||||
if (course.creditHours != null) return String(course.creditHours);
|
||||
if (course.creditHourLow != null && course.creditHourHigh != null) {
|
||||
return `${course.creditHourLow}–${course.creditHourHigh}`;
|
||||
}
|
||||
return "—";
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
class ThemeStore {
|
||||
isDark = $state<boolean>(false);
|
||||
private initialized = false;
|
||||
|
||||
init() {
|
||||
if (this.initialized || typeof window === "undefined") return;
|
||||
this.initialized = true;
|
||||
|
||||
const stored = localStorage.getItem("theme");
|
||||
if (stored === "light" || stored === "dark") {
|
||||
this.isDark = stored === "dark";
|
||||
} else {
|
||||
this.isDark = window.matchMedia("(prefers-color-scheme: dark)").matches;
|
||||
}
|
||||
|
||||
this.updateDOMClass();
|
||||
}
|
||||
|
||||
toggle() {
|
||||
this.isDark = !this.isDark;
|
||||
localStorage.setItem("theme", this.isDark ? "dark" : "light");
|
||||
this.updateDOMClass();
|
||||
}
|
||||
|
||||
private updateDOMClass() {
|
||||
if (typeof document === "undefined") return;
|
||||
|
||||
if (this.isDark) {
|
||||
document.documentElement.classList.add("dark");
|
||||
} else {
|
||||
document.documentElement.classList.remove("dark");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const themeStore = new ThemeStore();
|
||||
@@ -0,0 +1,69 @@
|
||||
/**
|
||||
* Relative time formatting with adaptive refresh intervals.
|
||||
*
|
||||
* The key insight: a timestamp showing "3 seconds ago" needs to update every second,
|
||||
* but "2 hours ago" only needs to update every minute. This module provides both
|
||||
* the formatted string and the optimal interval until the next meaningful change.
|
||||
*/
|
||||
|
||||
interface RelativeTimeResult {
|
||||
/** The human-readable relative time string (e.g. "3 seconds ago") */
|
||||
text: string;
|
||||
/** Milliseconds until the displayed text would change */
|
||||
nextUpdateMs: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a relative time string and the interval until it next changes.
|
||||
*
|
||||
* Granularity tiers:
|
||||
* - < 60s: per-second ("1 second ago", "45 seconds ago")
|
||||
* - < 60m: per-minute ("1 minute ago", "12 minutes ago")
|
||||
* - < 24h: per-hour ("1 hour ago", "5 hours ago")
|
||||
* - >= 24h: per-day ("1 day ago", "3 days ago")
|
||||
*/
|
||||
export function relativeTime(date: Date, ref: Date): RelativeTimeResult {
|
||||
const diffMs = ref.getTime() - date.getTime();
|
||||
const seconds = Math.round(diffMs / 1000);
|
||||
|
||||
if (seconds < 1) {
|
||||
return { text: "just now", nextUpdateMs: 1000 - (diffMs % 1000) || 1000 };
|
||||
}
|
||||
|
||||
if (seconds < 60) {
|
||||
const remainder = 1000 - (diffMs % 1000);
|
||||
return {
|
||||
text: seconds === 1 ? "1 second ago" : `${seconds} seconds ago`,
|
||||
nextUpdateMs: remainder || 1000,
|
||||
};
|
||||
}
|
||||
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
if (minutes < 60) {
|
||||
// Update when the next minute boundary is crossed
|
||||
const msIntoCurrentMinute = diffMs % 60_000;
|
||||
const msUntilNextMinute = 60_000 - msIntoCurrentMinute;
|
||||
return {
|
||||
text: minutes === 1 ? "1 minute ago" : `${minutes} minutes ago`,
|
||||
nextUpdateMs: msUntilNextMinute || 60_000,
|
||||
};
|
||||
}
|
||||
|
||||
const hours = Math.floor(minutes / 60);
|
||||
if (hours < 24) {
|
||||
const msIntoCurrentHour = diffMs % 3_600_000;
|
||||
const msUntilNextHour = 3_600_000 - msIntoCurrentHour;
|
||||
return {
|
||||
text: hours === 1 ? "1 hour ago" : `${hours} hours ago`,
|
||||
nextUpdateMs: msUntilNextHour || 3_600_000,
|
||||
};
|
||||
}
|
||||
|
||||
const days = Math.floor(hours / 24);
|
||||
const msIntoCurrentDay = diffMs % 86_400_000;
|
||||
const msUntilNextDay = 86_400_000 - msIntoCurrentDay;
|
||||
return {
|
||||
text: days === 1 ? "1 day ago" : `${days} days ago`,
|
||||
nextUpdateMs: msUntilNextDay || 86_400_000,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
import { clsx, type ClassValue } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs));
|
||||
}
|
||||
|
||||
/** Shared tooltip content styling for bits-ui Tooltip.Content */
|
||||
export const tooltipContentClass =
|
||||
"z-50 bg-card text-card-foreground text-xs border border-border rounded-md px-2.5 py-1.5 shadow-md max-w-72";
|
||||
@@ -1,44 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg id="Layer_1"
|
||||
xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 841.9 595.3">
|
||||
<!-- Generator: Adobe Illustrator 29.3.0, SVG Export Plug-In . SVG Version: 2.1.0 Build 146) -->
|
||||
<defs>
|
||||
<style>
|
||||
.st0 {
|
||||
fill: #9ae7fc;
|
||||
}
|
||||
|
||||
.st1 {
|
||||
fill: #61dafb;
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<g>
|
||||
<path class="st1" d="M666.3,296.5c0-32.5-40.7-63.3-103.1-82.4,14.4-63.6,8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6,0,8.3.9,11.4,2.6,13.6,7.8,19.5,37.5,14.9,75.7-1.1,9.4-2.9,19.3-5.1,29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50,32.6-30.3,63.2-46.9,84-46.9v-22.3c-27.5,0-63.5,19.6-99.9,53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7,0,51.4,16.5,84,46.6-14,14.7-28,31.4-41.3,49.9-22.6,2.4-44,6.1-63.6,11-2.3-10-4-19.7-5.2-29-4.7-38.2,1.1-67.9,14.6-75.8,3-1.8,6.9-2.6,11.5-2.6v-22.3c-8.4,0-16,1.8-22.6,5.6-28.1,16.2-34.4,66.7-19.9,130.1-62.2,19.2-102.7,49.9-102.7,82.3s40.7,63.3,103.1,82.4c-14.4,63.6-8,114.2,20.2,130.4,6.5,3.8,14.1,5.6,22.5,5.6,27.5,0,63.5-19.6,99.9-53.6,36.4,33.8,72.4,53.2,99.9,53.2,8.4,0,16-1.8,22.6-5.6,28.1-16.2,34.4-66.7,19.9-130.1,62-19.1,102.5-49.9,102.5-82.3zm-130.2-66.7c-3.7,12.9-8.3,26.2-13.5,39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4,14.2,2.1,27.9,4.7,41,7.9zm-45.8,106.5c-7.8,13.5-15.8,26.3-24.1,38.2-14.9,1.3-30,2-45.2,2s-30.2-.7-45-1.9c-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8,6.2-13.4,13.2-26.8,20.7-39.9,7.8-13.5,15.8-26.3,24.1-38.2,14.9-1.3,30-2,45.2-2s30.2.7,45,1.9c8.3,11.9,16.4,24.6,24.2,38,7.6,13.1,14.5,26.4,20.8,39.8-6.3,13.4-13.2,26.8-20.7,39.9zm32.3-13c5.4,13.4,10,26.8,13.8,39.8-13.1,3.2-26.9,5.9-41.2,8,4.9-7.7,9.8-15.6,14.4-23.7,4.6-8,8.9-16.1,13-24.1zm-101.4,106.7c-9.3-9.6-18.6-20.3-27.8-32,9,.4,18.2.7,27.5.7s18.7-.2,27.8-.7c-9,11.7-18.3,22.4-27.5,32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9,3.7-12.9,8.3-26.2,13.5-39.5,4.1,8,8.4,16,13.1,24s9.5,15.8,14.4,23.4zm73.9-208.1c9.3,9.6,18.6,20.3,27.8,32-9-.4-18.2-.7-27.5-.7s-18.7.2-27.8.7c9-11.7,18.3-22.4,27.5-32zm-74,58.9c-4.9,7.7-9.8,15.6-14.4,23.7-4.6,8-8.9,16-13,24-5.4-13.4-10-26.8-13.8-39.8,13.1-3.1,26.9-5.8,41.2-7.9zm-90.5,125.2c-35.4-15.1-58.3-34.9-58.3-50.6s22.9-35.6,58.3-50.6c8.6-3.7,18-7,27.7-10.1,5.7,19.6,13.2,40,22.5,60.9-9.2,20.8-16.6,41.1-22.2,60.6-9.9-3.1-19.3-6.5-28-10.2zm53.8,142.9c-13.6-7.8-19.5-37.5-14.9-75.7,1.1-9.4,2.9-19.3,5.1-29.4,19.6,4.8,41,8.5,63.5,10.9,13.5,18.5,27.5,35.3,41.6,50-32.6,30.3-63.2,46.9-84,46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7,38.2-1.1,67.9-14.6,75.8-3,1.8-6.9,2.6-11.5,2.6-20.7,0-51.4-16.5-84-46.6,14-14.7,28-31.4,41.3-49.9,22.6-2.4,44-6.1,63.6-11,2.3,10.1,4.1,19.8,5.2,29.1zm38.5-66.7c-8.6,3.7-18,7-27.7,10.1-5.7-19.6-13.2-40-22.5-60.9,9.2-20.8,16.6-41.1,22.2-60.6,9.9,3.1,19.3,6.5,28.1,10.2,35.4,15.1,58.3,34.9,58.3,50.6,0,15.7-23,35.6-58.4,50.6zm-264.9-268.7z"/>
|
||||
<circle class="st1" cx="420.9" cy="296.5" r="45.7"/>
|
||||
<path class="st1" d="M520.5,78.1"/>
|
||||
</g>
|
||||
<circle class="st0" cx="420.8" cy="296.6" r="43"/>
|
||||
<path class="st1" d="M466.1,296.6c0,25-20.2,45.2-45.2,45.2s-45.2-20.2-45.2-45.2,20.2-45.2,45.2-45.2,45.2,20.2,45.2,45.2ZM386,295.6v-6.3c0-1.1,1.2-5.1,1.8-6.2,1-1.9,2.9-3.5,4.6-4.7l-3.4-3.4c4-3.6,9.4-3.7,13.7-.7,1.9-4.7,6.6-7.1,11.6-6.7l-.8,4.2c5.9.2,13.1,4.1,13.1,10.8s0,.5-.7.7c-1.7.3-3.4-.4-5-.6s-1.2-.4-1.2.3,2.5,4.1,3,5.5,1,3.5.8,5.3c-5.6-.8-10.5-3.2-14.8-6.7.3,2.6,4.1,21.7,5.3,21.9s.8-.6,1-1.1,1.3-6.3,1.3-6.7c0-1-1.7-1.8-2.2-2.8-1.2-2.7,1.3-4.7,3.7-3.3s5.2,6.2,7.5,7.3,13,1.4,14.8,3.3-2.9,4.6-1.5,7.6c6.7-2.6,13.5-3.3,20.6-2.5,3.1-9.7,3.1-20.3-.9-29.8-7.3,0-14.7-3.6-17.2-10.8-2.5-7.2-.7-8.6-1.3-9.3-.8-1-6.3.6-7.4-1.5s.3-1.1-.2-1.4-1.9-.6-2.6-.8c-26-6.4-51.3,15.7-49.7,42.1,0,1.6,1.6,10.3,2.4,11.1s4.8,0,6.3,0,3.7.3,5,.5c2.9.4,7.2,2.4,9.4,2.5s2.4-.8,2.7-2.4c.4-2.6.5-7.4.5-10.1s-1-7.8-1.3-11.6c-.9-.2-.7,0-.9.5-.7,1.3-1.1,3.2-1.9,4.8s-5.2,8.7-5.7,9-.7-.5-.8-.8c-1.6-3.5-2-7.9-1.9-11.8-.9-1-5.4,4.9-6.7,5.3l-.8-.4v-.3h-.2ZM455.6,276.4c1.1-1.2-6-8.9-7.2-10-3-2.7-5.4-4.5-3.5,1.4s5.7,7.8,10.6,8.5h.1ZM410.9,270.1c-.4-.5-6.1,2.9-5.5,4.6,1.9-1.3,5.9-1.7,5.5-4.6ZM400.4,276.4c-.3-2.4-6.3-2.7-7.2-1s1.6,1.4,1.9,1.4c1.8.3,3.5-.6,5.2-.4h.1ZM411.3,276.8c3.8,1.3,6.6,3.6,10.9,3.7s0-3-1.2-3.9c-2.2-1.7-5.1-2.4-7.8-2.4s-1.6-.3-1.4.4c2.8.6,7.3.7,8.4,3.8-2.3-.3-3.9-1.6-6.2-2s-2.5-.5-2.6.3h0ZM420.6,290.3c-.8-5.1-5.7-10.8-10.9-11.6s-1.3-.4-.8.5,4.7,3.2,5.7,4,4.5,4.2,2.1,3.8-8.4-7.8-9.4-6.7c.2.9,1.1,1.9,1.7,2.7,3,3.8,6.9,6.8,11.8,7.4h-.2ZM395.3,279.8c-5,1.1-6.9,6.3-6.7,11,.7.8,5-3.8,5.4-4.5s2.7-4.6,1.1-4-2.9,4.4-4.2,4.6.2-2.1.4-2.5c1.1-1.6,2.9-3.1,4-4.6h0ZM400.4,281.5c-.4-.5-2,1.3-2.3,1.7-2.9,3.9-2.6,10.2-1.5,14.8.8.2.8-.3,1.2-.7,3-3.8,5.5-10.5,4.5-15.4-2.1,3.1-3.1,7.3-3.6,11h-1.3c0-4,1.9-7.7,3-11.4h0ZM426.9,305.9c0-1.7-1.7-1.4-2.5-1.9s-1.3-1.9-3-1.4c1.3,2.1,3,3.2,5.5,3.4h0ZM417.2,308.5c7.6.7,5.5-1.9,1.4-5.5-1.3-.3-1.5,4.5-1.4,5.5ZM437,309.7c-3.5-.3-7.8-2-11.2-2.1s-1.3,0-1.9.7c4,1.3,8.4,1.7,12.1,4l1-2.5h0ZM420.5,312.8c-7.3,0-15.1,3.7-20.4,8.8s-4.8,5.3-4.8,6.2c0,1.8,8.6,6.2,10.5,6.8,12.1,4.8,27.5,3.5,38.2-4.2s3.1-2.7,0-6.2c-5.7-6.6-14.7-11.4-23.4-11.3h-.1ZM398.7,316.9c-1.4-1.4-5-1.9-7-2.1s-5.3-.3-6.9.6l13.9,1.4h0ZM456.9,314.8h-7.4c-.9,0-4.9,1.1-6,1.6s-.8.6,0,.5c2.4,0,5.1-1,7.6-1.3s3.5.2,5.1,0,1.3-.3.6-.8h0Z"/>
|
||||
<path class="st0" d="M386,295.6l.8.4c1.3-.3,5.8-6.2,6.7-5.3,0,3.9.3,8.3,1.9,11.8s0,1.2.8.8,5.1-7.8,5.7-9,1.3-3.5,1.9-4.8,0-.7.9-.5c.3,3.8,1.2,7.8,1.3,11.6s0,7.5-.5,10.1-1.1,2.4-2.7,2.4-6.5-2.1-9.4-2.5-3.7-.5-5-.5-5.4,1.1-6.3,0-2.2-9.5-2.4-11.1c-1.5-26.4,23.7-48.5,49.7-42.1s2.2.4,2.6.8,0,1,.2,1.4c1.1,2,6.5.5,7.4,1.5s.4,6.9,1.3,9.3c2.5,7.2,10,10.9,17.2,10.8,4,9.4,4,20.1.9,29.8-7.2-.7-13.9,0-20.6,2.5-1.3-3.1,4.1-5.1,1.5-7.6s-11.8-1.9-14.8-3.3-5.4-6.1-7.5-7.3-4.9.6-3.7,3.3,2.1,1.8,2.2,2.8-1,6.2-1.3,6.7-.3,1.3-1,1.1c-1.1-.3-5-19.3-5.3-21.9,4.3,3.5,9.2,5.9,14.8,6.7.2-1.9-.3-3.5-.8-5.3s-3-5.1-3-5.5c0-.8.9-.3,1.2-.3,1.6,0,3.3.8,5,.6s.7.3.7-.7c0-6.6-7.2-10.6-13.1-10.8l.8-4.2c-5.1-.3-9.6,2-11.6,6.7-4.3-3-9.8-3-13.7.7l3.4,3.4c-1.8,1.3-3.5,2.8-4.6,4.7s-1.8,5.1-1.8,6.2v6.6h.2ZM431.6,265c7.8,2.1,8.7-3.5.2-1.3l-.2,1.3ZM432.4,270.9c.3.6,6.4-.4,5.8-2.3s-4.6.6-5.7.6l-.2,1.7h.1ZM434.5,276c.8,1.2,5.7-1.8,5.5-2.7-.4-1.9-6.6,1.2-5.5,2.7ZM442.9,276.4c-.9-.9-5,2.8-4.6,4,.6,2.4,5.7-3,4.6-4ZM445.1,279.9c-.3.2-3.1,4.6-1.5,5s3.5-3.4,3.5-4-1.3-1.3-2-.9h0ZM448.9,287.4c2.1.8,3.8-5.1,2.3-5.5-1.9-.6-2.6,5.1-2.3,5.5ZM457.3,288.6c.5-1.7,1.1-4.7-1-5.5-1,.3-.6,3.9-.6,4.8l.3.5,1.3.2h0Z"/>
|
||||
<path class="st0" d="M455.6,276.4c-5-.8-9.1-3.6-10.6-8.5s.5-4,3.5-1.4,8.3,8.7,7.2,10h-.1Z"/>
|
||||
<path class="st0" d="M420.6,290.3c-4.9-.6-8.9-3.6-11.8-7.4s-1.5-1.8-1.7-2.7c1-1,8.5,6.6,9.4,6.7,2.4.4-1.8-3.5-2.1-3.8-1-.8-5.4-3.5-5.7-4-.4-.8.5-.5.8-.5,5.2.8,10.1,6.6,10.9,11.6h.2Z"/>
|
||||
<path class="st0" d="M400.4,281.5c-1.1,3.7-3,7.3-3,11.4h1.3c.5-3.7,1.5-7.8,3.6-11,1,4.8-1.5,11.6-4.5,15.4s-.4.8-1.2.7c-1.1-4.5-1.3-10.8,1.5-14.8s1.9-2.2,2.3-1.7h0Z"/>
|
||||
<path class="st0" d="M411.3,276.8c0-.8,2.1-.4,2.6-.3,2.4.4,4,1.7,6.2,2-1.2-3.1-5.7-3.2-8.4-3.8,0-.8.9-.4,1.4-.4,2.8,0,5.6.7,7.8,2.4,2.2,1.7,4,4,1.2,3.9-4.3,0-7.1-2.4-10.9-3.7h0Z"/>
|
||||
<path class="st0" d="M395.3,279.8c-1.1,1.6-3,3-4,4.6s-1.9,2.8-.4,2.5,2.8-4,4.2-4.6-.9,3.6-1.1,4c-.4.7-4.7,5.2-5.4,4.5-.2-4.6,1.8-9.9,6.7-11h0Z"/>
|
||||
<path class="st0" d="M437,309.7l-1,2.5c-3.6-2.3-8-2.8-12.1-4,.5-.7,1.1-.7,1.9-.7,3.4,0,7.8,1.8,11.2,2.1h0Z"/>
|
||||
<path class="st0" d="M417.2,308.5c0-1,0-5.8,1.4-5.5,4,3.5,6.1,6.2-1.4,5.5Z"/>
|
||||
<path class="st0" d="M400.4,276.4c-1.8-.3-3.5.7-5.2.4s-2.3-.8-1.9-1.4c.8-1.6,6.9-1.4,7.2,1h-.1Z"/>
|
||||
<path class="st0" d="M410.9,270.1c.4,3-3.6,3.3-5.5,4.6-.6-1.8,5-5.1,5.5-4.6Z"/>
|
||||
<path class="st0" d="M426.9,305.9c-2.5-.2-4.1-1.3-5.5-3.4,1.7-.4,2,.8,3,1.4s2.6.3,2.5,1.9h0Z"/>
|
||||
<path class="st1" d="M432.4,270.9l.2-1.7c1.1,0,5.1-2.2,5.7-.6s-5.5,2.9-5.8,2.3h-.1Z"/>
|
||||
<path class="st1" d="M431.6,265l.2-1.3c8.4-2.1,7.7,3.4-.2,1.3Z"/>
|
||||
<path class="st1" d="M434.5,276c-1.1-1.5,5.1-4.6,5.5-2.7s-4.6,4-5.5,2.7Z"/>
|
||||
<path class="st1" d="M442.9,276.4c1.1,1.1-4,6.4-4.6,4s3.7-4.9,4.6-4Z"/>
|
||||
<path class="st1" d="M445.1,279.9c.7-.4,2.1,0,2,.9s-2.4,4.4-3.5,4,1.3-4.8,1.5-5h0Z"/>
|
||||
<path class="st1" d="M448.9,287.4c-.3-.3.4-6.1,2.3-5.5,1.4.4-.2,6.2-2.3,5.5Z"/>
|
||||
<path class="st1" d="M457.3,288.6l-1.3-.2-.3-.5c0-.9-.4-4.6.6-4.8,2.1.8,1.5,3.8,1,5.5h0Z"/>
|
||||
<path class="st0" d="M420.5,312.8c8.9,0,17.9,4.7,23.4,11.3,5.6,6.6,3.8,3.5,0,6.2-10.7,7.7-26.1,9-38.2,4.2-1.9-.8-10.5-5.1-10.5-6.8s4-5.3,4.8-6.2c5.3-5,13.1-8.6,20.4-8.8h.1Z"/>
|
||||
<path class="st0" d="M398.7,316.9l-13.9-1.4c1.7-1,5-.8,6.9-.6s5.6.7,7,2.1h0Z"/>
|
||||
<path class="st0" d="M456.9,314.8c.7.5,0,.8-.6.8-1.6.2-3.5-.2-5.1,0-2.4.3-5.2,1.2-7.6,1.3s-1.1,0,0-.5,5.1-1.6,6-1.6h7.4,0Z"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 8.4 KiB |
@@ -1,36 +0,0 @@
|
||||
import { StrictMode } from "react";
|
||||
import ReactDOM from "react-dom/client";
|
||||
import { RouterProvider, createRouter } from "@tanstack/react-router";
|
||||
|
||||
// Import the generated route tree
|
||||
import { routeTree } from "./routeTree.gen";
|
||||
|
||||
import "./styles.css";
|
||||
|
||||
// Create a new router instance
|
||||
const router = createRouter({
|
||||
routeTree,
|
||||
context: {},
|
||||
defaultPreload: "intent",
|
||||
scrollRestoration: true,
|
||||
defaultStructuralSharing: true,
|
||||
defaultPreloadStaleTime: 0,
|
||||
});
|
||||
|
||||
// Register the router instance for type safety
|
||||
declare module "@tanstack/react-router" {
|
||||
interface Register {
|
||||
router: typeof router;
|
||||
}
|
||||
}
|
||||
|
||||
// Render the app
|
||||
const rootElement = document.getElementById("app");
|
||||
if (rootElement && !rootElement.innerHTML) {
|
||||
const root = ReactDOM.createRoot(rootElement);
|
||||
root.render(
|
||||
<StrictMode>
|
||||
<RouterProvider router={router} />
|
||||
</StrictMode>
|
||||
);
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
/* eslint-disable */
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
// noinspection JSUnusedGlobalSymbols
|
||||
|
||||
// This file was automatically generated by TanStack Router.
|
||||
// You should NOT make any changes in this file as it will be overwritten.
|
||||
// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified.
|
||||
|
||||
import { Route as rootRouteImport } from './routes/__root'
|
||||
import { Route as IndexRouteImport } from './routes/index'
|
||||
|
||||
const IndexRoute = IndexRouteImport.update({
|
||||
id: '/',
|
||||
path: '/',
|
||||
getParentRoute: () => rootRouteImport,
|
||||
} as any)
|
||||
|
||||
export interface FileRoutesByFullPath {
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRoutesByTo {
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRoutesById {
|
||||
__root__: typeof rootRouteImport
|
||||
'/': typeof IndexRoute
|
||||
}
|
||||
export interface FileRouteTypes {
|
||||
fileRoutesByFullPath: FileRoutesByFullPath
|
||||
fullPaths: '/'
|
||||
fileRoutesByTo: FileRoutesByTo
|
||||
to: '/'
|
||||
id: '__root__' | '/'
|
||||
fileRoutesById: FileRoutesById
|
||||
}
|
||||
export interface RootRouteChildren {
|
||||
IndexRoute: typeof IndexRoute
|
||||
}
|
||||
|
||||
declare module '@tanstack/react-router' {
|
||||
interface FileRoutesByPath {
|
||||
'/': {
|
||||
id: '/'
|
||||
path: '/'
|
||||
fullPath: '/'
|
||||
preLoaderRoute: typeof IndexRouteImport
|
||||
parentRoute: typeof rootRouteImport
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const rootRouteChildren: RootRouteChildren = {
|
||||
IndexRoute: IndexRoute,
|
||||
}
|
||||
export const routeTree = rootRouteImport
|
||||
._addFileChildren(rootRouteChildren)
|
||||
._addFileTypes<FileRouteTypes>()
|
||||
@@ -0,0 +1,34 @@
|
||||
<script lang="ts">
|
||||
import "overlayscrollbars/overlayscrollbars.css";
|
||||
import "./layout.css";
|
||||
import { onMount } from "svelte";
|
||||
import { Tooltip } from "bits-ui";
|
||||
import ThemeToggle from "$lib/components/ThemeToggle.svelte";
|
||||
import { themeStore } from "$lib/stores/theme.svelte";
|
||||
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
|
||||
|
||||
let { children } = $props();
|
||||
|
||||
useOverlayScrollbars(() => document.body, {
|
||||
scrollbars: {
|
||||
autoHide: "leave",
|
||||
autoHideDelay: 800,
|
||||
},
|
||||
});
|
||||
|
||||
onMount(() => {
|
||||
themeStore.init();
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
document.documentElement.classList.remove("no-transition");
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<Tooltip.Provider>
|
||||
<div class="fixed top-5 right-5 z-50">
|
||||
<ThemeToggle />
|
||||
</div>
|
||||
|
||||
{@render children()}
|
||||
</Tooltip.Provider>
|
||||
@@ -0,0 +1,2 @@
|
||||
export const prerender = true;
|
||||
export const ssr = false;
|
||||
@@ -0,0 +1,258 @@
|
||||
<script lang="ts">
|
||||
import { untrack } from "svelte";
|
||||
import { goto } from "$app/navigation";
|
||||
import {
|
||||
type Subject,
|
||||
type SearchResponse,
|
||||
type SortColumn,
|
||||
type SortDirection,
|
||||
client,
|
||||
} from "$lib/api";
|
||||
import type { SortingState } from "@tanstack/table-core";
|
||||
import SearchFilters from "$lib/components/SearchFilters.svelte";
|
||||
import SearchStatus, { type SearchMeta } from "$lib/components/SearchStatus.svelte";
|
||||
import CourseTable from "$lib/components/CourseTable.svelte";
|
||||
import Pagination from "$lib/components/Pagination.svelte";
|
||||
import Footer from "$lib/components/Footer.svelte";
|
||||
|
||||
let { data } = $props();
|
||||
|
||||
// Read initial state from URL params (intentionally captured once)
|
||||
const initialParams = untrack(() => new URLSearchParams(data.url.search));
|
||||
|
||||
// Filter state
|
||||
let selectedTerm = $state(untrack(() => initialParams.get("term") ?? data.terms[0]?.code ?? ""));
|
||||
let selectedSubjects: string[] = $state(untrack(() => initialParams.getAll("subject")));
|
||||
let query = $state(initialParams.get("q") ?? "");
|
||||
let openOnly = $state(initialParams.get("open") === "true");
|
||||
let offset = $state(Number(initialParams.get("offset")) || 0);
|
||||
const limit = 25;
|
||||
|
||||
// Sorting state — maps TanStack column IDs to server sort params
|
||||
const SORT_COLUMN_MAP: Record<string, SortColumn> = {
|
||||
course_code: "course_code",
|
||||
title: "title",
|
||||
instructor: "instructor",
|
||||
time: "time",
|
||||
seats: "seats",
|
||||
};
|
||||
|
||||
let sorting: SortingState = $state(
|
||||
(() => {
|
||||
const sortBy = initialParams.get("sort_by");
|
||||
const sortDir = initialParams.get("sort_dir");
|
||||
if (!sortBy) return [];
|
||||
return [{ id: sortBy, desc: sortDir === "desc" }];
|
||||
})()
|
||||
);
|
||||
|
||||
function handleSortingChange(newSorting: SortingState) {
|
||||
sorting = newSorting;
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
// Data state
|
||||
let subjects: Subject[] = $state([]);
|
||||
let subjectMap: Record<string, string> = $derived(
|
||||
Object.fromEntries(subjects.map((s) => [s.code, s.description]))
|
||||
);
|
||||
let searchResult: SearchResponse | null = $state(null);
|
||||
let searchMeta: SearchMeta | null = $state(null);
|
||||
let loading = $state(false);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
// Fetch subjects when term changes
|
||||
$effect(() => {
|
||||
const term = selectedTerm;
|
||||
if (!term) return;
|
||||
client
|
||||
.getSubjects(term)
|
||||
.then((s) => {
|
||||
subjects = s;
|
||||
const validCodes = new Set(s.map((sub) => sub.code));
|
||||
selectedSubjects = selectedSubjects.filter((code) => validCodes.has(code));
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Failed to fetch subjects:", e);
|
||||
});
|
||||
});
|
||||
|
||||
// Centralized throttle configuration - maps trigger source to throttle delay (ms)
|
||||
const THROTTLE_MS = {
|
||||
term: 0, // Immediate
|
||||
subjects: 100, // Short delay for combobox selection
|
||||
query: 300, // Standard input debounce
|
||||
openOnly: 0, // Immediate
|
||||
offset: 0, // Immediate (pagination)
|
||||
sorting: 0, // Immediate (column sort)
|
||||
} as const;
|
||||
|
||||
let searchTimeout: ReturnType<typeof setTimeout> | undefined;
|
||||
|
||||
function scheduleSearch(source: keyof typeof THROTTLE_MS) {
|
||||
clearTimeout(searchTimeout);
|
||||
searchTimeout = setTimeout(() => {
|
||||
performSearch(selectedTerm, selectedSubjects, query, openOnly, offset, sorting);
|
||||
}, THROTTLE_MS[source]);
|
||||
}
|
||||
|
||||
// Separate effects for each trigger source with appropriate throttling
|
||||
$effect(() => {
|
||||
selectedTerm;
|
||||
scheduleSearch("term");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
selectedSubjects;
|
||||
scheduleSearch("subjects");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
query;
|
||||
scheduleSearch("query");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
openOnly;
|
||||
scheduleSearch("openOnly");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
offset;
|
||||
scheduleSearch("offset");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
sorting;
|
||||
scheduleSearch("sorting");
|
||||
return () => clearTimeout(searchTimeout);
|
||||
});
|
||||
|
||||
// Reset offset when filters change (not offset itself)
|
||||
let prevFilters = $state("");
|
||||
$effect(() => {
|
||||
const key = `${selectedTerm}|${selectedSubjects.join(",")}|${query}|${openOnly}`;
|
||||
if (prevFilters && key !== prevFilters) {
|
||||
offset = 0;
|
||||
}
|
||||
prevFilters = key;
|
||||
});
|
||||
|
||||
async function performSearch(
|
||||
term: string,
|
||||
subjects: string[],
|
||||
q: string,
|
||||
open: boolean,
|
||||
off: number,
|
||||
sort: SortingState
|
||||
) {
|
||||
if (!term) return;
|
||||
loading = true;
|
||||
error = null;
|
||||
|
||||
const sortBy = sort.length > 0 ? SORT_COLUMN_MAP[sort[0].id] : undefined;
|
||||
const sortDir: SortDirection | undefined =
|
||||
sort.length > 0 ? (sort[0].desc ? "desc" : "asc") : undefined;
|
||||
|
||||
const params = new URLSearchParams();
|
||||
params.set("term", term);
|
||||
for (const s of subjects) {
|
||||
params.append("subject", s);
|
||||
}
|
||||
if (q) params.set("q", q);
|
||||
if (open) params.set("open", "true");
|
||||
if (off > 0) params.set("offset", String(off));
|
||||
if (sortBy) params.set("sort_by", sortBy);
|
||||
if (sortDir && sortBy) params.set("sort_dir", sortDir);
|
||||
goto(`?${params.toString()}`, { replaceState: true, noScroll: true, keepFocus: true });
|
||||
|
||||
const t0 = performance.now();
|
||||
try {
|
||||
searchResult = await client.searchCourses({
|
||||
term,
|
||||
subjects: subjects.length > 0 ? subjects : undefined,
|
||||
q: q || undefined,
|
||||
open_only: open || undefined,
|
||||
limit,
|
||||
offset: off,
|
||||
sort_by: sortBy,
|
||||
sort_dir: sortDir,
|
||||
});
|
||||
searchMeta = {
|
||||
totalCount: searchResult.totalCount,
|
||||
durationMs: performance.now() - t0,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Search failed";
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function handlePageChange(newOffset: number) {
|
||||
offset = newOffset;
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="min-h-screen flex flex-col items-center p-5">
|
||||
<div class="w-full max-w-6xl flex flex-col gap-6">
|
||||
<!-- Title -->
|
||||
<div class="text-center pt-8 pb-2">
|
||||
<h1 class="text-2xl font-semibold text-foreground">UTSA Course Search</h1>
|
||||
</div>
|
||||
|
||||
<!-- Search status + Filters -->
|
||||
<div class="flex flex-col gap-1.5">
|
||||
<SearchStatus meta={searchMeta} />
|
||||
<!-- Filters -->
|
||||
<SearchFilters
|
||||
terms={data.terms}
|
||||
{subjects}
|
||||
bind:selectedTerm
|
||||
bind:selectedSubjects
|
||||
bind:query
|
||||
bind:openOnly
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Results -->
|
||||
{#if error}
|
||||
<div class="text-center py-8">
|
||||
<p class="text-status-red">{error}</p>
|
||||
<button
|
||||
onclick={() => performSearch(selectedTerm, selectedSubjects, query, openOnly, offset, sorting)}
|
||||
class="mt-2 text-sm text-muted-foreground hover:underline"
|
||||
>
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
{:else}
|
||||
<CourseTable
|
||||
courses={searchResult?.courses ?? []}
|
||||
{loading}
|
||||
{sorting}
|
||||
onSortingChange={handleSortingChange}
|
||||
manualSorting={true}
|
||||
{subjectMap}
|
||||
/>
|
||||
|
||||
{#if searchResult}
|
||||
<Pagination
|
||||
totalCount={searchResult.totalCount}
|
||||
offset={searchResult.offset}
|
||||
{limit}
|
||||
onPageChange={handlePageChange}
|
||||
/>
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
<!-- Footer -->
|
||||
<Footer />
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,13 @@
|
||||
import type { PageLoad } from "./$types";
|
||||
import { BannerApiClient } from "$lib/api";
|
||||
|
||||
export const load: PageLoad = async ({ url, fetch }) => {
|
||||
const client = new BannerApiClient(undefined, fetch);
|
||||
try {
|
||||
const terms = await client.getTerms();
|
||||
return { terms, url };
|
||||
} catch (e) {
|
||||
console.error("Failed to load terms:", e);
|
||||
return { terms: [], url };
|
||||
}
|
||||
};
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Outlet, createRootRoute } from "@tanstack/react-router";
|
||||
import { TanStackRouterDevtoolsPanel } from "@tanstack/react-router-devtools";
|
||||
import { TanstackDevtools } from "@tanstack/react-devtools";
|
||||
import { Theme } from "@radix-ui/themes";
|
||||
import "@radix-ui/themes/styles.css";
|
||||
import { ThemeProvider } from "next-themes";
|
||||
|
||||
export const Route = createRootRoute({
|
||||
component: () => (
|
||||
<ThemeProvider
|
||||
attribute="class"
|
||||
defaultTheme="system"
|
||||
enableSystem
|
||||
disableTransitionOnChange={false}
|
||||
>
|
||||
<Theme accentColor="blue" grayColor="gray">
|
||||
<Outlet />
|
||||
{import.meta.env.DEV ? (
|
||||
<TanstackDevtools
|
||||
config={{
|
||||
position: "bottom-left",
|
||||
}}
|
||||
plugins={[
|
||||
{
|
||||
name: "Tanstack Router",
|
||||
render: <TanStackRouterDevtoolsPanel />,
|
||||
},
|
||||
]}
|
||||
/>
|
||||
) : null}
|
||||
</Theme>
|
||||
</ThemeProvider>
|
||||
),
|
||||
});
|
||||
@@ -0,0 +1,74 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { goto } from "$app/navigation";
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import { LayoutDashboard, Users, ClipboardList, FileText, LogOut } from "@lucide/svelte";
|
||||
|
||||
let { children } = $props();
|
||||
|
||||
onMount(async () => {
|
||||
if (authStore.isLoading) {
|
||||
await authStore.init();
|
||||
}
|
||||
});
|
||||
|
||||
$effect(() => {
|
||||
if (authStore.state.mode === "unauthenticated") {
|
||||
goto("/login");
|
||||
}
|
||||
});
|
||||
|
||||
const navItems = [
|
||||
{ href: "/admin", label: "Dashboard", icon: LayoutDashboard },
|
||||
{ href: "/admin/scrape-jobs", label: "Scrape Jobs", icon: ClipboardList },
|
||||
{ href: "/admin/audit-log", label: "Audit Log", icon: FileText },
|
||||
{ href: "/admin/users", label: "Users", icon: Users },
|
||||
];
|
||||
</script>
|
||||
|
||||
{#if authStore.isLoading}
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
</div>
|
||||
{:else if !authStore.isAdmin}
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<div class="text-center">
|
||||
<h1 class="text-2xl font-bold">Access Denied</h1>
|
||||
<p class="text-muted-foreground mt-2">You do not have admin access.</p>
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="flex min-h-screen">
|
||||
<aside class="border-border bg-card flex w-64 flex-col border-r">
|
||||
<div class="border-border border-b p-4">
|
||||
<h2 class="text-lg font-semibold">Admin</h2>
|
||||
{#if authStore.user}
|
||||
<p class="text-muted-foreground text-sm">{authStore.user.discordUsername}</p>
|
||||
{/if}
|
||||
</div>
|
||||
<nav class="flex-1 space-y-1 p-2">
|
||||
{#each navItems as item}
|
||||
<a
|
||||
href={item.href}
|
||||
class="hover:bg-accent flex items-center gap-3 rounded-lg px-3 py-2 text-sm font-medium transition-colors"
|
||||
>
|
||||
<item.icon size={18} />
|
||||
{item.label}
|
||||
</a>
|
||||
{/each}
|
||||
</nav>
|
||||
<div class="border-border border-t p-2">
|
||||
<button
|
||||
onclick={() => authStore.logout()}
|
||||
class="hover:bg-destructive/10 text-destructive flex w-full items-center gap-3 rounded-lg px-3 py-2 text-sm font-medium transition-colors"
|
||||
>
|
||||
<LogOut size={18} />
|
||||
Sign Out
|
||||
</button>
|
||||
</div>
|
||||
</aside>
|
||||
<main class="flex-1 overflow-auto p-6">
|
||||
{@render children()}
|
||||
</main>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,54 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type AdminStatus } from "$lib/api";
|
||||
|
||||
let status = $state<AdminStatus | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
status = await client.getAdminStatus();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load status";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Dashboard</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !status}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else}
|
||||
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Users</p>
|
||||
<p class="text-3xl font-bold">{status.userCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Active Sessions</p>
|
||||
<p class="text-3xl font-bold">{status.sessionCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Courses</p>
|
||||
<p class="text-3xl font-bold">{status.courseCount}</p>
|
||||
</div>
|
||||
<div class="bg-card border-border rounded-lg border p-4">
|
||||
<p class="text-muted-foreground text-sm">Scrape Jobs</p>
|
||||
<p class="text-3xl font-bold">{status.scrapeJobCount}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2 class="mt-8 mb-4 text-lg font-semibold">Services</h2>
|
||||
<div class="bg-card border-border rounded-lg border">
|
||||
{#each status.services as service}
|
||||
<div class="border-border flex items-center justify-between border-b px-4 py-3 last:border-b-0">
|
||||
<span class="font-medium">{service.name}</span>
|
||||
<span class="rounded-full bg-green-100 px-2 py-0.5 text-xs font-medium text-green-800 dark:bg-green-900 dark:text-green-200">
|
||||
{service.status}
|
||||
</span>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,50 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type AuditLogResponse } from "$lib/api";
|
||||
|
||||
let data = $state<AuditLogResponse | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
data = await client.getAdminAuditLog();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load audit log";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Audit Log</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !data}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else if data.entries.length === 0}
|
||||
<p class="text-muted-foreground">No audit log entries found.</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">Time</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Course ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Field</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Old Value</th>
|
||||
<th class="px-4 py-3 text-left font-medium">New Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each data.entries as entry}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="px-4 py-3">{new Date(entry.timestamp).toLocaleString()}</td>
|
||||
<td class="px-4 py-3">{entry.courseId}</td>
|
||||
<td class="px-4 py-3 font-mono text-xs">{entry.fieldChanged}</td>
|
||||
<td class="px-4 py-3">{entry.oldValue}</td>
|
||||
<td class="px-4 py-3">{entry.newValue}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,52 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client, type ScrapeJobsResponse } from "$lib/api";
|
||||
|
||||
let data = $state<ScrapeJobsResponse | null>(null);
|
||||
let error = $state<string | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
data = await client.getAdminScrapeJobs();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load scrape jobs";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Scrape Jobs</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive">{error}</p>
|
||||
{:else if !data}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else if data.jobs.length === 0}
|
||||
<p class="text-muted-foreground">No scrape jobs found.</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Type</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Priority</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Execute At</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Retries</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each data.jobs as job}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="px-4 py-3">{job.id}</td>
|
||||
<td class="px-4 py-3">{job.targetType}</td>
|
||||
<td class="px-4 py-3">{job.priority}</td>
|
||||
<td class="px-4 py-3">{new Date(job.executeAt).toLocaleString()}</td>
|
||||
<td class="px-4 py-3">{job.retryCount}/{job.maxRetries}</td>
|
||||
<td class="px-4 py-3">{job.lockedAt ? "Locked" : "Pending"}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,92 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import { client } from "$lib/api";
|
||||
import type { User } from "$lib/bindings";
|
||||
import { Shield, ShieldOff } from "@lucide/svelte";
|
||||
|
||||
let users = $state<User[]>([]);
|
||||
let error = $state<string | null>(null);
|
||||
let updating = $state<bigint | null>(null);
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
users = await client.getAdminUsers();
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to load users";
|
||||
}
|
||||
});
|
||||
|
||||
async function toggleAdmin(user: User) {
|
||||
updating = user.discordId;
|
||||
try {
|
||||
const updated = await client.setUserAdmin(user.discordId, !user.isAdmin);
|
||||
users = users.map((u) => (u.discordId === updated.discordId ? updated : u));
|
||||
} catch (e) {
|
||||
error = e instanceof Error ? e.message : "Failed to update user";
|
||||
} finally {
|
||||
updating = null;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<h1 class="mb-6 text-2xl font-bold">Users</h1>
|
||||
|
||||
{#if error}
|
||||
<p class="text-destructive mb-4">{error}</p>
|
||||
{/if}
|
||||
|
||||
{#if users.length === 0 && !error}
|
||||
<p class="text-muted-foreground">Loading...</p>
|
||||
{:else}
|
||||
<div class="bg-card border-border overflow-hidden rounded-lg border">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="border-border border-b">
|
||||
<th class="px-4 py-3 text-left font-medium">Username</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Discord ID</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Admin</th>
|
||||
<th class="px-4 py-3 text-left font-medium">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each users as user}
|
||||
<tr class="border-border border-b last:border-b-0">
|
||||
<td class="flex items-center gap-2 px-4 py-3">
|
||||
{#if user.discordAvatarHash}
|
||||
<img
|
||||
src="https://cdn.discordapp.com/avatars/{user.discordId}/{user.discordAvatarHash}.png?size=32"
|
||||
alt=""
|
||||
class="h-6 w-6 rounded-full"
|
||||
/>
|
||||
{/if}
|
||||
{user.discordUsername}
|
||||
</td>
|
||||
<td class="text-muted-foreground px-4 py-3 font-mono text-xs">{user.discordId}</td>
|
||||
<td class="px-4 py-3">
|
||||
{#if user.isAdmin}
|
||||
<span class="rounded-full bg-blue-100 px-2 py-0.5 text-xs font-medium text-blue-800 dark:bg-blue-900 dark:text-blue-200">Admin</span>
|
||||
{:else}
|
||||
<span class="text-muted-foreground text-xs">User</span>
|
||||
{/if}
|
||||
</td>
|
||||
<td class="px-4 py-3">
|
||||
<button
|
||||
onclick={() => toggleAdmin(user)}
|
||||
disabled={updating === user.discordId}
|
||||
class="hover:bg-accent inline-flex items-center gap-1 rounded px-2 py-1 text-xs transition-colors disabled:opacity-50"
|
||||
>
|
||||
{#if user.isAdmin}
|
||||
<ShieldOff size={14} />
|
||||
Remove Admin
|
||||
{:else}
|
||||
<Shield size={14} />
|
||||
Make Admin
|
||||
{/if}
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
@@ -0,0 +1,312 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from "svelte";
|
||||
import {
|
||||
Activity,
|
||||
Bot,
|
||||
CheckCircle,
|
||||
Circle,
|
||||
Clock,
|
||||
Globe,
|
||||
Hourglass,
|
||||
MessageCircle,
|
||||
WifiOff,
|
||||
XCircle,
|
||||
} from "@lucide/svelte";
|
||||
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
|
||||
import Footer from "$lib/components/Footer.svelte";
|
||||
import { type ServiceStatus, type ServiceInfo, type StatusResponse, client } from "$lib/api";
|
||||
import { relativeTime } from "$lib/time";
|
||||
|
||||
const REFRESH_INTERVAL = import.meta.env.DEV ? 3000 : 30000;
|
||||
const REQUEST_TIMEOUT = 10000;
|
||||
|
||||
const SERVICE_ICONS: Record<string, typeof Bot> = {
|
||||
bot: Bot,
|
||||
banner: Globe,
|
||||
discord: MessageCircle,
|
||||
database: Activity,
|
||||
web: Globe,
|
||||
scraper: Clock,
|
||||
};
|
||||
|
||||
interface ResponseTiming {
|
||||
health: number | null;
|
||||
status: number | null;
|
||||
}
|
||||
|
||||
interface Service {
|
||||
name: string;
|
||||
status: ServiceStatus;
|
||||
icon: typeof Bot;
|
||||
}
|
||||
|
||||
type StatusState =
|
||||
| { mode: "loading" }
|
||||
| { mode: "response"; timing: ResponseTiming; lastFetch: Date; status: StatusResponse }
|
||||
| { mode: "error"; lastFetch: Date }
|
||||
| { mode: "timeout"; lastFetch: Date };
|
||||
|
||||
const STATUS_ICONS: Record<
|
||||
ServiceStatus | "Unreachable",
|
||||
{ icon: typeof CheckCircle; color: string }
|
||||
> = {
|
||||
active: { icon: CheckCircle, color: "var(--status-green)" },
|
||||
connected: { icon: CheckCircle, color: "var(--status-green)" },
|
||||
starting: { icon: Hourglass, color: "var(--status-orange)" },
|
||||
disabled: { icon: Circle, color: "var(--status-gray)" },
|
||||
error: { icon: XCircle, color: "var(--status-red)" },
|
||||
Unreachable: { icon: WifiOff, color: "var(--status-red)" },
|
||||
};
|
||||
|
||||
let statusState = $state({ mode: "loading" } as StatusState);
|
||||
let now = $state(new Date());
|
||||
|
||||
const isLoading = $derived(statusState.mode === "loading");
|
||||
const shouldShowSkeleton = $derived(statusState.mode === "loading" || statusState.mode === "error");
|
||||
|
||||
const overallHealth: ServiceStatus | "Unreachable" = $derived(
|
||||
statusState.mode === "timeout"
|
||||
? "Unreachable"
|
||||
: statusState.mode === "error"
|
||||
? "error"
|
||||
: statusState.mode === "response"
|
||||
? statusState.status.status
|
||||
: "error"
|
||||
);
|
||||
|
||||
const overallIcon = $derived(STATUS_ICONS[overallHealth]);
|
||||
|
||||
const services: Service[] = $derived(
|
||||
statusState.mode === "response"
|
||||
? (Object.entries(statusState.status.services) as [string, ServiceInfo][]).map(
|
||||
([id, info]) => ({
|
||||
name: info.name,
|
||||
status: info.status,
|
||||
icon: SERVICE_ICONS[id] ?? Bot,
|
||||
})
|
||||
)
|
||||
: []
|
||||
);
|
||||
|
||||
const shouldShowTiming = $derived(
|
||||
statusState.mode === "response" && statusState.timing.health !== null
|
||||
);
|
||||
|
||||
const shouldShowLastFetch = $derived(
|
||||
statusState.mode === "response" || statusState.mode === "error" || statusState.mode === "timeout"
|
||||
);
|
||||
|
||||
const lastFetch = $derived(
|
||||
statusState.mode === "response" || statusState.mode === "error" || statusState.mode === "timeout"
|
||||
? statusState.lastFetch
|
||||
: null
|
||||
);
|
||||
|
||||
const relativeLastFetchResult = $derived(lastFetch ? relativeTime(lastFetch, now) : null);
|
||||
const relativeLastFetch = $derived(relativeLastFetchResult?.text ?? "");
|
||||
|
||||
function formatNumber(num: number): string {
|
||||
return num.toLocaleString();
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
let timeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
let requestTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
let nowTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
// Adaptive tick: schedules the next `now` update based on when the
|
||||
// relative time text would actually change (every ~1s for recent
|
||||
// timestamps, every ~1m for minute-level, etc.)
|
||||
function scheduleNowTick() {
|
||||
const delay = relativeLastFetchResult?.nextUpdateMs ?? 1000;
|
||||
nowTimeoutId = setTimeout(() => {
|
||||
now = new Date();
|
||||
scheduleNowTick();
|
||||
}, delay);
|
||||
}
|
||||
scheduleNowTick();
|
||||
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
requestTimeoutId = setTimeout(() => {
|
||||
reject(new Error("Request timeout"));
|
||||
}, REQUEST_TIMEOUT);
|
||||
});
|
||||
|
||||
const statusData = await Promise.race([client.getStatus(), timeoutPromise]);
|
||||
|
||||
if (requestTimeoutId) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
requestTimeoutId = null;
|
||||
}
|
||||
|
||||
const responseTime = Date.now() - startTime;
|
||||
|
||||
statusState = {
|
||||
mode: "response",
|
||||
status: statusData,
|
||||
timing: { health: responseTime, status: responseTime },
|
||||
lastFetch: new Date(),
|
||||
};
|
||||
} catch (err) {
|
||||
if (requestTimeoutId) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
requestTimeoutId = null;
|
||||
}
|
||||
|
||||
const message = err instanceof Error ? err.message : "";
|
||||
|
||||
if (message === "Request timeout") {
|
||||
statusState = { mode: "timeout", lastFetch: new Date() };
|
||||
} else {
|
||||
statusState = { mode: "error", lastFetch: new Date() };
|
||||
}
|
||||
}
|
||||
|
||||
timeoutId = setTimeout(() => void fetchData(), REFRESH_INTERVAL);
|
||||
};
|
||||
|
||||
void fetchData();
|
||||
|
||||
return () => {
|
||||
if (timeoutId) clearTimeout(timeoutId);
|
||||
if (requestTimeoutId) clearTimeout(requestTimeoutId);
|
||||
if (nowTimeoutId) clearTimeout(nowTimeoutId);
|
||||
};
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="min-h-screen flex flex-col items-center justify-center p-5">
|
||||
<div
|
||||
class="bg-card text-card-foreground rounded-xl border border-border p-6 w-full max-w-[400px] shadow-sm"
|
||||
>
|
||||
<div class="flex flex-col gap-4">
|
||||
<!-- Overall Status -->
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<Activity
|
||||
size={18}
|
||||
color={isLoading ? undefined : overallIcon.color}
|
||||
class={isLoading ? "animate-pulse" : ""}
|
||||
style="opacity: {isLoading ? 0.3 : 1}; transition: opacity 2s ease-in-out, color 2s ease-in-out;"
|
||||
/>
|
||||
<span class="text-base font-medium text-foreground">System Status</span>
|
||||
</div>
|
||||
{#if isLoading}
|
||||
<div class="h-5 w-20 bg-muted rounded animate-pulse"></div>
|
||||
{:else}
|
||||
{#if overallIcon}
|
||||
{@const OverallIconComponent = overallIcon.icon}
|
||||
<div class="flex items-center gap-1.5">
|
||||
<span
|
||||
class="text-sm"
|
||||
class:text-muted-foreground={overallHealth === "disabled"}
|
||||
class:opacity-70={overallHealth === "disabled"}
|
||||
>
|
||||
{overallHealth}
|
||||
</span>
|
||||
<OverallIconComponent size={16} color={overallIcon.color} />
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Services -->
|
||||
<div class="flex flex-col gap-3 mt-4">
|
||||
{#if shouldShowSkeleton}
|
||||
{#each Array(3) as _}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="h-6 w-[18px] bg-muted rounded animate-pulse"></div>
|
||||
<div class="h-6 w-[60px] bg-muted rounded animate-pulse"></div>
|
||||
</div>
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="h-5 w-[50px] bg-muted rounded animate-pulse"></div>
|
||||
<div class="h-5 w-4 bg-muted rounded animate-pulse"></div>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
{:else}
|
||||
{#each services as service (service.name)}
|
||||
{@const statusInfo = STATUS_ICONS[service.status]}
|
||||
{@const ServiceIcon = service.icon}
|
||||
{@const StatusIconComponent = statusInfo.icon}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<ServiceIcon size={18} />
|
||||
<span class="text-muted-foreground">{service.name}</span>
|
||||
</div>
|
||||
<div class="flex items-center gap-1.5">
|
||||
<span
|
||||
class="text-sm"
|
||||
class:text-muted-foreground={service.status === "disabled"}
|
||||
class:opacity-70={service.status === "disabled"}
|
||||
>
|
||||
{service.status}
|
||||
</span>
|
||||
<StatusIconComponent size={16} color={statusInfo.color} />
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- Timing & Last Updated -->
|
||||
<div class="flex flex-col gap-2 mt-4 pt-4 border-t border-border">
|
||||
{#if isLoading}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<Hourglass size={13} />
|
||||
<span class="text-sm text-muted-foreground">Response Time</span>
|
||||
</div>
|
||||
<div class="h-[18px] w-[50px] bg-muted rounded animate-pulse"></div>
|
||||
</div>
|
||||
{:else if shouldShowTiming && statusState.mode === "response"}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<Hourglass size={13} />
|
||||
<span class="text-sm text-muted-foreground">Response Time</span>
|
||||
</div>
|
||||
<span class="text-sm text-muted-foreground">
|
||||
{formatNumber(statusState.timing.health!)}ms
|
||||
</span>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if isLoading}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<Clock size={13} />
|
||||
<span class="text-sm text-muted-foreground">Last Updated</span>
|
||||
</div>
|
||||
<span class="text-sm text-muted-foreground pb-0.5">Loading...</span>
|
||||
</div>
|
||||
{:else if shouldShowLastFetch && lastFetch}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center gap-2">
|
||||
<Clock size={13} />
|
||||
<span class="text-sm text-muted-foreground">Last Updated</span>
|
||||
</div>
|
||||
<SimpleTooltip text="as of {lastFetch.toLocaleTimeString()}" delay={150} passthrough>
|
||||
<abbr
|
||||
class="cursor-pointer underline decoration-dotted decoration-border underline-offset-[6px]"
|
||||
>
|
||||
<span class="text-sm text-muted-foreground">{relativeLastFetch}</span>
|
||||
</abbr>
|
||||
</SimpleTooltip>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Footer -->
|
||||
<Footer
|
||||
commitHash={statusState.mode === "response" ? statusState.status.commit : undefined}
|
||||
showStatusLink={false}
|
||||
class="mt-3 pt-0 pb-0"
|
||||
/>
|
||||
</div>
|
||||
@@ -1,419 +0,0 @@
|
||||
import { Card, Flex, Skeleton, Text, Tooltip } from "@radix-ui/themes";
|
||||
import { createFileRoute } from "@tanstack/react-router";
|
||||
import {
|
||||
Activity,
|
||||
Bot,
|
||||
CheckCircle,
|
||||
Circle,
|
||||
Clock,
|
||||
Globe,
|
||||
Hourglass,
|
||||
MessageCircle,
|
||||
WifiOff,
|
||||
XCircle,
|
||||
} from "lucide-react";
|
||||
import { useEffect, useState } from "react";
|
||||
import TimeAgo from "react-timeago";
|
||||
import { ThemeToggle } from "../components/ThemeToggle";
|
||||
import { type Status, type StatusResponse, client } from "../lib/api";
|
||||
import "../App.css";
|
||||
|
||||
const REFRESH_INTERVAL = import.meta.env.DEV ? 3000 : 30000;
|
||||
const REQUEST_TIMEOUT = 10000; // 10 seconds
|
||||
|
||||
const CARD_STYLES = {
|
||||
padding: "24px",
|
||||
maxWidth: "400px",
|
||||
width: "100%",
|
||||
} as const;
|
||||
|
||||
const BORDER_STYLES = {
|
||||
marginTop: "16px",
|
||||
paddingTop: "16px",
|
||||
borderTop: "1px solid var(--gray-7)",
|
||||
} as const;
|
||||
|
||||
const SERVICE_ICONS: Record<string, typeof Bot> = {
|
||||
bot: Bot,
|
||||
banner: Globe,
|
||||
discord: MessageCircle,
|
||||
database: Activity,
|
||||
web: Globe,
|
||||
scraper: Clock,
|
||||
};
|
||||
|
||||
interface ResponseTiming {
|
||||
health: number | null;
|
||||
status: number | null;
|
||||
}
|
||||
|
||||
interface StatusIcon {
|
||||
icon: typeof CheckCircle;
|
||||
color: string;
|
||||
}
|
||||
|
||||
interface Service {
|
||||
name: string;
|
||||
status: Status;
|
||||
icon: typeof Bot;
|
||||
}
|
||||
|
||||
type StatusState =
|
||||
| {
|
||||
mode: "loading";
|
||||
}
|
||||
| {
|
||||
mode: "response";
|
||||
timing: ResponseTiming;
|
||||
lastFetch: Date;
|
||||
status: StatusResponse;
|
||||
}
|
||||
| {
|
||||
mode: "error";
|
||||
lastFetch: Date;
|
||||
}
|
||||
| {
|
||||
mode: "timeout";
|
||||
lastFetch: Date;
|
||||
};
|
||||
|
||||
const formatNumber = (num: number): string => {
|
||||
return num.toLocaleString();
|
||||
};
|
||||
|
||||
const getStatusIcon = (status: Status | "Unreachable"): StatusIcon => {
|
||||
const statusMap: Record<Status | "Unreachable", StatusIcon> = {
|
||||
active: { icon: CheckCircle, color: "green" },
|
||||
connected: { icon: CheckCircle, color: "green" },
|
||||
starting: { icon: Hourglass, color: "orange" },
|
||||
disabled: { icon: Circle, color: "gray" },
|
||||
error: { icon: XCircle, color: "red" },
|
||||
Unreachable: { icon: WifiOff, color: "red" },
|
||||
};
|
||||
|
||||
return statusMap[status];
|
||||
};
|
||||
|
||||
const getOverallHealth = (state: StatusState): Status | "Unreachable" => {
|
||||
if (state.mode === "timeout") return "Unreachable";
|
||||
if (state.mode === "error") return "error";
|
||||
if (state.mode === "response") return state.status.status;
|
||||
return "error";
|
||||
};
|
||||
|
||||
const getServices = (state: StatusState): Service[] => {
|
||||
if (state.mode !== "response") return [];
|
||||
|
||||
return Object.entries(state.status.services).map(([serviceId, serviceInfo]) => ({
|
||||
name: serviceInfo.name,
|
||||
status: serviceInfo.status,
|
||||
icon: SERVICE_ICONS[serviceId] || SERVICE_ICONS.default,
|
||||
}));
|
||||
};
|
||||
|
||||
const StatusDisplay = ({ status }: { status: Status | "Unreachable" }) => {
|
||||
const { icon: Icon, color } = getStatusIcon(status);
|
||||
|
||||
return (
|
||||
<Flex align="center" gap="2">
|
||||
<Text
|
||||
size="2"
|
||||
style={{
|
||||
color: status === "disabled" ? "var(--gray-11)" : undefined,
|
||||
opacity: status === "disabled" ? 0.7 : undefined,
|
||||
}}
|
||||
>
|
||||
{status}
|
||||
</Text>
|
||||
<Icon color={color} size={16} />
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
const ServiceStatus = ({ service }: { service: Service }) => {
|
||||
return (
|
||||
<Flex align="center" justify="between">
|
||||
<Flex align="center" gap="2">
|
||||
<service.icon size={18} />
|
||||
<Text style={{ color: "var(--gray-11)" }}>{service.name}</Text>
|
||||
</Flex>
|
||||
<StatusDisplay status={service.status} />
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
const SkeletonService = () => {
|
||||
return (
|
||||
<Flex align="center" justify="between">
|
||||
<Flex align="center" gap="2">
|
||||
<Skeleton height="24px" width="18px" />
|
||||
<Skeleton height="24px" width="60px" />
|
||||
</Flex>
|
||||
<Flex align="center" gap="2">
|
||||
<Skeleton height="20px" width="50px" />
|
||||
<Skeleton height="20px" width="16px" />
|
||||
</Flex>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
const TimingRow = ({
|
||||
icon: Icon,
|
||||
name,
|
||||
children,
|
||||
}: {
|
||||
icon: React.ComponentType<{ size?: number }>;
|
||||
name: string;
|
||||
children: React.ReactNode;
|
||||
}) => (
|
||||
<Flex align="center" justify="between">
|
||||
<Flex align="center" gap="2">
|
||||
<Icon size={13} />
|
||||
<Text size="2" color="gray">
|
||||
{name}
|
||||
</Text>
|
||||
</Flex>
|
||||
{children}
|
||||
</Flex>
|
||||
);
|
||||
|
||||
function App() {
|
||||
const [state, setState] = useState<StatusState>({ mode: "loading" });
|
||||
|
||||
// State helpers
|
||||
const isLoading = state.mode === "loading";
|
||||
const hasError = state.mode === "error";
|
||||
const hasTimeout = state.mode === "timeout";
|
||||
const hasResponse = state.mode === "response";
|
||||
const shouldShowSkeleton = isLoading || hasError;
|
||||
const shouldShowTiming = hasResponse && state.timing.health !== null;
|
||||
const shouldShowLastFetch = hasResponse || hasError || hasTimeout;
|
||||
|
||||
useEffect(() => {
|
||||
let timeoutId: NodeJS.Timeout | null = null;
|
||||
let requestTimeoutId: NodeJS.Timeout | null = null;
|
||||
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Create a timeout promise with cleanup tracking
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
requestTimeoutId = setTimeout(() => {
|
||||
reject(new Error("Request timeout"));
|
||||
}, REQUEST_TIMEOUT);
|
||||
});
|
||||
|
||||
// Race between the API call and timeout
|
||||
const statusData = await Promise.race([client.getStatus(), timeoutPromise]);
|
||||
|
||||
// Clear the timeout if the request succeeded
|
||||
if (requestTimeoutId) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
requestTimeoutId = null;
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const responseTime = endTime - startTime;
|
||||
|
||||
setState({
|
||||
mode: "response",
|
||||
status: statusData,
|
||||
timing: { health: responseTime, status: responseTime },
|
||||
lastFetch: new Date(),
|
||||
});
|
||||
} catch (err) {
|
||||
// Clear the timeout on error as well
|
||||
if (requestTimeoutId) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
requestTimeoutId = null;
|
||||
}
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : "Failed to fetch data";
|
||||
|
||||
if (errorMessage === "Request timeout") {
|
||||
setState({
|
||||
mode: "timeout",
|
||||
lastFetch: new Date(),
|
||||
});
|
||||
} else {
|
||||
setState({
|
||||
mode: "error",
|
||||
lastFetch: new Date(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Schedule the next request after the current one completes
|
||||
timeoutId = setTimeout(() => void fetchData(), REFRESH_INTERVAL);
|
||||
};
|
||||
|
||||
// Start the first request immediately
|
||||
void fetchData();
|
||||
|
||||
return () => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
if (requestTimeoutId) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const overallHealth = getOverallHealth(state);
|
||||
const { color: overallColor } = getStatusIcon(overallHealth);
|
||||
const services = getServices(state);
|
||||
|
||||
return (
|
||||
<div className="App">
|
||||
<div
|
||||
style={{
|
||||
position: "fixed",
|
||||
top: "20px",
|
||||
right: "20px",
|
||||
zIndex: 1000,
|
||||
}}
|
||||
>
|
||||
<ThemeToggle />
|
||||
</div>
|
||||
|
||||
<Flex
|
||||
direction="column"
|
||||
align="center"
|
||||
justify="center"
|
||||
style={{ minHeight: "100vh", padding: "20px" }}
|
||||
>
|
||||
<Card style={CARD_STYLES}>
|
||||
<Flex direction="column" gap="4">
|
||||
{/* Overall Status */}
|
||||
<Flex align="center" justify="between">
|
||||
<Flex align="center" gap="2">
|
||||
<Activity
|
||||
color={isLoading ? undefined : overallColor}
|
||||
size={18}
|
||||
className={isLoading ? "animate-pulse" : ""}
|
||||
style={{
|
||||
opacity: isLoading ? 0.3 : 1,
|
||||
transition: "opacity 2s ease-in-out, color 2s ease-in-out",
|
||||
}}
|
||||
/>
|
||||
<Text size="4" style={{ color: "var(--gray-12)" }}>
|
||||
System Status
|
||||
</Text>
|
||||
</Flex>
|
||||
{isLoading ? (
|
||||
<Skeleton height="20px" width="80px" />
|
||||
) : (
|
||||
<StatusDisplay status={overallHealth} />
|
||||
)}
|
||||
</Flex>
|
||||
|
||||
{/* Individual Services */}
|
||||
<Flex direction="column" gap="3" style={{ marginTop: "16px" }}>
|
||||
{shouldShowSkeleton
|
||||
? // Show skeleton for 3 services during initial loading only
|
||||
Array.from({ length: 3 }).map((_, index) => <SkeletonService key={index} />)
|
||||
: services.map((service) => <ServiceStatus key={service.name} service={service} />)}
|
||||
</Flex>
|
||||
|
||||
<Flex direction="column" gap="2" style={BORDER_STYLES}>
|
||||
{isLoading ? (
|
||||
<TimingRow icon={Hourglass} name="Response Time">
|
||||
<Skeleton height="18px" width="50px" />
|
||||
</TimingRow>
|
||||
) : shouldShowTiming ? (
|
||||
<TimingRow icon={Hourglass} name="Response Time">
|
||||
<Text size="2" style={{ color: "var(--gray-11)" }}>
|
||||
{formatNumber(state.timing.health!)}ms
|
||||
</Text>
|
||||
</TimingRow>
|
||||
) : null}
|
||||
|
||||
{shouldShowLastFetch ? (
|
||||
<TimingRow icon={Clock} name="Last Updated">
|
||||
{isLoading ? (
|
||||
<Text size="2" style={{ paddingBottom: "2px" }} color="gray">
|
||||
Loading...
|
||||
</Text>
|
||||
) : (
|
||||
<Tooltip content={`as of ${state.lastFetch.toLocaleTimeString()}`}>
|
||||
<abbr
|
||||
style={{
|
||||
cursor: "pointer",
|
||||
textDecoration: "underline",
|
||||
textDecorationStyle: "dotted",
|
||||
textDecorationColor: "var(--gray-6)",
|
||||
textUnderlineOffset: "6px",
|
||||
}}
|
||||
>
|
||||
<Text size="2" style={{ color: "var(--gray-11)" }}>
|
||||
<TimeAgo date={state.lastFetch} />
|
||||
</Text>
|
||||
</abbr>
|
||||
</Tooltip>
|
||||
)}
|
||||
</TimingRow>
|
||||
) : isLoading ? (
|
||||
<TimingRow icon={Clock} name="Last Updated">
|
||||
<Text size="2" color="gray">
|
||||
Loading...
|
||||
</Text>
|
||||
</TimingRow>
|
||||
) : null}
|
||||
</Flex>
|
||||
</Flex>
|
||||
</Card>
|
||||
<Flex justify="center" style={{ marginTop: "12px" }} gap="2" align="center">
|
||||
{__APP_VERSION__ && (
|
||||
<Text
|
||||
size="1"
|
||||
style={{
|
||||
color: "var(--gray-11)",
|
||||
}}
|
||||
>
|
||||
v{__APP_VERSION__}
|
||||
</Text>
|
||||
)}
|
||||
{__APP_VERSION__ && (
|
||||
<div
|
||||
style={{
|
||||
width: "1px",
|
||||
height: "12px",
|
||||
backgroundColor: "var(--gray-10)",
|
||||
opacity: 0.3,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<Text
|
||||
size="1"
|
||||
style={{
|
||||
color: "var(--gray-11)",
|
||||
textDecoration: "none",
|
||||
}}
|
||||
>
|
||||
<a
|
||||
href={
|
||||
hasResponse && state.status.commit
|
||||
? `https://github.com/Xevion/banner/commit/${state.status.commit}`
|
||||
: "https://github.com/Xevion/banner"
|
||||
}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
style={{
|
||||
color: "inherit",
|
||||
textDecoration: "none",
|
||||
}}
|
||||
>
|
||||
GitHub
|
||||
</a>
|
||||
</Text>
|
||||
</Flex>
|
||||
</Flex>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export const Route = createFileRoute("/")({
|
||||
component: App,
|
||||
});
|
||||
@@ -0,0 +1,221 @@
|
||||
@import "tailwindcss";
|
||||
@import "@fontsource-variable/inter";
|
||||
|
||||
@custom-variant dark (&:where(.dark, .dark *));
|
||||
|
||||
:root {
|
||||
--background: oklch(0.985 0 0);
|
||||
--foreground: oklch(0.145 0 0);
|
||||
--card: oklch(1 0 0);
|
||||
--card-foreground: oklch(0.145 0 0);
|
||||
--muted: oklch(0.96 0 0);
|
||||
--muted-foreground: oklch(0.556 0 0);
|
||||
--border: oklch(0.922 0 0);
|
||||
--ring: oklch(0.708 0 0);
|
||||
--accent: oklch(0.96 0 0);
|
||||
--accent-foreground: oklch(0.145 0 0);
|
||||
|
||||
--status-green: oklch(0.65 0.2 145);
|
||||
--status-red: oklch(0.63 0.2 25);
|
||||
--status-orange: oklch(0.75 0.18 70);
|
||||
--status-gray: oklch(0.556 0 0);
|
||||
}
|
||||
|
||||
.dark {
|
||||
--background: oklch(0.145 0 0);
|
||||
--foreground: oklch(0.985 0 0);
|
||||
--card: oklch(0.205 0 0);
|
||||
--card-foreground: oklch(0.985 0 0);
|
||||
--muted: oklch(0.269 0 0);
|
||||
--muted-foreground: oklch(0.708 0 0);
|
||||
--border: oklch(0.269 0 0);
|
||||
--ring: oklch(0.556 0 0);
|
||||
--accent: oklch(0.269 0 0);
|
||||
--accent-foreground: oklch(0.985 0 0);
|
||||
|
||||
--status-green: oklch(0.72 0.19 145);
|
||||
--status-red: oklch(0.7 0.19 25);
|
||||
--status-orange: oklch(0.8 0.16 70);
|
||||
--status-gray: oklch(0.708 0 0);
|
||||
}
|
||||
|
||||
@theme inline {
|
||||
--color-background: var(--background);
|
||||
--color-foreground: var(--foreground);
|
||||
--color-card: var(--card);
|
||||
--color-card-foreground: var(--card-foreground);
|
||||
--color-muted: var(--muted);
|
||||
--color-muted-foreground: var(--muted-foreground);
|
||||
--color-border: var(--border);
|
||||
--color-ring: var(--ring);
|
||||
--color-accent: var(--accent);
|
||||
--color-accent-foreground: var(--accent-foreground);
|
||||
--color-status-green: var(--status-green);
|
||||
--color-status-red: var(--status-red);
|
||||
--color-status-orange: var(--status-orange);
|
||||
--color-status-gray: var(--status-gray);
|
||||
--font-sans: "Inter Variable", ui-sans-serif, system-ui, sans-serif;
|
||||
}
|
||||
|
||||
* {
|
||||
border-color: var(--border);
|
||||
}
|
||||
|
||||
body {
|
||||
background-color: var(--background);
|
||||
color: var(--foreground);
|
||||
font-family: var(--font-sans);
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* Focus styling - only visible on keyboard navigation */
|
||||
*:focus {
|
||||
outline: none;
|
||||
}
|
||||
|
||||
/* Form inputs get outline-based focus directly on border */
|
||||
input[type="text"]:focus-visible,
|
||||
input[type="search"]:focus-visible,
|
||||
input[type="email"]:focus-visible,
|
||||
input[type="password"]:focus-visible,
|
||||
input[type="number"]:focus-visible,
|
||||
input[type="url"]:focus-visible,
|
||||
input[type="tel"]:focus-visible,
|
||||
select:focus-visible,
|
||||
textarea:focus-visible {
|
||||
outline: 2px solid var(--ring);
|
||||
outline-offset: 0;
|
||||
}
|
||||
|
||||
/* Buttons get rounded box-shadow focus (outline doesn't support border-radius) */
|
||||
button:focus-visible {
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px var(--background), 0 0 0 4px var(--ring);
|
||||
}
|
||||
|
||||
/* Checkboxes get direct outline focus */
|
||||
input[type="checkbox"]:focus-visible,
|
||||
input[type="radio"]:focus-visible {
|
||||
outline: 2px solid var(--ring);
|
||||
outline-offset: 2px;
|
||||
}
|
||||
|
||||
/* Checkbox styling - theme-aware appearance */
|
||||
input[type="checkbox"] {
|
||||
appearance: none;
|
||||
width: 1rem;
|
||||
height: 1rem;
|
||||
border: 1.5px solid var(--border);
|
||||
border-radius: 0.25rem;
|
||||
background-color: var(--card);
|
||||
cursor: pointer;
|
||||
display: inline-grid;
|
||||
place-content: center;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
input[type="checkbox"]:checked {
|
||||
background-color: var(--foreground);
|
||||
border-color: var(--foreground);
|
||||
}
|
||||
|
||||
input[type="checkbox"]:checked::before {
|
||||
content: "";
|
||||
width: 0.5rem;
|
||||
height: 0.5rem;
|
||||
background-color: var(--background);
|
||||
clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%);
|
||||
}
|
||||
|
||||
html:not(.no-transition) body,
|
||||
html:not(.no-transition) body * {
|
||||
transition: background-color 300ms, color 300ms, border-color 300ms, fill 300ms;
|
||||
}
|
||||
|
||||
/* View Transitions API - disable default cross-fade so JS can animate clip-path */
|
||||
::view-transition-old(root),
|
||||
::view-transition-new(root) {
|
||||
animation: none;
|
||||
mix-blend-mode: normal;
|
||||
}
|
||||
|
||||
/* OverlayScrollbars — custom handle colors per theme */
|
||||
:root .os-scrollbar {
|
||||
--os-handle-bg: rgba(0, 0, 0, 0.25);
|
||||
--os-handle-bg-hover: rgba(0, 0, 0, 0.35);
|
||||
--os-handle-bg-active: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.dark .os-scrollbar {
|
||||
--os-handle-bg: rgba(255, 255, 255, 0.35);
|
||||
--os-handle-bg-hover: rgba(255, 255, 255, 0.45);
|
||||
--os-handle-bg-active: rgba(255, 255, 255, 0.55);
|
||||
}
|
||||
|
||||
.os-scrollbar-handle {
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
/* Hide native scrollbars on body — OverlayScrollbars takes over */
|
||||
html,
|
||||
body {
|
||||
scrollbar-width: none;
|
||||
-ms-overflow-style: none;
|
||||
}
|
||||
|
||||
html::-webkit-scrollbar,
|
||||
body::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Native scrollbars — theme-aware styling for inner scrollable elements */
|
||||
* {
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: rgba(0, 0, 0, 0.25) transparent;
|
||||
}
|
||||
|
||||
.dark * {
|
||||
scrollbar-color: rgba(255, 255, 255, 0.3) transparent;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: rgba(0, 0, 0, 0.25);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(0, 0, 0, 0.35);
|
||||
}
|
||||
|
||||
.dark ::-webkit-scrollbar-thumb {
|
||||
background: rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
.dark ::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(255, 255, 255, 0.4);
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%,
|
||||
100% {
|
||||
opacity: 0.2;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.4;
|
||||
}
|
||||
}
|
||||
|
||||
.animate-pulse {
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
<script lang="ts">
|
||||
import { authStore } from "$lib/auth.svelte";
|
||||
import { LogIn } from "@lucide/svelte";
|
||||
</script>
|
||||
|
||||
<div class="flex min-h-screen items-center justify-center">
|
||||
<div class="w-full max-w-sm space-y-6 text-center">
|
||||
<h1 class="text-3xl font-bold">Sign In</h1>
|
||||
<p class="text-muted-foreground">Sign in with your Discord account to continue.</p>
|
||||
<button
|
||||
onclick={() => authStore.login()}
|
||||
class="inline-flex w-full items-center justify-center gap-2 rounded-lg bg-[#5865F2] px-6 py-3 text-lg font-semibold text-white transition-colors hover:bg-[#4752C4]"
|
||||
>
|
||||
<LogIn size={20} />
|
||||
Sign in with Discord
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,13 +0,0 @@
|
||||
@import "@radix-ui/themes/styles.css";
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu",
|
||||
"Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user