29 Commits

Author SHA1 Message Date
2ec50ee731 chore(master): release 0.6.2 2026-01-31 01:44:06 -06:00
9e825cd113 fix: re-add overflow hidden for page transitions, but with negative margin padding to avoid clipping
Prevents scrollbars while also avoiding clipping on certain pages.
2026-01-31 01:40:22 -06:00
ac8dbb2eef fix: separate Biome format and lint checks to enable auto-format
Biome's 'check' command runs both formatting and linting, causing
overlapping failures that prevented auto-format from triggering.
Split into separate commands and removed web-lint check since Biome
linting crashes on Svelte 5 syntax. Renamed check steps for clarity.
2026-01-31 01:05:19 -06:00
5dd35ed215 fix(web): prevent duplicate searches and background fetching on navigation
- Search page no longer triggers cascading search when validating subjects
- Scraper page stops all refresh timers and API calls when navigating away
- Wrap initial data references in untrack() to silence Svelte warnings
2026-01-31 01:03:20 -06:00
2acf52a63b fix(cli): add proper flag validation for check script 2026-01-31 00:42:52 -06:00
47132e71d7 chore(master): release 0.6.1 (#1) 2026-01-31 00:37:13 -06:00
87db1a4ccb refactor: extract Justfile inline scripts into scripts/ directory
Move all [script("bun")] blocks into standalone TypeScript files under
scripts/ with shared utilities in scripts/lib/. The Justfile is now ~40
lines of thin `bun scripts/*.ts` wrappers.

Shared code consolidated into two lib files:
- lib/proc.ts: process spawning (run, spawnCollect, raceInOrder, ProcessGroup)
- lib/fmt.ts: color output, elapsed timers, reusable flag parser
2026-01-31 00:34:27 -06:00
e203e8e182 feat(build): auto-regenerate TypeScript bindings on source changes 2026-01-31 00:27:27 -06:00
cbb0a51bca refactor(terms): move term formatting from frontend to backend 2026-01-31 00:26:41 -06:00
c533768362 feat(scraper): improve results visibility and loading states 2026-01-30 23:36:23 -06:00
16039e02a9 fix(metrics): always emit baseline metrics on initial course insertion 2026-01-30 23:32:04 -06:00
7d2255a988 fix(data): decode HTML entities in course titles and instructor names 2026-01-30 23:31:05 -06:00
8bfc14e55c feat(course): distinguish async from synchronous online courses
Add logic to detect and label asynchronous online sections (INT building
with TBA times) separately from synchronous online courses. Update table
rendering to show "Async" instead of "TBA" for these sections.
2026-01-30 23:27:54 -06:00
2689587dd5 fix: avoid status flickering on subjects table 2026-01-30 22:04:48 -06:00
1ad614dad0 feat(scraper): improve dashboard clarity with stat tooltips 2026-01-30 22:00:59 -06:00
ebb7a97c11 fix(ci): add postgres container service for rust tests
Also updated deprecated codeql action to v4.
2026-01-30 21:36:32 -06:00
2df0ba0ec5 chore: add ts-rs generated bindings 2026-01-30 21:29:32 -06:00
dd148e08a0 fix(ci): fix rust/frontend/security job failures and expand local checks 2026-01-30 21:22:01 -06:00
3494341e3f ci: split quality checks into parallel jobs with security scanning
Reorganize CI pipeline into separate jobs for Rust quality, frontend
quality, tests, Docker build, and security audits. Add cargo-audit,
bun audit, and Trivy filesystem scanning. Allow formatting checks to
pass with warnings on push events while failing on PRs.
2026-01-30 21:08:16 -06:00
acccaa54d4 chore: update frontend packages 2026-01-30 21:07:08 -06:00
6863ee58d0 ci: add Release Please automation for changelog and version management 2026-01-30 21:05:15 -06:00
550401b85c refactor: use friendly term codes in URL query parameters 2026-01-30 20:49:08 -06:00
b02a0738e2 chore: bump to v0.6.0, update roadmap & changelog 2026-01-30 20:37:31 -06:00
5d7d60cd96 fix: prevent session pool deadlock on acquire cancellation
Replace is_creating mutex with atomic flag and RAII guard to ensure
proper cleanup when acquire() futures are cancelled mid-creation,
preventing permanent deadlock for subsequent callers.
2026-01-30 20:19:10 -06:00
1954166db6 feat: add name parsing and normalization for instructor-RMP matching 2026-01-30 20:02:59 -06:00
a2a9116b7a fix: avoid clipping page content 2026-01-30 19:32:05 -06:00
a103f0643a feat: refactor admin instructor UI with component extraction and optimistic updates 2026-01-30 19:31:31 -06:00
474d519b9d feat: add auto-format recovery when formatting is sole check failure
Enhances check recipe to detect when only formatting checks fail while
peers pass, automatically applies formatters, then re-verifies. Supports
both Rust (rustfmt + cargo-check) and web (biome + svelte-check)
domains. Displays results eagerly as checks complete instead of in
original order.
2026-01-30 16:01:56 -06:00
fb27bdc119 feat: implement session expiry extension and 401 recovery 2026-01-30 16:01:17 -06:00
118 changed files with 4501 additions and 1772 deletions
+11
View File
@@ -0,0 +1,11 @@
# cargo-audit configuration
# https://github.com/rustsec/rustsec/tree/main/cargo-audit
[advisories]
# Transitive dependencies we can't control
ignore = [
# rsa: Marvin Attack timing sidechannel (via sqlx-mysql, no fix available)
"RUSTSEC-2023-0071",
# derivative: unmaintained (via poise)
"RUSTSEC-2024-0388",
]
+34
View File
@@ -0,0 +1,34 @@
{
"$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
"changelog-sections": [
{ "type": "feat", "section": "Features" },
{ "type": "fix", "section": "Bug Fixes" },
{ "type": "perf", "section": "Performance Improvements" },
{ "type": "refactor", "section": "Code Refactoring" },
{ "type": "docs", "section": "Documentation" },
{ "type": "ci", "section": "Continuous Integration" },
{ "type": "build", "section": "Build System" },
{ "type": "chore", "section": "Miscellaneous" },
{ "type": "style", "section": "Styles", "hidden": true },
{ "type": "test", "section": "Tests", "hidden": true }
],
"bump-minor-pre-major": true,
"always-update": true,
"bump-patch-for-minor-pre-major": true,
"include-v-in-tag": true,
"include-component-in-tag": false,
"plugins": ["sentence-case"],
"packages": {
".": {
"release-type": "rust",
"exclude-paths": [".vscode", "docs"],
"extra-files": [
{
"type": "toml",
"path": "Cargo.lock",
"jsonpath": "$.package[?(@.name=='banner')].version"
}
]
}
}
}
+3
View File
@@ -0,0 +1,3 @@
{
".": "0.6.2"
}
+143 -27
View File
@@ -11,9 +11,9 @@ env:
RUST_BACKTRACE: 1
jobs:
check:
rust-quality:
name: Rust Quality
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -22,44 +22,160 @@ jobs:
with:
components: rustfmt, clippy
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@v2
with:
cache-on-failure: true
- name: Check formatting
run: |
if [ "${{ github.event_name }}" = "pull_request" ]; then
cargo fmt --all -- --check
else
cargo fmt --all -- --check || echo "::warning::Rust formatting issues found (not failing on push)"
fi
- name: Clippy
run: cargo clippy --no-default-features -- -D warnings
frontend-quality:
name: Frontend Quality
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Install dependencies
working-directory: web
run: bun install --frozen-lockfile
- name: Check formatting
working-directory: web
run: |
if [ "${{ github.event_name }}" = "pull_request" ]; then
bun run format:check
else
bun run format:check || echo "::warning::Frontend formatting issues found (not failing on push)"
fi
- name: Type check
working-directory: web
run: bun run typecheck
rust-tests:
name: Rust Tests
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17-alpine
env:
POSTGRES_USER: banner
POSTGRES_PASSWORD: banner
POSTGRES_DB: banner
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
env:
DATABASE_URL: postgresql://banner:banner@localhost:5432/banner
steps:
- uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@v2
with:
cache-on-failure: true
- name: Run tests
run: cargo test --no-default-features
frontend-tests:
name: Frontend Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Install dependencies
working-directory: web
run: bun install --frozen-lockfile
- name: Run tests
working-directory: web
run: bun run test
docker-build:
name: Docker Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build image
uses: docker/build-push-action@v6
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=max
security:
name: Security Scan
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
steps:
- uses: actions/checkout@v4
- name: Install cargo-audit
uses: taiki-e/install-action@cargo-audit
- name: Rust security audit
run: cargo audit
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Install frontend dependencies
working-directory: web
run: bun install --frozen-lockfile
- name: Check Rust formatting
run: cargo fmt --all -- --check
- name: Check TypeScript formatting
- name: Frontend security audit
working-directory: web
run: bun run format:check
run: bun audit --audit-level=moderate
continue-on-error: true
- name: TypeScript type check
working-directory: web
run: bun run typecheck
- name: Trivy filesystem scan
uses: aquasecurity/trivy-action@master
with:
scan-type: fs
scan-ref: .
format: sarif
output: trivy-results.sarif
severity: CRITICAL,HIGH
exit-code: 0
- name: ESLint
working-directory: web
run: bun run lint
- name: Clippy
run: cargo clippy --all-features -- --deny warnings
- name: Run tests
run: cargo test --all-features
- name: Build frontend
working-directory: web
run: bun run build
- name: Build backend
run: cargo build --release --bin banner
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@v4
if: always() && hashFiles('trivy-results.sarif') != ''
with:
sarif_file: trivy-results.sarif
+27
View File
@@ -0,0 +1,27 @@
name: Release Please
on:
workflow_dispatch:
workflow_run:
workflows: ["CI"]
types:
- completed
branches:
- master
permissions:
contents: write
pull-requests: write
jobs:
release-please:
name: Create Release PR
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }}
steps:
- uses: googleapis/release-please-action@v4
with:
token: ${{ secrets.RELEASE_PLEASE_TOKEN }}
config-file: .github/release-please-config.json
manifest-file: .github/release-please-manifest.json
Vendored
+2 -3
View File
@@ -1,6 +1,5 @@
.env
/target
/scripts/node_modules
# ts-rs bindings
web/src/lib/bindings/**/*.ts
!web/src/lib/bindings/index.ts
+151
View File
@@ -0,0 +1,151 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
## [0.6.2](https://github.com/Xevion/Banner/compare/v0.6.1...v0.6.2) (2026-01-31)
### Bug Fixes
* **cli:** Add proper flag validation for check script ([2acf52a](https://github.com/Xevion/Banner/commit/2acf52a63b6dcd24ca826b99061bf7a51a9230b1))
* Re-add overflow hidden for page transitions, but with negative margin padding to avoid clipping ([9e825cd](https://github.com/Xevion/Banner/commit/9e825cd113bbc65c10f0386b5300b6aec50bf936))
* Separate Biome format and lint checks to enable auto-format ([ac8dbb2](https://github.com/Xevion/Banner/commit/ac8dbb2eefe79ec5d898cfa719e270f4713125d5))
* **web:** Prevent duplicate searches and background fetching on navigation ([5dd35ed](https://github.com/Xevion/Banner/commit/5dd35ed215d3d1f3603e67a2aa59eaddf619f5c9))
## [0.6.1](https://github.com/Xevion/Banner/compare/v0.6.0...v0.6.1) (2026-01-31)
### Features
* **build:** Auto-regenerate TypeScript bindings on source changes ([e203e8e](https://github.com/Xevion/Banner/commit/e203e8e182f7a0b0224a8f9e6bf79d15259215a2))
* **course:** Distinguish async from synchronous online courses ([8bfc14e](https://github.com/Xevion/Banner/commit/8bfc14e55c1bdf5acc2006096476e0b1eb1b7cc6))
* **scraper:** Improve dashboard clarity with stat tooltips ([1ad614d](https://github.com/Xevion/Banner/commit/1ad614dad03d3631a8d119203786718c814e72c7))
* **scraper:** Improve results visibility and loading states ([c533768](https://github.com/Xevion/Banner/commit/c53376836238f3aca92ac82cd5fd59a077bcceff))
### Bug Fixes
* Avoid status flickering on subjects table ([2689587](https://github.com/Xevion/Banner/commit/2689587dd53c572a65eeb91f74c737662e1f148b))
* **ci:** Add postgres container service for rust tests ([ebb7a97](https://github.com/Xevion/Banner/commit/ebb7a97c113fa1d4b61b8637dfe97cae5260075c))
* **ci:** Fix rust/frontend/security job failures and expand local checks ([dd148e0](https://github.com/Xevion/Banner/commit/dd148e08a0b6d5b7afe4ff614d7d6e4e4d0dfce6))
* **data:** Decode HTML entities in course titles and instructor names ([7d2255a](https://github.com/Xevion/Banner/commit/7d2255a988a23f6e1b1c8e7cb5a8ead833ad34da))
* **metrics:** Always emit baseline metrics on initial course insertion ([16039e0](https://github.com/Xevion/Banner/commit/16039e02a999c668d4969a43eb9ed1d4e8d370e1))
### Code Refactoring
* **terms:** Move term formatting from frontend to backend ([cbb0a51](https://github.com/Xevion/Banner/commit/cbb0a51bca9e4e0d6a8fcee90465c93943f2a30e))
* Use friendly term codes in URL query parameters ([550401b](https://github.com/Xevion/Banner/commit/550401b85ceb8a447e316209b479c69062c5b658))
### Continuous Integration
* Add Release Please automation for changelog and version management ([6863ee5](https://github.com/Xevion/Banner/commit/6863ee58d0a5778303af1b7626b2a9eda3043ca0))
* Split quality checks into parallel jobs with security scanning ([3494341](https://github.com/Xevion/Banner/commit/3494341e3fbe9ffd96b6fcd8abbe7f95ecec6f45))
### Miscellaneous
* Add ts-rs generated bindings ([2df0ba0](https://github.com/Xevion/Banner/commit/2df0ba0ec58155d73830a66132cb635dc819e8a9))
* Update frontend packages ([acccaa5](https://github.com/Xevion/Banner/commit/acccaa54d4455500db60d1b6437cad1c592445f1))
## [Unreleased]
## [0.6.0] - 2026-01-30
### Added
- User authentication system with Discord OAuth, sessions, admin roles, and login page with FAQ.
- Interactive timeline visualization with D3 canvas, pan/zoom, touch gestures, and enrollment aggregation API.
- Scraper analytics dashboard with timeseries charts, subject monitoring, and per-subject detail views.
- Adaptive scraper scheduling with admin endpoints for monitoring and configuration.
- Scrape job result persistence for effectiveness tracking.
- WebSocket support for real-time scrape job monitoring with connection status indicators.
- Course change auditing with field-level tracking and time-series metrics endpoint.
- Audit log UI with smart JSON diffing, conditional request caching, and auto-refresh.
- Calendar export web endpoints for ICS download and Google Calendar redirect.
- Confidence-based RMP matching with manual review workflow and admin instructor UI.
- RMP profile links and confidence-aware rating display.
- Name parsing and normalization for improved instructor-RMP matching.
- Mobile touch controls with gesture detection for timeline.
- Worker timeout protection and crash recovery for job queue.
- Build-time asset compression with encoding negotiation (gzip, brotli, zstd).
- Smart page transitions with theme-aware element transitions.
- Search duration and result count feedback.
- Root error page handling.
- Login page with FAQ section and improved styling.
### Changed
- Consolidated navigation with top nav bar and route groups.
- Centralized number formatting with locale-aware utility.
- Modernized Justfile commands and simplified service management.
- Persisted audit log state in module scope for cross-navigation caching.
- Relative time feedback and improved tooltip customization.
### Fixed
- Instructor/course mismatching via build-order-independent map for association.
- Page content clipping.
- Backend startup delays with retry logic in auth.
- Banner API timeouts increased to handle slow responses.
- i64 serialization for JavaScript compatibility, fixing avatar URL display.
- Frontend build ordering with `-e` embed flag in Justfile.
- Login page centering and unnecessary scrollbar.
- ts-rs serde warnings.
## [0.5.0] - 2026-01-29
### Added
- Multi-select subject filtering with searchable comboboxes.
- Smart instructor name abbreviation for compact table display.
- Delivery mode indicators and tooltips in location column.
- Page selector dropdown with animated pagination controls.
- FLIP animations for smooth table row transitions during pagination.
- Time tooltip with detailed meeting schedule and day abbreviations.
- Reusable SimpleTooltip component for consistent UI hints.
### Changed
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
- Consolidated menu snippets and strengthened component type safety.
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
- Eliminated initial theme flash on page load.
## [0.4.0] - 2026-01-28
### Added
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
- TypeScript type bindings generated from Rust types via ts-rs.
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
- Course detail expansion panel with enrollment, meeting times, and instructor info.
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
- Pagination component for navigating large search result sets.
- Footer component with version display.
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
- Frontend API client with typed request/response handling and test coverage.
- Course formatting utilities with comprehensive unit tests.
## [0.3.4] - 2026-01
### Added
- Live service status tracking on web dashboard with auto-refresh and health indicators.
- DB operation extraction for improved testability.
- Unit test suite foundation covering core functionality.
- Docker support for PostgreSQL development environment.
- ICS calendar export with comprehensive holiday exclusion coverage.
- Google Calendar link generation with recurrence rules and meeting details.
- Job queue with priority-based scheduling for background scraping.
- Rate limiting with burst allowance for Banner API requests.
- Session management and caching for Banner API interactions.
- Discord bot commands: search, terms, ics, gcal.
- Intelligent scraping system with priority queues and retry tracking.
### Changed
- Type consolidation and dead code removal across the codebase.
Generated
+212 -316
View File
@@ -182,14 +182,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core",
"base64 0.22.1",
"base64",
"bytes",
"form_urlencoded",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http",
"http-body",
"http-body-util",
"hyper 1.7.0",
"hyper",
"hyper-util",
"itoa",
"matchit",
@@ -202,7 +202,7 @@ dependencies = [
"serde_path_to_error",
"serde_urlencoded",
"sha1",
"sync_wrapper 1.0.2",
"sync_wrapper",
"tokio",
"tokio-tungstenite 0.28.0",
"tower",
@@ -219,12 +219,12 @@ checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
dependencies = [
"bytes",
"futures-core",
"http 1.3.1",
"http-body 1.0.1",
"http",
"http-body",
"http-body-util",
"mime",
"pin-project-lite",
"sync_wrapper 1.0.2",
"sync_wrapper",
"tower-layer",
"tower-service",
"tracing",
@@ -242,8 +242,8 @@ dependencies = [
"form_urlencoded",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http",
"http-body",
"http-body-util",
"mime",
"pin-project-lite",
@@ -272,13 +272,13 @@ dependencies = [
[[package]]
name = "banner"
version = "0.5.0"
version = "0.6.2"
dependencies = [
"anyhow",
"async-trait",
"axum",
"axum-extra",
"bitflags 2.9.4",
"bitflags",
"chrono",
"chrono-tz",
"clap",
@@ -292,14 +292,15 @@ dependencies = [
"futures",
"governor",
"html-escape",
"http 1.3.1",
"htmlize",
"http",
"mime_guess",
"num-format",
"poise",
"rand 0.9.2",
"rapidhash",
"regex",
"reqwest 0.12.23",
"reqwest",
"reqwest-middleware",
"rust-embed",
"serde",
@@ -315,17 +316,12 @@ dependencies = [
"tracing",
"tracing-subscriber",
"ts-rs",
"unicode-normalization",
"url",
"urlencoding",
"yansi",
]
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@@ -338,12 +334,6 @@ version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.9.4"
@@ -471,6 +461,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.42"
@@ -492,7 +488,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
dependencies = [
"chrono",
"phf",
"phf 0.12.1",
]
[[package]]
@@ -543,9 +539,9 @@ checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "command_attr"
version = "0.5.3"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fcc89439e1bb4e19050a9586a767781a3060000d2f3296fd2a40597ad9421c5"
checksum = "8208103c5e25a091226dfa8d61d08d0561cc14f31b25691811ba37d4ec9b157b"
dependencies = [
"proc-macro2",
"quote",
@@ -1144,15 +1140,6 @@ dependencies = [
"slab",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -1170,8 +1157,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.11.1+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
@@ -1236,25 +1225,6 @@ dependencies = [
"web-time",
]
[[package]]
name = "h2"
version = "0.3.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d"
dependencies = [
"bytes",
"fnv",
"futures-core",
"futures-sink",
"futures-util",
"http 0.2.12",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "h2"
version = "0.4.12"
@@ -1266,7 +1236,7 @@ dependencies = [
"fnv",
"futures-core",
"futures-sink",
"http 1.3.1",
"http",
"indexmap",
"slab",
"tokio",
@@ -1349,14 +1319,16 @@ dependencies = [
]
[[package]]
name = "http"
version = "0.2.12"
name = "htmlize"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
checksum = "d347c0de239be20ba0982e4822de3124404281e119ae3e11f5d7425a414e1935"
dependencies = [
"bytes",
"fnv",
"itoa",
"memchr",
"pastey",
"phf 0.11.3",
"phf_codegen",
"serde_json",
]
[[package]]
@@ -1370,17 +1342,6 @@ dependencies = [
"itoa",
]
[[package]]
name = "http-body"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2"
dependencies = [
"bytes",
"http 0.2.12",
"pin-project-lite",
]
[[package]]
name = "http-body"
version = "1.0.1"
@@ -1388,7 +1349,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http 1.3.1",
"http",
]
[[package]]
@@ -1399,8 +1360,8 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http 1.3.1",
"http-body 1.0.1",
"http",
"http-body",
"pin-project-lite",
]
@@ -1416,30 +1377,6 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "0.14.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7"
dependencies = [
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"h2 0.3.27",
"http 0.2.12",
"http-body 0.4.6",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"socket2 0.5.10",
"tokio",
"tower-service",
"tracing",
"want",
]
[[package]]
name = "hyper"
version = "1.7.0"
@@ -1450,9 +1387,9 @@ dependencies = [
"bytes",
"futures-channel",
"futures-core",
"h2 0.4.12",
"http 1.3.1",
"http-body 1.0.1",
"h2",
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
@@ -1463,34 +1400,21 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
dependencies = [
"futures-util",
"http 0.2.12",
"hyper 0.14.32",
"rustls 0.21.12",
"tokio",
"tokio-rustls 0.24.1",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
dependencies = [
"http 1.3.1",
"hyper 1.7.0",
"http",
"hyper",
"hyper-util",
"rustls 0.23.31",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.26.2",
"tower-service",
"webpki-roots 1.0.2",
]
[[package]]
@@ -1501,7 +1425,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
dependencies = [
"bytes",
"http-body-util",
"hyper 1.7.0",
"hyper",
"hyper-util",
"native-tls",
"tokio",
@@ -1515,20 +1439,20 @@ version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e"
dependencies = [
"base64 0.22.1",
"base64",
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"hyper 1.7.0",
"http",
"http-body",
"hyper",
"ipnet",
"libc",
"percent-encoding",
"pin-project-lite",
"socket2 0.6.0",
"system-configuration 0.6.1",
"socket2",
"system-configuration",
"tokio",
"tower-service",
"tracing",
@@ -1694,7 +1618,7 @@ version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"cfg-if",
"libc",
]
@@ -1780,7 +1704,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"libc",
"redox_syscall",
]
@@ -1829,6 +1753,12 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "lru-slab"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "matchers"
version = "0.2.0"
@@ -2033,7 +1963,7 @@ version = "0.10.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"cfg-if",
"foreign-types",
"libc",
@@ -2100,6 +2030,12 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "pastey"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pear"
version = "0.2.9"
@@ -2138,13 +2074,51 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "phf"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_shared 0.11.3",
]
[[package]]
name = "phf"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
dependencies = [
"phf_shared",
"phf_shared 0.12.1",
]
[[package]]
name = "phf_codegen"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared 0.11.3",
]
[[package]]
name = "phf_generator"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared 0.11.3",
"rand 0.8.5",
]
[[package]]
name = "phf_shared"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
dependencies = [
"siphasher",
]
[[package]]
@@ -2298,7 +2272,7 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"memchr",
"unicase",
]
@@ -2318,6 +2292,61 @@ dependencies = [
"winapi",
]
[[package]]
name = "quinn"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
dependencies = [
"bytes",
"cfg_aliases",
"pin-project-lite",
"quinn-proto",
"quinn-udp",
"rustc-hash",
"rustls 0.23.31",
"socket2",
"thiserror 2.0.16",
"tokio",
"tracing",
"web-time",
]
[[package]]
name = "quinn-proto"
version = "0.11.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
dependencies = [
"bytes",
"getrandom 0.3.3",
"lru-slab",
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls 0.23.31",
"rustls-pki-types",
"slab",
"thiserror 2.0.16",
"tinyvec",
"tracing",
"web-time",
]
[[package]]
name = "quinn-udp"
version = "0.5.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2",
"tracing",
"windows-sys 0.60.2",
]
[[package]]
name = "quote"
version = "1.0.40"
@@ -2407,7 +2436,7 @@ version = "11.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186"
dependencies = [
"bitflags 2.9.4",
"bitflags",
]
[[package]]
@@ -2416,7 +2445,7 @@ version = "0.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
dependencies = [
"bitflags 2.9.4",
"bitflags",
]
[[package]]
@@ -2448,90 +2477,54 @@ version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
[[package]]
name = "reqwest"
version = "0.11.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62"
dependencies = [
"base64 0.21.7",
"bytes",
"encoding_rs",
"futures-core",
"futures-util",
"h2 0.3.27",
"http 0.2.12",
"http-body 0.4.6",
"hyper 0.14.32",
"hyper-rustls 0.24.2",
"ipnet",
"js-sys",
"log",
"mime",
"mime_guess",
"once_cell",
"percent-encoding",
"pin-project-lite",
"rustls 0.21.12",
"rustls-pemfile",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 0.1.2",
"system-configuration 0.5.1",
"tokio",
"tokio-rustls 0.24.1",
"tokio-util",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"webpki-roots 0.25.4",
"winreg",
]
[[package]]
name = "reqwest"
version = "0.12.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
dependencies = [
"base64 0.22.1",
"base64",
"bytes",
"cookie",
"cookie_store",
"encoding_rs",
"futures-core",
"h2 0.4.12",
"http 1.3.1",
"http-body 1.0.1",
"futures-util",
"h2",
"http",
"http-body",
"http-body-util",
"hyper 1.7.0",
"hyper-rustls 0.27.7",
"hyper",
"hyper-rustls",
"hyper-tls",
"hyper-util",
"js-sys",
"log",
"mime",
"mime_guess",
"native-tls",
"percent-encoding",
"pin-project-lite",
"quinn",
"rustls 0.23.31",
"rustls-pki-types",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 1.0.2",
"sync_wrapper",
"tokio",
"tokio-native-tls",
"tokio-rustls 0.26.2",
"tokio-util",
"tower",
"tower-http",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"webpki-roots 1.0.2",
]
[[package]]
@@ -2542,8 +2535,8 @@ checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e"
dependencies = [
"anyhow",
"async-trait",
"http 1.3.1",
"reqwest 0.12.23",
"http",
"reqwest",
"serde",
"thiserror 1.0.69",
"tower-service",
@@ -2624,6 +2617,12 @@ version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc_version"
version = "0.4.1"
@@ -2639,25 +2638,13 @@ version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.60.2",
]
[[package]]
name = "rustls"
version = "0.21.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring",
"rustls-webpki 0.101.7",
"sct",
]
[[package]]
name = "rustls"
version = "0.22.4"
@@ -2686,34 +2673,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rustls-pemfile"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64 0.21.7",
]
[[package]]
name = "rustls-pki-types"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79"
dependencies = [
"web-time",
"zeroize",
]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "rustls-webpki"
version = "0.102.8"
@@ -2772,16 +2741,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "secrecy"
version = "0.8.0"
@@ -2798,7 +2757,7 @@ version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
@@ -2921,26 +2880,26 @@ dependencies = [
[[package]]
name = "serenity"
version = "0.12.4"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d72ec4323681bf9a3cabe40fd080abc2435859b502a1b5aa9bf693f125bfa76"
checksum = "9bde37f42765dfdc34e2a039e0c84afbf79a3101c1941763b0beb816c2f17541"
dependencies = [
"arrayvec",
"async-trait",
"base64 0.22.1",
"bitflags 2.9.4",
"base64",
"bitflags",
"bytes",
"chrono",
"command_attr",
"dashmap 5.5.3",
"flate2",
"futures",
"fxhash",
"levenshtein",
"mime_guess",
"parking_lot",
"percent-encoding",
"reqwest 0.11.27",
"reqwest",
"rustc-hash",
"secrecy",
"serde",
"serde_cow",
@@ -3048,16 +3007,6 @@ dependencies = [
"serde",
]
[[package]]
name = "socket2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "socket2"
version = "0.6.0"
@@ -3115,7 +3064,7 @@ version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
dependencies = [
"base64 0.22.1",
"base64",
"bytes",
"chrono",
"crc",
@@ -3191,8 +3140,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
dependencies = [
"atoi",
"base64 0.22.1",
"bitflags 2.9.4",
"base64",
"bitflags",
"byteorder",
"bytes",
"chrono",
@@ -3234,8 +3183,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
dependencies = [
"atoi",
"base64 0.22.1",
"bitflags 2.9.4",
"base64",
"bitflags",
"byteorder",
"chrono",
"crc",
@@ -3347,12 +3296,6 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "sync_wrapper"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]]
name = "sync_wrapper"
version = "1.0.2"
@@ -3373,36 +3316,15 @@ dependencies = [
"syn 2.0.106",
]
[[package]]
name = "system-configuration"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
dependencies = [
"bitflags 1.3.2",
"core-foundation",
"system-configuration-sys 0.5.0",
]
[[package]]
name = "system-configuration"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
dependencies = [
"bitflags 2.9.4",
"bitflags",
"core-foundation",
"system-configuration-sys 0.6.0",
]
[[package]]
name = "system-configuration-sys"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9"
dependencies = [
"core-foundation-sys",
"libc",
"system-configuration-sys",
]
[[package]]
@@ -3562,7 +3484,7 @@ dependencies = [
"pin-project-lite",
"signal-hook-registry",
"slab",
"socket2 0.6.0",
"socket2",
"tokio-macros",
"windows-sys 0.59.0",
]
@@ -3588,16 +3510,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls 0.21.12",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.25.0"
@@ -3721,7 +3633,7 @@ dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper 1.0.2",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
@@ -3735,12 +3647,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"async-compression",
"bitflags 2.9.4",
"bitflags",
"bytes",
"futures-core",
"futures-util",
"http 1.3.1",
"http-body 1.0.1",
"http",
"http-body",
"iri-string",
"pin-project-lite",
"tokio",
@@ -3883,7 +3795,7 @@ dependencies = [
"byteorder",
"bytes",
"data-encoding",
"http 1.3.1",
"http",
"httparse",
"log",
"rand 0.8.5",
@@ -3903,7 +3815,7 @@ checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
dependencies = [
"bytes",
"data-encoding",
"http 1.3.1",
"http",
"httparse",
"log",
"rand 0.9.2",
@@ -3982,9 +3894,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-normalization"
version = "0.1.24"
version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
dependencies = [
"tinyvec",
]
@@ -4211,12 +4123,6 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webpki-roots"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]]
name = "webpki-roots"
version = "0.26.11"
@@ -4583,16 +4489,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1"
dependencies = [
"cfg-if",
"windows-sys 0.48.0",
]
[[package]]
name = "wit-bindgen"
version = "0.45.0"
+3 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "banner"
version = "0.5.0"
version = "0.6.2"
edition = "2024"
default-run = "banner"
@@ -60,6 +60,8 @@ html-escape = "0.2.13"
axum-extra = { version = "0.12.5", features = ["query"] }
urlencoding = "2.1.3"
chrono-tz = "0.10.4"
htmlize = { version = "1.0.6", features = ["unescape"] }
unicode-normalization = "0.1.25"
[dev-dependencies]
+7 -325
View File
@@ -4,80 +4,8 @@ default:
just --list
# Run all checks in parallel. Pass -f/--fix to auto-format and fix first.
[script("bun")]
check *flags:
const args = "{{flags}}".split(/\s+/).filter(Boolean);
let fix = false;
for (const arg of args) {
if (arg === "-f" || arg === "--fix") fix = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
}
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
if (fix) {
console.log("\x1b[1;36m→ Fixing...\x1b[0m");
run(["cargo", "fmt", "--all"]);
run(["bun", "run", "--cwd", "web", "format"]);
run(["cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
"--", "--deny", "warnings"]);
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
}
const checks = [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
// { name: "sqlx-prepare", cmd: ["cargo", "sqlx", "prepare", "--check"] },
];
const isTTY = process.stderr.isTTY;
const start = Date.now();
const remaining = new Set(checks.map(c => c.name));
const promises = checks.map(async (check) => {
const proc = Bun.spawn(check.cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe", stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { ...check, stdout, stderr, exitCode: proc.exitCode,
elapsed: ((Date.now() - start) / 1000).toFixed(1) };
});
const interval = isTTY ? setInterval(() => {
const elapsed = ((Date.now() - start) / 1000).toFixed(1);
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
}, 100) : null;
let anyFailed = false;
for (const promise of promises) {
const r = await promise;
remaining.delete(r.name);
if (isTTY) process.stderr.write(`\r\x1b[K`);
if (r.exitCode !== 0) {
anyFailed = true;
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
} else {
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
}
if (interval) clearInterval(interval);
if (isTTY) process.stderr.write(`\r\x1b[K`);
process.exit(anyFailed ? 1 : 0);
bun scripts/check.ts {{flags}}
# Format all Rust and TypeScript code
format:
@@ -85,276 +13,30 @@ format:
bun run --cwd web format
# Run tests. Usage: just test [rust|web|<nextest filter args>]
[script("bun")]
test *args:
const input = "{{args}}".trim();
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
if (input === "web") {
run(["bun", "run", "--cwd", "web", "test"]);
} else if (input === "rust") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
} else if (input === "") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
run(["bun", "run", "--cwd", "web", "test"]);
} else {
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
}
bun scripts/test.ts {{args}}
# Generate TypeScript bindings from Rust types (ts-rs)
bindings:
cargo test export_bindings
bun scripts/bindings.ts
# Run the Banner API search demo (hits live UTSA API, ~20s)
search *ARGS:
cargo run -q --bin search -- {{ARGS}}
# Pass args to binary after --: just dev -n -- --some-flag
# Dev server. Flags: -f(rontend) -b(ackend) -W(no-watch) -n(o-build) -r(elease) -e(mbed) --tracing <fmt>
[script("bun")]
# Pass args to binary after --: just dev -n -- --some-flag
dev *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let frontendOnly = false, backendOnly = false;
let noWatch = false, noBuild = false, release = false, embed = false;
let tracing = "pretty";
const passthrough = [];
let i = 0;
let seenDashDash = false;
while (i < argv.length) {
const arg = argv[i];
if (seenDashDash) { passthrough.push(arg); i++; continue; }
if (arg === "--") { seenDashDash = true; i++; continue; }
if (arg.startsWith("--")) {
if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else if (arg === "--no-watch") noWatch = true;
else if (arg === "--no-build") noBuild = true;
else if (arg === "--release") release = true;
else if (arg === "--embed") embed = true;
else if (arg === "--tracing") { tracing = argv[++i] || "pretty"; }
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else if (c === "W") noWatch = true;
else if (c === "n") noBuild = true;
else if (c === "r") release = true;
else if (c === "e") embed = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
i++;
}
// -e implies -b (no point running Vite if assets are embedded)
if (embed) backendOnly = true;
// -n implies -W (no build means no watch)
if (noBuild) noWatch = true;
// Validate conflicting flags
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together (or -e implies -b)");
process.exit(1);
}
const runFrontend = !backendOnly;
const runBackend = !frontendOnly;
const profile = release ? "release" : "dev";
const profileDir = release ? "release" : "debug";
const procs = [];
const cleanup = async () => {
for (const p of procs) p.kill();
await Promise.all(procs.map(p => p.exited));
};
process.on("SIGINT", async () => { await cleanup(); process.exit(0); });
process.on("SIGTERM", async () => { await cleanup(); process.exit(0); });
// Build frontend first when embedding assets (backend will bake them in)
if (embed && !noBuild) {
console.log(`\x1b[1;36m→ Building frontend (for embedding)...\x1b[0m`);
const fb = Bun.spawnSync(["bun", "run", "--cwd", "web", "build"], {
stdio: ["inherit", "inherit", "inherit"],
});
if (fb.exitCode !== 0) process.exit(fb.exitCode);
}
// Frontend: Vite dev server
if (runFrontend) {
const proc = Bun.spawn(["bun", "run", "--cwd", "web", "dev"], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
// Backend
if (runBackend) {
const backendArgs = [`--tracing`, tracing, ...passthrough];
const bin = `target/${profileDir}/banner`;
if (noWatch) {
// Build first unless -n (skip build)
if (!noBuild) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cargoArgs = ["cargo", "build", "--bin", "banner"];
if (!embed) cargoArgs.push("--no-default-features");
if (release) cargoArgs.push("--release");
const build = Bun.spawnSync(cargoArgs, { stdio: ["inherit", "inherit", "inherit"] });
if (build.exitCode !== 0) { cleanup(); process.exit(build.exitCode); }
}
// Run the binary directly (no watch)
const { existsSync } = await import("fs");
if (!existsSync(bin)) {
console.error(`Binary not found: ${bin}`);
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
cleanup();
process.exit(1);
}
console.log(`\x1b[1;36m→ Running ${bin} (no watch)\x1b[0m`);
const proc = Bun.spawn([bin, ...backendArgs], {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
} else {
// Bacon watch mode
const baconArgs = ["bacon", "--headless", "run", "--"];
if (!embed) baconArgs.push("--no-default-features");
if (release) baconArgs.push("--profile", "release");
baconArgs.push("--", ...backendArgs);
const proc = Bun.spawn(baconArgs, {
stdio: ["inherit", "inherit", "inherit"],
});
procs.push(proc);
}
}
// Wait for any process to exit, then kill the rest
const results = procs.map((p, i) => p.exited.then(code => ({ i, code })));
const first = await Promise.race(results);
cleanup();
process.exit(first.code);
bun scripts/dev.ts {{flags}}
# Production build. Flags: -d(ebug) -f(rontend-only) -b(ackend-only)
[script("bun")]
build *flags:
const argv = "{{flags}}".split(/\s+/).filter(Boolean);
let debug = false, frontendOnly = false, backendOnly = false;
for (const arg of argv) {
if (arg.startsWith("--")) {
if (arg === "--debug") debug = true;
else if (arg === "--frontend-only") frontendOnly = true;
else if (arg === "--backend-only") backendOnly = true;
else { console.error(`Unknown flag: ${arg}`); process.exit(1); }
} else if (arg.startsWith("-") && arg.length > 1) {
for (const c of arg.slice(1)) {
if (c === "d") debug = true;
else if (c === "f") frontendOnly = true;
else if (c === "b") backendOnly = true;
else { console.error(`Unknown flag: -${c}`); process.exit(1); }
}
} else { console.error(`Unknown argument: ${arg}`); process.exit(1); }
}
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together");
process.exit(1);
}
const run = (cmd) => {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
};
const buildFrontend = !backendOnly;
const buildBackend = !frontendOnly;
const profile = debug ? "debug" : "release";
if (buildFrontend) {
console.log("\x1b[1;36m→ Building frontend...\x1b[0m");
run(["bun", "run", "--cwd", "web", "build"]);
}
if (buildBackend) {
console.log(`\x1b[1;36m→ Building backend (${profile})...\x1b[0m`);
const cmd = ["cargo", "build", "--bin", "banner"];
if (!debug) cmd.push("--release");
run(cmd);
}
bun scripts/build.ts {{flags}}
# Start PostgreSQL in Docker and update .env with connection string
# Commands: start (default), reset, rm
[script("bun")]
db cmd="start":
const fs = await import("fs/promises");
const { spawnSync } = await import("child_process");
const NAME = "banner-postgres";
const USER = "banner";
const PASS = "banner";
const DB = "banner";
const PORT = "59489";
const ENV_FILE = ".env";
const CMD = "{{cmd}}";
const run = (args) => spawnSync("docker", args, { encoding: "utf8" });
const getContainer = () => {
const res = run(["ps", "-a", "--filter", `name=^${NAME}$`, "--format", "json"]);
return res.stdout.trim() ? JSON.parse(res.stdout) : null;
};
const updateEnv = async () => {
const url = `postgresql://${USER}:${PASS}@localhost:${PORT}/${DB}`;
try {
let content = await fs.readFile(ENV_FILE, "utf8");
content = content.includes("DATABASE_URL=")
? content.replace(/DATABASE_URL=.*$/m, `DATABASE_URL=${url}`)
: content.trim() + `\nDATABASE_URL=${url}\n`;
await fs.writeFile(ENV_FILE, content);
} catch {
await fs.writeFile(ENV_FILE, `DATABASE_URL=${url}\n`);
}
};
const create = () => {
run(["run", "-d", "--name", NAME, "-e", `POSTGRES_USER=${USER}`,
"-e", `POSTGRES_PASSWORD=${PASS}`, "-e", `POSTGRES_DB=${DB}`,
"-p", `${PORT}:5432`, "postgres:17-alpine"]);
console.log("created");
};
const container = getContainer();
if (CMD === "rm") {
if (!container) process.exit(0);
run(["stop", NAME]);
run(["rm", NAME]);
console.log("removed");
} else if (CMD === "reset") {
if (!container) create();
else {
run(["exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `DROP DATABASE IF EXISTS ${DB}`]);
run(["exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `CREATE DATABASE ${DB}`]);
console.log("reset");
}
await updateEnv();
} else {
if (!container) {
create();
} else if (container.State !== "running") {
run(["start", NAME]);
console.log("started");
} else {
console.log("running");
}
await updateEnv();
}
bun scripts/db.ts {{cmd}}
alias b := bun
bun *ARGS:
-61
View File
@@ -1,61 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
## [Unreleased]
## [0.5.0] - 2026-01-29
### Added
- Multi-select subject filtering with searchable comboboxes.
- Smart instructor name abbreviation for compact table display.
- Delivery mode indicators and tooltips in location column.
- Page selector dropdown with animated pagination controls.
- FLIP animations for smooth table row transitions during pagination.
- Time tooltip with detailed meeting schedule and day abbreviations.
- Reusable SimpleTooltip component for consistent UI hints.
### Changed
- Consolidated query logic and eliminated N+1 instructor loads via batch fetching.
- Consolidated menu snippets and strengthened component type safety.
- Enhanced table scrolling with OverlayScrollbars and theme-aware styling.
- Eliminated initial theme flash on page load.
## [0.4.0] - 2026-01-28
### Added
- Web-based course search UI with interactive data table, multi-column sorting, and column visibility controls.
- TypeScript type bindings generated from Rust types via ts-rs.
- RateMyProfessors integration: bulk professor sync via GraphQL and inline rating display in search results.
- Course detail expansion panel with enrollment, meeting times, and instructor info.
- OverlayScrollbars integration for styled, theme-aware scrollable areas.
- Pagination component for navigating large search result sets.
- Footer component with version display.
- API endpoints: `/api/courses/search`, `/api/courses/:term/:crn`, `/api/terms`, `/api/subjects`, `/api/reference/:category`.
- Frontend API client with typed request/response handling and test coverage.
- Course formatting utilities with comprehensive unit tests.
## [0.3.4] - 2026-01
### Added
- Live service status tracking on web dashboard with auto-refresh and health indicators.
- DB operation extraction for improved testability.
- Unit test suite foundation covering core functionality.
- Docker support for PostgreSQL development environment.
- ICS calendar export with comprehensive holiday exclusion coverage.
- Google Calendar link generation with recurrence rules and meeting details.
- Job queue with priority-based scheduling for background scraping.
- Rate limiting with burst allowance for Banner API requests.
- Session management and caching for Banner API interactions.
- Discord bot commands: search, terms, ics, gcal.
- Intelligent scraping system with priority queues and retry tracking.
### Changed
- Type consolidation and dead code removal across the codebase.
+23 -14
View File
@@ -2,34 +2,43 @@
## Now
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
- **Discord bot revival** - Audit and fix all existing commands (search, terms, ics, gcal) against the current data model. Add test coverage. Bot has been untouched since ~0.3.4 and commands may be broken.
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). Deliver via Discord bot and web dashboard.
- **Mobile/responsive redesign** - Hamburger nav for sidebar, responsive table column hiding, mobile-friendly admin pages. Timeline is the only area with solid mobile support; most pages need work.
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
- **Test coverage expansion** - Broaden coverage with session/rate-limiter tests and more DB integration tests.
- **Search field autocomplete** - Typeahead for course titles, course numbers, professors, and terms.
- **Large component extraction** - Break down CourseTable, Instructors page, and TimelineCanvas into smaller, testable subcomponents.
## Soon
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
- **Search result pagination** - Paginated embeds for large result sets in Discord.
- **Bot slash command parity** - Keep Discord bot commands in sync with web features: timeline summaries, RMP lookups, audit log highlights, notification management via bot.
- **E2E test suite** - Playwright tests for critical user flows: search, login, admin pages, timeline interaction.
- **Settings page** - Replace placeholder with theme preferences, notification settings, default term/subject selection.
- **Profile enhancements** - Expand from read-only stub to subscription management, saved searches, and course watchlists.
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "ends by 2 PM", "after 2 PM" mapped to time ranges.
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
- **Historical analytics visualization** - Build trend UI on top of existing course metrics and timeline API. Fill-rate charts per course or professor.
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup. Timeline visualization serves as a foundation.
## Eventually
- **API rate limiting** - Rate limiter on public API endpoints. Needed before any public or external exposure.
- **Bulk admin operations** - Batch RMP match/reject, bulk user management, data export from admin pages.
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
- **CRN direct lookup** - Look up a course by its CRN without going through search.
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
## Done
- **Interactive timeline visualization** - D3 canvas with pan/zoom, touch gestures, and enrollment aggregation API. *(0.6.0)*
- **Scraper analytics dashboard** - Timeseries charts, subject monitoring, adaptive scheduling, and admin endpoints. *(0.6.0)*
- **WebSocket job monitoring** - Real-time scrape job queue with live connection status indicators. *(0.6.0)*
- **Course change audit log** - Field-level change tracking with smart diffing, conditional caching, and auto-refresh. *(0.6.0)*
- **User authentication system** - Discord OAuth, sessions, admin roles, and login page. *(0.6.0)*
- **Dynamic scraper scheduling** - Adaptive scrape intervals based on change frequency and course volume. *(0.6.0)*
- **Metrics dashboard** - Scraper and service metrics surfaced on the web dashboard. *(0.6.0)*
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.40.4.0)*
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.4--0.4.0)*
@@ -0,0 +1,5 @@
-- Add structured first/last name columns to instructors.
-- Populated by Rust-side backfill (parse_banner_name) since we need
-- HTML entity decoding and suffix extraction that SQL can't handle well.
ALTER TABLE instructors ADD COLUMN first_name VARCHAR;
ALTER TABLE instructors ADD COLUMN last_name VARCHAR;
+32
View File
@@ -0,0 +1,32 @@
/**
* Generate TypeScript bindings from Rust types (ts-rs).
*
* Usage: bun scripts/bindings.ts
*/
import { readdirSync, writeFileSync, rmSync } from "fs";
import { run } from "./lib/proc";
const BINDINGS_DIR = "web/src/lib/bindings";
// Build test binary first (slow part) — fail before deleting anything
run(["cargo", "test", "--no-run"]);
// Clean slate
rmSync(BINDINGS_DIR, { recursive: true, force: true });
// Run the export (fast, already compiled)
run(["cargo", "test", "export_bindings"]);
// Auto-generate index.ts from emitted .ts files
const types = readdirSync(BINDINGS_DIR)
.filter((f) => f.endsWith(".ts") && f !== "index.ts")
.map((f) => f.replace(/\.ts$/, ""))
.sort();
writeFileSync(
`${BINDINGS_DIR}/index.ts`,
types.map((t) => `export type { ${t} } from "./${t}";`).join("\n") + "\n",
);
console.log(`Generated ${BINDINGS_DIR}/index.ts (${types.length} types)`);
+45
View File
@@ -0,0 +1,45 @@
/**
* Production build.
*
* Usage: bun scripts/build.ts [flags]
*
* Flags:
* -d, --debug Debug build instead of release
* -f, --frontend-only Frontend only
* -b, --backend-only Backend only
*/
import { parseFlags, c } from "./lib/fmt";
import { run } from "./lib/proc";
const { flags } = parseFlags(
process.argv.slice(2),
{
debug: "bool",
"frontend-only": "bool",
"backend-only": "bool",
} as const,
{ d: "debug", f: "frontend-only", b: "backend-only" },
{ debug: false, "frontend-only": false, "backend-only": false },
);
if (flags["frontend-only"] && flags["backend-only"]) {
console.error("Cannot use -f and -b together");
process.exit(1);
}
const buildFrontend = !flags["backend-only"];
const buildBackend = !flags["frontend-only"];
const profile = flags.debug ? "debug" : "release";
if (buildFrontend) {
console.log(c("1;36", "→ Building frontend..."));
run(["bun", "run", "--cwd", "web", "build"]);
}
if (buildBackend) {
console.log(c("1;36", `→ Building backend (${profile})...`));
const cmd = ["cargo", "build", "--bin", "banner"];
if (!flags.debug) cmd.push("--release");
run(cmd);
}
+21
View File
@@ -0,0 +1,21 @@
{
"lockfileVersion": 1,
"configVersion": 1,
"workspaces": {
"": {
"name": "banner-scripts",
"devDependencies": {
"@types/bun": "^1.3.8",
},
},
},
"packages": {
"@types/bun": ["@types/bun@1.3.8", "", { "dependencies": { "bun-types": "1.3.8" } }, "sha512-3LvWJ2q5GerAXYxO2mffLTqOzEu5qnhEAlh48Vnu8WQfnmSwbgagjGZV6BoHKJztENYEDn6QmVd949W4uESRJA=="],
"@types/node": ["@types/node@25.1.0", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-t7frlewr6+cbx+9Ohpl0NOTKXZNV9xHRmNOvql47BFJKcEG1CxtxlPEEe+gR9uhVWM4DwhnvTF110mIL4yP9RA=="],
"bun-types": ["bun-types@1.3.8", "", { "dependencies": { "@types/node": "*" } }, "sha512-fL99nxdOWvV4LqjmC+8Q9kW3M4QTtTR1eePs94v5ctGqU8OeceWrSUaRw3JYb7tU3FkMIAjkueehrHPPPGKi5Q=="],
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
}
}
+250
View File
@@ -0,0 +1,250 @@
/**
* Run all project checks in parallel. Auto-fixes formatting when safe.
*
* Usage: bun scripts/check.ts [--fix|-f]
*/
import { c, elapsed, isStderrTTY } from "./lib/fmt";
import { run, runPiped, spawnCollect, raceInOrder, type CollectResult } from "./lib/proc";
import { existsSync, statSync, readdirSync, writeFileSync, rmSync } from "fs";
const args = process.argv.slice(2);
let fix = false;
for (const arg of args) {
if (arg === "-f" || arg === "--fix") {
fix = true;
} else {
console.error(`Unknown flag: ${arg}`);
process.exit(1);
}
}
// ---------------------------------------------------------------------------
// Fix path: format + clippy fix, then fall through to verification
// ---------------------------------------------------------------------------
if (fix) {
console.log(c("1;36", "→ Fixing..."));
run(["cargo", "fmt", "--all"]);
run(["bun", "run", "--cwd", "web", "format"]);
run([
"cargo", "clippy", "--all-features", "--fix", "--allow-dirty", "--allow-staged",
"--", "--deny", "warnings",
]);
console.log(c("1;36", "→ Verifying..."));
}
// ---------------------------------------------------------------------------
// Ensure TypeScript bindings are up-to-date before frontend checks
// ---------------------------------------------------------------------------
{
const BINDINGS_DIR = "web/src/lib/bindings";
let newestSrcMtime = 0;
for (const file of new Bun.Glob("src/**/*.rs").scanSync(".")) {
const mt = statSync(file).mtimeMs;
if (mt > newestSrcMtime) newestSrcMtime = mt;
}
for (const f of ["Cargo.toml", "Cargo.lock"]) {
if (existsSync(f)) {
const mt = statSync(f).mtimeMs;
if (mt > newestSrcMtime) newestSrcMtime = mt;
}
}
let newestBindingMtime = 0;
if (existsSync(BINDINGS_DIR)) {
for (const file of new Bun.Glob("**/*").scanSync(BINDINGS_DIR)) {
const mt = statSync(`${BINDINGS_DIR}/${file}`).mtimeMs;
if (mt > newestBindingMtime) newestBindingMtime = mt;
}
}
const stale = newestBindingMtime === 0 || newestSrcMtime > newestBindingMtime;
if (stale) {
const t = Date.now();
process.stdout.write(
c("1;36", "→ Regenerating TypeScript bindings (Rust sources changed)...") + "\n",
);
run(["cargo", "test", "--no-run"]);
rmSync(BINDINGS_DIR, { recursive: true, force: true });
run(["cargo", "test", "export_bindings"]);
const types = readdirSync(BINDINGS_DIR)
.filter((f) => f.endsWith(".ts") && f !== "index.ts")
.map((f) => f.replace(/\.ts$/, ""))
.sort();
writeFileSync(
`${BINDINGS_DIR}/index.ts`,
types.map((t) => `export type { ${t} } from "./${t}";`).join("\n") + "\n",
);
process.stdout.write(c("32", "✓ bindings") + ` (${elapsed(t)}s, ${types.length} types)\n`);
} else {
process.stdout.write(c("2", "· bindings up-to-date, skipped") + "\n");
}
}
// ---------------------------------------------------------------------------
// Check definitions
// ---------------------------------------------------------------------------
interface Check {
name: string;
cmd: string[];
hint?: string;
}
const checks: Check[] = [
{
name: "rust-format",
cmd: ["cargo", "fmt", "--all", "--", "--check"],
hint: "Run 'cargo fmt --all' to see and fix formatting issues.",
},
{ name: "rust-lint", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
{ name: "rust-check", cmd: ["cargo", "check", "--all-features"] },
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
{ name: "web-format", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "web-test", cmd: ["bun", "run", "--cwd", "web", "test"] },
{ name: "actionlint", cmd: ["actionlint"] },
];
// ---------------------------------------------------------------------------
// Domain groups: formatter → { peers, format command, sanity rechecks }
// ---------------------------------------------------------------------------
const domains: Record<
string,
{
peers: string[];
format: () => ReturnType<typeof runPiped>;
recheck: Check[];
}
> = {
"rust-format": {
peers: ["rust-lint", "rust-check", "rust-test"],
format: () => runPiped(["cargo", "fmt", "--all"]),
recheck: [
{ name: "rust-format", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "rust-check", cmd: ["cargo", "check", "--all-features"] },
],
},
"web-format": {
peers: ["svelte-check", "web-test"],
format: () => runPiped(["bun", "run", "--cwd", "web", "format"]),
recheck: [
{ name: "web-format", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
],
},
};
// ---------------------------------------------------------------------------
// Phase 1: run all checks in parallel, display in completion order
// ---------------------------------------------------------------------------
const start = Date.now();
const remaining = new Set(checks.map((ch) => ch.name));
const promises = checks.map(async (check) => ({
...check,
...(await spawnCollect(check.cmd, start)),
}));
const interval = isStderrTTY
? setInterval(() => {
process.stderr.write(`\r\x1b[K${elapsed(start)}s [${Array.from(remaining).join(", ")}]`);
}, 100)
: null;
const results: Record<string, Check & CollectResult> = {};
await raceInOrder(promises, checks, (r) => {
results[r.name] = r;
remaining.delete(r.name);
if (isStderrTTY) process.stderr.write("\r\x1b[K");
if (r.exitCode !== 0) {
process.stdout.write(c("31", `${r.name}`) + ` (${r.elapsed}s)\n`);
if (r.hint) {
process.stdout.write(c("2", ` ${r.hint}`) + "\n");
} else {
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
}
} else {
process.stdout.write(c("32", `${r.name}`) + ` (${r.elapsed}s)\n`);
}
});
if (interval) clearInterval(interval);
if (isStderrTTY) process.stderr.write("\r\x1b[K");
// ---------------------------------------------------------------------------
// Phase 2: auto-fix formatting if it's the only failure in its domain
// ---------------------------------------------------------------------------
const autoFixedDomains = new Set<string>();
for (const [fmtName, domain] of Object.entries(domains)) {
const fmtResult = results[fmtName];
if (!fmtResult || fmtResult.exitCode === 0) continue;
if (!domain.peers.every((p) => results[p]?.exitCode === 0)) continue;
process.stdout.write(
"\n" +
c("1;36", `→ Auto-formatting ${fmtName} (peers passed, only formatting failed)...`) +
"\n",
);
const fmtOut = domain.format();
if (fmtOut.exitCode !== 0) {
process.stdout.write(c("31", `${fmtName} formatter failed`) + "\n");
if (fmtOut.stdout) process.stdout.write(fmtOut.stdout);
if (fmtOut.stderr) process.stderr.write(fmtOut.stderr);
continue;
}
const recheckStart = Date.now();
const recheckPromises = domain.recheck.map(async (ch) => ({
...ch,
...(await spawnCollect(ch.cmd, recheckStart)),
}));
let recheckFailed = false;
await raceInOrder(recheckPromises, domain.recheck, (r) => {
if (r.exitCode !== 0) {
recheckFailed = true;
process.stdout.write(c("31", `${r.name}`) + ` (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
} else {
process.stdout.write(c("32", `${r.name}`) + ` (${r.elapsed}s)\n`);
}
});
if (!recheckFailed) {
process.stdout.write(c("32", `${fmtName} auto-fix succeeded`) + "\n");
autoFixedDomains.add(fmtName);
} else {
process.stdout.write(c("31", `${fmtName} auto-fix failed sanity check`) + "\n");
}
}
// ---------------------------------------------------------------------------
// Final verdict
// ---------------------------------------------------------------------------
const finalFailed = Object.entries(results).some(
([name, r]) => r.exitCode !== 0 && !autoFixedDomains.has(name),
);
if (autoFixedDomains.size > 0 && !finalFailed) {
process.stdout.write(
"\n" + c("1;32", "✓ All checks passed (formatting was auto-fixed)") + "\n",
);
}
process.exit(finalFailed ? 1 : 0);
+79
View File
@@ -0,0 +1,79 @@
/**
* PostgreSQL Docker container management.
*
* Usage: bun scripts/db.ts [start|reset|rm]
*/
import { readFile, writeFile } from "fs/promises";
import { spawnSync } from "child_process";
const NAME = "banner-postgres";
const USER = "banner";
const PASS = "banner";
const DB = "banner";
const PORT = "59489";
const ENV_FILE = ".env";
const cmd = process.argv[2] || "start";
function docker(...args: string[]) {
return spawnSync("docker", args, { encoding: "utf8" });
}
function getContainer() {
const res = docker("ps", "-a", "--filter", `name=^${NAME}$`, "--format", "json");
return res.stdout.trim() ? JSON.parse(res.stdout) : null;
}
async function updateEnv() {
const url = `postgresql://${USER}:${PASS}@localhost:${PORT}/${DB}`;
try {
let content = await readFile(ENV_FILE, "utf8");
content = content.includes("DATABASE_URL=")
? content.replace(/DATABASE_URL=.*$/m, `DATABASE_URL=${url}`)
: content.trim() + `\nDATABASE_URL=${url}\n`;
await writeFile(ENV_FILE, content);
} catch {
await writeFile(ENV_FILE, `DATABASE_URL=${url}\n`);
}
}
function create() {
docker(
"run", "-d", "--name", NAME,
"-e", `POSTGRES_USER=${USER}`,
"-e", `POSTGRES_PASSWORD=${PASS}`,
"-e", `POSTGRES_DB=${DB}`,
"-p", `${PORT}:5432`,
"postgres:17-alpine",
);
console.log("created");
}
const container = getContainer();
if (cmd === "rm") {
if (!container) process.exit(0);
docker("stop", NAME);
docker("rm", NAME);
console.log("removed");
} else if (cmd === "reset") {
if (!container) {
create();
} else {
docker("exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `DROP DATABASE IF EXISTS ${DB}`);
docker("exec", NAME, "psql", "-U", USER, "-d", "postgres", "-c", `CREATE DATABASE ${DB}`);
console.log("reset");
}
await updateEnv();
} else {
if (!container) {
create();
} else if (container.State !== "running") {
docker("start", NAME);
console.log("started");
} else {
console.log("running");
}
await updateEnv();
}
+112
View File
@@ -0,0 +1,112 @@
/**
* Dev server orchestrator.
*
* Usage: bun scripts/dev.ts [flags] [-- passthrough-args]
*
* Flags:
* -f, --frontend-only Frontend only (Vite dev server)
* -b, --backend-only Backend only (bacon watch)
* -W, --no-watch Build once + run (no watch)
* -n, --no-build Run last compiled binary (no rebuild)
* -r, --release Use release profile
* -e, --embed Embed assets (implies -b)
* --tracing <fmt> Tracing format (default: pretty)
*/
import { existsSync } from "fs";
import { parseFlags, c } from "./lib/fmt";
import { run, ProcessGroup } from "./lib/proc";
const { flags, passthrough } = parseFlags(
process.argv.slice(2),
{
"frontend-only": "bool",
"backend-only": "bool",
"no-watch": "bool",
"no-build": "bool",
release: "bool",
embed: "bool",
tracing: "string",
} as const,
{ f: "frontend-only", b: "backend-only", W: "no-watch", n: "no-build", r: "release", e: "embed" },
{
"frontend-only": false,
"backend-only": false,
"no-watch": false,
"no-build": false,
release: false,
embed: false,
tracing: "pretty",
},
);
let frontendOnly = flags["frontend-only"];
let backendOnly = flags["backend-only"];
let noWatch = flags["no-watch"];
const noBuild = flags["no-build"];
const release = flags.release;
const embed = flags.embed;
const tracing = flags.tracing as string;
// -e implies -b
if (embed) backendOnly = true;
// -n implies -W
if (noBuild) noWatch = true;
if (frontendOnly && backendOnly) {
console.error("Cannot use -f and -b together (or -e implies -b)");
process.exit(1);
}
const runFrontend = !backendOnly;
const runBackend = !frontendOnly;
const profile = release ? "release" : "dev";
const profileDir = release ? "release" : "debug";
const group = new ProcessGroup();
// Build frontend first when embedding assets
if (embed && !noBuild) {
console.log(c("1;36", "→ Building frontend (for embedding)..."));
run(["bun", "run", "--cwd", "web", "build"]);
}
// Frontend: Vite dev server
if (runFrontend) {
group.spawn(["bun", "run", "--cwd", "web", "dev"]);
}
// Backend
if (runBackend) {
const backendArgs = ["--tracing", tracing, ...passthrough];
const bin = `target/${profileDir}/banner`;
if (noWatch) {
if (!noBuild) {
console.log(c("1;36", `→ Building backend (${profile})...`));
const cargoArgs = ["cargo", "build", "--bin", "banner"];
if (!embed) cargoArgs.push("--no-default-features");
if (release) cargoArgs.push("--release");
run(cargoArgs);
}
if (!existsSync(bin)) {
console.error(`Binary not found: ${bin}`);
console.error(`Run 'just build${release ? "" : " -d"}' first, or remove -n to use bacon.`);
await group.killAll();
process.exit(1);
}
console.log(c("1;36", `→ Running ${bin} (no watch)`));
group.spawn([bin, ...backendArgs]);
} else {
// Bacon watch mode
const baconArgs = ["bacon", "--headless", "run", "--"];
if (!embed) baconArgs.push("--no-default-features");
if (release) baconArgs.push("--profile", "release");
baconArgs.push("--", ...backendArgs);
group.spawn(baconArgs);
}
}
const code = await group.waitForFirst();
process.exit(code);
+96
View File
@@ -0,0 +1,96 @@
/**
* Shared formatting, color, and CLI argument parsing utilities.
*/
const isTTY = process.stdout.isTTY ?? false;
const isStderrTTY = process.stderr.isTTY ?? false;
/** ANSI color wrapper — no-op when stdout is not a TTY. */
export function c(code: string, text: string): string {
return isTTY ? `\x1b[${code}m${text}\x1b[0m` : text;
}
/** Elapsed seconds since `start` as a formatted string. */
export function elapsed(start: number): string {
return ((Date.now() - start) / 1000).toFixed(1);
}
/** Whether stderr is a TTY (for progress spinners). */
export { isStderrTTY };
/**
* Parse short and long CLI flags from a flat argument array.
*
* `spec` maps flag names to their type:
* - `"bool"` — presence sets the value to `true`
* - `"string"` — consumes the next argument as the value
*
* Short flags can be combined: `-fbW` expands to `-f -b -W`.
* Long flags: `--frontend-only`, `--tracing pretty`.
* `--` terminates flag parsing; remaining args go to `passthrough`.
*
* Returns `{ flags, passthrough }`.
*/
export function parseFlags<T extends Record<string, "bool" | "string">>(
argv: string[],
spec: T,
shortMap: Record<string, keyof T>,
defaults: { [K in keyof T]: T[K] extends "bool" ? boolean : string },
): { flags: typeof defaults; passthrough: string[] } {
const flags = { ...defaults };
const passthrough: string[] = [];
let i = 0;
while (i < argv.length) {
const arg = argv[i];
if (arg === "--") {
passthrough.push(...argv.slice(i + 1));
break;
}
if (arg.startsWith("--")) {
const name = arg.slice(2);
if (!(name in spec)) {
console.error(`Unknown flag: ${arg}`);
process.exit(1);
}
if (spec[name] === "string") {
(flags as Record<string, unknown>)[name] = argv[++i] || "";
} else {
(flags as Record<string, unknown>)[name] = true;
}
} else if (arg.startsWith("-") && arg.length > 1) {
for (const ch of arg.slice(1)) {
const mapped = shortMap[ch];
if (!mapped) {
console.error(`Unknown flag: -${ch}`);
process.exit(1);
}
if (spec[mapped as string] === "string") {
(flags as Record<string, unknown>)[mapped as string] = argv[++i] || "";
} else {
(flags as Record<string, unknown>)[mapped as string] = true;
}
}
} else {
console.error(`Unknown argument: ${arg}`);
process.exit(1);
}
i++;
}
return { flags, passthrough };
}
/**
* Simple positional-or-keyword argument parser.
* Returns the first positional arg, or empty string.
*/
export function parseArgs(raw: string): string[] {
return raw
.trim()
.split(/\s+/)
.filter(Boolean);
}
+113
View File
@@ -0,0 +1,113 @@
/**
* Shared process spawning utilities for project scripts.
*/
import { elapsed } from "./fmt";
export interface CollectResult {
stdout: string;
stderr: string;
exitCode: number;
elapsed: string;
}
/** Sync spawn with inherited stdio. Exits process on failure. */
export function run(cmd: string[]): void {
const proc = Bun.spawnSync(cmd, { stdio: ["inherit", "inherit", "inherit"] });
if (proc.exitCode !== 0) process.exit(proc.exitCode);
}
/** Sync spawn with piped stdio. Returns captured output. */
export function runPiped(cmd: string[]): { exitCode: number; stdout: string; stderr: string } {
const proc = Bun.spawnSync(cmd, { stdout: "pipe", stderr: "pipe" });
return {
exitCode: proc.exitCode,
stdout: proc.stdout?.toString() ?? "",
stderr: proc.stderr?.toString() ?? "",
};
}
/**
* Async spawn that collects stdout/stderr. Returns a result object.
* Catches spawn failures (e.g. missing binary) instead of throwing.
*/
export async function spawnCollect(cmd: string[], startTime: number): Promise<CollectResult> {
try {
const proc = Bun.spawn(cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { stdout, stderr, exitCode: proc.exitCode, elapsed: elapsed(startTime) };
} catch (err) {
return { stdout: "", stderr: String(err), exitCode: 1, elapsed: elapsed(startTime) };
}
}
/**
* Race all promises, yielding results in completion order via callback.
* Spawn failures become results, not unhandled rejections.
*/
export async function raceInOrder<T extends { name: string }>(
promises: Promise<T & CollectResult>[],
fallbacks: T[],
onResult: (r: T & CollectResult) => void,
): Promise<void> {
const tagged = promises.map((p, i) =>
p
.then((r) => ({ i, r }))
.catch((err) => ({
i,
r: {
...fallbacks[i],
exitCode: 1,
stdout: "",
stderr: String(err),
elapsed: "?",
} as T & CollectResult,
})),
);
for (let n = 0; n < promises.length; n++) {
const { i, r } = await Promise.race(tagged);
tagged[i] = new Promise(() => {}); // sentinel: never resolves
onResult(r);
}
}
/** Spawn managed processes with coordinated cleanup on exit. */
export class ProcessGroup {
private procs: ReturnType<typeof Bun.spawn>[] = [];
constructor() {
const cleanup = async () => {
await this.killAll();
process.exit(0);
};
process.on("SIGINT", cleanup);
process.on("SIGTERM", cleanup);
}
spawn(cmd: string[]): ReturnType<typeof Bun.spawn> {
const proc = Bun.spawn(cmd, { stdio: ["inherit", "inherit", "inherit"] });
this.procs.push(proc);
return proc;
}
async killAll(): Promise<void> {
for (const p of this.procs) p.kill();
await Promise.all(this.procs.map((p) => p.exited));
}
/** Wait for any process to exit, kill the rest, return exit code. */
async waitForFirst(): Promise<number> {
const results = this.procs.map((p, i) => p.exited.then((code) => ({ i, code })));
const first = await Promise.race(results);
await this.killAll();
return first.code;
}
}
+8
View File
@@ -0,0 +1,8 @@
{
"name": "banner-scripts",
"private": true,
"type": "module",
"devDependencies": {
"@types/bun": "^1.3.8"
}
}
+20
View File
@@ -0,0 +1,20 @@
/**
* Run project tests.
*
* Usage: bun scripts/test.ts [rust|web|<nextest filter args>]
*/
import { run } from "./lib/proc";
const input = process.argv.slice(2).join(" ").trim();
if (input === "web") {
run(["bun", "run", "--cwd", "web", "test"]);
} else if (input === "rust") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
} else if (input === "") {
run(["cargo", "nextest", "run", "-E", "not test(export_bindings)"]);
run(["bun", "run", "--cwd", "web", "test"]);
} else {
run(["cargo", "nextest", "run", ...input.split(/\s+/)]);
}
+15
View File
@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "bundler",
"strict": true,
"noEmit": true,
"skipLibCheck": true,
"types": ["bun-types"],
"paths": {
"#lib/*": ["./lib/*"]
}
},
"include": ["**/*.ts"]
}
+6 -1
View File
@@ -14,7 +14,7 @@ use sqlx::postgres::PgPoolOptions;
use std::process::ExitCode;
use std::sync::Arc;
use std::time::Duration;
use tracing::{error, info};
use tracing::{error, info, warn};
/// Main application struct containing all necessary components
pub struct App {
@@ -70,6 +70,11 @@ impl App {
.context("Failed to run database migrations")?;
info!("Database migrations completed successfully");
// Backfill structured name columns for existing instructors
if let Err(e) = crate::data::names::backfill_instructor_names(&db_pool).await {
warn!(error = ?e, "Failed to backfill instructor names (non-fatal)");
}
// Create BannerApi and AppState
let banner_api = BannerApi::new_with_config(
config.banner_base_url.clone(),
+4
View File
@@ -325,6 +325,7 @@ mod tests {
fn test_parse_json_with_context_null_value() {
#[derive(Debug, Deserialize)]
struct TestStruct {
#[allow(dead_code)]
name: String,
}
@@ -363,12 +364,14 @@ mod tests {
#[allow(dead_code)]
#[serde(rename = "courseTitle")]
course_title: String,
#[allow(dead_code)]
faculty: Vec<Faculty>,
}
#[derive(Debug, Deserialize)]
struct Faculty {
#[serde(rename = "displayName")]
#[allow(dead_code)]
display_name: String,
#[allow(dead_code)]
email: String,
@@ -376,6 +379,7 @@ mod tests {
#[derive(Debug, Deserialize)]
struct SearchResult {
#[allow(dead_code)]
data: Vec<Course>,
}
+125
View File
@@ -147,6 +147,37 @@ impl Term {
},
}
}
/// URL-friendly slug, e.g. "spring-2026"
pub fn slug(&self) -> String {
format!("{}-{}", self.season.slug(), self.year)
}
/// Parse a slug like "spring-2026" into a Term
pub fn from_slug(s: &str) -> Option<Self> {
let (season_str, year_str) = s.rsplit_once('-')?;
let season = Season::from_slug(season_str)?;
let year = year_str.parse::<u32>().ok()?;
if !VALID_YEARS.contains(&year) {
return None;
}
Some(Term { year, season })
}
/// Human-readable description, e.g. "Spring 2026"
pub fn description(&self) -> String {
format!("{} {}", self.season, self.year)
}
/// Resolve a string that is either a term code ("202620") or a slug ("spring-2026") to a term code.
pub fn resolve_to_code(s: &str) -> Option<String> {
// Try parsing as a 6-digit code first
if let Ok(term) = s.parse::<Term>() {
return Some(term.to_string());
}
// Try parsing as a slug
Term::from_slug(s).map(|t| t.to_string())
}
}
impl TermPoint {
@@ -195,6 +226,25 @@ impl Season {
Season::Summer => "30",
}
}
/// Returns the lowercase slug for URL-friendly representation
pub fn slug(self) -> &'static str {
match self {
Season::Fall => "fall",
Season::Spring => "spring",
Season::Summer => "summer",
}
}
/// Parse a slug like "spring", "summer", "fall" into a Season
pub fn from_slug(s: &str) -> Option<Self> {
match s {
"fall" => Some(Season::Fall),
"spring" => Some(Season::Spring),
"summer" => Some(Season::Summer),
_ => None,
}
}
}
impl std::fmt::Display for Season {
@@ -445,4 +495,79 @@ mod tests {
}
);
}
// --- Season::slug / from_slug ---
#[test]
fn test_season_slug_roundtrip() {
for season in [Season::Fall, Season::Spring, Season::Summer] {
assert_eq!(Season::from_slug(season.slug()), Some(season));
}
}
#[test]
fn test_season_from_slug_invalid() {
assert_eq!(Season::from_slug("winter"), None);
assert_eq!(Season::from_slug(""), None);
assert_eq!(Season::from_slug("Spring"), None); // case-sensitive
}
// --- Term::slug / from_slug ---
#[test]
fn test_term_slug() {
let term = Term {
year: 2026,
season: Season::Spring,
};
assert_eq!(term.slug(), "spring-2026");
}
#[test]
fn test_term_from_slug_roundtrip() {
for code in ["202510", "202520", "202530"] {
let term = Term::from_str(code).unwrap();
let slug = term.slug();
let parsed = Term::from_slug(&slug).unwrap();
assert_eq!(parsed, term);
}
}
#[test]
fn test_term_from_slug_invalid() {
assert_eq!(Term::from_slug("winter-2026"), None);
assert_eq!(Term::from_slug("spring"), None);
assert_eq!(Term::from_slug(""), None);
}
// --- Term::description ---
#[test]
fn test_term_description() {
let term = Term {
year: 2026,
season: Season::Spring,
};
assert_eq!(term.description(), "Spring 2026");
}
// --- Term::resolve_to_code ---
#[test]
fn test_resolve_to_code_from_code() {
assert_eq!(Term::resolve_to_code("202620"), Some("202620".to_string()));
}
#[test]
fn test_resolve_to_code_from_slug() {
assert_eq!(
Term::resolve_to_code("spring-2026"),
Some("202620".to_string())
);
}
#[test]
fn test_resolve_to_code_invalid() {
assert_eq!(Term::resolve_to_code("garbage"), None);
}
}
+108 -60
View File
@@ -11,7 +11,9 @@ use rand::distr::{Alphanumeric, SampleString};
use reqwest_middleware::ClientWithMiddleware;
use std::collections::{HashMap, VecDeque};
use std::mem::ManuallyDrop;
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, LazyLock};
use std::time::{Duration, Instant};
use tokio::sync::{Mutex, Notify};
@@ -121,6 +123,64 @@ impl BannerSession {
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
/// Verifies that cancelling `acquire()` mid-session-creation resets `is_creating`,
/// allowing subsequent callers to proceed rather than deadlocking.
#[tokio::test]
async fn test_acquire_not_deadlocked_after_cancellation() {
use tokio::sync::mpsc;
let (tx, mut rx) = mpsc::channel::<()>(10);
// Local server: /registration signals arrival via `tx`, then hangs forever.
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
let app = axum::Router::new().route(
"/StudentRegistrationSsb/registration",
axum::routing::get(move || {
let tx = tx.clone();
async move {
let _ = tx.send(()).await;
std::future::pending::<&str>().await
}
}),
);
tokio::spawn(async move {
axum::serve(listener, app).await.unwrap();
});
let base_url = format!("http://{}/StudentRegistrationSsb", addr);
let client = reqwest_middleware::ClientBuilder::new(
reqwest::Client::builder()
.timeout(Duration::from_secs(300))
.build()
.unwrap(),
)
.build();
let pool = SessionPool::new(client, base_url);
let term: Term = "202620".parse().unwrap();
// First acquire: cancel once the request reaches the server.
tokio::select! {
_ = pool.acquire(term) => panic!("server hangs — acquire should never complete"),
_ = rx.recv() => {} // Request arrived; dropping the future simulates timeout cancellation.
}
// Second acquire: verify it reaches the server (i.e., is_creating was reset).
// The global rate limiter has a 10s period, so allow 15s for the second attempt.
tokio::select! {
_ = pool.acquire(term) => {}
result = tokio::time::timeout(Duration::from_secs(15), rx.recv()) => {
assert!(
result.is_ok(),
"acquire() deadlocked — is_creating was not reset after cancellation"
);
}
}
}
#[test]
fn test_new_session_creates_session() {
@@ -200,50 +260,53 @@ mod tests {
}
}
/// A smart pointer that returns a BannerSession to the pool when dropped.
/// A smart pointer that returns a `BannerSession` to the pool when dropped.
pub struct PooledSession {
session: Option<BannerSession>,
// This Arc points directly to the term-specific pool.
session: ManuallyDrop<BannerSession>,
pool: Arc<TermPool>,
}
impl PooledSession {
pub fn been_used(&self) -> bool {
self.session.as_ref().unwrap().been_used()
}
}
impl Deref for PooledSession {
type Target = BannerSession;
fn deref(&self) -> &Self::Target {
// The option is only ever None after drop is called, so this is safe.
self.session.as_ref().unwrap()
&self.session
}
}
impl DerefMut for PooledSession {
fn deref_mut(&mut self) -> &mut Self::Target {
self.session.as_mut().unwrap()
&mut self.session
}
}
/// The magic happens here: when the guard goes out of scope, this is called.
impl Drop for PooledSession {
fn drop(&mut self) {
if let Some(session) = self.session.take() {
// SAFETY: `drop` is called exactly once by Rust's drop semantics,
// so `ManuallyDrop::take` is guaranteed to see a valid value.
let session = unsafe { ManuallyDrop::take(&mut self.session) };
let pool = self.pool.clone();
// Since drop() cannot be async, we spawn a task to return the session.
tokio::spawn(async move {
pool.release(session).await;
});
}
}
}
pub struct TermPool {
sessions: Mutex<VecDeque<BannerSession>>,
notifier: Notify,
is_creating: Mutex<bool>,
is_creating: AtomicBool,
}
/// RAII guard ensuring `is_creating` is reset on drop for cancellation safety.
/// Without this, a cancelled `acquire()` future would leave the flag set permanently,
/// deadlocking all subsequent callers.
struct CreatingGuard(Arc<TermPool>);
impl Drop for CreatingGuard {
fn drop(&mut self) {
self.0.is_creating.store(false, Ordering::Release);
self.0.notifier.notify_waiters();
}
}
impl TermPool {
@@ -251,7 +314,7 @@ impl TermPool {
Self {
sessions: Mutex::new(VecDeque::new()),
notifier: Notify::new(),
is_creating: Mutex::new(false),
is_creating: AtomicBool::new(false),
}
}
@@ -308,7 +371,7 @@ impl SessionPool {
if let Some(session) = queue.pop_front() {
if !session.is_expired() {
return Ok(PooledSession {
session: Some(session),
session: ManuallyDrop::new(session),
pool: Arc::clone(&term_pool),
});
} else {
@@ -317,45 +380,38 @@ impl SessionPool {
}
} // MutexGuard is dropped, lock is released.
// Slow path: No sessions available. We must either wait or become the creator.
let mut is_creating_guard = term_pool.is_creating.lock().await;
if *is_creating_guard {
// Another task is already creating a session. Release the lock and wait.
drop(is_creating_guard);
// Slow path: wait for an in-progress creation, or become the creator.
if term_pool.is_creating.load(Ordering::Acquire) {
if !waited_for_creation {
trace!("Waiting for another task to create session");
waited_for_creation = true;
}
term_pool.notifier.notified().await;
// Loop back to the top to try the fast path again.
continue;
}
// This task is now the designated creator.
*is_creating_guard = true;
drop(is_creating_guard);
// CAS to become the designated creator.
if term_pool
.is_creating
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
.is_err()
{
continue; // Lost the race — loop back and wait.
}
// Guard resets is_creating on drop (including cancellation).
let creating_guard = CreatingGuard(Arc::clone(&term_pool));
// Race: wait for a session to be returned OR for the rate limiter to allow a new one.
trace!("Pool empty, creating new session");
tokio::select! {
_ = term_pool.notifier.notified() => {
// A session was returned while we were waiting!
// We are no longer the creator. Reset the flag and loop to race for the new session.
let mut guard = term_pool.is_creating.lock().await;
*guard = false;
drop(guard);
// A session was returned — release creator role and race for it.
drop(creating_guard);
continue;
}
_ = SESSION_CREATION_RATE_LIMITER.until_ready() => {
// The rate limit has elapsed. It's our job to create the session.
let new_session_result = self.create_session(&term).await;
// After creation, we are no longer the creator. Reset the flag
// and notify all other waiting tasks.
let mut guard = term_pool.is_creating.lock().await;
*guard = false;
drop(guard);
term_pool.notifier.notify_waiters();
drop(creating_guard);
match new_session_result {
Ok(new_session) => {
@@ -366,12 +422,11 @@ impl SessionPool {
"Created new session"
);
return Ok(PooledSession {
session: Some(new_session),
session: ManuallyDrop::new(new_session),
pool: term_pool,
});
}
Err(e) => {
// Propagate the error if session creation failed.
return Err(e.context("Failed to create new session in pool"));
}
}
@@ -380,8 +435,8 @@ impl SessionPool {
}
}
/// Sets up initial session cookies by making required Banner API requests
pub async fn create_session(&self, term: &Term) -> Result<BannerSession> {
/// Sets up initial session cookies by making required Banner API requests.
async fn create_session(&self, term: &Term) -> Result<BannerSession> {
info!(term = %term, "setting up banner session");
// The 'register' or 'search' registration page
@@ -392,22 +447,15 @@ impl SessionPool {
.await?;
// TODO: Validate success
let cookies = initial_registration
let cookies: HashMap<String, String> = initial_registration
.headers()
.get_all("Set-Cookie")
.iter()
.filter_map(|header_value| {
if let Ok(cookie_str) = header_value.to_str() {
if let Ok(cookie) = Cookie::parse(cookie_str) {
Some((cookie.name().to_string(), cookie.value().to_string()))
} else {
None
}
} else {
None
}
.filter_map(|v| {
let c = Cookie::parse(v.to_str().ok()?).ok()?;
Some((c.name().to_string(), c.value().to_string()))
})
.collect::<HashMap<String, String>>();
.collect();
let jsessionid = cookies
.get("JSESSIONID")
@@ -494,8 +542,8 @@ impl SessionPool {
Ok(terms)
}
/// Selects a term for the current session
pub async fn select_term(
/// Selects a term for the current session.
async fn select_term(
&self,
term: &str,
unique_session_id: &str,
+25 -9
View File
@@ -2,6 +2,7 @@
use crate::banner::Course;
use crate::data::models::{DbMeetingTime, UpsertCounts};
use crate::data::names::{decode_html_entities, parse_banner_name};
use crate::error::Result;
use sqlx::PgConnection;
use sqlx::PgPool;
@@ -275,14 +276,14 @@ fn compute_diffs(rows: &[UpsertDiffRow]) -> (Vec<AuditEntry>, Vec<MetricEntry>)
diff_field!(json audits, row, "meeting_times", old_meeting_times, new_meeting_times);
diff_field!(json audits, row, "attributes", old_attributes, new_attributes);
// Emit a metric entry when enrollment/wait_count/max_enrollment changed
// Skip fresh inserts (no old data to compare against)
// Emit a metric entry on fresh insert (baseline) or when enrollment data changed
let is_new = row.old_id.is_none();
let enrollment_changed = row.old_id.is_some()
&& (row.old_enrollment != Some(row.new_enrollment)
|| row.old_wait_count != Some(row.new_wait_count)
|| row.old_max_enrollment != Some(row.new_max_enrollment));
if enrollment_changed {
if is_new || enrollment_changed {
metrics.push(MetricEntry {
course_id: row.id,
enrollment: row.new_enrollment,
@@ -447,7 +448,10 @@ async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<V
.collect();
let subjects: Vec<&str> = courses.iter().map(|c| c.subject.as_str()).collect();
let course_numbers: Vec<&str> = courses.iter().map(|c| c.course_number.as_str()).collect();
let titles: Vec<&str> = courses.iter().map(|c| c.course_title.as_str()).collect();
let titles: Vec<String> = courses
.iter()
.map(|c| decode_html_entities(&c.course_title))
.collect();
let term_codes: Vec<&str> = courses.iter().map(|c| c.term.as_str()).collect();
let enrollments: Vec<i32> = courses.iter().map(|c| c.enrollment).collect();
let max_enrollments: Vec<i32> = courses.iter().map(|c| c.maximum_enrollment).collect();
@@ -627,7 +631,9 @@ async fn upsert_instructors(
conn: &mut PgConnection,
) -> Result<HashMap<String, i32>> {
let mut seen = HashSet::new();
let mut display_names: Vec<&str> = Vec::new();
let mut display_names: Vec<String> = Vec::new();
let mut first_names: Vec<Option<String>> = Vec::new();
let mut last_names: Vec<Option<String>> = Vec::new();
let mut emails_lower: Vec<String> = Vec::new();
let mut skipped_no_email = 0u32;
@@ -636,7 +642,10 @@ async fn upsert_instructors(
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if seen.insert(email_lower.clone()) {
display_names.push(faculty.display_name.as_str());
let parts = parse_banner_name(&faculty.display_name);
display_names.push(decode_html_entities(&faculty.display_name));
first_names.push(parts.as_ref().map(|p| p.first.clone()));
last_names.push(parts.as_ref().map(|p| p.last.clone()));
emails_lower.push(email_lower);
}
} else {
@@ -657,18 +666,25 @@ async fn upsert_instructors(
}
let email_refs: Vec<&str> = emails_lower.iter().map(|s| s.as_str()).collect();
let first_name_refs: Vec<Option<&str>> = first_names.iter().map(|s| s.as_deref()).collect();
let last_name_refs: Vec<Option<&str>> = last_names.iter().map(|s| s.as_deref()).collect();
let rows: Vec<(i32, String)> = sqlx::query_as(
r#"
INSERT INTO instructors (display_name, email)
SELECT * FROM UNNEST($1::text[], $2::text[])
INSERT INTO instructors (display_name, email, first_name, last_name)
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[])
ON CONFLICT (email)
DO UPDATE SET display_name = EXCLUDED.display_name
DO UPDATE SET
display_name = EXCLUDED.display_name,
first_name = EXCLUDED.first_name,
last_name = EXCLUDED.last_name
RETURNING id, email
"#,
)
.bind(&display_names)
.bind(&email_refs)
.bind(&first_name_refs)
.bind(&last_name_refs)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
+1
View File
@@ -3,6 +3,7 @@
pub mod batch;
pub mod courses;
pub mod models;
pub mod names;
pub mod reference;
pub mod rmp;
pub mod rmp_matching;
+2
View File
@@ -103,6 +103,8 @@ pub struct Instructor {
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
pub first_name: Option<String>,
pub last_name: Option<String>,
}
#[allow(dead_code)]
+728
View File
@@ -0,0 +1,728 @@
//! Name parsing, normalization, and matching utilities.
//!
//! Handles the mismatch between Banner's single `display_name` ("Last, First Middle")
//! and RMP's separate `first_name`/`last_name` fields, plus data quality issues
//! from both sources (HTML entities, accents, nicknames, suffixes, junk).
use sqlx::PgPool;
use tracing::{info, warn};
use unicode_normalization::UnicodeNormalization;
/// Known name suffixes to extract from the last-name portion.
const SUFFIXES: &[&str] = &["iv", "iii", "ii", "jr", "sr"];
/// Parsed, cleaned name components.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct NameParts {
/// Cleaned display-quality first name(s): "H. Paul", "María"
pub first: String,
/// Cleaned display-quality last name: "O'Brien", "LeBlanc"
pub last: String,
/// Middle name/initial if detected: "Manuel", "L."
pub middle: Option<String>,
/// Suffix if detected: "III", "Jr"
pub suffix: Option<String>,
/// Nicknames extracted from parentheses: ["Ken"], ["Qian"]
pub nicknames: Vec<String>,
}
/// Decode common HTML entities found in Banner data.
///
/// Handles both named entities (`&amp;`, `&uuml;`) and numeric references
/// (`&#39;`, `&#x27;`).
pub(crate) fn decode_html_entities(s: &str) -> String {
if !s.contains('&') {
return s.to_string();
}
htmlize::unescape(s).to_string()
}
/// Extract parenthesized nicknames from a name string.
///
/// `"William (Ken)"` → `("William", vec!["Ken"])`
/// `"Guenevere (Qian)"` → `("Guenevere", vec!["Qian"])`
/// `"John (jack) C."` → `("John C.", vec!["jack"])`
fn extract_nicknames(s: &str) -> (String, Vec<String>) {
let mut nicknames = Vec::new();
let mut cleaned = String::with_capacity(s.len());
let mut chars = s.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '(' {
let mut nick = String::new();
for inner in chars.by_ref() {
if inner == ')' {
break;
}
nick.push(inner);
}
let nick = nick.trim().to_string();
if !nick.is_empty() {
nicknames.push(nick);
}
} else if ch == '"' || ch == '\u{201C}' || ch == '\u{201D}' {
// Extract quoted nicknames: Thomas "Butch" → nickname "Butch"
let mut nick = String::new();
for inner in chars.by_ref() {
if inner == '"' || inner == '\u{201C}' || inner == '\u{201D}' {
break;
}
nick.push(inner);
}
let nick = nick.trim().to_string();
if !nick.is_empty() {
nicknames.push(nick);
}
} else {
cleaned.push(ch);
}
}
// Collapse multiple spaces left by extraction
let cleaned = collapse_whitespace(&cleaned);
(cleaned, nicknames)
}
/// Extract a suffix (Jr, Sr, II, III, IV) from the last-name portion.
///
/// `"LeBlanc III"` → `("LeBlanc", Some("III"))`
/// `"Smith Jr."` → `("Smith", Some("Jr."))`
fn extract_suffix(last: &str) -> (String, Option<String>) {
// Try to match the last token as a suffix
let tokens: Vec<&str> = last.split_whitespace().collect();
if tokens.len() < 2 {
return (last.to_string(), None);
}
let candidate = tokens.last().unwrap();
let candidate_normalized = candidate.to_lowercase().trim_end_matches('.').to_string();
if SUFFIXES.contains(&candidate_normalized.as_str()) {
let name_part = tokens[..tokens.len() - 1].join(" ");
return (name_part, Some(candidate.to_string()));
}
(last.to_string(), None)
}
/// Strip junk commonly found in RMP name fields.
///
/// - Trailing commas: `"Cronenberger,"` → `"Cronenberger"`
/// - Email addresses: `"Neel.Baumgardner@utsa.edu"` → `""` (returns empty)
fn strip_junk(s: &str) -> String {
let s = s.trim();
// If the string looks like an email, return empty
if s.contains('@') && s.contains('.') && !s.contains(' ') {
return String::new();
}
// Strip trailing commas
s.trim_end_matches(',').trim().to_string()
}
/// Collapse runs of whitespace into single spaces and trim.
fn collapse_whitespace(s: &str) -> String {
s.split_whitespace().collect::<Vec<_>>().join(" ")
}
/// Parse a Banner `display_name` ("Last, First Middle") into structured parts.
///
/// Handles HTML entities, suffixes, and multi-token names.
///
/// # Examples
///
/// ```
/// use banner::data::names::parse_banner_name;
///
/// let parts = parse_banner_name("O&#39;Brien, Erin").unwrap();
/// assert_eq!(parts.first, "Erin");
/// assert_eq!(parts.last, "O'Brien");
/// ```
pub fn parse_banner_name(display_name: &str) -> Option<NameParts> {
// 1. Decode HTML entities
let decoded = decode_html_entities(display_name);
// 2. Split on first comma
let (last_part, first_part) = decoded.split_once(',')?;
let last_part = last_part.trim();
let first_part = first_part.trim();
if last_part.is_empty() || first_part.is_empty() {
return None;
}
// 3. Extract suffix from last name
let (last_clean, suffix) = extract_suffix(last_part);
// 4. Parse first-name portion: first token(s) + optional middle
// Banner format is "First Middle", so we keep all tokens as first_name
// to support "H. Paul" style names
let first_clean = collapse_whitespace(first_part);
Some(NameParts {
first: first_clean,
last: last_clean,
middle: None, // Banner doesn't clearly delineate middle vs first
suffix,
nicknames: Vec::new(), // Banner doesn't include nicknames
})
}
/// Parse RMP professor name fields into structured parts.
///
/// Handles junk data, nicknames in parentheses/quotes, and suffixes.
///
/// # Examples
///
/// ```
/// use banner::data::names::parse_rmp_name;
///
/// let parts = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
/// assert_eq!(parts.first, "William");
/// assert_eq!(parts.nicknames, vec!["Ken"]);
/// ```
pub fn parse_rmp_name(first_name: &str, last_name: &str) -> Option<NameParts> {
let first_cleaned = strip_junk(first_name);
let last_cleaned = strip_junk(last_name);
if first_cleaned.is_empty() || last_cleaned.is_empty() {
return None;
}
// Extract nicknames from parens/quotes in first name
let (first_no_nicks, nicknames) = extract_nicknames(&first_cleaned);
let first_final = collapse_whitespace(&first_no_nicks);
// Extract suffix from last name
let (last_final, suffix) = extract_suffix(&last_cleaned);
if first_final.is_empty() || last_final.is_empty() {
return None;
}
Some(NameParts {
first: first_final,
last: last_final,
middle: None,
suffix,
nicknames,
})
}
/// Normalize a name string for matching comparison.
///
/// Pipeline: lowercase → NFD decompose → strip combining marks →
/// strip punctuation/hyphens → collapse whitespace → trim.
///
/// # Examples
///
/// ```
/// use banner::data::names::normalize_for_matching;
///
/// assert_eq!(normalize_for_matching("García"), "garcia");
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
/// ```
/// Normalize a name string for matching index keys.
///
/// Pipeline: lowercase → NFD decompose → strip combining marks →
/// strip ALL punctuation, hyphens, and whitespace.
///
/// This produces a compact, space-free string so that "Aguirre Mesa" (Banner)
/// and "Aguirre-Mesa" (RMP) both become "aguirremesa".
///
/// # Examples
///
/// ```
/// use banner::data::names::normalize_for_matching;
///
/// assert_eq!(normalize_for_matching("García"), "garcia");
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
/// assert_eq!(normalize_for_matching("Aguirre Mesa"), "aguirremesa");
/// ```
pub fn normalize_for_matching(s: &str) -> String {
s.to_lowercase()
.nfd()
.filter(|c| {
// Keep only non-combining alphabetic characters — strip everything else
c.is_alphabetic() && !unicode_normalization::char::is_combining_mark(*c)
})
.collect()
}
/// Generate all matching index keys for a parsed name.
///
/// For a name like "H. Paul" / "LeBlanc" with no nicknames, generates:
/// - `("leblanc", "h paul")` — full normalized first
/// - `("leblanc", "paul")` — individual token (if multi-token)
/// - `("leblanc", "h")` — individual token (if multi-token)
///
/// For a name like "William" / "Burchenal" with nickname "Ken":
/// - `("burchenal", "william")` — primary
/// - `("burchenal", "ken")` — nickname variant
pub fn matching_keys(parts: &NameParts) -> Vec<(String, String)> {
let norm_last = normalize_for_matching(&parts.last);
if norm_last.is_empty() {
return Vec::new();
}
let mut keys = Vec::new();
let mut seen = std::collections::HashSet::new();
// Primary key: full first name (all spaces stripped)
let norm_first_full = normalize_for_matching(&parts.first);
if !norm_first_full.is_empty() && seen.insert(norm_first_full.clone()) {
keys.push((norm_last.clone(), norm_first_full));
}
// Individual tokens from the display-form first name
// (split before full normalization so we can generate per-token keys)
let first_tokens: Vec<&str> = parts.first.split_whitespace().collect();
if first_tokens.len() > 1 {
for token in &first_tokens {
let norm_token = normalize_for_matching(token);
if !norm_token.is_empty() && seen.insert(norm_token.clone()) {
keys.push((norm_last.clone(), norm_token));
}
}
}
// Nickname variants
for nick in &parts.nicknames {
let norm_nick = normalize_for_matching(nick);
if !norm_nick.is_empty() && seen.insert(norm_nick.clone()) {
keys.push((norm_last.clone(), norm_nick));
}
}
keys
}
/// Backfill `first_name`/`last_name` columns for all instructors that have
/// a `display_name` but NULL structured name fields.
///
/// Parses each `display_name` using [`parse_banner_name`] and updates the row.
/// Logs warnings for any names that fail to parse.
pub async fn backfill_instructor_names(db_pool: &PgPool) -> crate::error::Result<()> {
let rows: Vec<(i32, String)> = sqlx::query_as(
"SELECT id, display_name FROM instructors WHERE first_name IS NULL OR last_name IS NULL",
)
.fetch_all(db_pool)
.await?;
if rows.is_empty() {
return Ok(());
}
let total = rows.len();
let mut ids: Vec<i32> = Vec::with_capacity(total);
let mut firsts: Vec<String> = Vec::with_capacity(total);
let mut lasts: Vec<String> = Vec::with_capacity(total);
let mut unparseable = 0usize;
for (id, display_name) in &rows {
match parse_banner_name(display_name) {
Some(parts) => {
ids.push(*id);
firsts.push(parts.first);
lasts.push(parts.last);
}
None => {
warn!(
id,
display_name, "Failed to parse instructor display_name during backfill"
);
unparseable += 1;
}
}
}
if !ids.is_empty() {
let first_refs: Vec<&str> = firsts.iter().map(|s| s.as_str()).collect();
let last_refs: Vec<&str> = lasts.iter().map(|s| s.as_str()).collect();
sqlx::query(
r#"
UPDATE instructors i
SET first_name = v.first_name, last_name = v.last_name
FROM UNNEST($1::int4[], $2::text[], $3::text[])
AS v(id, first_name, last_name)
WHERE i.id = v.id
"#,
)
.bind(&ids)
.bind(&first_refs)
.bind(&last_refs)
.execute(db_pool)
.await?;
}
info!(
total,
updated = ids.len(),
unparseable,
"Instructor name backfill complete"
);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
// -----------------------------------------------------------------------
// HTML entity decoding
// -----------------------------------------------------------------------
#[test]
fn decode_apostrophe_entity() {
assert_eq!(decode_html_entities("O&#39;Brien"), "O'Brien");
}
#[test]
fn decode_umlaut_entity() {
assert_eq!(decode_html_entities("B&uuml;lent"), "Bülent");
}
#[test]
fn decode_no_entities() {
assert_eq!(decode_html_entities("Smith"), "Smith");
}
// -----------------------------------------------------------------------
// Nickname extraction
// -----------------------------------------------------------------------
#[test]
fn extract_paren_nickname() {
let (cleaned, nicks) = extract_nicknames("William (Ken)");
assert_eq!(cleaned, "William");
assert_eq!(nicks, vec!["Ken"]);
}
#[test]
fn extract_quoted_nickname() {
let (cleaned, nicks) = extract_nicknames("Thomas \"Butch\"");
assert_eq!(cleaned, "Thomas");
assert_eq!(nicks, vec!["Butch"]);
}
#[test]
fn extract_paren_with_extra_text() {
let (cleaned, nicks) = extract_nicknames("John (jack) C.");
assert_eq!(cleaned, "John C.");
assert_eq!(nicks, vec!["jack"]);
}
#[test]
fn extract_no_nicknames() {
let (cleaned, nicks) = extract_nicknames("Maria Elena");
assert_eq!(cleaned, "Maria Elena");
assert!(nicks.is_empty());
}
// -----------------------------------------------------------------------
// Suffix extraction
// -----------------------------------------------------------------------
#[test]
fn extract_suffix_iii() {
let (name, suffix) = extract_suffix("LeBlanc III");
assert_eq!(name, "LeBlanc");
assert_eq!(suffix, Some("III".to_string()));
}
#[test]
fn extract_suffix_jr_period() {
let (name, suffix) = extract_suffix("Smith Jr.");
assert_eq!(name, "Smith");
assert_eq!(suffix, Some("Jr.".to_string()));
}
#[test]
fn extract_no_suffix() {
let (name, suffix) = extract_suffix("García");
assert_eq!(name, "García");
assert_eq!(suffix, None);
}
// -----------------------------------------------------------------------
// Junk stripping
// -----------------------------------------------------------------------
#[test]
fn strip_trailing_comma() {
assert_eq!(strip_junk("Cronenberger,"), "Cronenberger");
}
#[test]
fn strip_email_address() {
assert_eq!(strip_junk("Neel.Baumgardner@utsa.edu"), "");
}
#[test]
fn strip_clean_name() {
assert_eq!(strip_junk(" Maria "), "Maria");
}
// -----------------------------------------------------------------------
// normalize_for_matching
// -----------------------------------------------------------------------
#[test]
fn normalize_strips_accents() {
assert_eq!(normalize_for_matching("García"), "garcia");
}
#[test]
fn normalize_strips_apostrophe() {
assert_eq!(normalize_for_matching("O'Brien"), "obrien");
}
#[test]
fn normalize_strips_hyphen() {
assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
}
#[test]
fn normalize_tilde_n() {
assert_eq!(normalize_for_matching("Muñoz"), "munoz");
}
#[test]
fn normalize_umlaut() {
assert_eq!(normalize_for_matching("Müller"), "muller");
}
#[test]
fn normalize_period() {
assert_eq!(normalize_for_matching("H. Paul"), "hpaul");
}
#[test]
fn normalize_strips_spaces() {
assert_eq!(normalize_for_matching("Mary Lou"), "marylou");
}
// -----------------------------------------------------------------------
// parse_banner_name
// -----------------------------------------------------------------------
#[test]
fn banner_standard_name() {
let p = parse_banner_name("Smith, John").unwrap();
assert_eq!(p.first, "John");
assert_eq!(p.last, "Smith");
assert_eq!(p.suffix, None);
}
#[test]
fn banner_html_entity_apostrophe() {
let p = parse_banner_name("O&#39;Brien, Erin").unwrap();
assert_eq!(p.first, "Erin");
assert_eq!(p.last, "O'Brien");
}
#[test]
fn banner_html_entity_umlaut() {
let p = parse_banner_name("Temel, B&uuml;lent").unwrap();
assert_eq!(p.first, "Bülent");
assert_eq!(p.last, "Temel");
}
#[test]
fn banner_suffix_iii() {
let p = parse_banner_name("LeBlanc III, H. Paul").unwrap();
assert_eq!(p.first, "H. Paul");
assert_eq!(p.last, "LeBlanc");
assert_eq!(p.suffix, Some("III".to_string()));
}
#[test]
fn banner_suffix_ii() {
let p = parse_banner_name("Ellis II, Ronald").unwrap();
assert_eq!(p.first, "Ronald");
assert_eq!(p.last, "Ellis");
assert_eq!(p.suffix, Some("II".to_string()));
}
#[test]
fn banner_multi_word_last() {
let p = parse_banner_name("Aguirre Mesa, Andres").unwrap();
assert_eq!(p.first, "Andres");
assert_eq!(p.last, "Aguirre Mesa");
}
#[test]
fn banner_hyphenated_last() {
let p = parse_banner_name("Abu-Lail, Nehal").unwrap();
assert_eq!(p.first, "Nehal");
assert_eq!(p.last, "Abu-Lail");
}
#[test]
fn banner_with_middle_name() {
let p = parse_banner_name("Smith, John David").unwrap();
assert_eq!(p.first, "John David");
assert_eq!(p.last, "Smith");
}
#[test]
fn banner_no_comma() {
assert!(parse_banner_name("SingleName").is_none());
}
#[test]
fn banner_empty_first() {
assert!(parse_banner_name("Smith,").is_none());
}
#[test]
fn banner_empty_last() {
assert!(parse_banner_name(", John").is_none());
}
// -----------------------------------------------------------------------
// parse_rmp_name
// -----------------------------------------------------------------------
#[test]
fn rmp_standard_name() {
let p = parse_rmp_name("John", "Smith").unwrap();
assert_eq!(p.first, "John");
assert_eq!(p.last, "Smith");
}
#[test]
fn rmp_with_nickname() {
let p = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
assert_eq!(p.first, "William");
assert_eq!(p.nicknames, vec!["Ken"]);
}
#[test]
fn rmp_trailing_comma_last() {
let p = parse_rmp_name("J.", "Cronenberger,").unwrap();
assert_eq!(p.last, "Cronenberger");
}
#[test]
fn rmp_email_in_first() {
assert!(parse_rmp_name("Neel.Baumgardner@utsa.edu", "Baumgardner").is_none());
}
#[test]
fn rmp_suffix_in_last() {
let p = parse_rmp_name("H. Paul", "LeBlanc III").unwrap();
assert_eq!(p.first, "H. Paul");
assert_eq!(p.last, "LeBlanc");
assert_eq!(p.suffix, Some("III".to_string()));
}
#[test]
fn rmp_quoted_nickname() {
let p = parse_rmp_name("Thomas \"Butch\"", "Matjeka").unwrap();
assert_eq!(p.first, "Thomas");
assert_eq!(p.nicknames, vec!["Butch"]);
}
#[test]
fn rmp_accented_last() {
let p = parse_rmp_name("Liliana", "Saldaña").unwrap();
assert_eq!(p.last, "Saldaña");
}
// -----------------------------------------------------------------------
// matching_keys
// -----------------------------------------------------------------------
#[test]
fn keys_simple_name() {
let parts = NameParts {
first: "John".into(),
last: "Smith".into(),
middle: None,
suffix: None,
nicknames: vec![],
};
let keys = matching_keys(&parts);
assert_eq!(keys, vec![("smith".into(), "john".into())]);
}
#[test]
fn keys_multi_token_first() {
let parts = NameParts {
first: "H. Paul".into(),
last: "LeBlanc".into(),
middle: None,
suffix: Some("III".into()),
nicknames: vec![],
};
let keys = matching_keys(&parts);
assert!(keys.contains(&("leblanc".into(), "hpaul".into())));
assert!(keys.contains(&("leblanc".into(), "paul".into())));
assert!(keys.contains(&("leblanc".into(), "h".into())));
assert_eq!(keys.len(), 3);
}
#[test]
fn keys_with_nickname() {
let parts = NameParts {
first: "William".into(),
last: "Burchenal".into(),
middle: None,
suffix: None,
nicknames: vec!["Ken".into()],
};
let keys = matching_keys(&parts);
assert!(keys.contains(&("burchenal".into(), "william".into())));
assert!(keys.contains(&("burchenal".into(), "ken".into())));
assert_eq!(keys.len(), 2);
}
#[test]
fn keys_hyphenated_last() {
let parts = parse_banner_name("Aguirre-Mesa, Andres").unwrap();
let keys = matching_keys(&parts);
// Hyphen removed: "aguirremesa"
assert!(keys.contains(&("aguirremesa".into(), "andres".into())));
}
#[test]
fn keys_accented_name() {
let parts = parse_rmp_name("Liliana", "Saldaña").unwrap();
let keys = matching_keys(&parts);
assert!(keys.contains(&("saldana".into(), "liliana".into())));
}
#[test]
fn keys_cross_source_match() {
// Banner: "Aguirre Mesa, Andres" → last="Aguirre Mesa"
let banner = parse_banner_name("Aguirre Mesa, Andres").unwrap();
let banner_keys = matching_keys(&banner);
// RMP: "Andres" / "Aguirre-Mesa" → last="Aguirre-Mesa"
let rmp = parse_rmp_name("Andres", "Aguirre-Mesa").unwrap();
let rmp_keys = matching_keys(&rmp);
// Both should normalize to ("aguirremesa", "andres")
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
}
#[test]
fn keys_accent_cross_match() {
// Banner: "García, José" (if Banner ever has accents)
let banner = parse_banner_name("Garcia, Jose").unwrap();
let banner_keys = matching_keys(&banner);
// RMP: "José" / "García"
let rmp = parse_rmp_name("José", "García").unwrap();
let rmp_keys = matching_keys(&rmp);
// Both normalize to ("garcia", "jose")
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
}
}
+65 -82
View File
@@ -91,25 +91,6 @@ pub async fn batch_upsert_rmp_professors(
Ok(())
}
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
pub(crate) fn normalize(s: &str) -> String {
s.trim().to_lowercase().trim_end_matches('.').to_string()
}
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
///
/// Returns `None` if the format is unparseable (no comma, empty parts).
pub(crate) fn parse_display_name(display_name: &str) -> Option<(String, String)> {
let (last_part, first_part) = display_name.split_once(',')?;
let last = normalize(last_part);
// Take only the first token of the first-name portion to drop middle names/initials.
let first = normalize(first_part.split_whitespace().next()?);
if last.is_empty() || first.is_empty() {
return None;
}
Some((last, first))
}
/// Retrieve RMP rating data for an instructor by instructor id.
///
/// Returns `(avg_rating, num_ratings)` for the best linked RMP profile
@@ -136,74 +117,76 @@ pub async fn get_instructor_rmp_data(
Ok(row)
}
#[cfg(test)]
mod tests {
use super::*;
/// Unmatch an instructor from an RMP profile.
///
/// Removes the link from `instructor_rmp_links` and updates the instructor's
/// `rmp_match_status` to 'unmatched' if no links remain.
///
/// If `rmp_legacy_id` is `Some`, removes only that specific link.
/// If `None`, removes all links for the instructor.
pub async fn unmatch_instructor(
db_pool: &PgPool,
instructor_id: i32,
rmp_legacy_id: Option<i32>,
) -> Result<()> {
let mut tx = db_pool.begin().await?;
#[test]
fn parse_standard_name() {
assert_eq!(
parse_display_name("Smith, John"),
Some(("smith".into(), "john".into()))
);
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(instructor_id)
.bind(legacy_id)
.execute(&mut *tx)
.await?;
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_middle() {
assert_eq!(
parse_display_name("Smith, John David"),
Some(("smith".into(), "john".into()))
);
// Check if any links remain
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.fetch_one(&mut *tx)
.await?;
// Update instructor status if no links remain
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_middle_initial() {
assert_eq!(
parse_display_name("Garcia, Maria L."),
Some(("garcia".into(), "maria".into()))
);
// Reset accepted candidates back to pending when unmatching
// This allows the candidates to be re-matched later
if let Some(legacy_id) = rmp_legacy_id {
// Reset only the specific candidate
sqlx::query(
"UPDATE rmp_match_candidates
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'accepted'",
)
.bind(instructor_id)
.bind(legacy_id)
.execute(&mut *tx)
.await?;
} else {
// Reset all accepted candidates for this instructor
sqlx::query(
"UPDATE rmp_match_candidates
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
WHERE instructor_id = $1 AND status = 'accepted'",
)
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_suffix_in_last() {
// Banner may encode "Jr." as part of the last name.
// normalize() strips trailing periods so "Jr." becomes "jr".
assert_eq!(
parse_display_name("Smith Jr., James"),
Some(("smith jr".into(), "james".into()))
);
}
#[test]
fn parse_no_comma_returns_none() {
assert_eq!(parse_display_name("SingleName"), None);
}
#[test]
fn parse_empty_first_returns_none() {
assert_eq!(parse_display_name("Smith,"), None);
}
#[test]
fn parse_empty_last_returns_none() {
assert_eq!(parse_display_name(", John"), None);
}
#[test]
fn parse_extra_whitespace() {
assert_eq!(
parse_display_name(" Doe , Jane Marie "),
Some(("doe".into(), "jane".into()))
);
}
#[test]
fn normalize_trims_and_lowercases() {
assert_eq!(normalize(" FOO "), "foo");
}
#[test]
fn normalize_strips_trailing_period() {
assert_eq!(normalize("Jr."), "jr");
}
tx.commit().await?;
Ok(())
}
+225 -43
View File
@@ -1,6 +1,6 @@
//! Confidence scoring and candidate generation for RMP instructor matching.
use crate::data::rmp::{normalize, parse_display_name};
use crate::data::names::{matching_keys, parse_banner_name, parse_rmp_name};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -14,6 +14,7 @@ use tracing::{debug, info};
/// Breakdown of individual scoring signals.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScoreBreakdown {
pub name: f32,
pub department: f32,
pub uniqueness: f32,
pub volume: f32,
@@ -37,12 +38,13 @@ const MIN_CANDIDATE_THRESHOLD: f32 = 0.40;
const AUTO_ACCEPT_THRESHOLD: f32 = 0.85;
// ---------------------------------------------------------------------------
// Weights
// Weights (must sum to 1.0)
// ---------------------------------------------------------------------------
const WEIGHT_DEPARTMENT: f32 = 0.50;
const WEIGHT_UNIQUENESS: f32 = 0.30;
const WEIGHT_VOLUME: f32 = 0.20;
const WEIGHT_NAME: f32 = 0.50;
const WEIGHT_DEPARTMENT: f32 = 0.25;
const WEIGHT_UNIQUENESS: f32 = 0.15;
const WEIGHT_VOLUME: f32 = 0.10;
// ---------------------------------------------------------------------------
// Pure scoring functions
@@ -78,8 +80,9 @@ fn department_similarity(subjects: &[String], rmp_department: Option<&str>) -> f
/// Expand common subject abbreviations used at UTSA and check for overlap.
fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
const MAPPINGS: &[(&str, &[&str])] = &[
// Core subjects (original mappings, corrected)
("cs", &["computer science"]),
("ece", &["electrical", "computer engineering"]),
("ece", &["early childhood education", "early childhood"]),
("ee", &["electrical engineering", "electrical"]),
("me", &["mechanical engineering", "mechanical"]),
("ce", &["civil engineering", "civil"]),
@@ -105,6 +108,85 @@ fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
("ms", &["management science"]),
("kin", &["kinesiology"]),
("com", &["communication"]),
// Architecture & Design
("arc", &["architecture"]),
("ide", &["interior design", "design"]),
// Anthropology & Ethnic Studies
("ant", &["anthropology"]),
("aas", &["african american studies", "ethnic studies"]),
("mas", &["mexican american studies", "ethnic studies"]),
("regs", &["ethnic studies", "gender"]),
// Languages
("lng", &["linguistics", "applied linguistics"]),
("spn", &["spanish"]),
("frn", &["french"]),
("ger", &["german"]),
("chn", &["chinese"]),
("jpn", &["japanese"]),
("kor", &["korean"]),
("itl", &["italian"]),
("rus", &["russian"]),
("lat", &["latin"]),
("grk", &["greek"]),
("asl", &["american sign language", "sign language"]),
(
"fl",
&["foreign languages", "languages", "modern languages"],
),
// Education
("edu", &["education"]),
("ci", &["curriculum", "education"]),
("edl", &["educational leadership", "education"]),
("edp", &["educational psychology", "education"]),
("bbl", &["bilingual education"]),
("spe", &["special education", "education"]),
// Business
("ent", &["entrepreneurship"]),
("gba", &["general business", "business"]),
("blw", &["business law", "law"]),
("rfd", &["real estate"]),
("mot", &["management of technology", "management"]),
// Engineering
("egr", &["engineering"]),
("bme", &["biomedical engineering", "engineering"]),
("cme", &["chemical engineering", "engineering"]),
("cpe", &["computer engineering", "engineering"]),
("ise", &["industrial", "systems engineering", "engineering"]),
("mate", &["materials engineering", "engineering"]),
// Sciences
("che", &["chemistry"]),
("bch", &["biochemistry", "chemistry"]),
("geo", &["geology"]),
("phy", &["physics"]),
("ast", &["astronomy"]),
("es", &["environmental science"]),
// Social Sciences
("crj", &["criminal justice"]),
("swk", &["social work"]),
("pad", &["public administration"]),
("grg", &["geography"]),
("ges", &["geography"]),
// Humanities
("cla", &["classics"]),
("hum", &["humanities"]),
("wgss", &["women's studies"]),
// Health
("hth", &["health"]),
("hcp", &["health science", "health"]),
("ntr", &["nutrition"]),
// Military
("msc", &["military science"]),
("asc", &["aerospace"]),
// Arts
("dan", &["dance"]),
("thr", &["theater"]),
("ahc", &["art history"]),
// Other
("cou", &["counseling"]),
("hon", &["honors"]),
("csm", &["construction"]),
("wrc", &["writing"]),
("set", &["tourism management", "tourism"]),
];
for &(abbr, expansions) in MAPPINGS {
@@ -119,35 +201,39 @@ fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
/// Compute match confidence score (0.01.0) for an instructorRMP pair.
///
/// Name matching is handled by the caller via pre-filtering on exact
/// normalized `(last, first)`, so only department, uniqueness, and volume
/// signals are scored here.
/// The name signal is always 1.0 since candidates are only generated for
/// exact normalized name matches. The effective score range is 0.501.0.
pub fn compute_match_score(
instructor_subjects: &[String],
rmp_department: Option<&str>,
candidate_count: usize,
rmp_num_ratings: i32,
) -> MatchScore {
// --- Department (0.50) ---
// --- Name (0.50) — always 1.0, candidates only exist for exact matches ---
let name_score = 1.0;
// --- Department (0.25) ---
let dept_score = department_similarity(instructor_subjects, rmp_department);
// --- Uniqueness (0.30) ---
// --- Uniqueness (0.15) ---
let uniqueness_score = match candidate_count {
0 | 1 => 1.0,
2 => 0.5,
_ => 0.2,
};
// --- Volume (0.20) ---
// --- Volume (0.10) ---
let volume_score = ((rmp_num_ratings as f32).ln_1p() / 5.0_f32.ln_1p()).clamp(0.0, 1.0);
let composite = dept_score * WEIGHT_DEPARTMENT
let composite = name_score * WEIGHT_NAME
+ dept_score * WEIGHT_DEPARTMENT
+ uniqueness_score * WEIGHT_UNIQUENESS
+ volume_score * WEIGHT_VOLUME;
MatchScore {
score: composite,
breakdown: ScoreBreakdown {
name: name_score,
department: dept_score,
uniqueness: uniqueness_score,
volume: volume_score,
@@ -164,6 +250,7 @@ pub fn compute_match_score(
pub struct MatchingStats {
pub total_unmatched: usize,
pub candidates_created: usize,
pub candidates_rescored: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
@@ -179,8 +266,8 @@ struct RmpProfForMatching {
/// Generate match candidates for all unmatched instructors.
///
/// For each unmatched instructor:
/// 1. Parse `display_name` into (last, first).
/// 2. Find RMP professors with matching normalized name.
/// 1. Parse `display_name` into [`NameParts`] and generate matching keys.
/// 2. Find RMP professors with matching normalized name keys.
/// 3. Score each candidate.
/// 4. Store candidates scoring above [`MIN_CANDIDATE_THRESHOLD`].
/// 5. Auto-accept if the top candidate scores ≥ [`AUTO_ACCEPT_THRESHOLD`]
@@ -200,6 +287,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
return Ok(MatchingStats {
total_unmatched: 0,
candidates_created: 0,
candidates_rescored: 0,
auto_matched: 0,
skipped_unparseable: 0,
skipped_no_candidates: 0,
@@ -227,7 +315,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
subject_map.entry(iid).or_default().push(subject);
}
// 3. Load all RMP professors
// 3. Load all RMP professors and build multi-key name index
let prof_rows: Vec<(i32, String, String, Option<String>, i32)> = sqlx::query_as(
"SELECT legacy_id, first_name, last_name, department, num_ratings FROM rmp_professors",
)
@@ -235,40 +323,72 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
.await?;
// Build name index: (normalized_last, normalized_first) -> Vec<RmpProfForMatching>
// Each professor may appear under multiple keys (nicknames, token variants).
let mut name_index: HashMap<(String, String), Vec<RmpProfForMatching>> = HashMap::new();
for (legacy_id, first_name, last_name, department, num_ratings) in prof_rows {
let key = (normalize(&last_name), normalize(&first_name));
let mut rmp_parse_failures = 0usize;
for (legacy_id, first_name, last_name, department, num_ratings) in &prof_rows {
match parse_rmp_name(first_name, last_name) {
Some(parts) => {
let keys = matching_keys(&parts);
for key in keys {
name_index.entry(key).or_default().push(RmpProfForMatching {
legacy_id,
department,
num_ratings,
legacy_id: *legacy_id,
department: department.clone(),
num_ratings: *num_ratings,
});
}
}
None => {
rmp_parse_failures += 1;
debug!(
legacy_id,
first_name, last_name, "Unparseable RMP professor name, skipping"
);
}
}
}
// 4. Load existing candidate pairs (and rejected subset) in a single query
if rmp_parse_failures > 0 {
debug!(
count = rmp_parse_failures,
"RMP professors with unparseable names"
);
}
// 4. Load existing candidate pairs — only skip resolved (accepted/rejected) pairs.
// Pending candidates are rescored so updated mappings take effect.
let candidate_rows: Vec<(i32, i32, String)> =
sqlx::query_as("SELECT instructor_id, rmp_legacy_id, status FROM rmp_match_candidates")
.fetch_all(db_pool)
.await?;
let mut existing_pairs: HashSet<(i32, i32)> = HashSet::with_capacity(candidate_rows.len());
let mut resolved_pairs: HashSet<(i32, i32)> = HashSet::new();
let mut pending_pairs: HashSet<(i32, i32)> = HashSet::new();
let mut rejected_pairs: HashSet<(i32, i32)> = HashSet::new();
for (iid, lid, status) in candidate_rows {
existing_pairs.insert((iid, lid));
match status.as_str() {
"accepted" | "rejected" => {
resolved_pairs.insert((iid, lid));
if status == "rejected" {
rejected_pairs.insert((iid, lid));
}
}
_ => {
pending_pairs.insert((iid, lid));
}
}
}
// 5. Score and collect candidates
// 5. Score and collect candidates (new + rescored pending)
let empty_subjects: Vec<String> = Vec::new();
let mut candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut new_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut rescored_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut auto_accept: Vec<(i32, i32)> = Vec::new(); // (instructor_id, legacy_id)
let mut skipped_unparseable = 0usize;
let mut skipped_no_candidates = 0usize;
for (instructor_id, display_name) in &instructors {
let Some((norm_last, norm_first)) = parse_display_name(display_name) else {
let Some(instructor_parts) = parse_banner_name(display_name) else {
skipped_unparseable += 1;
debug!(
instructor_id,
@@ -279,18 +399,33 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let subjects = subject_map.get(instructor_id).unwrap_or(&empty_subjects);
let key = (norm_last.clone(), norm_first.clone());
let Some(rmp_candidates) = name_index.get(&key) else {
// Generate all matching keys for this instructor and collect candidate
// RMP professors across all key variants (deduplicated by legacy_id).
let instructor_keys = matching_keys(&instructor_parts);
let mut seen_profs: HashSet<i32> = HashSet::new();
let mut matched_profs: Vec<&RmpProfForMatching> = Vec::new();
for key in &instructor_keys {
if let Some(profs) = name_index.get(key) {
for prof in profs {
if seen_profs.insert(prof.legacy_id) {
matched_profs.push(prof);
}
}
}
}
if matched_profs.is_empty() {
skipped_no_candidates += 1;
continue;
};
}
let candidate_count = rmp_candidates.len();
let candidate_count = matched_profs.len();
let mut best: Option<(f32, i32)> = None;
for prof in rmp_candidates {
for prof in &matched_profs {
let pair = (*instructor_id, prof.legacy_id);
if existing_pairs.contains(&pair) {
if resolved_pairs.contains(&pair) {
continue;
}
@@ -308,7 +443,16 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let breakdown_json =
serde_json::to_value(&ms.breakdown).unwrap_or_else(|_| serde_json::json!({}));
candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
if pending_pairs.contains(&pair) {
rescored_candidates.push((
*instructor_id,
prof.legacy_id,
ms.score,
breakdown_json,
));
} else {
new_candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
}
match best {
Some((s, _)) if ms.score > s => best = Some((ms.score, prof.legacy_id)),
@@ -327,19 +471,20 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
}
}
// 67. Write candidates and auto-accept within a single transaction
let candidates_created = candidates.len();
// 67. Write candidates, rescore, and auto-accept within a single transaction
let candidates_created = new_candidates.len();
let candidates_rescored = rescored_candidates.len();
let auto_matched = auto_accept.len();
let mut tx = db_pool.begin().await?;
// 6. Batch-insert candidates
if !candidates.is_empty() {
let c_instructor_ids: Vec<i32> = candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = candidates.iter().map(|(_, _, s, _)| *s).collect();
// 6a. Batch-insert new candidates
if !new_candidates.is_empty() {
let c_instructor_ids: Vec<i32> = new_candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = new_candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = new_candidates.iter().map(|(_, _, s, _)| *s).collect();
let c_breakdowns: Vec<serde_json::Value> =
candidates.into_iter().map(|(_, _, _, b)| b).collect();
new_candidates.into_iter().map(|(_, _, _, b)| b).collect();
sqlx::query(
r#"
@@ -358,6 +503,40 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
.await?;
}
// 6b. Batch-update rescored pending candidates
if !rescored_candidates.is_empty() {
let r_instructor_ids: Vec<i32> = rescored_candidates
.iter()
.map(|(iid, _, _, _)| *iid)
.collect();
let r_legacy_ids: Vec<i32> = rescored_candidates
.iter()
.map(|(_, lid, _, _)| *lid)
.collect();
let r_scores: Vec<f32> = rescored_candidates.iter().map(|(_, _, s, _)| *s).collect();
let r_breakdowns: Vec<serde_json::Value> = rescored_candidates
.into_iter()
.map(|(_, _, _, b)| b)
.collect();
sqlx::query(
r#"
UPDATE rmp_match_candidates mc
SET score = v.score, score_breakdown = v.score_breakdown
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
WHERE mc.instructor_id = v.instructor_id
AND mc.rmp_legacy_id = v.rmp_legacy_id
"#,
)
.bind(&r_instructor_ids)
.bind(&r_legacy_ids)
.bind(&r_scores)
.bind(&r_breakdowns)
.execute(&mut *tx)
.await?;
}
// 7. Auto-accept top candidates
if !auto_accept.is_empty() {
let aa_instructor_ids: Vec<i32> = auto_accept.iter().map(|(iid, _)| *iid).collect();
@@ -411,6 +590,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let stats = MatchingStats {
total_unmatched,
candidates_created,
candidates_rescored,
auto_matched,
skipped_unparseable,
skipped_no_candidates,
@@ -419,6 +599,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
info!(
total_unmatched = stats.total_unmatched,
candidates_created = stats.candidates_created,
candidates_rescored = stats.candidates_rescored,
auto_matched = stats.auto_matched,
skipped_unparseable = stats.skipped_unparseable,
skipped_no_candidates = stats.skipped_no_candidates,
@@ -444,8 +625,9 @@ mod tests {
1, // unique candidate
50, // decent ratings
);
// dept 1.0*0.50 + unique 1.0*0.30 + volume ~0.97*0.20 ≈ 0.99
// name 1.0*0.50 + dept 1.0*0.25 + unique 1.0*0.15 + volume ~0.97*0.10 ≈ 0.997
assert!(ms.score >= 0.85, "Expected score >= 0.85, got {}", ms.score);
assert_eq!(ms.breakdown.name, 1.0);
assert_eq!(ms.breakdown.uniqueness, 1.0);
assert_eq!(ms.breakdown.department, 1.0);
}
+13 -3
View File
@@ -7,6 +7,9 @@ use sqlx::PgPool;
use super::models::UserSession;
use crate::error::Result;
/// Session lifetime: 7 days (in seconds).
pub const SESSION_DURATION_SECS: u64 = 7 * 24 * 3600;
/// Generate a cryptographically random 32-byte hex token.
fn generate_token() -> String {
let bytes: [u8; 32] = rand::rng().random();
@@ -48,10 +51,18 @@ pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSessio
.context("failed to get session")
}
/// Update the last-active timestamp for a session.
/// Update the last-active timestamp and extend session expiry (sliding window).
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query("UPDATE user_sessions SET last_active_at = now() WHERE id = $1")
sqlx::query(
r#"
UPDATE user_sessions
SET last_active_at = now(),
expires_at = now() + make_interval(secs => $2::double precision)
WHERE id = $1
"#,
)
.bind(token)
.bind(SESSION_DURATION_SECS as f64)
.execute(pool)
.await
.context("failed to touch session")?;
@@ -80,7 +91,6 @@ pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
}
/// Delete all expired sessions. Returns the number of sessions cleaned up.
#[allow(dead_code)] // Called by SessionCache::cleanup_expired (not yet wired to periodic task)
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
.execute(pool)
+1
View File
@@ -310,6 +310,7 @@ impl Scheduler {
total,
stats.total_unmatched,
stats.candidates_created,
stats.candidates_rescored,
stats.auto_matched,
stats.skipped_unparseable,
stats.skipped_no_candidates,
+34
View File
@@ -51,6 +51,33 @@ impl WebService {
}
}
}
/// Periodically cleans up expired sessions from the database and in-memory cache.
async fn session_cleanup_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
use std::time::Duration;
// Run every hour
let mut interval = tokio::time::interval(Duration::from_secs(3600));
loop {
tokio::select! {
_ = interval.tick() => {
match state.session_cache.cleanup_expired().await {
Ok(deleted) => {
if deleted > 0 {
info!(deleted, "cleaned up expired sessions");
}
}
Err(e) => {
warn!(error = %e, "session cleanup failed");
}
}
}
_ = shutdown_rx.recv() => {
break;
}
}
}
}
}
#[async_trait::async_trait]
@@ -87,6 +114,13 @@ impl Service for WebService {
Self::db_health_check_loop(health_state, health_shutdown_rx).await;
});
// Spawn session cleanup task
let cleanup_state = self.app_state.clone();
let cleanup_shutdown_rx = shutdown_tx.subscribe();
tokio::spawn(async move {
Self::session_cleanup_loop(cleanup_state, cleanup_shutdown_rx).await;
});
// Use axum's graceful shutdown with the internal shutdown signal
axum::serve(listener, app)
.with_graceful_shutdown(async move {
+12 -50
View File
@@ -180,6 +180,7 @@ pub struct InstructorDetailResponse {
pub struct RescoreResponse {
pub total_unmatched: usize,
pub candidates_created: usize,
pub candidates_rescored: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
@@ -768,16 +769,10 @@ pub async fn unmatch_instructor(
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let rmp_legacy_id = body.and_then(|b| b.rmp_legacy_id);
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Verify instructor exists
let exists: Option<(i32,)> = sqlx::query_as("SELECT id FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check instructor", e))?;
@@ -788,50 +783,16 @@ pub async fn unmatch_instructor(
));
}
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
let result = sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
// Use the data layer function to perform the unmatch
crate::data::rmp::unmatch_instructor(&state.db_pool, id, rmp_legacy_id)
.await
.map_err(|e| {
tracing::error!(error = %e, "failed to unmatch instructor");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to unmatch instructor"})),
)
.bind(id)
.bind(legacy_id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp link", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "link not found for this instructor"})),
));
}
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp links", e))?;
}
// Check if any links remain; update status accordingly
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.fetch_one(&mut *tx)
.await
.map_err(|e| db_error("failed to count remaining links", e))?;
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
}
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
})?;
Ok(Json(OkResponse { ok: true }))
}
@@ -858,6 +819,7 @@ pub async fn rescore(
Ok(Json(RescoreResponse {
total_unmatched: stats.total_unmatched,
candidates_created: stats.candidates_created,
candidates_rescored: stats.candidates_rescored,
auto_matched: stats.auto_matched,
skipped_unparseable: stats.skipped_unparseable,
skipped_no_candidates: stats.skipped_no_candidates,
+6 -2
View File
@@ -235,7 +235,7 @@ pub async fn auth_callback(
let session = crate::data::sessions::create_session(
&state.db_pool,
discord_id,
Duration::from_secs(7 * 24 * 3600),
Duration::from_secs(crate::data::sessions::SESSION_DURATION_SECS),
)
.await
.map_err(|e| {
@@ -248,7 +248,11 @@ pub async fn auth_callback(
// 6. Build response with session cookie
let secure = redirect_uri.starts_with("https://");
let cookie = session_cookie(&session.id, 604800, secure);
let cookie = session_cookie(
&session.id,
crate::data::sessions::SESSION_DURATION_SECS as i64,
secure,
);
let redirect_to = if user.is_admin { "/admin" } else { "/" };
+49 -22
View File
@@ -9,13 +9,13 @@ use axum::{
routing::{get, post, put},
};
use crate::web::admin;
use crate::web::admin_rmp;
use crate::web::admin_scraper;
use crate::web::auth::{self, AuthConfig};
use crate::web::calendar;
use crate::web::timeline;
use crate::web::ws;
use crate::{data, web::admin};
use crate::{data::models, web::admin_rmp};
#[cfg(feature = "embed-assets")]
use axum::{
http::{HeaderMap, StatusCode, Uri},
@@ -468,7 +468,7 @@ pub struct CourseResponse {
link_identifier: Option<String>,
is_section_linked: Option<bool>,
part_of_term: Option<String>,
meeting_times: Vec<crate::data::models::DbMeetingTime>,
meeting_times: Vec<models::DbMeetingTime>,
attributes: Vec<String>,
instructors: Vec<InstructorResponse>,
}
@@ -505,10 +505,19 @@ pub struct CodeDescription {
description: String,
}
#[derive(Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TermResponse {
code: String,
slug: String,
description: String,
}
/// Build a `CourseResponse` from a DB course with pre-fetched instructor details.
fn build_course_response(
course: &crate::data::models::Course,
instructors: Vec<crate::data::models::CourseInstructorDetail>,
course: &models::Course,
instructors: Vec<models::CourseInstructorDetail>,
) -> CourseResponse {
let instructors = instructors
.into_iter()
@@ -557,12 +566,20 @@ async fn search_courses(
State(state): State<AppState>,
axum_extra::extract::Query(params): axum_extra::extract::Query<SearchParams>,
) -> Result<Json<SearchResponse>, (AxumStatusCode, String)> {
use crate::banner::models::terms::Term;
let term_code = Term::resolve_to_code(&params.term).ok_or_else(|| {
(
AxumStatusCode::BAD_REQUEST,
format!("Invalid term: {}", params.term),
)
})?;
let limit = params.limit.clamp(1, 100);
let offset = params.offset.max(0);
let (courses, total_count) = crate::data::courses::search_courses(
let (courses, total_count) = data::courses::search_courses(
&state.db_pool,
&params.term,
&term_code,
if params.subject.is_empty() {
None
} else {
@@ -591,7 +608,7 @@ async fn search_courses(
// Batch-fetch all instructors in a single query instead of N+1
let course_ids: Vec<i32> = courses.iter().map(|c| c.id).collect();
let mut instructor_map =
crate::data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
data::courses::get_instructors_for_courses(&state.db_pool, &course_ids)
.await
.unwrap_or_default();
@@ -616,7 +633,7 @@ async fn get_course(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Json<CourseResponse>, (AxumStatusCode, String)> {
let course = crate::data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
let course = data::courses::get_course_by_crn(&state.db_pool, &crn, &term)
.await
.map_err(|e| {
tracing::error!(error = %e, "Course lookup failed");
@@ -627,7 +644,7 @@ async fn get_course(
})?
.ok_or_else(|| (AxumStatusCode::NOT_FOUND, "Course not found".to_string()))?;
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
let instructors = data::courses::get_course_instructors(&state.db_pool, course.id)
.await
.unwrap_or_default();
Ok(Json(build_course_response(&course, instructors)))
@@ -636,9 +653,10 @@ async fn get_course(
/// `GET /api/terms`
async fn get_terms(
State(state): State<AppState>,
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
let cache = state.reference_cache.read().await;
let term_codes = crate::data::courses::get_available_terms(&state.db_pool)
) -> Result<Json<Vec<TermResponse>>, (AxumStatusCode, String)> {
use crate::banner::models::terms::Term;
let term_codes = data::courses::get_available_terms(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to get terms");
@@ -648,14 +666,15 @@ async fn get_terms(
)
})?;
let terms: Vec<CodeDescription> = term_codes
let terms: Vec<TermResponse> = term_codes
.into_iter()
.map(|code| {
let description = cache
.lookup("term", &code)
.unwrap_or("Unknown Term")
.to_string();
CodeDescription { code, description }
.filter_map(|code| {
let term: Term = code.parse().ok()?;
Some(TermResponse {
code,
slug: term.slug(),
description: term.description(),
})
})
.collect();
@@ -667,7 +686,15 @@ async fn get_subjects(
State(state): State<AppState>,
Query(params): Query<SubjectsParams>,
) -> Result<Json<Vec<CodeDescription>>, (AxumStatusCode, String)> {
let rows = crate::data::courses::get_subjects_by_enrollment(&state.db_pool, &params.term)
use crate::banner::models::terms::Term;
let term_code = Term::resolve_to_code(&params.term).ok_or_else(|| {
(
AxumStatusCode::BAD_REQUEST,
format!("Invalid term: {}", params.term),
)
})?;
let rows = data::courses::get_subjects_by_enrollment(&state.db_pool, &term_code)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to get subjects");
@@ -696,7 +723,7 @@ async fn get_reference(
if entries.is_empty() {
// Fall back to DB query in case cache doesn't have this category
drop(cache);
let rows = crate::data::reference::get_by_category(&category, &state.db_pool)
let rows = data::reference::get_by_category(&category, &state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, category = %category, "Reference lookup failed");
+3 -4
View File
@@ -9,8 +9,8 @@
use chrono::NaiveDate;
use serde_json::Value;
use sqlx::PgPool;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use tokio::sync::watch;
use tracing::{debug, error, info};
@@ -141,9 +141,8 @@ struct ScheduleRow {
async fn load_snapshot(pool: &PgPool) -> anyhow::Result<ScheduleSnapshot> {
let start = std::time::Instant::now();
let rows: Vec<ScheduleRow> = sqlx::query_as(
"SELECT subject, enrollment, meeting_times FROM courses",
)
let rows: Vec<ScheduleRow> =
sqlx::query_as("SELECT subject, enrollment, meeting_times FROM courses")
.fetch_all(pool)
.await?;
-1
View File
@@ -108,7 +108,6 @@ impl SessionCache {
/// Delete expired sessions from the database and sweep the in-memory cache.
///
/// Returns the number of sessions deleted from the database.
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
+2 -3
View File
@@ -233,9 +233,8 @@ pub(crate) async fn timeline(
s.active_during(local_date, wday_bit, slot_start_minutes, slot_end_minutes)
});
if active {
*subject_totals
.entry(course.subject.clone())
.or_default() += course.enrollment as i64;
*subject_totals.entry(course.subject.clone()).or_default() +=
course.enrollment as i64;
}
}
+103
View File
@@ -0,0 +1,103 @@
#[allow(dead_code)]
mod helpers;
use banner::data::rmp::unmatch_instructor;
use sqlx::PgPool;
/// Test that unmatching an instructor resets accepted candidates back to pending.
///
/// When a user unmatches an instructor, accepted candidates should be reset to
/// 'pending' so they can be re-matched later. This prevents the bug where
/// candidates remain 'accepted' but have no corresponding link.
#[sqlx::test]
async fn unmatch_resets_accepted_candidates_to_pending(pool: PgPool) {
// ARRANGE: Create an instructor
let (instructor_id,): (i32,) = sqlx::query_as(
"INSERT INTO instructors (display_name, email)
VALUES ('Test, Instructor', 'test@utsa.edu')
RETURNING id",
)
.fetch_one(&pool)
.await
.expect("failed to create instructor");
// ARRANGE: Create an RMP professor
let (rmp_legacy_id,): (i32,) = sqlx::query_as(
"INSERT INTO rmp_professors (legacy_id, graphql_id, first_name, last_name, num_ratings)
VALUES (9999999, 'test-graphql-id', 'Test', 'Professor', 10)
RETURNING legacy_id",
)
.fetch_one(&pool)
.await
.expect("failed to create rmp professor");
// ARRANGE: Create a match candidate with 'accepted' status
sqlx::query(
"INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, status)
VALUES ($1, $2, 0.85, 'accepted')",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.execute(&pool)
.await
.expect("failed to create candidate");
// ARRANGE: Create a link in instructor_rmp_links
sqlx::query(
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
VALUES ($1, $2, 'manual')",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.execute(&pool)
.await
.expect("failed to create link");
// ARRANGE: Update instructor status to 'confirmed'
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
.bind(instructor_id)
.execute(&pool)
.await
.expect("failed to update instructor status");
// ACT: Unmatch the specific RMP profile
unmatch_instructor(&pool, instructor_id, Some(rmp_legacy_id))
.await
.expect("unmatch should succeed");
// ASSERT: Candidate should be reset to pending
let (candidate_status,): (String,) = sqlx::query_as(
"SELECT status FROM rmp_match_candidates
WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.fetch_one(&pool)
.await
.expect("failed to fetch candidate status");
assert_eq!(
candidate_status, "pending",
"candidate should be reset to pending after unmatch"
);
// ASSERT: Link should be deleted
let (link_count,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.fetch_one(&pool)
.await
.expect("failed to count links");
assert_eq!(link_count, 0, "link should be deleted");
// ASSERT: Instructor status should be unmatched
let (instructor_status,): (String,) =
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
.bind(instructor_id)
.fetch_one(&pool)
.await
.expect("failed to fetch instructor status");
assert_eq!(
instructor_status, "unmatched",
"instructor should be unmatched"
);
}
+22 -10
View File
@@ -1,3 +1,4 @@
#[allow(dead_code)]
mod helpers;
use banner::data::batch::batch_upsert_courses;
@@ -213,7 +214,7 @@ async fn test_batch_upsert_unique_constraint_crn_term(pool: PgPool) {
#[sqlx::test]
async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
// Insert initial data — should NOT create audits/metrics (it's a fresh insert)
// Insert initial data — should create a baseline metric but no audits
let initial = vec![helpers::make_course(
"50001",
"202510",
@@ -241,10 +242,21 @@ async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
.await
.unwrap();
assert_eq!(
metric_count, 0,
"initial insert should not create metric entries"
metric_count, 1,
"initial insert should create a baseline metric"
);
// Verify baseline metric values
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
"SELECT enrollment, wait_count, seats_available FROM course_metrics ORDER BY timestamp LIMIT 1",
)
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(enrollment, 10);
assert_eq!(wait_count, 0);
assert_eq!(seats, 25); // 35 - 10
// Update enrollment and wait_count
let updated = vec![helpers::make_course(
"50001",
@@ -269,16 +281,16 @@ async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
"should have audit entries for enrollment and wait_count changes, got {audit_count}"
);
// Should have exactly 1 metric entry
// Should have 2 metric entries: baseline + change
let (metric_count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM course_metrics")
.fetch_one(&pool)
.await
.unwrap();
assert_eq!(metric_count, 1, "should have 1 metric snapshot");
assert_eq!(metric_count, 2, "should have baseline + 1 change metric");
// Verify metric values
// Verify the latest metric values
let (enrollment, wait_count, seats): (i32, i32, i32) = sqlx::query_as(
"SELECT enrollment, wait_count, seats_available FROM course_metrics LIMIT 1",
"SELECT enrollment, wait_count, seats_available FROM course_metrics ORDER BY timestamp DESC LIMIT 1",
)
.fetch_one(&pool)
.await
@@ -290,7 +302,7 @@ async fn test_batch_upsert_creates_audit_and_metric_entries(pool: PgPool) {
#[sqlx::test]
async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
// Insert then re-insert identical data — should produce zero audits/metrics
// Insert then re-insert identical data — should produce baseline metric but no audits or extra metrics
let course = vec![helpers::make_course(
"60001",
"202510",
@@ -319,7 +331,7 @@ async fn test_batch_upsert_no_change_no_audit(pool: PgPool) {
.await
.unwrap();
assert_eq!(
metric_count, 0,
"identical re-upsert should not create metric entries"
metric_count, 1,
"identical re-upsert should only have the baseline metric"
);
}
+1
View File
@@ -1,3 +1,4 @@
#[allow(dead_code)]
mod helpers;
use banner::data::models::{ScrapePriority, TargetType};
+1 -1
View File
@@ -633,7 +633,7 @@
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
"svelte": ["svelte@5.49.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-Fn2mCc3XX0gnnbBYzWOTrZHi5WnF9KvqmB1+KGlUWoJkdioPmFYtg2ALBr6xl2dcnFTz3Vi7/mHpbKSVg/imVg=="],
"svelte": ["svelte@5.49.1", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.2", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-jj95WnbKbXsXXngYj28a4zx8jeZx50CN/J4r0CEeax2pbfdsETv/J1K8V9Hbu3DCXnpHz5qAikICuxEooi7eNQ=="],
"svelte-check": ["svelte-check@4.3.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q=="],
+17 -15
View File
@@ -7,33 +7,35 @@
"build": "vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"typecheck": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"lint": "biome lint .",
"test": "vitest run",
"format": "biome format --write .",
"format:check": "biome format ."
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@fontsource-variable/inter": "^5.2.5",
"@lucide/svelte": "^0.563.0",
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/kit": "^2.16.0",
"@sveltejs/vite-plugin-svelte": "^5.0.3",
"@tailwindcss/vite": "^4.0.0",
"@fontsource-variable/inter": "^5.2.8",
"@lucide/svelte": "^0.563.1",
"@sveltejs/adapter-static": "^3.0.10",
"@sveltejs/kit": "^2.50.1",
"@sveltejs/vite-plugin-svelte": "^5.1.1",
"@tailwindcss/vite": "^4.1.18",
"@tanstack/table-core": "^8.21.3",
"@types/d3-scale": "^4.0.9",
"@types/d3-shape": "^3.1.8",
"@types/d3-time-format": "^4.0.3",
"@types/node": "^25.1.0",
"bits-ui": "^1.3.7",
"bits-ui": "^1.8.0",
"clsx": "^2.1.1",
"jsdom": "^26.0.0",
"svelte": "^5.19.0",
"svelte-check": "^4.1.4",
"tailwind-merge": "^3.0.1",
"tailwindcss": "^4.0.0",
"typescript": "^5.7.2",
"vite": "^6.3.5",
"vitest": "^3.0.5"
"jsdom": "^26.1.0",
"svelte": "^5.49.1",
"svelte-check": "^4.3.5",
"tailwind-merge": "^3.4.0",
"tailwindcss": "^4.1.18",
"typescript": "^5.9.3",
"vite": "^6.4.1",
"vitest": "^3.2.4"
},
"dependencies": {
"@icons-pack/svelte-simple-icons": "^6.5.0",
+4 -4
View File
@@ -1,4 +1,7 @@
#!/usr/bin/env bun
import { extname, join } from "path";
import { constants, brotliCompressSync, gzipSync } from "zlib";
import { $ } from "bun";
/**
* Pre-compress static assets with maximum compression levels.
* Run after `bun run build`.
@@ -7,10 +10,7 @@
* These are embedded alongside originals by rust-embed and served via
* content negotiation in src/web/assets.rs.
*/
import { readdir, stat, readFile, writeFile } from "fs/promises";
import { join, extname } from "path";
import { gzipSync, brotliCompressSync, constants } from "zlib";
import { $ } from "bun";
import { readFile, readdir, stat, writeFile } from "fs/promises";
// Must match COMPRESSION_MIN_SIZE in src/web/encoding.rs
const MIN_SIZE = 512;
+15 -4
View File
@@ -1,3 +1,4 @@
import { authStore } from "$lib/auth.svelte";
import type {
CandidateResponse,
CodeDescription,
@@ -20,6 +21,7 @@ import type {
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TermResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
@@ -50,13 +52,14 @@ export type {
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TermResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
};
// Semantic aliases — these all share the CodeDescription shape
export type Term = CodeDescription;
// Semantic aliases
export type Term = TermResponse;
export type Subject = CodeDescription;
export type ReferenceEntry = CodeDescription;
@@ -212,6 +215,10 @@ export class BannerApiClient {
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
@@ -229,6 +236,10 @@ export class BannerApiClient {
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
@@ -259,8 +270,8 @@ export class BannerApiClient {
return this.request<Term[]>("/terms");
}
async getSubjects(termCode: string): Promise<Subject[]> {
return this.request<Subject[]>(`/subjects?term=${encodeURIComponent(termCode)}`);
async getSubjects(term: string): Promise<Subject[]> {
return this.request<Subject[]>(`/subjects?term=${encodeURIComponent(term)}`);
}
async getReference(category: string): Promise<ReferenceEntry[]> {
+7
View File
@@ -60,6 +60,13 @@ class AuthStore {
}
}
/** Idempotently mark the session as lost. Called by apiFetch on 401. */
handleUnauthorized() {
if (this.state.mode !== "unauthenticated") {
this.state = { mode: "unauthenticated" };
}
}
login() {
window.location.href = "/api/auth/login";
}
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* A match candidate in the detail view.
*/
export type CandidateResponse = { id: number, rmpLegacyId: number, firstName: string | null, lastName: string | null, department: string | null, avgRating: number | null, avgDifficulty: number | null, numRatings: number | null, wouldTakeAgainPct: number | null, score: number | null, scoreBreakdown: { [key in string]?: number } | null, status: string, };
+3
View File
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type CodeDescription = { code: string, description: string, };
+5
View File
@@ -0,0 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DbMeetingTime } from "./DbMeetingTime";
import type { InstructorResponse } from "./InstructorResponse";
export type CourseResponse = { crn: string, subject: string, courseNumber: string, title: string, termCode: string, sequenceNumber: string | null, instructionalMethod: string | null, campus: string | null, enrollment: number, maxEnrollment: number, waitCount: number, waitCapacity: number, creditHours: number | null, creditHourLow: number | null, creditHourHigh: number | null, crossList: string | null, crossListCapacity: number | null, crossListCount: number | null, linkIdentifier: string | null, isSectionLinked: boolean | null, partOfTerm: string | null, meetingTimes: Array<DbMeetingTime>, attributes: Array<string>, instructors: Array<InstructorResponse>, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Represents a meeting time stored as JSONB in the courses table.
*/
export type DbMeetingTime = { begin_time: string | null, end_time: string | null, start_date: string, end_date: string, monday: boolean, tuesday: boolean, wednesday: boolean, thursday: boolean, friday: boolean, saturday: boolean, sunday: boolean, building: string | null, building_description: string | null, room: string | null, campus: string | null, meeting_type: string, meeting_schedule_type: string, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Instructor summary in the detail view.
*/
export type InstructorDetail = { id: number, displayName: string, email: string, rmpMatchStatus: string, subjectsTaught: Array<string>, courseCount: number, };
@@ -0,0 +1,9 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { CandidateResponse } from "./CandidateResponse";
import type { InstructorDetail } from "./InstructorDetail";
import type { LinkedRmpProfile } from "./LinkedRmpProfile";
/**
* Response for `GET /api/admin/instructors/{id}` and `POST .../match`.
*/
export type InstructorDetailResponse = { instructor: InstructorDetail, currentMatches: Array<LinkedRmpProfile>, candidates: Array<CandidateResponse>, };
@@ -0,0 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { TopCandidateResponse } from "./TopCandidateResponse";
/**
* An instructor row in the paginated list.
*/
export type InstructorListItem = { id: number, displayName: string, email: string, rmpMatchStatus: string, rmpLinkCount: number, candidateCount: number, courseSubjectCount: number, topCandidate: TopCandidateResponse | null, };
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type InstructorResponse = { instructorId: number, bannerId: string, displayName: string, email: string, isPrimary: boolean, rmpRating: number | null, rmpNumRatings: number | null, rmpLegacyId: number | null, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Aggregate status counts for the instructor list.
*/
export type InstructorStats = { total: number, unmatched: number, auto: number, confirmed: number, rejected: number, withCandidates: number, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* A linked RMP profile in the detail view.
*/
export type LinkedRmpProfile = { linkId: number, legacyId: number, firstName: string | null, lastName: string | null, department: string | null, avgRating: number | null, avgDifficulty: number | null, numRatings: number | null, wouldTakeAgainPct: number | null, };
@@ -0,0 +1,8 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { InstructorListItem } from "./InstructorListItem";
import type { InstructorStats } from "./InstructorStats";
/**
* Response for `GET /api/admin/instructors`.
*/
export type ListInstructorsResponse = { instructors: Array<InstructorListItem>, total: number, page: number, perPage: number, stats: InstructorStats, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Simple acknowledgement response for mutating operations.
*/
export type OkResponse = { ok: boolean, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Response for `POST /api/admin/rmp/rescore`.
*/
export type RescoreResponse = { totalUnmatched: number, candidatesCreated: number, candidatesRescored: number, autoMatched: number, skippedUnparseable: number, skippedNoCandidates: number, };
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ScraperStatsResponse = { period: string, totalScrapes: number, successfulScrapes: number, failedScrapes: number, successRate: number | null, avgDurationMs: number | null, totalCoursesChanged: number, totalCoursesFetched: number, totalAuditsGenerated: number, pendingJobs: number, lockedJobs: number, };
+4
View File
@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { CourseResponse } from "./CourseResponse";
export type SearchResponse = { courses: Array<CourseResponse>, totalCount: number, offset: number, limit: number, };
+4
View File
@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ServiceStatus } from "./ServiceStatus";
export type ServiceInfo = { name: string, status: ServiceStatus, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Health status of a service.
*/
export type ServiceStatus = "starting" | "active" | "connected" | "disabled" | "error";
+5
View File
@@ -0,0 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { ServiceInfo } from "./ServiceInfo";
import type { ServiceStatus } from "./ServiceStatus";
export type StatusResponse = { status: ServiceStatus, version: string, commit: string, services: { [key in string]?: ServiceInfo }, };
@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SubjectResultEntry } from "./SubjectResultEntry";
export type SubjectDetailResponse = { subject: string, results: Array<SubjectResultEntry>, };
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SubjectResultEntry = { id: number, completedAt: string, durationMs: number, success: boolean, errorMessage: string | null, coursesFetched: number | null, coursesChanged: number | null, coursesUnchanged: number | null, auditsGenerated: number | null, metricsGenerated: number | null, };
+3
View File
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type SubjectSummary = { subject: string, subjectDescription: string | null, trackedCourseCount: number, scheduleState: string, currentIntervalSecs: number, timeMultiplier: number, lastScraped: string, nextEligibleAt: string | null, cooldownRemainingSecs: number | null, avgChangeRatio: number, consecutiveZeroChanges: number, recentRuns: number, recentFailures: number, };
+4
View File
@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SubjectSummary } from "./SubjectSummary";
export type SubjectsResponse = { subjects: Array<SubjectSummary>, };
+3
View File
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type TermResponse = { code: string, slug: string, description: string, };
+12
View File
@@ -0,0 +1,12 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { TimelineSlot } from "./TimelineSlot";
export type TimelineResponse = {
/**
* 15-minute slots with per-subject enrollment totals, sorted by time.
*/
slots: Array<TimelineSlot>,
/**
* All subject codes present in the returned data.
*/
subjects: Array<string>, };
+11
View File
@@ -0,0 +1,11 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type TimelineSlot = {
/**
* ISO-8601 timestamp at the start of this 15-minute bucket.
*/
time: string,
/**
* Subject code → total enrollment in this slot.
*/
subjects: { [key in string]?: bigint }, };
+3
View File
@@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type TimeseriesPoint = { timestamp: string, scrapeCount: number, successCount: number, errorCount: number, coursesChanged: number, avgDurationMs: number, };
@@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { TimeseriesPoint } from "./TimeseriesPoint";
export type TimeseriesResponse = { period: string, bucket: string, points: Array<TimeseriesPoint>, };
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* A top-candidate summary shown in the instructor list view.
*/
export type TopCandidateResponse = { rmpLegacyId: number, score: number | null, scoreBreakdown: { [key in string]?: number } | null, firstName: string | null, lastName: string | null, department: string | null, avgRating: number | null, numRatings: number | null, };
+6
View File
@@ -0,0 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* A user authenticated via Discord OAuth.
*/
export type User = { discordId: string, discordUsername: string, discordAvatarHash: string | null, isAdmin: boolean, createdAt: string, updatedAt: string, };
+3
View File
@@ -20,6 +20,9 @@ export type { SubjectDetailResponse } from "./SubjectDetailResponse";
export type { SubjectResultEntry } from "./SubjectResultEntry";
export type { SubjectSummary } from "./SubjectSummary";
export type { SubjectsResponse } from "./SubjectsResponse";
export type { TermResponse } from "./TermResponse";
export type { TimelineResponse } from "./TimelineResponse";
export type { TimelineSlot } from "./TimelineSlot";
export type { TimeseriesPoint } from "./TimeseriesPoint";
export type { TimeseriesResponse } from "./TimeseriesResponse";
export type { TopCandidateResponse } from "./TopCandidateResponse";
+11 -11
View File
@@ -1,31 +1,31 @@
<script lang="ts">
import type { CourseResponse } from "$lib/api";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import {
formatTime,
RMP_CONFIDENCE_THRESHOLD,
formatCreditHours,
formatDate,
formatMeetingDaysLong,
formatTime,
isMeetingTimeTBA,
isTimeTBA,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import { Tooltip } from "bits-ui";
import SimpleTooltip from "./SimpleTooltip.svelte";
import { cn, formatNumber, tooltipContentClass } from "$lib/utils";
import {
Info,
Copy,
Calendar,
Check,
Copy,
Download,
ExternalLink,
Info,
Star,
Triangle,
ExternalLink,
Calendar,
Download,
} from "@lucide/svelte";
import { Tooltip } from "bits-ui";
import SimpleTooltip from "./SimpleTooltip.svelte";
let { course }: { course: CourseResponse } = $props();
+29 -23
View File
@@ -1,6 +1,10 @@
<script lang="ts">
import type { CourseResponse } from "$lib/api";
import { FlexRender, createSvelteTable } from "$lib/components/ui/data-table/index.js";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
import {
RMP_CONFIDENCE_THRESHOLD,
abbreviateInstructor,
concernAccentColor,
formatLocationDisplay,
@@ -10,43 +14,40 @@ import {
formatTimeRange,
getDeliveryConcern,
getPrimaryInstructor,
isAsyncOnline,
isMeetingTimeTBA,
isTimeTBA,
openSeats,
seatsColor,
seatsDotColor,
ratingStyle,
rmpUrl,
RMP_CONFIDENCE_THRESHOLD,
seatsColor,
seatsDotColor,
} from "$lib/course";
import { themeStore } from "$lib/stores/theme.svelte";
import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
import CourseDetail from "./CourseDetail.svelte";
import { fade, fly, slide } from "svelte/transition";
import { flip } from "svelte/animate";
import { createSvelteTable, FlexRender } from "$lib/components/ui/data-table/index.js";
import { cn, formatNumber, tooltipContentClass } from "$lib/utils";
import {
getCoreRowModel,
getSortedRowModel,
type ColumnDef,
type SortingState,
type VisibilityState,
type Updater,
} from "@tanstack/table-core";
import {
ArrowUp,
ArrowDown,
ArrowUp,
ArrowUpDown,
Columns3,
Check,
Columns3,
ExternalLink,
RotateCcw,
Star,
Triangle,
ExternalLink,
} from "@lucide/svelte";
import { DropdownMenu, ContextMenu, Tooltip } from "bits-ui";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import {
type ColumnDef,
type SortingState,
type Updater,
type VisibilityState,
getCoreRowModel,
getSortedRowModel,
} from "@tanstack/table-core";
import { ContextMenu, DropdownMenu, Tooltip } from "bits-ui";
import { flip } from "svelte/animate";
import { fade, fly, slide } from "svelte/transition";
import CourseDetail from "./CourseDetail.svelte";
import SimpleTooltip from "./SimpleTooltip.svelte";
let {
@@ -611,7 +612,12 @@ const table = createSvelteTable({
)}
passthrough
>
{#if timeIsTBA(course)}
{#if isAsyncOnline(course)}
<span
class="text-xs text-muted-foreground/60"
>Async</span
>
{:else if timeIsTBA(course)}
<span
class="text-xs text-muted-foreground/60"
>TBA</span
@@ -1,6 +1,6 @@
<script lang="ts">
import { page } from "$app/state";
import { TriangleAlert, RotateCcw } from "@lucide/svelte";
import { RotateCcw, TriangleAlert } from "@lucide/svelte";
interface Props {
/** Heading shown in the error card */
+9 -5
View File
@@ -1,7 +1,7 @@
<script lang="ts">
import { page } from "$app/state";
import { Search, User, Clock } from "@lucide/svelte";
import { authStore } from "$lib/auth.svelte";
import { Clock, Search, User } from "@lucide/svelte";
import ThemeToggle from "./ThemeToggle.svelte";
const staticTabs = [
@@ -11,11 +11,15 @@ const staticTabs = [
const APP_PREFIXES = ["/profile", "/settings", "/admin"];
let profileTab = $derived({
href: authStore.isAuthenticated ? "/profile" : "/login",
let profileTab = $derived(
authStore.isLoading
? { href: "/login" as const, label: null, icon: User }
: {
href: authStore.isAuthenticated ? ("/profile" as const) : ("/login" as const),
label: authStore.isAuthenticated ? "Account" : "Login",
icon: User,
});
}
);
function isActive(tabHref: string): boolean {
if (tabHref === "/") return page.url.pathname === "/";
@@ -50,7 +54,7 @@ function isActive(tabHref: string): boolean {
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<User size={15} strokeWidth={2} />
{profileTab.label}
{#if profileTab.label}{profileTab.label}{/if}
</a>
<ThemeToggle />
</div>
+2 -2
View File
@@ -67,9 +67,9 @@ function outTransition(_node: HTMLElement): TransitionConfig {
}
</script>
<div class="relative flex flex-1 flex-col overflow-hidden">
<div class="relative flex flex-1 flex-col overflow-hidden p-8">
{#key key}
<div in:inTransition out:outTransition class="flex flex-1 flex-col">
<div in:inTransition out:outTransition class="flex flex-1 flex-col -m-8">
{@render children()}
</div>
{/key}
+3 -3
View File
@@ -1,8 +1,8 @@
<script lang="ts">
import { Select } from "bits-ui";
import { ChevronUp, ChevronDown } from "@lucide/svelte";
import type { Action } from "svelte/action";
import { formatNumber } from "$lib/utils";
import { ChevronDown, ChevronUp } from "@lucide/svelte";
import { Select } from "bits-ui";
import type { Action } from "svelte/action";
const slideIn: Action<HTMLElement, number> = (node, direction) => {
if (direction !== 0) {
+2 -2
View File
@@ -1,8 +1,8 @@
<script lang="ts">
import type { Term, Subject } from "$lib/api";
import type { Subject, Term } from "$lib/api";
import SimpleTooltip from "./SimpleTooltip.svelte";
import TermCombobox from "./TermCombobox.svelte";
import SubjectCombobox from "./SubjectCombobox.svelte";
import TermCombobox from "./TermCombobox.svelte";
let {
terms,
+1 -1
View File
@@ -1,8 +1,8 @@
<script lang="ts">
import { onMount } from "svelte";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
import { onMount } from "svelte";
export interface SearchMeta {
totalCount: number;
+1 -1
View File
@@ -1,7 +1,7 @@
<script lang="ts">
import { cn } from "$lib/utils";
import { Tooltip } from "bits-ui";
import type { Snippet } from "svelte";
import { cn } from "$lib/utils";
let {
text,
@@ -1,9 +1,9 @@
<script lang="ts">
import { Combobox } from "bits-ui";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Subject } from "$lib/api";
import { formatNumber } from "$lib/utils";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { Combobox } from "bits-ui";
import { fly } from "svelte/transition";
let {
subjects,
+14 -15
View File
@@ -1,8 +1,8 @@
<script lang="ts">
import { Combobox } from "bits-ui";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { fly } from "svelte/transition";
import type { Term } from "$lib/api";
import { Check, ChevronsUpDown } from "@lucide/svelte";
import { Combobox } from "bits-ui";
import { fly } from "svelte/transition";
let {
terms,
@@ -16,12 +16,11 @@ let open = $state(false);
let searchValue = $state("");
let containerEl = $state<HTMLDivElement>(null!);
const currentTermCode = $derived(
terms.find((t) => !t.description.includes("(View Only)"))?.code ?? ""
);
// The first term from the backend is the most current
const currentTermSlug = $derived(terms[0]?.slug ?? "");
const selectedLabel = $derived(
terms.find((t) => t.code === value)?.description ?? "Select term..."
terms.find((t) => t.slug === value)?.description ?? "Select term..."
);
const filteredTerms = $derived.by(() => {
@@ -29,8 +28,8 @@ const filteredTerms = $derived.by(() => {
const matched =
query === "" ? terms : terms.filter((t) => t.description.toLowerCase().includes(query));
const current = matched.find((t) => t.code === currentTermCode);
const rest = matched.filter((t) => t.code !== currentTermCode);
const current = matched.find((t) => t.slug === currentTermSlug);
const rest = matched.filter((t) => t.slug !== currentTermSlug);
return current ? [current, ...rest] : rest;
});
@@ -100,22 +99,22 @@ $effect(() => {
<div {...wrapperProps}>
<div {...props} transition:fly={{ duration: 150, y: -4 }}>
<Combobox.Viewport class="p-0.5">
{#each filteredTerms as term, i (term.code)}
{#if i === 1 && term.code !== currentTermCode && filteredTerms[0]?.code === currentTermCode}
{#each filteredTerms as term, i (term.slug)}
{#if i === 1 && term.slug !== currentTermSlug && filteredTerms[0]?.slug === currentTermSlug}
<div class="mx-2 my-1 h-px bg-border"></div>
{/if}
<Combobox.Item
class="rounded-sm outline-hidden flex h-8 w-full select-none items-center px-2 text-sm
data-[highlighted]:bg-accent data-[highlighted]:text-accent-foreground
{term.code === value ? 'cursor-default' : 'cursor-pointer'}
{term.code === currentTermCode ? 'font-medium text-foreground' : 'text-foreground'}"
value={term.code}
{term.slug === value ? 'cursor-default' : 'cursor-pointer'}
{term.slug === currentTermSlug ? 'font-medium text-foreground' : 'text-foreground'}"
value={term.slug}
label={term.description}
>
{#snippet children({ selected })}
<span class="flex-1 truncate">
{term.description}
{#if term.code === currentTermCode}
{#if term.slug === currentTermSlug}
<span class="ml-1.5 text-xs text-muted-foreground font-normal">current</span>
{/if}
</span>
+2 -2
View File
@@ -1,7 +1,7 @@
<script lang="ts">
import { tick } from "svelte";
import { Moon, Sun } from "@lucide/svelte";
import { themeStore } from "$lib/stores/theme.svelte";
import { Moon, Sun } from "@lucide/svelte";
import { tick } from "svelte";
import SimpleTooltip from "./SimpleTooltip.svelte";
/**
+42 -42
View File
@@ -1,60 +1,60 @@
<script lang="ts">
import { scaleLinear, scaleTime } from "d3-scale";
import { onMount } from "svelte";
import { scaleTime, scaleLinear } from "d3-scale";
import type { TimeSlot, ChartContext } from "$lib/timeline/types";
import {
PADDING,
DEFAULT_AXIS_RATIO,
CHART_HEIGHT_RATIO,
MIN_SPAN_MS,
MAX_SPAN_MS,
DEFAULT_SPAN_MS,
ZOOM_FACTOR,
ZOOM_KEY_FACTOR,
ZOOM_EASE,
ZOOM_SETTLE_THRESHOLD,
PAN_FRICTION,
PAN_STOP_THRESHOLD,
PAN_STOP_THRESHOLD_Y,
VELOCITY_SAMPLE_WINDOW,
VELOCITY_MIN_DT,
PAN_STEP_RATIO,
PAN_STEP_CTRL_RATIO,
PAN_EASE,
PAN_SETTLE_THRESHOLD_PX,
YRATIO_STEP,
YRATIO_MIN,
YRATIO_MAX,
YRATIO_SETTLE_THRESHOLD,
FOLLOW_EASE,
MIN_MAXY,
MAX_DT,
DEFAULT_DT,
TAP_MAX_DURATION_MS,
TAP_MAX_DISTANCE_PX,
} from "$lib/timeline/constants";
import { createTimelineStore } from "$lib/timeline/store.svelte";
import {
createAnimMap,
syncAnimTargets,
stepAnimations,
pruneAnimMap,
stepAnimations,
syncAnimTargets,
} from "$lib/timeline/animation";
import {
getVisibleSlots,
findSlotByTime,
snapToSlot,
enabledTotalClasses,
} from "$lib/timeline/viewport";
CHART_HEIGHT_RATIO,
DEFAULT_AXIS_RATIO,
DEFAULT_DT,
DEFAULT_SPAN_MS,
FOLLOW_EASE,
MAX_DT,
MAX_SPAN_MS,
MIN_MAXY,
MIN_SPAN_MS,
PADDING,
PAN_EASE,
PAN_FRICTION,
PAN_SETTLE_THRESHOLD_PX,
PAN_STEP_CTRL_RATIO,
PAN_STEP_RATIO,
PAN_STOP_THRESHOLD,
PAN_STOP_THRESHOLD_Y,
TAP_MAX_DISTANCE_PX,
TAP_MAX_DURATION_MS,
VELOCITY_MIN_DT,
VELOCITY_SAMPLE_WINDOW,
YRATIO_MAX,
YRATIO_MIN,
YRATIO_SETTLE_THRESHOLD,
YRATIO_STEP,
ZOOM_EASE,
ZOOM_FACTOR,
ZOOM_KEY_FACTOR,
ZOOM_SETTLE_THRESHOLD,
} from "$lib/timeline/constants";
import {
drawGrid,
drawHoverColumn,
drawStackedArea,
drawNowLine,
drawStackedArea,
drawTimeAxis,
stackVisibleSlots,
} from "$lib/timeline/renderer";
import { createTimelineStore } from "$lib/timeline/store.svelte";
import type { ChartContext, TimeSlot } from "$lib/timeline/types";
import {
enabledTotalClasses,
findSlotByTime,
getVisibleSlots,
snapToSlot,
} from "$lib/timeline/viewport";
import TimelineDrawer from "./TimelineDrawer.svelte";
import TimelineTooltip from "./TimelineTooltip.svelte";
+2 -2
View File
@@ -1,7 +1,7 @@
<script lang="ts">
import { Filter, X } from "@lucide/svelte";
import { getSubjectColor } from "$lib/timeline/data";
import { DRAWER_WIDTH } from "$lib/timeline/constants";
import { getSubjectColor } from "$lib/timeline/data";
import { Filter, X } from "@lucide/svelte";
interface Props {
open: boolean;
@@ -1,8 +1,8 @@
<script lang="ts">
import { timeFormat } from "d3-time-format";
import { getSubjectColor } from "$lib/timeline/data";
import type { TimeSlot } from "$lib/timeline/types";
import { enabledTotalClasses } from "$lib/timeline/viewport";
import { timeFormat } from "d3-time-format";
interface Props {
visible: boolean;
@@ -1,6 +1,6 @@
import { onMount } from "svelte";
import { OverlayScrollbars, type PartialOptions } from "overlayscrollbars";
import { themeStore } from "$lib/stores/theme.svelte";
import { OverlayScrollbars, type PartialOptions } from "overlayscrollbars";
import { onMount } from "svelte";
/**
* Set up OverlayScrollbars on an element with automatic theme reactivity.
+100 -9
View File
@@ -1,22 +1,24 @@
import { describe, it, expect } from "vitest";
import type { CourseResponse, DbMeetingTime, InstructorResponse } from "$lib/api";
import {
formatTime,
formatTimeRange,
abbreviateInstructor,
formatCreditHours,
formatDate,
formatDateShort,
formatLocationDisplay,
formatMeetingDays,
formatMeetingDaysLong,
formatMeetingDaysVerbose,
formatMeetingTime,
formatMeetingTimeTooltip,
formatMeetingTimesTooltip,
abbreviateInstructor,
formatCreditHours,
formatTime,
formatTimeRange,
getPrimaryInstructor,
isAsyncOnline,
isMeetingTimeTBA,
isTimeTBA,
formatDate,
formatDateShort,
formatMeetingDaysLong,
} from "$lib/course";
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
import { describe, expect, it } from "vitest";
function makeMeetingTime(overrides: Partial<DbMeetingTime> = {}): DbMeetingTime {
return {
@@ -411,3 +413,92 @@ describe("formatMeetingTimesTooltip", () => {
expect(result).toContain("\n\n");
});
});
describe("isAsyncOnline", () => {
it("returns true for INT building with no times", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "INT",
building_description: "Internet Class",
begin_time: null,
end_time: null,
}),
],
} as CourseResponse;
expect(isAsyncOnline(course)).toBe(true);
});
it("returns false for INT building with meeting times", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "INT",
building_description: "Internet Class",
tuesday: true,
thursday: true,
begin_time: "1000",
end_time: "1115",
}),
],
} as CourseResponse;
expect(isAsyncOnline(course)).toBe(false);
});
it("returns false for non-INT building", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "MH",
building_description: "Main Hall",
begin_time: null,
end_time: null,
}),
],
} as CourseResponse;
expect(isAsyncOnline(course)).toBe(false);
});
it("returns false for empty meeting times", () => {
const course = { meetingTimes: [] } as unknown as CourseResponse;
expect(isAsyncOnline(course)).toBe(false);
});
});
describe("formatLocationDisplay", () => {
it("returns 'Online' for INT building", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "INT",
building_description: "Internet Class",
}),
],
campus: "9",
} as CourseResponse;
expect(formatLocationDisplay(course)).toBe("Online");
});
it("returns building and room for physical location", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "MH",
building_description: "Main Hall",
room: "2.206",
}),
],
campus: "11",
} as CourseResponse;
expect(formatLocationDisplay(course)).toBe("MH 2.206");
});
it("returns building only when no room", () => {
const course = {
meetingTimes: [
makeMeetingTime({
building: "MH",
building_description: "Main Hall",
room: null,
}),
],
campus: "11",
} as CourseResponse;
expect(formatLocationDisplay(course)).toBe("MH");
});
});
+18 -3
View File
@@ -1,4 +1,4 @@
import type { DbMeetingTime, CourseResponse, InstructorResponse } from "$lib/api";
import type { CourseResponse, DbMeetingTime, InstructorResponse } from "$lib/api";
/** Convert "0900" to "9:00 AM" */
export function formatTime(time: string | null): string {
@@ -152,6 +152,13 @@ export function isTimeTBA(mt: DbMeetingTime): boolean {
return !mt.begin_time || mt.begin_time.length !== 4;
}
/** Check if course is asynchronous online (INT building with no meeting times) */
export function isAsyncOnline(course: CourseResponse): boolean {
if (course.meetingTimes.length === 0) return false;
const mt = course.meetingTimes[0];
return mt.building === "INT" && isTimeTBA(mt);
}
/** Format a date string to "January 20, 2026". Accepts YYYY-MM-DD or MM/DD/YYYY. */
export function formatDate(dateStr: string): string {
let year: number, month: number, day: number;
@@ -170,6 +177,8 @@ export function formatDate(dateStr: string): string {
/** Short location string from first meeting time: "MH 2.206" or campus fallback */
export function formatLocation(course: CourseResponse): string | null {
for (const mt of course.meetingTimes) {
// Skip INT building - handled by formatLocationDisplay
if (mt.building === "INT") continue;
if (mt.building && mt.room) return `${mt.building} ${mt.room}`;
if (mt.building) return mt.building;
}
@@ -307,13 +316,19 @@ export function concernAccentColor(concern: DeliveryConcern): string | null {
/**
* Location display text for the table cell.
* Falls back to "Online" for online courses instead of showing a dash.
* Shows "Online" for internet class (INT building) or other online courses.
*/
export function formatLocationDisplay(course: CourseResponse): string | null {
// Check for Internet Class building first
const hasIntBuilding = course.meetingTimes.some((mt) => mt.building === "INT");
if (hasIntBuilding) return "Online";
const loc = formatLocation(course);
if (loc) return loc;
const concern = getDeliveryConcern(course);
if (concern === "online") return "Online";
if (concern === "online" || concern === "internet") return "Online";
return null;
}
+1 -1
View File
@@ -5,7 +5,7 @@
* targets. This module owns the AnimMap lifecycle: syncing targets,
* stepping current values, and pruning offscreen entries.
*/
import { VALUE_EASE, MAXY_EASE, SETTLE_THRESHOLD, MIN_MAXY } from "./constants";
import { MAXY_EASE, MIN_MAXY, SETTLE_THRESHOLD, VALUE_EASE } from "./constants";
import type { AnimEntry, TimeSlot } from "./types";
export type AnimMap = Map<number, Map<string, AnimEntry>>;

Some files were not shown because too many files have changed in this diff Show More