13 Commits

Author SHA1 Message Date
b02a0738e2 chore: bump to v0.6.0, update roadmap & changelog 2026-01-30 20:37:31 -06:00
5d7d60cd96 fix: prevent session pool deadlock on acquire cancellation
Replace is_creating mutex with atomic flag and RAII guard to ensure
proper cleanup when acquire() futures are cancelled mid-creation,
preventing permanent deadlock for subsequent callers.
2026-01-30 20:19:10 -06:00
1954166db6 feat: add name parsing and normalization for instructor-RMP matching 2026-01-30 20:02:59 -06:00
a2a9116b7a fix: avoid clipping page content 2026-01-30 19:32:05 -06:00
a103f0643a feat: refactor admin instructor UI with component extraction and optimistic updates 2026-01-30 19:31:31 -06:00
474d519b9d feat: add auto-format recovery when formatting is sole check failure
Enhances check recipe to detect when only formatting checks fail while
peers pass, automatically applies formatters, then re-verifies. Supports
both Rust (rustfmt + cargo-check) and web (biome + svelte-check)
domains. Displays results eagerly as checks complete instead of in
original order.
2026-01-30 16:01:56 -06:00
fb27bdc119 feat: implement session expiry extension and 401 recovery 2026-01-30 16:01:17 -06:00
669dec0235 feat: add timeline API with schedule-aware enrollment aggregation
Implements POST /api/timeline endpoint that aggregates enrollment by
subject over 15-minute slots, filtering courses by their actual meeting
times. Includes ISR-style schedule cache with hourly background refresh
using stale-while-revalidate pattern, database indexes for efficient
queries, and frontend refactor to dynamically discover subjects from API.
2026-01-30 10:56:11 -06:00
67ba63339a fix: instructor/course mismatching, build order-independent map for association 2026-01-30 09:53:03 -06:00
7b8c11ac13 feat: add calendar export endpoints for ICS and Google Calendar 2026-01-30 04:08:16 -06:00
a767a3f8be feat: add root error page handling 2026-01-30 04:07:53 -06:00
8ce398c0e0 feat: add scraper analytics dashboard with timeseries and subject monitoring 2026-01-30 03:46:48 -06:00
9fed651641 feat: add adaptive scheduling and scraper admin endpoints
Subjects now have individually calculated scrape intervals based on their
historical change ratio, consecutive zero-change runs, failure counts, and
the current time of day. This reduces unnecessary scrapes during inactive
periods while maintaining responsiveness during peak hours. Includes four
new admin endpoints for monitoring scraper health and scheduling decisions.
2026-01-30 02:14:37 -06:00
64 changed files with 6148 additions and 864 deletions
Generated
+97 -3
View File
@@ -272,7 +272,7 @@ dependencies = [
[[package]]
name = "banner"
version = "0.5.0"
version = "0.6.0"
dependencies = [
"anyhow",
"async-trait",
@@ -280,6 +280,7 @@ dependencies = [
"axum-extra",
"bitflags 2.9.4",
"chrono",
"chrono-tz",
"clap",
"compile-time",
"cookie",
@@ -291,6 +292,7 @@ dependencies = [
"futures",
"governor",
"html-escape",
"htmlize",
"http 1.3.1",
"mime_guess",
"num-format",
@@ -314,6 +316,7 @@ dependencies = [
"tracing",
"tracing-subscriber",
"ts-rs",
"unicode-normalization",
"url",
"urlencoding",
"yansi",
@@ -484,6 +487,16 @@ dependencies = [
"windows-link 0.2.0",
]
[[package]]
name = "chrono-tz"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
dependencies = [
"chrono",
"phf 0.12.1",
]
[[package]]
name = "clap"
version = "4.5.47"
@@ -1337,6 +1350,19 @@ dependencies = [
"utf8-width",
]
[[package]]
name = "htmlize"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d347c0de239be20ba0982e4822de3124404281e119ae3e11f5d7425a414e1935"
dependencies = [
"memchr",
"pastey",
"phf 0.11.3",
"phf_codegen",
"serde_json",
]
[[package]]
name = "http"
version = "0.2.12"
@@ -2089,6 +2115,12 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "pastey"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pear"
version = "0.2.9"
@@ -2127,6 +2159,62 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "phf"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_shared 0.11.3",
]
[[package]]
name = "phf"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
dependencies = [
"phf_shared 0.12.1",
]
[[package]]
name = "phf_codegen"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared 0.11.3",
]
[[package]]
name = "phf_generator"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared 0.11.3",
"rand 0.8.5",
]
[[package]]
name = "phf_shared"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
dependencies = [
"siphasher",
]
[[package]]
name = "phf_shared"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@@ -2983,6 +3071,12 @@ dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "siphasher"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
[[package]]
name = "skeptic"
version = "0.13.7"
@@ -3947,9 +4041,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-normalization"
version = "0.1.24"
version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
dependencies = [
"tinyvec",
]
+4 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "banner"
version = "0.5.0"
version = "0.6.0"
edition = "2024"
default-run = "banner"
@@ -59,6 +59,9 @@ ts-rs = { version = "11.1.0", features = ["chrono-impl", "serde-compat", "serde-
html-escape = "0.2.13"
axum-extra = { version = "0.12.5", features = ["query"] }
urlencoding = "2.1.3"
chrono-tz = "0.10.4"
htmlize = { version = "1.0.6", features = ["unescape"] }
unicode-normalization = "0.1.25"
[dev-dependencies]
+87 -6
View File
@@ -27,8 +27,28 @@ check *flags:
console.log("\x1b[1;36m→ Verifying...\x1b[0m");
}
// Domain groups: format check name → { peers (other checks), formatter, sanity re-check }
const domains = {
rustfmt: {
peers: ["clippy", "rust-test"],
format: () => run(["cargo", "fmt", "--all"]),
recheck: [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "cargo-check", cmd: ["cargo", "check", "--all-features"] },
],
},
biome: {
peers: ["svelte-check", "web-test"],
format: () => run(["bun", "run", "--cwd", "web", "format"]),
recheck: [
{ name: "biome", cmd: ["bun", "run", "--cwd", "web", "format:check"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
],
},
};
const checks = [
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"] },
{ name: "rustfmt", cmd: ["cargo", "fmt", "--all", "--", "--check"], terse: true },
{ name: "clippy", cmd: ["cargo", "clippy", "--all-features", "--", "--deny", "warnings"] },
{ name: "rust-test", cmd: ["cargo", "nextest", "run", "-E", "not test(export_bindings)"] },
{ name: "svelte-check", cmd: ["bun", "run", "--cwd", "web", "check"] },
@@ -60,16 +80,23 @@ check *flags:
process.stderr.write(`\r\x1b[K${elapsed}s [${Array.from(remaining).join(", ")}]`);
}, 100) : null;
// Phase 1: collect all results, eagerly displaying whichever finishes first
const results = {};
let anyFailed = false;
for (const promise of promises) {
const r = await promise;
const tagged = promises.map((p, i) => p.then(r => ({ i, r })));
for (let n = 0; n < checks.length; n++) {
const { i, r } = await Promise.race(tagged);
tagged[i] = new Promise(() => {}); // sentinel: never resolves
results[r.name] = r;
remaining.delete(r.name);
if (isTTY) process.stderr.write(`\r\x1b[K`);
if (r.exitCode !== 0) {
anyFailed = true;
process.stdout.write(`\x1b[31m✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
if (!r.terse) {
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
}
} else {
process.stdout.write(`\x1b[32m✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
@@ -77,7 +104,61 @@ check *flags:
if (interval) clearInterval(interval);
if (isTTY) process.stderr.write(`\r\x1b[K`);
process.exit(anyFailed ? 1 : 0);
// Phase 2: auto-fix formatting if it's the only failure in a domain
let autoFixed = false;
for (const [fmtName, domain] of Object.entries(domains)) {
const fmtResult = results[fmtName];
if (!fmtResult || fmtResult.exitCode === 0) continue;
const peersAllPassed = domain.peers.every(p => results[p]?.exitCode === 0);
if (!peersAllPassed) continue;
process.stdout.write(`\n\x1b[1;36m→ Auto-formatting ${fmtName} (peers passed, only formatting failed)...\x1b[0m\n`);
domain.format();
// Re-verify format + sanity check in parallel
const recheckStart = Date.now();
const recheckPromises = domain.recheck.map(async (check) => {
const proc = Bun.spawn(check.cmd, {
env: { ...process.env, FORCE_COLOR: "1" },
stdout: "pipe", stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
await proc.exited;
return { ...check, stdout, stderr, exitCode: proc.exitCode,
elapsed: ((Date.now() - recheckStart) / 1000).toFixed(1) };
});
let recheckFailed = false;
for (const p of recheckPromises) {
const r = await p;
if (r.exitCode !== 0) {
recheckFailed = true;
process.stdout.write(`\x1b[31m ✗ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
if (r.stdout) process.stdout.write(r.stdout);
if (r.stderr) process.stderr.write(r.stderr);
} else {
process.stdout.write(`\x1b[32m ✓ ${r.name}\x1b[0m (${r.elapsed}s)\n`);
}
}
if (!recheckFailed) {
process.stdout.write(`\x1b[32m ✓ ${fmtName} auto-fix succeeded\x1b[0m\n`);
results[fmtName].exitCode = 0;
autoFixed = true;
} else {
process.stdout.write(`\x1b[31m ✗ ${fmtName} auto-fix failed sanity check\x1b[0m\n`);
}
}
const finalFailed = Object.values(results).some(r => r.exitCode !== 0);
if (autoFixed && !finalFailed) {
process.stdout.write(`\n\x1b[1;32m✓ All checks passed (formatting was auto-fixed)\x1b[0m\n`);
}
process.exit(finalFailed ? 1 : 0);
# Format all Rust and TypeScript code
format:
+43
View File
@@ -6,6 +6,49 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
## [Unreleased]
## [0.6.0] - 2026-01-30
### Added
- User authentication system with Discord OAuth, sessions, admin roles, and login page with FAQ.
- Interactive timeline visualization with D3 canvas, pan/zoom, touch gestures, and enrollment aggregation API.
- Scraper analytics dashboard with timeseries charts, subject monitoring, and per-subject detail views.
- Adaptive scraper scheduling with admin endpoints for monitoring and configuration.
- Scrape job result persistence for effectiveness tracking.
- WebSocket support for real-time scrape job monitoring with connection status indicators.
- Course change auditing with field-level tracking and time-series metrics endpoint.
- Audit log UI with smart JSON diffing, conditional request caching, and auto-refresh.
- Calendar export web endpoints for ICS download and Google Calendar redirect.
- Confidence-based RMP matching with manual review workflow and admin instructor UI.
- RMP profile links and confidence-aware rating display.
- Name parsing and normalization for improved instructor-RMP matching.
- Mobile touch controls with gesture detection for timeline.
- Worker timeout protection and crash recovery for job queue.
- Build-time asset compression with encoding negotiation (gzip, brotli, zstd).
- Smart page transitions with theme-aware element transitions.
- Search duration and result count feedback.
- Root error page handling.
- Login page with FAQ section and improved styling.
### Changed
- Consolidated navigation with top nav bar and route groups.
- Centralized number formatting with locale-aware utility.
- Modernized Justfile commands and simplified service management.
- Persisted audit log state in module scope for cross-navigation caching.
- Relative time feedback and improved tooltip customization.
### Fixed
- Instructor/course mismatching via build-order-independent map for association.
- Page content clipping.
- Backend startup delays with retry logic in auth.
- Banner API timeouts increased to handle slow responses.
- i64 serialization for JavaScript compatibility, fixing avatar URL display.
- Frontend build ordering with `-e` embed flag in Justfile.
- Login page centering and unnecessary scrollbar.
- ts-rs serde warnings.
## [0.5.0] - 2026-01-29
### Added
+23 -14
View File
@@ -2,34 +2,43 @@
## Now
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). DB schema exists.
- **Discord bot revival** - Audit and fix all existing commands (search, terms, ics, gcal) against the current data model. Add test coverage. Bot has been untouched since ~0.3.4 and commands may be broken.
- **Notification and subscription system** - Subscribe to courses and get alerts on seat availability, waitlist movement, and detail changes (time, location, professor, seats). Deliver via Discord bot and web dashboard.
- **Mobile/responsive redesign** - Hamburger nav for sidebar, responsive table column hiding, mobile-friendly admin pages. Timeline is the only area with solid mobile support; most pages need work.
- **Professor name search filter** - Filter search results by instructor. Backend code exists but is commented out.
- **Autocomplete for search fields** - Typeahead for course titles, course numbers, professors, and terms.
- **Test coverage expansion** - Broaden coverage with session/rate-limiter tests and more DB integration tests.
- **Search field autocomplete** - Typeahead for course titles, course numbers, professors, and terms.
- **Large component extraction** - Break down CourseTable, Instructors page, and TimelineCanvas into smaller, testable subcomponents.
## Soon
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "2-3 PM", "ends by 2 PM", "after 2 PM", "before 2 PM" mapped to time ranges.
- **Section-based lookup** - Search by full section identifier, e.g. "CS 4393 001".
- **Search result pagination** - Paginated embeds for large result sets in Discord.
- **Bot slash command parity** - Keep Discord bot commands in sync with web features: timeline summaries, RMP lookups, audit log highlights, notification management via bot.
- **E2E test suite** - Playwright tests for critical user flows: search, login, admin pages, timeline interaction.
- **Settings page** - Replace placeholder with theme preferences, notification settings, default term/subject selection.
- **Profile enhancements** - Expand from read-only stub to subscription management, saved searches, and course watchlists.
- **Smart time-of-day search parsing** - Support natural queries like "2 PM", "ends by 2 PM", "after 2 PM" mapped to time ranges.
- **Multi-term querying** - Query across multiple terms in a single search instead of one at a time.
- **Historical analytics** - Track seat availability over time and visualize fill-rate trends per course or professor.
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup.
- **Professor stats** - Aggregate data views: average class size, typical waitlist length, schedule patterns across semesters.
- **Historical analytics visualization** - Build trend UI on top of existing course metrics and timeline API. Fill-rate charts per course or professor.
- **Schedule builder** - Visual weekly schedule tool for assembling a conflict-free course lineup. Timeline visualization serves as a foundation.
## Eventually
- **API rate limiting** - Rate limiter on public API endpoints. Needed before any public or external exposure.
- **Bulk admin operations** - Batch RMP match/reject, bulk user management, data export from admin pages.
- **Degree audit helper** - Map available courses to degree requirements and suggest what to take next.
- **Dynamic scraper scheduling** - Adjust scrape intervals based on change frequency and course count (e.g. 2 hours per 500 courses, shorter intervals when changes are detected).
- **DM support** - Allow the Discord bot to respond in direct messages, not just guild channels.
- **"Classes Now" command** - Find classes currently in session based on the current day and time.
- **CRN direct lookup** - Look up a course by its CRN without going through search.
- **Metrics dashboard** - Surface scraper and service metrics visually on the web dashboard.
- **Privileged error feedback** - Detailed error information surfaced to bot admins when commands fail.
## Done
- **Interactive timeline visualization** - D3 canvas with pan/zoom, touch gestures, and enrollment aggregation API. *(0.6.0)*
- **Scraper analytics dashboard** - Timeseries charts, subject monitoring, adaptive scheduling, and admin endpoints. *(0.6.0)*
- **WebSocket job monitoring** - Real-time scrape job queue with live connection status indicators. *(0.6.0)*
- **Course change audit log** - Field-level change tracking with smart diffing, conditional caching, and auto-refresh. *(0.6.0)*
- **User authentication system** - Discord OAuth, sessions, admin roles, and login page. *(0.6.0)*
- **Dynamic scraper scheduling** - Adaptive scrape intervals based on change frequency and course volume. *(0.6.0)*
- **Metrics dashboard** - Scraper and service metrics surfaced on the web dashboard. *(0.6.0)*
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
- **Web course search UI** - Browser-based course search with interactive data table, sorting, pagination, and column controls. *(0.4.0)*
- **RateMyProfessor integration** - Bulk professor sync via GraphQL with inline ratings in search results. *(0.4.0)*
- **Subject/major search filter** - Multi-select subject filtering with searchable comboboxes. *(0.5.0)*
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.40.4.0)*
- **Test coverage expansion** - Unit tests for course formatting, API client, query builder, CLI args, and config parsing. *(0.3.4--0.4.0)*
@@ -0,0 +1,13 @@
-- Indexes for the timeline aggregation endpoint.
-- The query buckets course_metrics by 15-minute intervals, joins to courses
-- for subject, and aggregates enrollment. These indexes support efficient
-- time-range scans and the join.
-- Primary access pattern: scan course_metrics by timestamp range
CREATE INDEX IF NOT EXISTS idx_course_metrics_timestamp
ON course_metrics (timestamp);
-- Composite index for the DISTINCT ON (bucket, course_id) ordered by timestamp DESC
-- to efficiently pick the latest metric per course per bucket.
CREATE INDEX IF NOT EXISTS idx_course_metrics_course_timestamp
ON course_metrics (course_id, timestamp DESC);
@@ -0,0 +1,5 @@
-- Add structured first/last name columns to instructors.
-- Populated by Rust-side backfill (parse_banner_name) since we need
-- HTML entity decoding and suffix extraction that SQL can't handle well.
ALTER TABLE instructors ADD COLUMN first_name VARCHAR;
ALTER TABLE instructors ADD COLUMN last_name VARCHAR;
+11 -1
View File
@@ -14,7 +14,7 @@ use sqlx::postgres::PgPoolOptions;
use std::process::ExitCode;
use std::sync::Arc;
use std::time::Duration;
use tracing::{error, info};
use tracing::{error, info, warn};
/// Main application struct containing all necessary components
pub struct App {
@@ -70,6 +70,11 @@ impl App {
.context("Failed to run database migrations")?;
info!("Database migrations completed successfully");
// Backfill structured name columns for existing instructors
if let Err(e) = crate::data::names::backfill_instructor_names(&db_pool).await {
warn!(error = ?e, "Failed to backfill instructor names (non-fatal)");
}
// Create BannerApi and AppState
let banner_api = BannerApi::new_with_config(
config.banner_base_url.clone(),
@@ -85,6 +90,11 @@ impl App {
info!(error = ?e, "Could not load reference cache on startup (may be empty)");
}
// Load schedule cache for timeline enrollment queries
if let Err(e) = app_state.schedule_cache.load().await {
info!(error = ?e, "Could not load schedule cache on startup (may be empty)");
}
// Seed the initial admin user if configured
if let Some(admin_id) = config.admin_discord_id {
let user = crate::data::users::ensure_seed_admin(&db_pool, admin_id as i64)
+4
View File
@@ -325,6 +325,7 @@ mod tests {
fn test_parse_json_with_context_null_value() {
#[derive(Debug, Deserialize)]
struct TestStruct {
#[allow(dead_code)]
name: String,
}
@@ -363,12 +364,14 @@ mod tests {
#[allow(dead_code)]
#[serde(rename = "courseTitle")]
course_title: String,
#[allow(dead_code)]
faculty: Vec<Faculty>,
}
#[derive(Debug, Deserialize)]
struct Faculty {
#[serde(rename = "displayName")]
#[allow(dead_code)]
display_name: String,
#[allow(dead_code)]
email: String,
@@ -376,6 +379,7 @@ mod tests {
#[derive(Debug, Deserialize)]
struct SearchResult {
#[allow(dead_code)]
data: Vec<Course>,
}
+112 -64
View File
@@ -11,7 +11,9 @@ use rand::distr::{Alphanumeric, SampleString};
use reqwest_middleware::ClientWithMiddleware;
use std::collections::{HashMap, VecDeque};
use std::mem::ManuallyDrop;
use std::ops::{Deref, DerefMut};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, LazyLock};
use std::time::{Duration, Instant};
use tokio::sync::{Mutex, Notify};
@@ -121,6 +123,64 @@ impl BannerSession {
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
/// Verifies that cancelling `acquire()` mid-session-creation resets `is_creating`,
/// allowing subsequent callers to proceed rather than deadlocking.
#[tokio::test]
async fn test_acquire_not_deadlocked_after_cancellation() {
use tokio::sync::mpsc;
let (tx, mut rx) = mpsc::channel::<()>(10);
// Local server: /registration signals arrival via `tx`, then hangs forever.
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
let app = axum::Router::new().route(
"/StudentRegistrationSsb/registration",
axum::routing::get(move || {
let tx = tx.clone();
async move {
let _ = tx.send(()).await;
std::future::pending::<&str>().await
}
}),
);
tokio::spawn(async move {
axum::serve(listener, app).await.unwrap();
});
let base_url = format!("http://{}/StudentRegistrationSsb", addr);
let client = reqwest_middleware::ClientBuilder::new(
reqwest::Client::builder()
.timeout(Duration::from_secs(300))
.build()
.unwrap(),
)
.build();
let pool = SessionPool::new(client, base_url);
let term: Term = "202620".parse().unwrap();
// First acquire: cancel once the request reaches the server.
tokio::select! {
_ = pool.acquire(term) => panic!("server hangs — acquire should never complete"),
_ = rx.recv() => {} // Request arrived; dropping the future simulates timeout cancellation.
}
// Second acquire: verify it reaches the server (i.e., is_creating was reset).
// The global rate limiter has a 10s period, so allow 15s for the second attempt.
tokio::select! {
_ = pool.acquire(term) => {}
result = tokio::time::timeout(Duration::from_secs(15), rx.recv()) => {
assert!(
result.is_ok(),
"acquire() deadlocked — is_creating was not reset after cancellation"
);
}
}
}
#[test]
fn test_new_session_creates_session() {
@@ -200,50 +260,53 @@ mod tests {
}
}
/// A smart pointer that returns a BannerSession to the pool when dropped.
/// A smart pointer that returns a `BannerSession` to the pool when dropped.
pub struct PooledSession {
session: Option<BannerSession>,
// This Arc points directly to the term-specific pool.
session: ManuallyDrop<BannerSession>,
pool: Arc<TermPool>,
}
impl PooledSession {
pub fn been_used(&self) -> bool {
self.session.as_ref().unwrap().been_used()
}
}
impl Deref for PooledSession {
type Target = BannerSession;
fn deref(&self) -> &Self::Target {
// The option is only ever None after drop is called, so this is safe.
self.session.as_ref().unwrap()
&self.session
}
}
impl DerefMut for PooledSession {
fn deref_mut(&mut self) -> &mut Self::Target {
self.session.as_mut().unwrap()
&mut self.session
}
}
/// The magic happens here: when the guard goes out of scope, this is called.
impl Drop for PooledSession {
fn drop(&mut self) {
if let Some(session) = self.session.take() {
let pool = self.pool.clone();
// Since drop() cannot be async, we spawn a task to return the session.
tokio::spawn(async move {
pool.release(session).await;
});
}
// SAFETY: `drop` is called exactly once by Rust's drop semantics,
// so `ManuallyDrop::take` is guaranteed to see a valid value.
let session = unsafe { ManuallyDrop::take(&mut self.session) };
let pool = self.pool.clone();
tokio::spawn(async move {
pool.release(session).await;
});
}
}
pub struct TermPool {
sessions: Mutex<VecDeque<BannerSession>>,
notifier: Notify,
is_creating: Mutex<bool>,
is_creating: AtomicBool,
}
/// RAII guard ensuring `is_creating` is reset on drop for cancellation safety.
/// Without this, a cancelled `acquire()` future would leave the flag set permanently,
/// deadlocking all subsequent callers.
struct CreatingGuard(Arc<TermPool>);
impl Drop for CreatingGuard {
fn drop(&mut self) {
self.0.is_creating.store(false, Ordering::Release);
self.0.notifier.notify_waiters();
}
}
impl TermPool {
@@ -251,7 +314,7 @@ impl TermPool {
Self {
sessions: Mutex::new(VecDeque::new()),
notifier: Notify::new(),
is_creating: Mutex::new(false),
is_creating: AtomicBool::new(false),
}
}
@@ -308,7 +371,7 @@ impl SessionPool {
if let Some(session) = queue.pop_front() {
if !session.is_expired() {
return Ok(PooledSession {
session: Some(session),
session: ManuallyDrop::new(session),
pool: Arc::clone(&term_pool),
});
} else {
@@ -317,45 +380,38 @@ impl SessionPool {
}
} // MutexGuard is dropped, lock is released.
// Slow path: No sessions available. We must either wait or become the creator.
let mut is_creating_guard = term_pool.is_creating.lock().await;
if *is_creating_guard {
// Another task is already creating a session. Release the lock and wait.
drop(is_creating_guard);
// Slow path: wait for an in-progress creation, or become the creator.
if term_pool.is_creating.load(Ordering::Acquire) {
if !waited_for_creation {
trace!("Waiting for another task to create session");
waited_for_creation = true;
}
term_pool.notifier.notified().await;
// Loop back to the top to try the fast path again.
continue;
}
// This task is now the designated creator.
*is_creating_guard = true;
drop(is_creating_guard);
// CAS to become the designated creator.
if term_pool
.is_creating
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
.is_err()
{
continue; // Lost the race — loop back and wait.
}
// Guard resets is_creating on drop (including cancellation).
let creating_guard = CreatingGuard(Arc::clone(&term_pool));
// Race: wait for a session to be returned OR for the rate limiter to allow a new one.
trace!("Pool empty, creating new session");
tokio::select! {
_ = term_pool.notifier.notified() => {
// A session was returned while we were waiting!
// We are no longer the creator. Reset the flag and loop to race for the new session.
let mut guard = term_pool.is_creating.lock().await;
*guard = false;
drop(guard);
// A session was returned — release creator role and race for it.
drop(creating_guard);
continue;
}
_ = SESSION_CREATION_RATE_LIMITER.until_ready() => {
// The rate limit has elapsed. It's our job to create the session.
let new_session_result = self.create_session(&term).await;
// After creation, we are no longer the creator. Reset the flag
// and notify all other waiting tasks.
let mut guard = term_pool.is_creating.lock().await;
*guard = false;
drop(guard);
term_pool.notifier.notify_waiters();
drop(creating_guard);
match new_session_result {
Ok(new_session) => {
@@ -366,12 +422,11 @@ impl SessionPool {
"Created new session"
);
return Ok(PooledSession {
session: Some(new_session),
session: ManuallyDrop::new(new_session),
pool: term_pool,
});
}
Err(e) => {
// Propagate the error if session creation failed.
return Err(e.context("Failed to create new session in pool"));
}
}
@@ -380,8 +435,8 @@ impl SessionPool {
}
}
/// Sets up initial session cookies by making required Banner API requests
pub async fn create_session(&self, term: &Term) -> Result<BannerSession> {
/// Sets up initial session cookies by making required Banner API requests.
async fn create_session(&self, term: &Term) -> Result<BannerSession> {
info!(term = %term, "setting up banner session");
// The 'register' or 'search' registration page
@@ -392,22 +447,15 @@ impl SessionPool {
.await?;
// TODO: Validate success
let cookies = initial_registration
let cookies: HashMap<String, String> = initial_registration
.headers()
.get_all("Set-Cookie")
.iter()
.filter_map(|header_value| {
if let Ok(cookie_str) = header_value.to_str() {
if let Ok(cookie) = Cookie::parse(cookie_str) {
Some((cookie.name().to_string(), cookie.value().to_string()))
} else {
None
}
} else {
None
}
.filter_map(|v| {
let c = Cookie::parse(v.to_str().ok()?).ok()?;
Some((c.name().to_string(), c.value().to_string()))
})
.collect::<HashMap<String, String>>();
.collect();
let jsessionid = cookies
.get("JSESSIONID")
@@ -494,8 +542,8 @@ impl SessionPool {
Ok(terms)
}
/// Selects a term for the current session
pub async fn select_term(
/// Selects a term for the current session.
async fn select_term(
&self,
term: &str,
unique_session_id: &str,
+462
View File
@@ -0,0 +1,462 @@
//! Shared calendar generation logic for ICS files and Google Calendar URLs.
//!
//! Used by both the Discord bot commands and the web API endpoints.
use crate::data::models::DbMeetingTime;
use chrono::{Datelike, Duration, NaiveDate, NaiveTime, Weekday};
/// Course metadata needed for calendar generation (shared interface between bot and web).
pub struct CalendarCourse {
pub crn: String,
pub subject: String,
pub course_number: String,
pub title: String,
pub sequence_number: Option<String>,
pub primary_instructor: Option<String>,
}
impl CalendarCourse {
/// Display title like "CS 1083 - Introduction to Computer Science"
pub fn display_title(&self) -> String {
format!("{} {} - {}", self.subject, self.course_number, self.title)
}
/// Filename-safe identifier: "CS_1083_001"
pub fn filename_stem(&self) -> String {
format!(
"{}_{}{}",
self.subject.replace(' ', "_"),
self.course_number,
self.sequence_number
.as_deref()
.map(|s| format!("_{s}"))
.unwrap_or_default()
)
}
}
// ---------------------------------------------------------------------------
// Date parsing helpers
// ---------------------------------------------------------------------------
/// Parse a date string in either MM/DD/YYYY or YYYY-MM-DD format.
fn parse_date(s: &str) -> Option<NaiveDate> {
NaiveDate::parse_from_str(s, "%m/%d/%Y")
.or_else(|_| NaiveDate::parse_from_str(s, "%Y-%m-%d"))
.ok()
}
/// Parse an HHMM time string into `NaiveTime`.
fn parse_hhmm(s: &str) -> Option<NaiveTime> {
if s.len() != 4 {
return None;
}
let hours = s[..2].parse::<u32>().ok()?;
let minutes = s[2..].parse::<u32>().ok()?;
NaiveTime::from_hms_opt(hours, minutes, 0)
}
/// Active weekdays for a meeting time.
fn active_weekdays(mt: &DbMeetingTime) -> Vec<Weekday> {
let mapping: [(bool, Weekday); 7] = [
(mt.monday, Weekday::Mon),
(mt.tuesday, Weekday::Tue),
(mt.wednesday, Weekday::Wed),
(mt.thursday, Weekday::Thu),
(mt.friday, Weekday::Fri),
(mt.saturday, Weekday::Sat),
(mt.sunday, Weekday::Sun),
];
mapping
.iter()
.filter(|(active, _)| *active)
.map(|(_, day)| *day)
.collect()
}
/// ICS two-letter day code for RRULE BYDAY.
fn ics_day_code(day: Weekday) -> &'static str {
match day {
Weekday::Mon => "MO",
Weekday::Tue => "TU",
Weekday::Wed => "WE",
Weekday::Thu => "TH",
Weekday::Fri => "FR",
Weekday::Sat => "SA",
Weekday::Sun => "SU",
}
}
/// Location string from a `DbMeetingTime`.
fn location_string(mt: &DbMeetingTime) -> String {
let building = mt
.building_description
.as_deref()
.or(mt.building.as_deref())
.unwrap_or("");
let room = mt.room.as_deref().unwrap_or("");
let combined = format!("{building} {room}").trim().to_string();
if combined.is_empty() {
"Online".to_string()
} else {
combined
}
}
/// Days display string (e.g. "MWF", "TTh").
fn days_display(mt: &DbMeetingTime) -> String {
let weekdays = active_weekdays(mt);
if weekdays.is_empty() {
return "TBA".to_string();
}
weekdays
.iter()
.map(|d| ics_day_code(*d))
.collect::<Vec<_>>()
.join("")
}
/// Escape text for ICS property values.
fn escape_ics(text: &str) -> String {
text.replace('\\', "\\\\")
.replace(';', "\\;")
.replace(',', "\\,")
.replace('\n', "\\n")
.replace('\r', "")
}
// ---------------------------------------------------------------------------
// University holidays (ported from bot/commands/ics.rs)
// ---------------------------------------------------------------------------
/// Find the nth occurrence of a weekday in a given month/year (1-based).
fn nth_weekday_of_month(year: i32, month: u32, weekday: Weekday, n: u32) -> Option<NaiveDate> {
let first = NaiveDate::from_ymd_opt(year, month, 1)?;
let days_ahead = (weekday.num_days_from_monday() as i64
- first.weekday().num_days_from_monday() as i64)
.rem_euclid(7) as u32;
let day = 1 + days_ahead + 7 * (n - 1);
NaiveDate::from_ymd_opt(year, month, day)
}
/// Compute a consecutive range of dates starting from `start` for `count` days.
fn date_range(start: NaiveDate, count: i64) -> Vec<NaiveDate> {
(0..count)
.filter_map(|i| start.checked_add_signed(Duration::days(i)))
.collect()
}
/// Compute university holidays for a given year.
fn compute_holidays_for_year(year: i32) -> Vec<(&'static str, Vec<NaiveDate>)> {
let mut holidays = Vec::new();
// Labor Day: 1st Monday of September
if let Some(d) = nth_weekday_of_month(year, 9, Weekday::Mon, 1) {
holidays.push(("Labor Day", vec![d]));
}
// Fall Break: Mon-Tue of Columbus Day week
if let Some(mon) = nth_weekday_of_month(year, 10, Weekday::Mon, 2) {
holidays.push(("Fall Break", date_range(mon, 2)));
}
// Day before Thanksgiving
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4)
&& let Some(wed) = thu.checked_sub_signed(Duration::days(1))
{
holidays.push(("Day Before Thanksgiving", vec![wed]));
}
// Thanksgiving: 4th Thursday + Friday
if let Some(thu) = nth_weekday_of_month(year, 11, Weekday::Thu, 4) {
holidays.push(("Thanksgiving", date_range(thu, 2)));
}
// Winter Holiday: Dec 23-31
if let Some(start) = NaiveDate::from_ymd_opt(year, 12, 23) {
holidays.push(("Winter Holiday", date_range(start, 9)));
}
// New Year's Day
if let Some(d) = NaiveDate::from_ymd_opt(year, 1, 1) {
holidays.push(("New Year's Day", vec![d]));
}
// MLK Day: 3rd Monday of January
if let Some(d) = nth_weekday_of_month(year, 1, Weekday::Mon, 3) {
holidays.push(("MLK Day", vec![d]));
}
// Spring Break: full week starting 2nd Monday of March
if let Some(mon) = nth_weekday_of_month(year, 3, Weekday::Mon, 2) {
holidays.push(("Spring Break", date_range(mon, 6)));
}
holidays
}
/// Get holiday dates within a date range that fall on specific weekdays.
fn holiday_exceptions(start: NaiveDate, end: NaiveDate, weekdays: &[Weekday]) -> Vec<NaiveDate> {
let start_year = start.year();
let end_year = end.year();
(start_year..=end_year)
.flat_map(compute_holidays_for_year)
.flat_map(|(_, dates)| dates)
.filter(|&date| date >= start && date <= end && weekdays.contains(&date.weekday()))
.collect()
}
/// Names of excluded holidays (for user-facing messages).
fn excluded_holiday_names(
start: NaiveDate,
end: NaiveDate,
exceptions: &[NaiveDate],
) -> Vec<String> {
let start_year = start.year();
let end_year = end.year();
let all_holidays: Vec<_> = (start_year..=end_year)
.flat_map(compute_holidays_for_year)
.collect();
let mut names = Vec::new();
for (holiday_name, holiday_dates) in &all_holidays {
for &exc in exceptions {
if holiday_dates.contains(&exc) {
names.push(format!("{} ({})", holiday_name, exc.format("%a, %b %d")));
}
}
}
names.sort();
names.dedup();
names
}
// ---------------------------------------------------------------------------
// ICS generation
// ---------------------------------------------------------------------------
/// Result from ICS generation, including the file content and excluded holiday names.
pub struct IcsResult {
pub content: String,
pub filename: String,
/// Holiday dates excluded via EXDATE rules, for user-facing messages.
#[allow(dead_code)]
pub excluded_holidays: Vec<String>,
}
/// Generate an ICS calendar file for a course.
pub fn generate_ics(
course: &CalendarCourse,
meeting_times: &[DbMeetingTime],
) -> Result<IcsResult, anyhow::Error> {
let mut ics = String::new();
let mut all_excluded = Vec::new();
// Header
ics.push_str("BEGIN:VCALENDAR\r\n");
ics.push_str("VERSION:2.0\r\n");
ics.push_str("PRODID:-//Banner Bot//Course Calendar//EN\r\n");
ics.push_str("CALSCALE:GREGORIAN\r\n");
ics.push_str("METHOD:PUBLISH\r\n");
ics.push_str(&format!(
"X-WR-CALNAME:{}\r\n",
escape_ics(&course.display_title())
));
for (index, mt) in meeting_times.iter().enumerate() {
let (event, holidays) = generate_ics_event(course, mt, index)?;
ics.push_str(&event);
all_excluded.extend(holidays);
}
ics.push_str("END:VCALENDAR\r\n");
Ok(IcsResult {
content: ics,
filename: format!("{}.ics", course.filename_stem()),
excluded_holidays: all_excluded,
})
}
/// Generate a single VEVENT for one meeting time.
fn generate_ics_event(
course: &CalendarCourse,
mt: &DbMeetingTime,
index: usize,
) -> Result<(String, Vec<String>), anyhow::Error> {
let start_date = parse_date(&mt.start_date)
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
let end_date = parse_date(&mt.end_date)
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
// DTSTART/DTEND: first occurrence with time, or all-day on start_date
let (dtstart, dtend) = match (start_time, end_time) {
(Some(st), Some(et)) => {
let s = start_date.and_time(st).and_utc();
let e = start_date.and_time(et).and_utc();
(
s.format("%Y%m%dT%H%M%SZ").to_string(),
e.format("%Y%m%dT%H%M%SZ").to_string(),
)
}
_ => {
let s = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
let e = start_date.and_hms_opt(0, 0, 0).unwrap().and_utc();
(
s.format("%Y%m%dT%H%M%SZ").to_string(),
e.format("%Y%m%dT%H%M%SZ").to_string(),
)
}
};
let event_title = if index > 0 {
format!("{} (Meeting {})", course.display_title(), index + 1)
} else {
course.display_title()
};
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
let description = format!(
"CRN: {}\\nInstructor: {}\\nDays: {}\\nMeeting Type: {}",
course.crn,
instructor,
days_display(mt),
mt.meeting_type,
);
let location = location_string(mt);
let uid = format!(
"{}-{}-{}@banner-bot.local",
course.crn,
index,
start_date
.and_hms_opt(0, 0, 0)
.unwrap()
.and_utc()
.timestamp()
);
let mut event = String::new();
event.push_str("BEGIN:VEVENT\r\n");
event.push_str(&format!("UID:{uid}\r\n"));
event.push_str(&format!("DTSTART:{dtstart}\r\n"));
event.push_str(&format!("DTEND:{dtend}\r\n"));
event.push_str(&format!("SUMMARY:{}\r\n", escape_ics(&event_title)));
event.push_str(&format!("DESCRIPTION:{}\r\n", escape_ics(&description)));
event.push_str(&format!("LOCATION:{}\r\n", escape_ics(&location)));
let weekdays = active_weekdays(mt);
let mut holiday_names = Vec::new();
if let (false, Some(st)) = (weekdays.is_empty(), start_time) {
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
let until = end_date.format("%Y%m%dT000000Z").to_string();
event.push_str(&format!(
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}\r\n",
by_day.join(","),
until,
));
// Holiday exceptions
let exceptions = holiday_exceptions(start_date, end_date, &weekdays);
if !exceptions.is_empty() {
let start_utc = start_date.and_time(st).and_utc();
let exdates: Vec<String> = exceptions
.iter()
.map(|&d| {
d.and_time(start_utc.time())
.and_utc()
.format("%Y%m%dT%H%M%SZ")
.to_string()
})
.collect();
event.push_str(&format!("EXDATE:{}\r\n", exdates.join(",")));
}
holiday_names = excluded_holiday_names(start_date, end_date, &exceptions);
}
event.push_str("END:VEVENT\r\n");
Ok((event, holiday_names))
}
// ---------------------------------------------------------------------------
// Google Calendar URL generation
// ---------------------------------------------------------------------------
/// Generate a Google Calendar "add event" URL for a single meeting time.
pub fn generate_gcal_url(
course: &CalendarCourse,
mt: &DbMeetingTime,
) -> Result<String, anyhow::Error> {
let start_date = parse_date(&mt.start_date)
.ok_or_else(|| anyhow::anyhow!("Invalid start_date: {}", mt.start_date))?;
let end_date = parse_date(&mt.end_date)
.ok_or_else(|| anyhow::anyhow!("Invalid end_date: {}", mt.end_date))?;
let start_time = mt.begin_time.as_deref().and_then(parse_hhmm);
let end_time = mt.end_time.as_deref().and_then(parse_hhmm);
let dates_text = match (start_time, end_time) {
(Some(st), Some(et)) => {
let s = start_date.and_time(st);
let e = start_date.and_time(et);
format!(
"{}/{}",
s.format("%Y%m%dT%H%M%S"),
e.format("%Y%m%dT%H%M%S")
)
}
_ => {
let s = start_date.format("%Y%m%d").to_string();
format!("{s}/{s}")
}
};
let instructor = course.primary_instructor.as_deref().unwrap_or("Staff");
let details = format!(
"CRN: {}\nInstructor: {}\nDays: {}",
course.crn,
instructor,
days_display(mt),
);
let location = location_string(mt);
let weekdays = active_weekdays(mt);
let recur = if !weekdays.is_empty() && start_time.is_some() {
let by_day: Vec<&str> = weekdays.iter().map(|d| ics_day_code(*d)).collect();
let until = end_date.format("%Y%m%dT000000Z").to_string();
format!(
"RRULE:FREQ=WEEKLY;BYDAY={};UNTIL={}",
by_day.join(","),
until
)
} else {
String::new()
};
let course_text = course.display_title();
let params: Vec<(&str, &str)> = vec![
("action", "TEMPLATE"),
("text", &course_text),
("dates", &dates_text),
("details", &details),
("location", &location),
("trp", "true"),
("ctz", "America/Chicago"),
("recur", &recur),
];
let url = url::Url::parse_with_params("https://calendar.google.com/calendar/render", &params)?;
Ok(url.to_string())
}
+43 -8
View File
@@ -2,6 +2,7 @@
use crate::banner::Course;
use crate::data::models::{DbMeetingTime, UpsertCounts};
use crate::data::names::parse_banner_name;
use crate::error::Result;
use sqlx::PgConnection;
use sqlx::PgPool;
@@ -68,6 +69,8 @@ fn extract_campus_code(course: &Course) -> Option<String> {
struct UpsertDiffRow {
id: i32,
old_id: Option<i32>,
crn: String,
term_code: String,
// enrollment fields
old_enrollment: Option<i32>,
@@ -382,8 +385,14 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
// Step 1: Upsert courses with CTE, returning diff rows
let diff_rows = upsert_courses(courses, &mut tx).await?;
// Step 2: Extract course IDs for instructor linking
let course_ids: Vec<i32> = diff_rows.iter().map(|r| r.id).collect();
// Step 2: Build (crn, term_code) → course_id map for instructor linking.
// RETURNING order from INSERT ... ON CONFLICT is not guaranteed to match
// the input array order, so we must key by (crn, term_code) rather than
// relying on positional correspondence.
let crn_term_to_id: HashMap<(&str, &str), i32> = diff_rows
.iter()
.map(|r| ((r.crn.as_str(), r.term_code.as_str()), r.id))
.collect();
// Step 3: Compute audit/metric diffs
let (audits, metrics) = compute_diffs(&diff_rows);
@@ -409,7 +418,7 @@ pub async fn batch_upsert_courses(courses: &[Course], db_pool: &PgPool) -> Resul
let email_to_id = upsert_instructors(courses, &mut tx).await?;
// Step 6: Link courses to instructors via junction table
upsert_course_instructors(courses, &course_ids, &email_to_id, &mut tx).await?;
upsert_course_instructors(courses, &crn_term_to_id, &email_to_id, &mut tx).await?;
tx.commit().await?;
@@ -556,6 +565,7 @@ async fn upsert_courses(courses: &[Course], conn: &mut PgConnection) -> Result<V
)
SELECT u.id,
o.id AS old_id,
u.crn, u.term_code,
o.enrollment AS old_enrollment, u.enrollment AS new_enrollment,
o.max_enrollment AS old_max_enrollment, u.max_enrollment AS new_max_enrollment,
o.wait_count AS old_wait_count, u.wait_count AS new_wait_count,
@@ -619,6 +629,8 @@ async fn upsert_instructors(
) -> Result<HashMap<String, i32>> {
let mut seen = HashSet::new();
let mut display_names: Vec<&str> = Vec::new();
let mut first_names: Vec<Option<String>> = Vec::new();
let mut last_names: Vec<Option<String>> = Vec::new();
let mut emails_lower: Vec<String> = Vec::new();
let mut skipped_no_email = 0u32;
@@ -627,7 +639,10 @@ async fn upsert_instructors(
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
if seen.insert(email_lower.clone()) {
let parts = parse_banner_name(&faculty.display_name);
display_names.push(faculty.display_name.as_str());
first_names.push(parts.as_ref().map(|p| p.first.clone()));
last_names.push(parts.as_ref().map(|p| p.last.clone()));
emails_lower.push(email_lower);
}
} else {
@@ -648,18 +663,25 @@ async fn upsert_instructors(
}
let email_refs: Vec<&str> = emails_lower.iter().map(|s| s.as_str()).collect();
let first_name_refs: Vec<Option<&str>> = first_names.iter().map(|s| s.as_deref()).collect();
let last_name_refs: Vec<Option<&str>> = last_names.iter().map(|s| s.as_deref()).collect();
let rows: Vec<(i32, String)> = sqlx::query_as(
r#"
INSERT INTO instructors (display_name, email)
SELECT * FROM UNNEST($1::text[], $2::text[])
INSERT INTO instructors (display_name, email, first_name, last_name)
SELECT * FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[])
ON CONFLICT (email)
DO UPDATE SET display_name = EXCLUDED.display_name
DO UPDATE SET
display_name = EXCLUDED.display_name,
first_name = EXCLUDED.first_name,
last_name = EXCLUDED.last_name
RETURNING id, email
"#,
)
.bind(&display_names)
.bind(&email_refs)
.bind(&first_name_refs)
.bind(&last_name_refs)
.fetch_all(&mut *conn)
.await
.map_err(|e| anyhow::anyhow!("Failed to batch upsert instructors: {}", e))?;
@@ -670,7 +692,7 @@ async fn upsert_instructors(
/// Link courses to their instructors via the junction table.
async fn upsert_course_instructors(
courses: &[Course],
course_ids: &[i32],
crn_term_to_id: &HashMap<(&str, &str), i32>,
email_to_id: &HashMap<String, i32>,
conn: &mut PgConnection,
) -> Result<()> {
@@ -679,7 +701,20 @@ async fn upsert_course_instructors(
let mut banner_ids: Vec<&str> = Vec::new();
let mut primaries = Vec::new();
for (course, &course_id) in courses.iter().zip(course_ids) {
for course in courses {
let key = (
course.course_reference_number.as_str(),
course.term.as_str(),
);
let Some(&course_id) = crn_term_to_id.get(&key) else {
tracing::warn!(
crn = %course.course_reference_number,
term = %course.term,
"No course_id found for CRN/term pair during instructor linking"
);
continue;
};
for faculty in &course.faculty {
if let Some(email) = &faculty.email_address {
let email_lower = email.to_lowercase();
+1
View File
@@ -3,6 +3,7 @@
pub mod batch;
pub mod courses;
pub mod models;
pub mod names;
pub mod reference;
pub mod rmp;
pub mod rmp_matching;
+2
View File
@@ -103,6 +103,8 @@ pub struct Instructor {
pub display_name: String,
pub email: String,
pub rmp_match_status: String,
pub first_name: Option<String>,
pub last_name: Option<String>,
}
#[allow(dead_code)]
+728
View File
@@ -0,0 +1,728 @@
//! Name parsing, normalization, and matching utilities.
//!
//! Handles the mismatch between Banner's single `display_name` ("Last, First Middle")
//! and RMP's separate `first_name`/`last_name` fields, plus data quality issues
//! from both sources (HTML entities, accents, nicknames, suffixes, junk).
use sqlx::PgPool;
use tracing::{info, warn};
use unicode_normalization::UnicodeNormalization;
/// Known name suffixes to extract from the last-name portion.
const SUFFIXES: &[&str] = &["iv", "iii", "ii", "jr", "sr"];
/// Parsed, cleaned name components.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct NameParts {
/// Cleaned display-quality first name(s): "H. Paul", "María"
pub first: String,
/// Cleaned display-quality last name: "O'Brien", "LeBlanc"
pub last: String,
/// Middle name/initial if detected: "Manuel", "L."
pub middle: Option<String>,
/// Suffix if detected: "III", "Jr"
pub suffix: Option<String>,
/// Nicknames extracted from parentheses: ["Ken"], ["Qian"]
pub nicknames: Vec<String>,
}
/// Decode common HTML entities found in Banner data.
///
/// Handles both named entities (`&amp;`, `&uuml;`) and numeric references
/// (`&#39;`, `&#x27;`).
fn decode_html_entities(s: &str) -> String {
if !s.contains('&') {
return s.to_string();
}
htmlize::unescape(s).to_string()
}
/// Extract parenthesized nicknames from a name string.
///
/// `"William (Ken)"` → `("William", vec!["Ken"])`
/// `"Guenevere (Qian)"` → `("Guenevere", vec!["Qian"])`
/// `"John (jack) C."` → `("John C.", vec!["jack"])`
fn extract_nicknames(s: &str) -> (String, Vec<String>) {
let mut nicknames = Vec::new();
let mut cleaned = String::with_capacity(s.len());
let mut chars = s.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '(' {
let mut nick = String::new();
for inner in chars.by_ref() {
if inner == ')' {
break;
}
nick.push(inner);
}
let nick = nick.trim().to_string();
if !nick.is_empty() {
nicknames.push(nick);
}
} else if ch == '"' || ch == '\u{201C}' || ch == '\u{201D}' {
// Extract quoted nicknames: Thomas "Butch" → nickname "Butch"
let mut nick = String::new();
for inner in chars.by_ref() {
if inner == '"' || inner == '\u{201C}' || inner == '\u{201D}' {
break;
}
nick.push(inner);
}
let nick = nick.trim().to_string();
if !nick.is_empty() {
nicknames.push(nick);
}
} else {
cleaned.push(ch);
}
}
// Collapse multiple spaces left by extraction
let cleaned = collapse_whitespace(&cleaned);
(cleaned, nicknames)
}
/// Extract a suffix (Jr, Sr, II, III, IV) from the last-name portion.
///
/// `"LeBlanc III"` → `("LeBlanc", Some("III"))`
/// `"Smith Jr."` → `("Smith", Some("Jr."))`
fn extract_suffix(last: &str) -> (String, Option<String>) {
// Try to match the last token as a suffix
let tokens: Vec<&str> = last.split_whitespace().collect();
if tokens.len() < 2 {
return (last.to_string(), None);
}
let candidate = tokens.last().unwrap();
let candidate_normalized = candidate.to_lowercase().trim_end_matches('.').to_string();
if SUFFIXES.contains(&candidate_normalized.as_str()) {
let name_part = tokens[..tokens.len() - 1].join(" ");
return (name_part, Some(candidate.to_string()));
}
(last.to_string(), None)
}
/// Strip junk commonly found in RMP name fields.
///
/// - Trailing commas: `"Cronenberger,"` → `"Cronenberger"`
/// - Email addresses: `"Neel.Baumgardner@utsa.edu"` → `""` (returns empty)
fn strip_junk(s: &str) -> String {
let s = s.trim();
// If the string looks like an email, return empty
if s.contains('@') && s.contains('.') && !s.contains(' ') {
return String::new();
}
// Strip trailing commas
s.trim_end_matches(',').trim().to_string()
}
/// Collapse runs of whitespace into single spaces and trim.
fn collapse_whitespace(s: &str) -> String {
s.split_whitespace().collect::<Vec<_>>().join(" ")
}
/// Parse a Banner `display_name` ("Last, First Middle") into structured parts.
///
/// Handles HTML entities, suffixes, and multi-token names.
///
/// # Examples
///
/// ```
/// use banner::data::names::parse_banner_name;
///
/// let parts = parse_banner_name("O&#39;Brien, Erin").unwrap();
/// assert_eq!(parts.first, "Erin");
/// assert_eq!(parts.last, "O'Brien");
/// ```
pub fn parse_banner_name(display_name: &str) -> Option<NameParts> {
// 1. Decode HTML entities
let decoded = decode_html_entities(display_name);
// 2. Split on first comma
let (last_part, first_part) = decoded.split_once(',')?;
let last_part = last_part.trim();
let first_part = first_part.trim();
if last_part.is_empty() || first_part.is_empty() {
return None;
}
// 3. Extract suffix from last name
let (last_clean, suffix) = extract_suffix(last_part);
// 4. Parse first-name portion: first token(s) + optional middle
// Banner format is "First Middle", so we keep all tokens as first_name
// to support "H. Paul" style names
let first_clean = collapse_whitespace(first_part);
Some(NameParts {
first: first_clean,
last: last_clean,
middle: None, // Banner doesn't clearly delineate middle vs first
suffix,
nicknames: Vec::new(), // Banner doesn't include nicknames
})
}
/// Parse RMP professor name fields into structured parts.
///
/// Handles junk data, nicknames in parentheses/quotes, and suffixes.
///
/// # Examples
///
/// ```
/// use banner::data::names::parse_rmp_name;
///
/// let parts = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
/// assert_eq!(parts.first, "William");
/// assert_eq!(parts.nicknames, vec!["Ken"]);
/// ```
pub fn parse_rmp_name(first_name: &str, last_name: &str) -> Option<NameParts> {
let first_cleaned = strip_junk(first_name);
let last_cleaned = strip_junk(last_name);
if first_cleaned.is_empty() || last_cleaned.is_empty() {
return None;
}
// Extract nicknames from parens/quotes in first name
let (first_no_nicks, nicknames) = extract_nicknames(&first_cleaned);
let first_final = collapse_whitespace(&first_no_nicks);
// Extract suffix from last name
let (last_final, suffix) = extract_suffix(&last_cleaned);
if first_final.is_empty() || last_final.is_empty() {
return None;
}
Some(NameParts {
first: first_final,
last: last_final,
middle: None,
suffix,
nicknames,
})
}
/// Normalize a name string for matching comparison.
///
/// Pipeline: lowercase → NFD decompose → strip combining marks →
/// strip punctuation/hyphens → collapse whitespace → trim.
///
/// # Examples
///
/// ```
/// use banner::data::names::normalize_for_matching;
///
/// assert_eq!(normalize_for_matching("García"), "garcia");
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
/// ```
/// Normalize a name string for matching index keys.
///
/// Pipeline: lowercase → NFD decompose → strip combining marks →
/// strip ALL punctuation, hyphens, and whitespace.
///
/// This produces a compact, space-free string so that "Aguirre Mesa" (Banner)
/// and "Aguirre-Mesa" (RMP) both become "aguirremesa".
///
/// # Examples
///
/// ```
/// use banner::data::names::normalize_for_matching;
///
/// assert_eq!(normalize_for_matching("García"), "garcia");
/// assert_eq!(normalize_for_matching("O'Brien"), "obrien");
/// assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
/// assert_eq!(normalize_for_matching("Aguirre Mesa"), "aguirremesa");
/// ```
pub fn normalize_for_matching(s: &str) -> String {
s.to_lowercase()
.nfd()
.filter(|c| {
// Keep only non-combining alphabetic characters — strip everything else
c.is_alphabetic() && !unicode_normalization::char::is_combining_mark(*c)
})
.collect()
}
/// Generate all matching index keys for a parsed name.
///
/// For a name like "H. Paul" / "LeBlanc" with no nicknames, generates:
/// - `("leblanc", "h paul")` — full normalized first
/// - `("leblanc", "paul")` — individual token (if multi-token)
/// - `("leblanc", "h")` — individual token (if multi-token)
///
/// For a name like "William" / "Burchenal" with nickname "Ken":
/// - `("burchenal", "william")` — primary
/// - `("burchenal", "ken")` — nickname variant
pub fn matching_keys(parts: &NameParts) -> Vec<(String, String)> {
let norm_last = normalize_for_matching(&parts.last);
if norm_last.is_empty() {
return Vec::new();
}
let mut keys = Vec::new();
let mut seen = std::collections::HashSet::new();
// Primary key: full first name (all spaces stripped)
let norm_first_full = normalize_for_matching(&parts.first);
if !norm_first_full.is_empty() && seen.insert(norm_first_full.clone()) {
keys.push((norm_last.clone(), norm_first_full));
}
// Individual tokens from the display-form first name
// (split before full normalization so we can generate per-token keys)
let first_tokens: Vec<&str> = parts.first.split_whitespace().collect();
if first_tokens.len() > 1 {
for token in &first_tokens {
let norm_token = normalize_for_matching(token);
if !norm_token.is_empty() && seen.insert(norm_token.clone()) {
keys.push((norm_last.clone(), norm_token));
}
}
}
// Nickname variants
for nick in &parts.nicknames {
let norm_nick = normalize_for_matching(nick);
if !norm_nick.is_empty() && seen.insert(norm_nick.clone()) {
keys.push((norm_last.clone(), norm_nick));
}
}
keys
}
/// Backfill `first_name`/`last_name` columns for all instructors that have
/// a `display_name` but NULL structured name fields.
///
/// Parses each `display_name` using [`parse_banner_name`] and updates the row.
/// Logs warnings for any names that fail to parse.
pub async fn backfill_instructor_names(db_pool: &PgPool) -> crate::error::Result<()> {
let rows: Vec<(i32, String)> = sqlx::query_as(
"SELECT id, display_name FROM instructors WHERE first_name IS NULL OR last_name IS NULL",
)
.fetch_all(db_pool)
.await?;
if rows.is_empty() {
return Ok(());
}
let total = rows.len();
let mut ids: Vec<i32> = Vec::with_capacity(total);
let mut firsts: Vec<String> = Vec::with_capacity(total);
let mut lasts: Vec<String> = Vec::with_capacity(total);
let mut unparseable = 0usize;
for (id, display_name) in &rows {
match parse_banner_name(display_name) {
Some(parts) => {
ids.push(*id);
firsts.push(parts.first);
lasts.push(parts.last);
}
None => {
warn!(
id,
display_name, "Failed to parse instructor display_name during backfill"
);
unparseable += 1;
}
}
}
if !ids.is_empty() {
let first_refs: Vec<&str> = firsts.iter().map(|s| s.as_str()).collect();
let last_refs: Vec<&str> = lasts.iter().map(|s| s.as_str()).collect();
sqlx::query(
r#"
UPDATE instructors i
SET first_name = v.first_name, last_name = v.last_name
FROM UNNEST($1::int4[], $2::text[], $3::text[])
AS v(id, first_name, last_name)
WHERE i.id = v.id
"#,
)
.bind(&ids)
.bind(&first_refs)
.bind(&last_refs)
.execute(db_pool)
.await?;
}
info!(
total,
updated = ids.len(),
unparseable,
"Instructor name backfill complete"
);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
// -----------------------------------------------------------------------
// HTML entity decoding
// -----------------------------------------------------------------------
#[test]
fn decode_apostrophe_entity() {
assert_eq!(decode_html_entities("O&#39;Brien"), "O'Brien");
}
#[test]
fn decode_umlaut_entity() {
assert_eq!(decode_html_entities("B&uuml;lent"), "Bülent");
}
#[test]
fn decode_no_entities() {
assert_eq!(decode_html_entities("Smith"), "Smith");
}
// -----------------------------------------------------------------------
// Nickname extraction
// -----------------------------------------------------------------------
#[test]
fn extract_paren_nickname() {
let (cleaned, nicks) = extract_nicknames("William (Ken)");
assert_eq!(cleaned, "William");
assert_eq!(nicks, vec!["Ken"]);
}
#[test]
fn extract_quoted_nickname() {
let (cleaned, nicks) = extract_nicknames("Thomas \"Butch\"");
assert_eq!(cleaned, "Thomas");
assert_eq!(nicks, vec!["Butch"]);
}
#[test]
fn extract_paren_with_extra_text() {
let (cleaned, nicks) = extract_nicknames("John (jack) C.");
assert_eq!(cleaned, "John C.");
assert_eq!(nicks, vec!["jack"]);
}
#[test]
fn extract_no_nicknames() {
let (cleaned, nicks) = extract_nicknames("Maria Elena");
assert_eq!(cleaned, "Maria Elena");
assert!(nicks.is_empty());
}
// -----------------------------------------------------------------------
// Suffix extraction
// -----------------------------------------------------------------------
#[test]
fn extract_suffix_iii() {
let (name, suffix) = extract_suffix("LeBlanc III");
assert_eq!(name, "LeBlanc");
assert_eq!(suffix, Some("III".to_string()));
}
#[test]
fn extract_suffix_jr_period() {
let (name, suffix) = extract_suffix("Smith Jr.");
assert_eq!(name, "Smith");
assert_eq!(suffix, Some("Jr.".to_string()));
}
#[test]
fn extract_no_suffix() {
let (name, suffix) = extract_suffix("García");
assert_eq!(name, "García");
assert_eq!(suffix, None);
}
// -----------------------------------------------------------------------
// Junk stripping
// -----------------------------------------------------------------------
#[test]
fn strip_trailing_comma() {
assert_eq!(strip_junk("Cronenberger,"), "Cronenberger");
}
#[test]
fn strip_email_address() {
assert_eq!(strip_junk("Neel.Baumgardner@utsa.edu"), "");
}
#[test]
fn strip_clean_name() {
assert_eq!(strip_junk(" Maria "), "Maria");
}
// -----------------------------------------------------------------------
// normalize_for_matching
// -----------------------------------------------------------------------
#[test]
fn normalize_strips_accents() {
assert_eq!(normalize_for_matching("García"), "garcia");
}
#[test]
fn normalize_strips_apostrophe() {
assert_eq!(normalize_for_matching("O'Brien"), "obrien");
}
#[test]
fn normalize_strips_hyphen() {
assert_eq!(normalize_for_matching("Aguirre-Mesa"), "aguirremesa");
}
#[test]
fn normalize_tilde_n() {
assert_eq!(normalize_for_matching("Muñoz"), "munoz");
}
#[test]
fn normalize_umlaut() {
assert_eq!(normalize_for_matching("Müller"), "muller");
}
#[test]
fn normalize_period() {
assert_eq!(normalize_for_matching("H. Paul"), "hpaul");
}
#[test]
fn normalize_strips_spaces() {
assert_eq!(normalize_for_matching("Mary Lou"), "marylou");
}
// -----------------------------------------------------------------------
// parse_banner_name
// -----------------------------------------------------------------------
#[test]
fn banner_standard_name() {
let p = parse_banner_name("Smith, John").unwrap();
assert_eq!(p.first, "John");
assert_eq!(p.last, "Smith");
assert_eq!(p.suffix, None);
}
#[test]
fn banner_html_entity_apostrophe() {
let p = parse_banner_name("O&#39;Brien, Erin").unwrap();
assert_eq!(p.first, "Erin");
assert_eq!(p.last, "O'Brien");
}
#[test]
fn banner_html_entity_umlaut() {
let p = parse_banner_name("Temel, B&uuml;lent").unwrap();
assert_eq!(p.first, "Bülent");
assert_eq!(p.last, "Temel");
}
#[test]
fn banner_suffix_iii() {
let p = parse_banner_name("LeBlanc III, H. Paul").unwrap();
assert_eq!(p.first, "H. Paul");
assert_eq!(p.last, "LeBlanc");
assert_eq!(p.suffix, Some("III".to_string()));
}
#[test]
fn banner_suffix_ii() {
let p = parse_banner_name("Ellis II, Ronald").unwrap();
assert_eq!(p.first, "Ronald");
assert_eq!(p.last, "Ellis");
assert_eq!(p.suffix, Some("II".to_string()));
}
#[test]
fn banner_multi_word_last() {
let p = parse_banner_name("Aguirre Mesa, Andres").unwrap();
assert_eq!(p.first, "Andres");
assert_eq!(p.last, "Aguirre Mesa");
}
#[test]
fn banner_hyphenated_last() {
let p = parse_banner_name("Abu-Lail, Nehal").unwrap();
assert_eq!(p.first, "Nehal");
assert_eq!(p.last, "Abu-Lail");
}
#[test]
fn banner_with_middle_name() {
let p = parse_banner_name("Smith, John David").unwrap();
assert_eq!(p.first, "John David");
assert_eq!(p.last, "Smith");
}
#[test]
fn banner_no_comma() {
assert!(parse_banner_name("SingleName").is_none());
}
#[test]
fn banner_empty_first() {
assert!(parse_banner_name("Smith,").is_none());
}
#[test]
fn banner_empty_last() {
assert!(parse_banner_name(", John").is_none());
}
// -----------------------------------------------------------------------
// parse_rmp_name
// -----------------------------------------------------------------------
#[test]
fn rmp_standard_name() {
let p = parse_rmp_name("John", "Smith").unwrap();
assert_eq!(p.first, "John");
assert_eq!(p.last, "Smith");
}
#[test]
fn rmp_with_nickname() {
let p = parse_rmp_name("William (Ken)", "Burchenal").unwrap();
assert_eq!(p.first, "William");
assert_eq!(p.nicknames, vec!["Ken"]);
}
#[test]
fn rmp_trailing_comma_last() {
let p = parse_rmp_name("J.", "Cronenberger,").unwrap();
assert_eq!(p.last, "Cronenberger");
}
#[test]
fn rmp_email_in_first() {
assert!(parse_rmp_name("Neel.Baumgardner@utsa.edu", "Baumgardner").is_none());
}
#[test]
fn rmp_suffix_in_last() {
let p = parse_rmp_name("H. Paul", "LeBlanc III").unwrap();
assert_eq!(p.first, "H. Paul");
assert_eq!(p.last, "LeBlanc");
assert_eq!(p.suffix, Some("III".to_string()));
}
#[test]
fn rmp_quoted_nickname() {
let p = parse_rmp_name("Thomas \"Butch\"", "Matjeka").unwrap();
assert_eq!(p.first, "Thomas");
assert_eq!(p.nicknames, vec!["Butch"]);
}
#[test]
fn rmp_accented_last() {
let p = parse_rmp_name("Liliana", "Saldaña").unwrap();
assert_eq!(p.last, "Saldaña");
}
// -----------------------------------------------------------------------
// matching_keys
// -----------------------------------------------------------------------
#[test]
fn keys_simple_name() {
let parts = NameParts {
first: "John".into(),
last: "Smith".into(),
middle: None,
suffix: None,
nicknames: vec![],
};
let keys = matching_keys(&parts);
assert_eq!(keys, vec![("smith".into(), "john".into())]);
}
#[test]
fn keys_multi_token_first() {
let parts = NameParts {
first: "H. Paul".into(),
last: "LeBlanc".into(),
middle: None,
suffix: Some("III".into()),
nicknames: vec![],
};
let keys = matching_keys(&parts);
assert!(keys.contains(&("leblanc".into(), "hpaul".into())));
assert!(keys.contains(&("leblanc".into(), "paul".into())));
assert!(keys.contains(&("leblanc".into(), "h".into())));
assert_eq!(keys.len(), 3);
}
#[test]
fn keys_with_nickname() {
let parts = NameParts {
first: "William".into(),
last: "Burchenal".into(),
middle: None,
suffix: None,
nicknames: vec!["Ken".into()],
};
let keys = matching_keys(&parts);
assert!(keys.contains(&("burchenal".into(), "william".into())));
assert!(keys.contains(&("burchenal".into(), "ken".into())));
assert_eq!(keys.len(), 2);
}
#[test]
fn keys_hyphenated_last() {
let parts = parse_banner_name("Aguirre-Mesa, Andres").unwrap();
let keys = matching_keys(&parts);
// Hyphen removed: "aguirremesa"
assert!(keys.contains(&("aguirremesa".into(), "andres".into())));
}
#[test]
fn keys_accented_name() {
let parts = parse_rmp_name("Liliana", "Saldaña").unwrap();
let keys = matching_keys(&parts);
assert!(keys.contains(&("saldana".into(), "liliana".into())));
}
#[test]
fn keys_cross_source_match() {
// Banner: "Aguirre Mesa, Andres" → last="Aguirre Mesa"
let banner = parse_banner_name("Aguirre Mesa, Andres").unwrap();
let banner_keys = matching_keys(&banner);
// RMP: "Andres" / "Aguirre-Mesa" → last="Aguirre-Mesa"
let rmp = parse_rmp_name("Andres", "Aguirre-Mesa").unwrap();
let rmp_keys = matching_keys(&rmp);
// Both should normalize to ("aguirremesa", "andres")
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
}
#[test]
fn keys_accent_cross_match() {
// Banner: "García, José" (if Banner ever has accents)
let banner = parse_banner_name("Garcia, Jose").unwrap();
let banner_keys = matching_keys(&banner);
// RMP: "José" / "García"
let rmp = parse_rmp_name("José", "García").unwrap();
let rmp_keys = matching_keys(&rmp);
// Both normalize to ("garcia", "jose")
assert!(banner_keys.iter().any(|k| rmp_keys.contains(k)));
}
}
+65 -82
View File
@@ -91,25 +91,6 @@ pub async fn batch_upsert_rmp_professors(
Ok(())
}
/// Normalize a name for matching: lowercase, trim, strip trailing periods.
pub(crate) fn normalize(s: &str) -> String {
s.trim().to_lowercase().trim_end_matches('.').to_string()
}
/// Parse Banner's "Last, First Middle" display name into (last, first) tokens.
///
/// Returns `None` if the format is unparseable (no comma, empty parts).
pub(crate) fn parse_display_name(display_name: &str) -> Option<(String, String)> {
let (last_part, first_part) = display_name.split_once(',')?;
let last = normalize(last_part);
// Take only the first token of the first-name portion to drop middle names/initials.
let first = normalize(first_part.split_whitespace().next()?);
if last.is_empty() || first.is_empty() {
return None;
}
Some((last, first))
}
/// Retrieve RMP rating data for an instructor by instructor id.
///
/// Returns `(avg_rating, num_ratings)` for the best linked RMP profile
@@ -136,74 +117,76 @@ pub async fn get_instructor_rmp_data(
Ok(row)
}
#[cfg(test)]
mod tests {
use super::*;
/// Unmatch an instructor from an RMP profile.
///
/// Removes the link from `instructor_rmp_links` and updates the instructor's
/// `rmp_match_status` to 'unmatched' if no links remain.
///
/// If `rmp_legacy_id` is `Some`, removes only that specific link.
/// If `None`, removes all links for the instructor.
pub async fn unmatch_instructor(
db_pool: &PgPool,
instructor_id: i32,
rmp_legacy_id: Option<i32>,
) -> Result<()> {
let mut tx = db_pool.begin().await?;
#[test]
fn parse_standard_name() {
assert_eq!(
parse_display_name("Smith, John"),
Some(("smith".into(), "john".into()))
);
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(instructor_id)
.bind(legacy_id)
.execute(&mut *tx)
.await?;
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_middle() {
assert_eq!(
parse_display_name("Smith, John David"),
Some(("smith".into(), "john".into()))
);
// Check if any links remain
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.fetch_one(&mut *tx)
.await?;
// Update instructor status if no links remain
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_middle_initial() {
assert_eq!(
parse_display_name("Garcia, Maria L."),
Some(("garcia".into(), "maria".into()))
);
// Reset accepted candidates back to pending when unmatching
// This allows the candidates to be re-matched later
if let Some(legacy_id) = rmp_legacy_id {
// Reset only the specific candidate
sqlx::query(
"UPDATE rmp_match_candidates
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
WHERE instructor_id = $1 AND rmp_legacy_id = $2 AND status = 'accepted'",
)
.bind(instructor_id)
.bind(legacy_id)
.execute(&mut *tx)
.await?;
} else {
// Reset all accepted candidates for this instructor
sqlx::query(
"UPDATE rmp_match_candidates
SET status = 'pending', resolved_at = NULL, resolved_by = NULL
WHERE instructor_id = $1 AND status = 'accepted'",
)
.bind(instructor_id)
.execute(&mut *tx)
.await?;
}
#[test]
fn parse_name_with_suffix_in_last() {
// Banner may encode "Jr." as part of the last name.
// normalize() strips trailing periods so "Jr." becomes "jr".
assert_eq!(
parse_display_name("Smith Jr., James"),
Some(("smith jr".into(), "james".into()))
);
}
#[test]
fn parse_no_comma_returns_none() {
assert_eq!(parse_display_name("SingleName"), None);
}
#[test]
fn parse_empty_first_returns_none() {
assert_eq!(parse_display_name("Smith,"), None);
}
#[test]
fn parse_empty_last_returns_none() {
assert_eq!(parse_display_name(", John"), None);
}
#[test]
fn parse_extra_whitespace() {
assert_eq!(
parse_display_name(" Doe , Jane Marie "),
Some(("doe".into(), "jane".into()))
);
}
#[test]
fn normalize_trims_and_lowercases() {
assert_eq!(normalize(" FOO "), "foo");
}
#[test]
fn normalize_strips_trailing_period() {
assert_eq!(normalize("Jr."), "jr");
}
tx.commit().await?;
Ok(())
}
+229 -47
View File
@@ -1,6 +1,6 @@
//! Confidence scoring and candidate generation for RMP instructor matching.
use crate::data::rmp::{normalize, parse_display_name};
use crate::data::names::{matching_keys, parse_banner_name, parse_rmp_name};
use crate::error::Result;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
@@ -14,6 +14,7 @@ use tracing::{debug, info};
/// Breakdown of individual scoring signals.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ScoreBreakdown {
pub name: f32,
pub department: f32,
pub uniqueness: f32,
pub volume: f32,
@@ -37,12 +38,13 @@ const MIN_CANDIDATE_THRESHOLD: f32 = 0.40;
const AUTO_ACCEPT_THRESHOLD: f32 = 0.85;
// ---------------------------------------------------------------------------
// Weights
// Weights (must sum to 1.0)
// ---------------------------------------------------------------------------
const WEIGHT_DEPARTMENT: f32 = 0.50;
const WEIGHT_UNIQUENESS: f32 = 0.30;
const WEIGHT_VOLUME: f32 = 0.20;
const WEIGHT_NAME: f32 = 0.50;
const WEIGHT_DEPARTMENT: f32 = 0.25;
const WEIGHT_UNIQUENESS: f32 = 0.15;
const WEIGHT_VOLUME: f32 = 0.10;
// ---------------------------------------------------------------------------
// Pure scoring functions
@@ -78,8 +80,9 @@ fn department_similarity(subjects: &[String], rmp_department: Option<&str>) -> f
/// Expand common subject abbreviations used at UTSA and check for overlap.
fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
const MAPPINGS: &[(&str, &[&str])] = &[
// Core subjects (original mappings, corrected)
("cs", &["computer science"]),
("ece", &["electrical", "computer engineering"]),
("ece", &["early childhood education", "early childhood"]),
("ee", &["electrical engineering", "electrical"]),
("me", &["mechanical engineering", "mechanical"]),
("ce", &["civil engineering", "civil"]),
@@ -105,6 +108,85 @@ fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
("ms", &["management science"]),
("kin", &["kinesiology"]),
("com", &["communication"]),
// Architecture & Design
("arc", &["architecture"]),
("ide", &["interior design", "design"]),
// Anthropology & Ethnic Studies
("ant", &["anthropology"]),
("aas", &["african american studies", "ethnic studies"]),
("mas", &["mexican american studies", "ethnic studies"]),
("regs", &["ethnic studies", "gender"]),
// Languages
("lng", &["linguistics", "applied linguistics"]),
("spn", &["spanish"]),
("frn", &["french"]),
("ger", &["german"]),
("chn", &["chinese"]),
("jpn", &["japanese"]),
("kor", &["korean"]),
("itl", &["italian"]),
("rus", &["russian"]),
("lat", &["latin"]),
("grk", &["greek"]),
("asl", &["american sign language", "sign language"]),
(
"fl",
&["foreign languages", "languages", "modern languages"],
),
// Education
("edu", &["education"]),
("ci", &["curriculum", "education"]),
("edl", &["educational leadership", "education"]),
("edp", &["educational psychology", "education"]),
("bbl", &["bilingual education"]),
("spe", &["special education", "education"]),
// Business
("ent", &["entrepreneurship"]),
("gba", &["general business", "business"]),
("blw", &["business law", "law"]),
("rfd", &["real estate"]),
("mot", &["management of technology", "management"]),
// Engineering
("egr", &["engineering"]),
("bme", &["biomedical engineering", "engineering"]),
("cme", &["chemical engineering", "engineering"]),
("cpe", &["computer engineering", "engineering"]),
("ise", &["industrial", "systems engineering", "engineering"]),
("mate", &["materials engineering", "engineering"]),
// Sciences
("che", &["chemistry"]),
("bch", &["biochemistry", "chemistry"]),
("geo", &["geology"]),
("phy", &["physics"]),
("ast", &["astronomy"]),
("es", &["environmental science"]),
// Social Sciences
("crj", &["criminal justice"]),
("swk", &["social work"]),
("pad", &["public administration"]),
("grg", &["geography"]),
("ges", &["geography"]),
// Humanities
("cla", &["classics"]),
("hum", &["humanities"]),
("wgss", &["women's studies"]),
// Health
("hth", &["health"]),
("hcp", &["health science", "health"]),
("ntr", &["nutrition"]),
// Military
("msc", &["military science"]),
("asc", &["aerospace"]),
// Arts
("dan", &["dance"]),
("thr", &["theater"]),
("ahc", &["art history"]),
// Other
("cou", &["counseling"]),
("hon", &["honors"]),
("csm", &["construction"]),
("wrc", &["writing"]),
("set", &["tourism management", "tourism"]),
];
for &(abbr, expansions) in MAPPINGS {
@@ -119,35 +201,39 @@ fn matches_known_abbreviation(subject: &str, department: &str) -> bool {
/// Compute match confidence score (0.01.0) for an instructorRMP pair.
///
/// Name matching is handled by the caller via pre-filtering on exact
/// normalized `(last, first)`, so only department, uniqueness, and volume
/// signals are scored here.
/// The name signal is always 1.0 since candidates are only generated for
/// exact normalized name matches. The effective score range is 0.501.0.
pub fn compute_match_score(
instructor_subjects: &[String],
rmp_department: Option<&str>,
candidate_count: usize,
rmp_num_ratings: i32,
) -> MatchScore {
// --- Department (0.50) ---
// --- Name (0.50) — always 1.0, candidates only exist for exact matches ---
let name_score = 1.0;
// --- Department (0.25) ---
let dept_score = department_similarity(instructor_subjects, rmp_department);
// --- Uniqueness (0.30) ---
// --- Uniqueness (0.15) ---
let uniqueness_score = match candidate_count {
0 | 1 => 1.0,
2 => 0.5,
_ => 0.2,
};
// --- Volume (0.20) ---
// --- Volume (0.10) ---
let volume_score = ((rmp_num_ratings as f32).ln_1p() / 5.0_f32.ln_1p()).clamp(0.0, 1.0);
let composite = dept_score * WEIGHT_DEPARTMENT
let composite = name_score * WEIGHT_NAME
+ dept_score * WEIGHT_DEPARTMENT
+ uniqueness_score * WEIGHT_UNIQUENESS
+ volume_score * WEIGHT_VOLUME;
MatchScore {
score: composite,
breakdown: ScoreBreakdown {
name: name_score,
department: dept_score,
uniqueness: uniqueness_score,
volume: volume_score,
@@ -164,6 +250,7 @@ pub fn compute_match_score(
pub struct MatchingStats {
pub total_unmatched: usize,
pub candidates_created: usize,
pub candidates_rescored: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
@@ -179,8 +266,8 @@ struct RmpProfForMatching {
/// Generate match candidates for all unmatched instructors.
///
/// For each unmatched instructor:
/// 1. Parse `display_name` into (last, first).
/// 2. Find RMP professors with matching normalized name.
/// 1. Parse `display_name` into [`NameParts`] and generate matching keys.
/// 2. Find RMP professors with matching normalized name keys.
/// 3. Score each candidate.
/// 4. Store candidates scoring above [`MIN_CANDIDATE_THRESHOLD`].
/// 5. Auto-accept if the top candidate scores ≥ [`AUTO_ACCEPT_THRESHOLD`]
@@ -200,6 +287,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
return Ok(MatchingStats {
total_unmatched: 0,
candidates_created: 0,
candidates_rescored: 0,
auto_matched: 0,
skipped_unparseable: 0,
skipped_no_candidates: 0,
@@ -227,7 +315,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
subject_map.entry(iid).or_default().push(subject);
}
// 3. Load all RMP professors
// 3. Load all RMP professors and build multi-key name index
let prof_rows: Vec<(i32, String, String, Option<String>, i32)> = sqlx::query_as(
"SELECT legacy_id, first_name, last_name, department, num_ratings FROM rmp_professors",
)
@@ -235,40 +323,72 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
.await?;
// Build name index: (normalized_last, normalized_first) -> Vec<RmpProfForMatching>
// Each professor may appear under multiple keys (nicknames, token variants).
let mut name_index: HashMap<(String, String), Vec<RmpProfForMatching>> = HashMap::new();
for (legacy_id, first_name, last_name, department, num_ratings) in prof_rows {
let key = (normalize(&last_name), normalize(&first_name));
name_index.entry(key).or_default().push(RmpProfForMatching {
legacy_id,
department,
num_ratings,
});
let mut rmp_parse_failures = 0usize;
for (legacy_id, first_name, last_name, department, num_ratings) in &prof_rows {
match parse_rmp_name(first_name, last_name) {
Some(parts) => {
let keys = matching_keys(&parts);
for key in keys {
name_index.entry(key).or_default().push(RmpProfForMatching {
legacy_id: *legacy_id,
department: department.clone(),
num_ratings: *num_ratings,
});
}
}
None => {
rmp_parse_failures += 1;
debug!(
legacy_id,
first_name, last_name, "Unparseable RMP professor name, skipping"
);
}
}
}
// 4. Load existing candidate pairs (and rejected subset) in a single query
if rmp_parse_failures > 0 {
debug!(
count = rmp_parse_failures,
"RMP professors with unparseable names"
);
}
// 4. Load existing candidate pairs — only skip resolved (accepted/rejected) pairs.
// Pending candidates are rescored so updated mappings take effect.
let candidate_rows: Vec<(i32, i32, String)> =
sqlx::query_as("SELECT instructor_id, rmp_legacy_id, status FROM rmp_match_candidates")
.fetch_all(db_pool)
.await?;
let mut existing_pairs: HashSet<(i32, i32)> = HashSet::with_capacity(candidate_rows.len());
let mut resolved_pairs: HashSet<(i32, i32)> = HashSet::new();
let mut pending_pairs: HashSet<(i32, i32)> = HashSet::new();
let mut rejected_pairs: HashSet<(i32, i32)> = HashSet::new();
for (iid, lid, status) in candidate_rows {
existing_pairs.insert((iid, lid));
if status == "rejected" {
rejected_pairs.insert((iid, lid));
match status.as_str() {
"accepted" | "rejected" => {
resolved_pairs.insert((iid, lid));
if status == "rejected" {
rejected_pairs.insert((iid, lid));
}
}
_ => {
pending_pairs.insert((iid, lid));
}
}
}
// 5. Score and collect candidates
// 5. Score and collect candidates (new + rescored pending)
let empty_subjects: Vec<String> = Vec::new();
let mut candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut new_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut rescored_candidates: Vec<(i32, i32, f32, serde_json::Value)> = Vec::new();
let mut auto_accept: Vec<(i32, i32)> = Vec::new(); // (instructor_id, legacy_id)
let mut skipped_unparseable = 0usize;
let mut skipped_no_candidates = 0usize;
for (instructor_id, display_name) in &instructors {
let Some((norm_last, norm_first)) = parse_display_name(display_name) else {
let Some(instructor_parts) = parse_banner_name(display_name) else {
skipped_unparseable += 1;
debug!(
instructor_id,
@@ -279,18 +399,33 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let subjects = subject_map.get(instructor_id).unwrap_or(&empty_subjects);
let key = (norm_last.clone(), norm_first.clone());
let Some(rmp_candidates) = name_index.get(&key) else {
// Generate all matching keys for this instructor and collect candidate
// RMP professors across all key variants (deduplicated by legacy_id).
let instructor_keys = matching_keys(&instructor_parts);
let mut seen_profs: HashSet<i32> = HashSet::new();
let mut matched_profs: Vec<&RmpProfForMatching> = Vec::new();
for key in &instructor_keys {
if let Some(profs) = name_index.get(key) {
for prof in profs {
if seen_profs.insert(prof.legacy_id) {
matched_profs.push(prof);
}
}
}
}
if matched_profs.is_empty() {
skipped_no_candidates += 1;
continue;
};
}
let candidate_count = rmp_candidates.len();
let candidate_count = matched_profs.len();
let mut best: Option<(f32, i32)> = None;
for prof in rmp_candidates {
for prof in &matched_profs {
let pair = (*instructor_id, prof.legacy_id);
if existing_pairs.contains(&pair) {
if resolved_pairs.contains(&pair) {
continue;
}
@@ -308,7 +443,16 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let breakdown_json =
serde_json::to_value(&ms.breakdown).unwrap_or_else(|_| serde_json::json!({}));
candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
if pending_pairs.contains(&pair) {
rescored_candidates.push((
*instructor_id,
prof.legacy_id,
ms.score,
breakdown_json,
));
} else {
new_candidates.push((*instructor_id, prof.legacy_id, ms.score, breakdown_json));
}
match best {
Some((s, _)) if ms.score > s => best = Some((ms.score, prof.legacy_id)),
@@ -327,19 +471,20 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
}
}
// 67. Write candidates and auto-accept within a single transaction
let candidates_created = candidates.len();
// 67. Write candidates, rescore, and auto-accept within a single transaction
let candidates_created = new_candidates.len();
let candidates_rescored = rescored_candidates.len();
let auto_matched = auto_accept.len();
let mut tx = db_pool.begin().await?;
// 6. Batch-insert candidates
if !candidates.is_empty() {
let c_instructor_ids: Vec<i32> = candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = candidates.iter().map(|(_, _, s, _)| *s).collect();
// 6a. Batch-insert new candidates
if !new_candidates.is_empty() {
let c_instructor_ids: Vec<i32> = new_candidates.iter().map(|(iid, _, _, _)| *iid).collect();
let c_legacy_ids: Vec<i32> = new_candidates.iter().map(|(_, lid, _, _)| *lid).collect();
let c_scores: Vec<f32> = new_candidates.iter().map(|(_, _, s, _)| *s).collect();
let c_breakdowns: Vec<serde_json::Value> =
candidates.into_iter().map(|(_, _, _, b)| b).collect();
new_candidates.into_iter().map(|(_, _, _, b)| b).collect();
sqlx::query(
r#"
@@ -358,6 +503,40 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
.await?;
}
// 6b. Batch-update rescored pending candidates
if !rescored_candidates.is_empty() {
let r_instructor_ids: Vec<i32> = rescored_candidates
.iter()
.map(|(iid, _, _, _)| *iid)
.collect();
let r_legacy_ids: Vec<i32> = rescored_candidates
.iter()
.map(|(_, lid, _, _)| *lid)
.collect();
let r_scores: Vec<f32> = rescored_candidates.iter().map(|(_, _, s, _)| *s).collect();
let r_breakdowns: Vec<serde_json::Value> = rescored_candidates
.into_iter()
.map(|(_, _, _, b)| b)
.collect();
sqlx::query(
r#"
UPDATE rmp_match_candidates mc
SET score = v.score, score_breakdown = v.score_breakdown
FROM UNNEST($1::int4[], $2::int4[], $3::real[], $4::jsonb[])
AS v(instructor_id, rmp_legacy_id, score, score_breakdown)
WHERE mc.instructor_id = v.instructor_id
AND mc.rmp_legacy_id = v.rmp_legacy_id
"#,
)
.bind(&r_instructor_ids)
.bind(&r_legacy_ids)
.bind(&r_scores)
.bind(&r_breakdowns)
.execute(&mut *tx)
.await?;
}
// 7. Auto-accept top candidates
if !auto_accept.is_empty() {
let aa_instructor_ids: Vec<i32> = auto_accept.iter().map(|(iid, _)| *iid).collect();
@@ -411,6 +590,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
let stats = MatchingStats {
total_unmatched,
candidates_created,
candidates_rescored,
auto_matched,
skipped_unparseable,
skipped_no_candidates,
@@ -419,6 +599,7 @@ pub async fn generate_candidates(db_pool: &PgPool) -> Result<MatchingStats> {
info!(
total_unmatched = stats.total_unmatched,
candidates_created = stats.candidates_created,
candidates_rescored = stats.candidates_rescored,
auto_matched = stats.auto_matched,
skipped_unparseable = stats.skipped_unparseable,
skipped_no_candidates = stats.skipped_no_candidates,
@@ -444,8 +625,9 @@ mod tests {
1, // unique candidate
50, // decent ratings
);
// dept 1.0*0.50 + unique 1.0*0.30 + volume ~0.97*0.20 ≈ 0.99
// name 1.0*0.50 + dept 1.0*0.25 + unique 1.0*0.15 + volume ~0.97*0.10 ≈ 0.997
assert!(ms.score >= 0.85, "Expected score >= 0.85, got {}", ms.score);
assert_eq!(ms.breakdown.name, 1.0);
assert_eq!(ms.breakdown.uniqueness, 1.0);
assert_eq!(ms.breakdown.department, 1.0);
}
+64
View File
@@ -213,6 +213,70 @@ pub async fn insert_job_result(
Ok(())
}
/// Per-subject aggregated stats from recent scrape results.
///
/// Populated by [`fetch_subject_stats`] and converted into
/// [`crate::scraper::adaptive::SubjectStats`] for interval computation.
#[derive(sqlx::FromRow, Debug, Clone)]
pub struct SubjectResultStats {
pub subject: String,
pub recent_runs: i64,
pub avg_change_ratio: f64,
pub consecutive_zero_changes: i64,
pub consecutive_empty_fetches: i64,
pub recent_failure_count: i64,
pub recent_success_count: i64,
pub last_completed: DateTime<Utc>,
}
/// Fetch aggregated per-subject statistics from the last 24 hours of results.
///
/// For each subject, examines the 20 most recent results and computes:
/// - Average change ratio (courses_changed / courses_fetched)
/// - Consecutive zero-change runs from the most recent result
/// - Consecutive empty-fetch runs from the most recent result
/// - Failure and success counts
/// - Last completion timestamp
pub async fn fetch_subject_stats(db_pool: &PgPool) -> Result<Vec<SubjectResultStats>> {
let rows = sqlx::query_as::<_, SubjectResultStats>(
r#"
WITH recent AS (
SELECT payload->>'subject' AS subject, success,
COALESCE(courses_fetched, 0) AS courses_fetched,
COALESCE(courses_changed, 0) AS courses_changed,
completed_at,
ROW_NUMBER() OVER (PARTITION BY payload->>'subject' ORDER BY completed_at DESC) AS rn
FROM scrape_job_results
WHERE target_type = 'Subject' AND completed_at > NOW() - INTERVAL '24 hours'
),
filtered AS (SELECT * FROM recent WHERE rn <= 20),
zero_break AS (
SELECT subject,
MIN(rn) FILTER (WHERE courses_changed > 0 AND success) AS first_nonzero_rn,
MIN(rn) FILTER (WHERE courses_fetched > 0 AND success) AS first_nonempty_rn
FROM filtered GROUP BY subject
)
SELECT
f.subject::TEXT AS subject,
COUNT(*)::BIGINT AS recent_runs,
COALESCE(AVG(CASE WHEN f.success AND f.courses_fetched > 0
THEN f.courses_changed::FLOAT / f.courses_fetched ELSE NULL END), 0.0)::FLOAT8 AS avg_change_ratio,
COALESCE(zb.first_nonzero_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_changed = 0))::BIGINT AS consecutive_zero_changes,
COALESCE(zb.first_nonempty_rn - 1, COUNT(*) FILTER (WHERE f.success AND f.courses_fetched = 0))::BIGINT AS consecutive_empty_fetches,
COUNT(*) FILTER (WHERE NOT f.success)::BIGINT AS recent_failure_count,
COUNT(*) FILTER (WHERE f.success)::BIGINT AS recent_success_count,
MAX(f.completed_at) AS last_completed
FROM filtered f
LEFT JOIN zero_break zb ON f.subject = zb.subject
GROUP BY f.subject, zb.first_nonzero_rn, zb.first_nonempty_rn
"#,
)
.fetch_all(db_pool)
.await?;
Ok(rows)
}
/// Batch insert scrape jobs using UNNEST for a single round-trip.
///
/// All jobs are inserted with `execute_at` set to the current time.
+17 -7
View File
@@ -7,6 +7,9 @@ use sqlx::PgPool;
use super::models::UserSession;
use crate::error::Result;
/// Session lifetime: 7 days (in seconds).
pub const SESSION_DURATION_SECS: u64 = 7 * 24 * 3600;
/// Generate a cryptographically random 32-byte hex token.
fn generate_token() -> String {
let bytes: [u8; 32] = rand::rng().random();
@@ -48,13 +51,21 @@ pub async fn get_session(pool: &PgPool, token: &str) -> Result<Option<UserSessio
.context("failed to get session")
}
/// Update the last-active timestamp for a session.
/// Update the last-active timestamp and extend session expiry (sliding window).
pub async fn touch_session(pool: &PgPool, token: &str) -> Result<()> {
sqlx::query("UPDATE user_sessions SET last_active_at = now() WHERE id = $1")
.bind(token)
.execute(pool)
.await
.context("failed to touch session")?;
sqlx::query(
r#"
UPDATE user_sessions
SET last_active_at = now(),
expires_at = now() + make_interval(secs => $2::double precision)
WHERE id = $1
"#,
)
.bind(token)
.bind(SESSION_DURATION_SECS as f64)
.execute(pool)
.await
.context("failed to touch session")?;
Ok(())
}
@@ -80,7 +91,6 @@ pub async fn delete_user_sessions(pool: &PgPool, user_id: i64) -> Result<u64> {
}
/// Delete all expired sessions. Returns the number of sessions cleaned up.
#[allow(dead_code)] // Called by SessionCache::cleanup_expired (not yet wired to periodic task)
pub async fn cleanup_expired(pool: &PgPool) -> Result<u64> {
let result = sqlx::query("DELETE FROM user_sessions WHERE expires_at <= now()")
.execute(pool)
+1
View File
@@ -1,6 +1,7 @@
pub mod app;
pub mod banner;
pub mod bot;
pub mod calendar;
pub mod cli;
pub mod config;
pub mod data;
+1
View File
@@ -8,6 +8,7 @@ use tracing::info;
mod app;
mod banner;
mod bot;
mod calendar;
mod cli;
mod config;
mod data;
+326
View File
@@ -0,0 +1,326 @@
//! Adaptive scraping interval computation.
//!
//! Assigns per-subject scrape intervals based on recent change rates,
//! consecutive zero-change runs, failure patterns, and time of day.
use chrono::{DateTime, Datelike, Timelike, Utc};
use chrono_tz::US::Central;
use std::time::Duration;
use crate::data::scrape_jobs::SubjectResultStats;
const FLOOR_INTERVAL: Duration = Duration::from_secs(3 * 60);
const MODERATE_HIGH_INTERVAL: Duration = Duration::from_secs(5 * 60);
const MODERATE_LOW_INTERVAL: Duration = Duration::from_secs(15 * 60);
const LOW_CHANGE_INTERVAL: Duration = Duration::from_secs(30 * 60);
const ZERO_5_INTERVAL: Duration = Duration::from_secs(60 * 60);
const ZERO_10_INTERVAL: Duration = Duration::from_secs(2 * 60 * 60);
const CEILING_INTERVAL: Duration = Duration::from_secs(4 * 60 * 60);
const COLD_START_INTERVAL: Duration = FLOOR_INTERVAL;
const PAUSE_PROBE_INTERVAL: Duration = Duration::from_secs(6 * 60 * 60);
const EMPTY_FETCH_PAUSE_THRESHOLD: i64 = 3;
const FAILURE_PAUSE_THRESHOLD: i64 = 5;
/// Aggregated per-subject statistics derived from recent scrape results.
#[derive(Debug, Clone)]
pub struct SubjectStats {
pub subject: String,
pub recent_runs: i64,
pub avg_change_ratio: f64,
pub consecutive_zero_changes: i64,
pub consecutive_empty_fetches: i64,
pub recent_failure_count: i64,
pub recent_success_count: i64,
pub last_completed: DateTime<Utc>,
}
/// Scheduling decision for a subject.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SubjectSchedule {
/// Subject is due for scraping, with the computed interval.
Eligible(Duration),
/// Subject was scraped recently; wait for the remaining cooldown.
Cooldown(Duration),
/// Subject is paused due to repeated empty fetches or failures.
Paused,
/// Subject belongs to a past term and should not be scraped.
ReadOnly,
}
impl From<SubjectResultStats> for SubjectStats {
fn from(row: SubjectResultStats) -> Self {
Self {
subject: row.subject,
recent_runs: row.recent_runs,
avg_change_ratio: row.avg_change_ratio,
consecutive_zero_changes: row.consecutive_zero_changes,
consecutive_empty_fetches: row.consecutive_empty_fetches,
recent_failure_count: row.recent_failure_count,
recent_success_count: row.recent_success_count,
last_completed: row.last_completed,
}
}
}
/// Compute the base interval tier from change-rate statistics.
pub fn compute_base_interval(stats: &SubjectStats) -> Duration {
if stats.recent_runs == 0 {
return COLD_START_INTERVAL;
}
// Consecutive-zero tiers take precedence when change ratio is near zero
if stats.avg_change_ratio < 0.001 {
return match stats.consecutive_zero_changes {
0..5 => LOW_CHANGE_INTERVAL,
5..10 => ZERO_5_INTERVAL,
10..20 => ZERO_10_INTERVAL,
_ => CEILING_INTERVAL,
};
}
match stats.avg_change_ratio {
r if r >= 0.10 => FLOOR_INTERVAL,
r if r >= 0.05 => MODERATE_HIGH_INTERVAL,
r if r >= 0.01 => MODERATE_LOW_INTERVAL,
_ => LOW_CHANGE_INTERVAL,
}
}
/// Return a time-of-day multiplier for the given UTC timestamp.
///
/// Peak hours (weekdays 8am-6pm CT) return 1; off-peak (weekdays 6pm-midnight CT)
/// return 2; night (midnight-8am CT) and weekends return 4.
pub fn time_of_day_multiplier(now: DateTime<Utc>) -> u32 {
let ct = now.with_timezone(&Central);
let weekday = ct.weekday();
let hour = ct.hour();
// Weekends get the slowest multiplier
if matches!(weekday, chrono::Weekday::Sat | chrono::Weekday::Sun) {
return 4;
}
match hour {
8..18 => 1, // peak
18..24 => 2, // off-peak
_ => 4, // night (0..8)
}
}
/// Evaluate whether a subject should be scraped now.
///
/// Combines base interval, time-of-day multiplier, pause detection (empty
/// fetches / consecutive failures), and past-term read-only status.
pub fn evaluate_subject(
stats: &SubjectStats,
now: DateTime<Utc>,
is_past_term: bool,
) -> SubjectSchedule {
if is_past_term {
return SubjectSchedule::ReadOnly;
}
let elapsed = (now - stats.last_completed)
.to_std()
.unwrap_or(Duration::ZERO);
let probe_due = elapsed >= PAUSE_PROBE_INTERVAL;
// Pause on repeated empty fetches
if stats.consecutive_empty_fetches >= EMPTY_FETCH_PAUSE_THRESHOLD {
return if probe_due {
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
} else {
SubjectSchedule::Paused
};
}
// Pause on all-failures
if stats.recent_success_count == 0 && stats.recent_failure_count >= FAILURE_PAUSE_THRESHOLD {
return if probe_due {
SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL)
} else {
SubjectSchedule::Paused
};
}
let base = compute_base_interval(stats);
let multiplier = time_of_day_multiplier(now);
let effective = base * multiplier;
if elapsed >= effective {
SubjectSchedule::Eligible(effective)
} else {
let remaining = effective - elapsed;
SubjectSchedule::Cooldown(remaining)
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::TimeZone;
/// Create a default `SubjectStats` for testing. Callers mutate fields as needed.
fn make_stats(subject: &str) -> SubjectStats {
SubjectStats {
subject: subject.to_string(),
recent_runs: 10,
avg_change_ratio: 0.0,
consecutive_zero_changes: 0,
consecutive_empty_fetches: 0,
recent_failure_count: 0,
recent_success_count: 10,
last_completed: Utc::now() - chrono::Duration::hours(1),
}
}
// -- compute_base_interval tests --
#[test]
fn test_cold_start_returns_floor() {
let mut stats = make_stats("CS");
stats.recent_runs = 0;
assert_eq!(compute_base_interval(&stats), COLD_START_INTERVAL);
}
#[test]
fn test_high_change_rate() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15;
assert_eq!(compute_base_interval(&stats), FLOOR_INTERVAL);
}
#[test]
fn test_moderate_high_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.07;
assert_eq!(compute_base_interval(&stats), MODERATE_HIGH_INTERVAL);
}
#[test]
fn test_moderate_low_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.03;
assert_eq!(compute_base_interval(&stats), MODERATE_LOW_INTERVAL);
}
#[test]
fn test_low_change() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.005;
assert_eq!(compute_base_interval(&stats), LOW_CHANGE_INTERVAL);
}
#[test]
fn test_zero_5_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 5;
assert_eq!(compute_base_interval(&stats), ZERO_5_INTERVAL);
}
#[test]
fn test_zero_10_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 10;
assert_eq!(compute_base_interval(&stats), ZERO_10_INTERVAL);
}
#[test]
fn test_zero_20_consecutive() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.0;
stats.consecutive_zero_changes = 20;
assert_eq!(compute_base_interval(&stats), CEILING_INTERVAL);
}
// -- evaluate_subject tests --
#[test]
fn test_pause_empty_fetches() {
let mut stats = make_stats("CS");
stats.consecutive_empty_fetches = 3;
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Paused);
}
#[test]
fn test_pause_all_failures() {
let mut stats = make_stats("CS");
stats.recent_success_count = 0;
stats.recent_failure_count = 5;
stats.last_completed = Utc::now() - chrono::Duration::minutes(10);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Paused);
}
#[test]
fn test_probe_after_pause() {
let mut stats = make_stats("CS");
stats.consecutive_empty_fetches = 5;
stats.last_completed = Utc::now() - chrono::Duration::hours(7);
let result = evaluate_subject(&stats, Utc::now(), false);
assert_eq!(result, SubjectSchedule::Eligible(PAUSE_PROBE_INTERVAL));
}
#[test]
fn test_read_only_past_term() {
let stats = make_stats("CS");
let result = evaluate_subject(&stats, Utc::now(), true);
assert_eq!(result, SubjectSchedule::ReadOnly);
}
#[test]
fn test_cooldown_not_elapsed() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15; // floor = 3 min
stats.last_completed = Utc::now() - chrono::Duration::seconds(30);
// Use a peak-hours timestamp so multiplier = 1
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
stats.last_completed = peak - chrono::Duration::seconds(30);
let result = evaluate_subject(&stats, peak, false);
assert!(matches!(result, SubjectSchedule::Cooldown(_)));
}
#[test]
fn test_eligible_elapsed() {
let mut stats = make_stats("CS");
stats.avg_change_ratio = 0.15; // floor = 3 min
let peak = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap(); // Mon 10am CT
stats.last_completed = peak - chrono::Duration::minutes(5);
let result = evaluate_subject(&stats, peak, false);
assert!(matches!(result, SubjectSchedule::Eligible(_)));
}
// -- time_of_day_multiplier tests --
#[test]
fn test_time_multiplier_peak() {
// Monday 10am CT = 15:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 15, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 1);
}
#[test]
fn test_time_multiplier_offpeak() {
// Monday 8pm CT = 01:00 UTC next day, but let's use Tuesday 01:00 UTC = Mon 8pm CT
let dt = Utc.with_ymd_and_hms(2025, 7, 15, 1, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 2);
}
#[test]
fn test_time_multiplier_night() {
// 3am CT = 08:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 14, 8, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 4);
}
#[test]
fn test_time_multiplier_weekend() {
// Saturday noon CT = 17:00 UTC
let dt = Utc.with_ymd_and_hms(2025, 7, 12, 17, 0, 0).unwrap();
assert_eq!(time_of_day_multiplier(dt), 4);
}
}
+1
View File
@@ -1,3 +1,4 @@
pub mod adaptive;
pub mod jobs;
pub mod scheduler;
pub mod worker;
+74 -18
View File
@@ -3,11 +3,14 @@ use crate::data::models::{ReferenceData, ScrapePriority, TargetType};
use crate::data::scrape_jobs;
use crate::error::Result;
use crate::rmp::RmpClient;
use crate::scraper::adaptive::{SubjectSchedule, SubjectStats, evaluate_subject};
use crate::scraper::jobs::subject::SubjectJob;
use crate::state::ReferenceCache;
use crate::web::ws::{ScrapeJobDto, ScrapeJobEvent};
use chrono::{DateTime, Utc};
use serde_json::json;
use sqlx::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::{RwLock, broadcast};
@@ -148,10 +151,9 @@ impl Scheduler {
/// Core scheduling logic that analyzes data and creates scrape jobs.
///
/// Strategy:
/// 1. Fetch all subjects for the current term from Banner API
/// 2. Query existing jobs in a single batch query
/// 3. Create jobs only for subjects that don't have pending jobs
/// Uses adaptive scheduling to determine per-subject scrape intervals based
/// on recent change rates, failure patterns, and time of day. Only subjects
/// that are eligible (i.e. their cooldown has elapsed) are enqueued.
///
/// This is a static method (not &self) to allow it to be called from spawned tasks.
#[tracing::instrument(skip_all, fields(term))]
@@ -160,10 +162,6 @@ impl Scheduler {
banner_api: &BannerApi,
job_events_tx: Option<&broadcast::Sender<ScrapeJobEvent>>,
) -> Result<()> {
// For now, we will implement a simple baseline scheduling strategy:
// 1. Get a list of all subjects from the Banner API.
// 2. Query existing jobs for all subjects in a single query.
// 3. Create new jobs only for subjects that don't have existing jobs.
let term = Term::get_current().inner().to_string();
tracing::Span::current().record("term", term.as_str());
@@ -175,13 +173,70 @@ impl Scheduler {
"Retrieved subjects from API"
);
// Create payloads for all subjects
let subject_payloads: Vec<_> = subjects
.iter()
.map(|subject| json!({ "subject": subject.code }))
// Fetch per-subject stats and build a lookup map
let stats_rows = scrape_jobs::fetch_subject_stats(db_pool).await?;
let stats_map: HashMap<String, SubjectStats> = stats_rows
.into_iter()
.map(|row| {
let subject = row.subject.clone();
(subject, SubjectStats::from(row))
})
.collect();
// Query existing jobs for all subjects in a single query
// Evaluate each subject using adaptive scheduling
let now = Utc::now();
let is_past_term = false; // Scheduler currently only fetches current term subjects
let mut eligible_subjects: Vec<String> = Vec::new();
let mut cooldown_count: usize = 0;
let mut paused_count: usize = 0;
let mut read_only_count: usize = 0;
for subject in &subjects {
let stats = stats_map.get(&subject.code).cloned().unwrap_or_else(|| {
// Cold start: no history for this subject
SubjectStats {
subject: subject.code.clone(),
recent_runs: 0,
avg_change_ratio: 0.0,
consecutive_zero_changes: 0,
consecutive_empty_fetches: 0,
recent_failure_count: 0,
recent_success_count: 0,
last_completed: DateTime::<Utc>::MIN_UTC,
}
});
match evaluate_subject(&stats, now, is_past_term) {
SubjectSchedule::Eligible(_) => {
eligible_subjects.push(subject.code.clone());
}
SubjectSchedule::Cooldown(_) => cooldown_count += 1,
SubjectSchedule::Paused => paused_count += 1,
SubjectSchedule::ReadOnly => read_only_count += 1,
}
}
info!(
total = subjects.len(),
eligible = eligible_subjects.len(),
cooldown = cooldown_count,
paused = paused_count,
read_only = read_only_count,
"Adaptive scheduling decisions"
);
if eligible_subjects.is_empty() {
debug!("No eligible subjects to schedule");
return Ok(());
}
// Create payloads only for eligible subjects
let subject_payloads: Vec<_> = eligible_subjects
.iter()
.map(|code| json!({ "subject": code }))
.collect();
// Query existing jobs for eligible subjects only
let existing_payloads = scrape_jobs::find_existing_job_payloads(
TargetType::Subject,
&subject_payloads,
@@ -189,12 +244,12 @@ impl Scheduler {
)
.await?;
// Filter out subjects that already have jobs and prepare new jobs
// Filter out subjects that already have pending jobs
let mut skipped_count = 0;
let new_jobs: Vec<_> = subjects
let new_jobs: Vec<_> = eligible_subjects
.into_iter()
.filter_map(|subject| {
let job = SubjectJob::new(subject.code.clone());
.filter_map(|subject_code| {
let job = SubjectJob::new(subject_code.clone());
let payload = serde_json::to_value(&job).unwrap();
let payload_str = payload.to_string();
@@ -202,7 +257,7 @@ impl Scheduler {
skipped_count += 1;
None
} else {
Some((payload, subject.code))
Some((payload, subject_code))
}
})
.collect();
@@ -255,6 +310,7 @@ impl Scheduler {
total,
stats.total_unmatched,
stats.candidates_created,
stats.candidates_rescored,
stats.auto_matched,
stats.skipped_unparseable,
stats.skipped_no_candidates,
+34
View File
@@ -51,6 +51,33 @@ impl WebService {
}
}
}
/// Periodically cleans up expired sessions from the database and in-memory cache.
async fn session_cleanup_loop(state: AppState, mut shutdown_rx: broadcast::Receiver<()>) {
use std::time::Duration;
// Run every hour
let mut interval = tokio::time::interval(Duration::from_secs(3600));
loop {
tokio::select! {
_ = interval.tick() => {
match state.session_cache.cleanup_expired().await {
Ok(deleted) => {
if deleted > 0 {
info!(deleted, "cleaned up expired sessions");
}
}
Err(e) => {
warn!(error = %e, "session cleanup failed");
}
}
}
_ = shutdown_rx.recv() => {
break;
}
}
}
}
}
#[async_trait::async_trait]
@@ -87,6 +114,13 @@ impl Service for WebService {
Self::db_health_check_loop(health_state, health_shutdown_rx).await;
});
// Spawn session cleanup task
let cleanup_state = self.app_state.clone();
let cleanup_shutdown_rx = shutdown_tx.subscribe();
tokio::spawn(async move {
Self::session_cleanup_loop(cleanup_state, cleanup_shutdown_rx).await;
});
// Use axum's graceful shutdown with the internal shutdown signal
axum::serve(listener, app)
.with_graceful_shutdown(async move {
+4
View File
@@ -4,6 +4,7 @@ use crate::banner::BannerApi;
use crate::banner::Course;
use crate::data::models::ReferenceData;
use crate::status::ServiceStatusRegistry;
use crate::web::schedule_cache::ScheduleCache;
use crate::web::session_cache::{OAuthStateStore, SessionCache};
use crate::web::ws::ScrapeJobEvent;
use anyhow::Result;
@@ -76,12 +77,14 @@ pub struct AppState {
pub reference_cache: Arc<RwLock<ReferenceCache>>,
pub session_cache: SessionCache,
pub oauth_state_store: OAuthStateStore,
pub schedule_cache: ScheduleCache,
pub scrape_job_tx: broadcast::Sender<ScrapeJobEvent>,
}
impl AppState {
pub fn new(banner_api: Arc<BannerApi>, db_pool: PgPool) -> Self {
let (scrape_job_tx, _) = broadcast::channel(64);
let schedule_cache = ScheduleCache::new(db_pool.clone());
Self {
session_cache: SessionCache::new(db_pool.clone()),
oauth_state_store: OAuthStateStore::new(),
@@ -89,6 +92,7 @@ impl AppState {
db_pool,
service_statuses: ServiceStatusRegistry::new(),
reference_cache: Arc::new(RwLock::new(ReferenceCache::new())),
schedule_cache,
scrape_job_tx,
}
}
+12 -50
View File
@@ -180,6 +180,7 @@ pub struct InstructorDetailResponse {
pub struct RescoreResponse {
pub total_unmatched: usize,
pub candidates_created: usize,
pub candidates_rescored: usize,
pub auto_matched: usize,
pub skipped_unparseable: usize,
pub skipped_no_candidates: usize,
@@ -768,16 +769,10 @@ pub async fn unmatch_instructor(
) -> Result<Json<OkResponse>, (StatusCode, Json<Value>)> {
let rmp_legacy_id = body.and_then(|b| b.rmp_legacy_id);
let mut tx = state
.db_pool
.begin()
.await
.map_err(|e| db_error("failed to begin transaction", e))?;
// Verify instructor exists
let exists: Option<(i32,)> = sqlx::query_as("SELECT id FROM instructors WHERE id = $1")
.bind(id)
.fetch_optional(&mut *tx)
.fetch_optional(&state.db_pool)
.await
.map_err(|e| db_error("failed to check instructor", e))?;
@@ -788,50 +783,16 @@ pub async fn unmatch_instructor(
));
}
// Delete specific link or all links
if let Some(legacy_id) = rmp_legacy_id {
let result = sqlx::query(
"DELETE FROM instructor_rmp_links WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(id)
.bind(legacy_id)
.execute(&mut *tx)
// Use the data layer function to perform the unmatch
crate::data::rmp::unmatch_instructor(&state.db_pool, id, rmp_legacy_id)
.await
.map_err(|e| db_error("failed to remove rmp link", e))?;
if result.rows_affected() == 0 {
return Err((
StatusCode::NOT_FOUND,
Json(json!({"error": "link not found for this instructor"})),
));
}
} else {
sqlx::query("DELETE FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to remove rmp links", e))?;
}
// Check if any links remain; update status accordingly
let (remaining,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(id)
.fetch_one(&mut *tx)
.await
.map_err(|e| db_error("failed to count remaining links", e))?;
if remaining == 0 {
sqlx::query("UPDATE instructors SET rmp_match_status = 'unmatched' WHERE id = $1")
.bind(id)
.execute(&mut *tx)
.await
.map_err(|e| db_error("failed to update instructor status", e))?;
}
tx.commit()
.await
.map_err(|e| db_error("failed to commit transaction", e))?;
.map_err(|e| {
tracing::error!(error = %e, "failed to unmatch instructor");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "failed to unmatch instructor"})),
)
})?;
Ok(Json(OkResponse { ok: true }))
}
@@ -858,6 +819,7 @@ pub async fn rescore(
Ok(Json(RescoreResponse {
total_unmatched: stats.total_unmatched,
candidates_created: stats.candidates_created,
candidates_rescored: stats.candidates_rescored,
auto_matched: stats.auto_matched,
skipped_unparseable: stats.skipped_unparseable,
skipped_no_candidates: stats.skipped_no_candidates,
+523
View File
@@ -0,0 +1,523 @@
//! Admin API handlers for scraper observability.
//!
//! All endpoints require the `AdminUser` extractor, returning 401/403 as needed.
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::response::Json;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::Row;
use ts_rs::TS;
use crate::banner::models::terms::Term;
use crate::data::scrape_jobs;
use crate::scraper::adaptive::{self, SubjectSchedule, SubjectStats};
use crate::state::AppState;
use crate::web::extractors::AdminUser;
type ApiError = (StatusCode, Json<serde_json::Value>);
fn parse_period(period: &str) -> Result<chrono::Duration, ApiError> {
match period {
"1h" => Ok(chrono::Duration::hours(1)),
"6h" => Ok(chrono::Duration::hours(6)),
"24h" => Ok(chrono::Duration::hours(24)),
"7d" => Ok(chrono::Duration::days(7)),
"30d" => Ok(chrono::Duration::days(30)),
_ => Err((
StatusCode::BAD_REQUEST,
Json(
json!({"error": format!("Invalid period '{period}'. Valid: 1h, 6h, 24h, 7d, 30d")}),
),
)),
}
}
fn period_to_interval_str(period: &str) -> &'static str {
match period {
"1h" => "1 hour",
"6h" => "6 hours",
"24h" => "24 hours",
"7d" => "7 days",
"30d" => "30 days",
_ => "24 hours",
}
}
fn parse_bucket(bucket: &str) -> Result<&'static str, ApiError> {
match bucket {
"1m" => Ok("1 minute"),
"5m" => Ok("5 minutes"),
"15m" => Ok("15 minutes"),
"1h" => Ok("1 hour"),
"6h" => Ok("6 hours"),
_ => Err((
StatusCode::BAD_REQUEST,
Json(
json!({"error": format!("Invalid bucket '{bucket}'. Valid: 1m, 5m, 15m, 1h, 6h")}),
),
)),
}
}
fn default_bucket_for_period(period: &str) -> &'static str {
match period {
"1h" => "1m",
"6h" => "5m",
"24h" => "15m",
"7d" => "1h",
"30d" => "6h",
_ => "15m",
}
}
// ---------------------------------------------------------------------------
// Endpoint 1: GET /api/admin/scraper/stats
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct StatsParams {
#[serde(default = "default_period")]
period: String,
}
fn default_period() -> String {
"24h".to_string()
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct ScraperStatsResponse {
period: String,
#[ts(type = "number")]
total_scrapes: i64,
#[ts(type = "number")]
successful_scrapes: i64,
#[ts(type = "number")]
failed_scrapes: i64,
success_rate: Option<f64>,
avg_duration_ms: Option<f64>,
#[ts(type = "number")]
total_courses_changed: i64,
#[ts(type = "number")]
total_courses_fetched: i64,
#[ts(type = "number")]
total_audits_generated: i64,
#[ts(type = "number")]
pending_jobs: i64,
#[ts(type = "number")]
locked_jobs: i64,
}
pub async fn scraper_stats(
_admin: AdminUser,
State(state): State<AppState>,
Query(params): Query<StatsParams>,
) -> Result<Json<ScraperStatsResponse>, ApiError> {
let _duration = parse_period(&params.period)?;
let interval_str = period_to_interval_str(&params.period);
let row = sqlx::query(
"SELECT \
COUNT(*) AS total_scrapes, \
COUNT(*) FILTER (WHERE success) AS successful_scrapes, \
COUNT(*) FILTER (WHERE NOT success) AS failed_scrapes, \
(AVG(duration_ms) FILTER (WHERE success))::FLOAT8 AS avg_duration_ms, \
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0) AS total_courses_changed, \
COALESCE(SUM(courses_fetched) FILTER (WHERE success), 0) AS total_courses_fetched, \
COALESCE(SUM(audits_generated) FILTER (WHERE success), 0) AS total_audits_generated \
FROM scrape_job_results \
WHERE completed_at > NOW() - $1::interval",
)
.bind(interval_str)
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch scraper stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch scraper stats"})),
)
})?;
let total_scrapes: i64 = row.get("total_scrapes");
let successful_scrapes: i64 = row.get("successful_scrapes");
let failed_scrapes: i64 = row.get("failed_scrapes");
let avg_duration_ms: Option<f64> = row.get("avg_duration_ms");
let total_courses_changed: i64 = row.get("total_courses_changed");
let total_courses_fetched: i64 = row.get("total_courses_fetched");
let total_audits_generated: i64 = row.get("total_audits_generated");
let queue_row = sqlx::query(
"SELECT \
COUNT(*) FILTER (WHERE locked_at IS NULL) AS pending_jobs, \
COUNT(*) FILTER (WHERE locked_at IS NOT NULL) AS locked_jobs \
FROM scrape_jobs",
)
.fetch_one(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch queue stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch queue stats"})),
)
})?;
let pending_jobs: i64 = queue_row.get("pending_jobs");
let locked_jobs: i64 = queue_row.get("locked_jobs");
let success_rate = if total_scrapes > 0 {
Some(successful_scrapes as f64 / total_scrapes as f64)
} else {
None
};
Ok(Json(ScraperStatsResponse {
period: params.period,
total_scrapes,
successful_scrapes,
failed_scrapes,
success_rate,
avg_duration_ms,
total_courses_changed,
total_courses_fetched,
total_audits_generated,
pending_jobs,
locked_jobs,
}))
}
// ---------------------------------------------------------------------------
// Endpoint 2: GET /api/admin/scraper/timeseries
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct TimeseriesParams {
#[serde(default = "default_period")]
period: String,
bucket: Option<String>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct TimeseriesResponse {
period: String,
bucket: String,
points: Vec<TimeseriesPoint>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct TimeseriesPoint {
timestamp: DateTime<Utc>,
#[ts(type = "number")]
scrape_count: i64,
#[ts(type = "number")]
success_count: i64,
#[ts(type = "number")]
error_count: i64,
#[ts(type = "number")]
courses_changed: i64,
avg_duration_ms: f64,
}
pub async fn scraper_timeseries(
_admin: AdminUser,
State(state): State<AppState>,
Query(params): Query<TimeseriesParams>,
) -> Result<Json<TimeseriesResponse>, ApiError> {
let _duration = parse_period(&params.period)?;
let period_interval = period_to_interval_str(&params.period);
let bucket_code = match &params.bucket {
Some(b) => {
// Validate the bucket
parse_bucket(b)?;
b.as_str()
}
None => default_bucket_for_period(&params.period),
};
let bucket_interval = parse_bucket(bucket_code)?;
let rows = sqlx::query(
"WITH buckets AS ( \
SELECT generate_series( \
date_bin($1::interval, NOW() - $2::interval, '2020-01-01'::timestamptz), \
date_bin($1::interval, NOW(), '2020-01-01'::timestamptz), \
$1::interval \
) AS bucket_start \
), \
raw AS ( \
SELECT date_bin($1::interval, completed_at, '2020-01-01'::timestamptz) AS bucket_start, \
COUNT(*)::BIGINT AS scrape_count, \
COUNT(*) FILTER (WHERE success)::BIGINT AS success_count, \
COUNT(*) FILTER (WHERE NOT success)::BIGINT AS error_count, \
COALESCE(SUM(courses_changed) FILTER (WHERE success), 0)::BIGINT AS courses_changed, \
COALESCE(AVG(duration_ms) FILTER (WHERE success), 0)::FLOAT8 AS avg_duration_ms \
FROM scrape_job_results \
WHERE completed_at > NOW() - $2::interval \
GROUP BY 1 \
) \
SELECT b.bucket_start, \
COALESCE(r.scrape_count, 0) AS scrape_count, \
COALESCE(r.success_count, 0) AS success_count, \
COALESCE(r.error_count, 0) AS error_count, \
COALESCE(r.courses_changed, 0) AS courses_changed, \
COALESCE(r.avg_duration_ms, 0) AS avg_duration_ms \
FROM buckets b \
LEFT JOIN raw r ON b.bucket_start = r.bucket_start \
ORDER BY b.bucket_start",
)
.bind(bucket_interval)
.bind(period_interval)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch scraper timeseries");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch scraper timeseries"})),
)
})?;
let points = rows
.iter()
.map(|row| TimeseriesPoint {
timestamp: row.get("bucket_start"),
scrape_count: row.get("scrape_count"),
success_count: row.get("success_count"),
error_count: row.get("error_count"),
courses_changed: row.get("courses_changed"),
avg_duration_ms: row.get("avg_duration_ms"),
})
.collect();
Ok(Json(TimeseriesResponse {
period: params.period,
bucket: bucket_code.to_string(),
points,
}))
}
// ---------------------------------------------------------------------------
// Endpoint 3: GET /api/admin/scraper/subjects
// ---------------------------------------------------------------------------
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectsResponse {
subjects: Vec<SubjectSummary>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectSummary {
subject: String,
subject_description: Option<String>,
#[ts(type = "number")]
tracked_course_count: i64,
schedule_state: String,
#[ts(type = "number")]
current_interval_secs: u64,
time_multiplier: u32,
last_scraped: DateTime<Utc>,
next_eligible_at: Option<DateTime<Utc>>,
#[ts(type = "number | null")]
cooldown_remaining_secs: Option<u64>,
avg_change_ratio: f64,
#[ts(type = "number")]
consecutive_zero_changes: i64,
#[ts(type = "number")]
recent_runs: i64,
#[ts(type = "number")]
recent_failures: i64,
}
pub async fn scraper_subjects(
_admin: AdminUser,
State(state): State<AppState>,
) -> Result<Json<SubjectsResponse>, ApiError> {
let raw_stats = scrape_jobs::fetch_subject_stats(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch subject stats");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch subject stats"})),
)
})?;
let now = Utc::now();
let multiplier = adaptive::time_of_day_multiplier(now);
// Look up subject descriptions from the reference cache
let ref_cache = state.reference_cache.read().await;
// Count tracked courses per subject for the current term
let term = Term::get_current().inner().to_string();
let course_counts: std::collections::HashMap<String, i64> = sqlx::query_as(
"SELECT subject, COUNT(*)::BIGINT AS cnt FROM courses WHERE term_code = $1 GROUP BY subject",
)
.bind(&term)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, "Failed to fetch course counts");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch course counts"})),
)
})?
.into_iter()
.map(|(subject, cnt): (String, i64)| (subject, cnt))
.collect();
let subjects = raw_stats
.into_iter()
.map(|row| {
let stats: SubjectStats = row.into();
let schedule = adaptive::evaluate_subject(&stats, now, false);
let base_interval = adaptive::compute_base_interval(&stats);
let schedule_state = match &schedule {
SubjectSchedule::Eligible(_) => "eligible",
SubjectSchedule::Cooldown(_) => "cooldown",
SubjectSchedule::Paused => "paused",
SubjectSchedule::ReadOnly => "read_only",
};
let current_interval_secs = base_interval.as_secs() * multiplier as u64;
let (next_eligible_at, cooldown_remaining_secs) = match &schedule {
SubjectSchedule::Eligible(_) => (Some(now), Some(0)),
SubjectSchedule::Cooldown(remaining) => {
let remaining_secs = remaining.as_secs();
(
Some(now + chrono::Duration::seconds(remaining_secs as i64)),
Some(remaining_secs),
)
}
SubjectSchedule::Paused | SubjectSchedule::ReadOnly => (None, None),
};
let subject_description = ref_cache
.lookup("subject", &stats.subject)
.map(|s| s.to_string());
let tracked_course_count = course_counts.get(&stats.subject).copied().unwrap_or(0);
SubjectSummary {
subject: stats.subject,
subject_description,
tracked_course_count,
schedule_state: schedule_state.to_string(),
current_interval_secs,
time_multiplier: multiplier,
last_scraped: stats.last_completed,
next_eligible_at,
cooldown_remaining_secs,
avg_change_ratio: stats.avg_change_ratio,
consecutive_zero_changes: stats.consecutive_zero_changes,
recent_runs: stats.recent_runs,
recent_failures: stats.recent_failure_count,
}
})
.collect();
Ok(Json(SubjectsResponse { subjects }))
}
// ---------------------------------------------------------------------------
// Endpoint 4: GET /api/admin/scraper/subjects/{subject}
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct SubjectDetailParams {
#[serde(default = "default_detail_limit")]
limit: i32,
}
fn default_detail_limit() -> i32 {
50
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectDetailResponse {
subject: String,
results: Vec<SubjectResultEntry>,
}
#[derive(Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct SubjectResultEntry {
#[ts(type = "number")]
id: i64,
completed_at: DateTime<Utc>,
duration_ms: i32,
success: bool,
error_message: Option<String>,
courses_fetched: Option<i32>,
courses_changed: Option<i32>,
courses_unchanged: Option<i32>,
audits_generated: Option<i32>,
metrics_generated: Option<i32>,
}
pub async fn scraper_subject_detail(
_admin: AdminUser,
State(state): State<AppState>,
Path(subject): Path<String>,
Query(params): Query<SubjectDetailParams>,
) -> Result<Json<SubjectDetailResponse>, ApiError> {
let limit = params.limit.clamp(1, 200);
let rows = sqlx::query(
"SELECT id, completed_at, duration_ms, success, error_message, \
courses_fetched, courses_changed, courses_unchanged, \
audits_generated, metrics_generated \
FROM scrape_job_results \
WHERE target_type = 'Subject' AND payload->>'subject' = $1 \
ORDER BY completed_at DESC \
LIMIT $2",
)
.bind(&subject)
.bind(limit)
.fetch_all(&state.db_pool)
.await
.map_err(|e| {
tracing::error!(error = %e, subject = %subject, "Failed to fetch subject detail");
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(json!({"error": "Failed to fetch subject detail"})),
)
})?;
let results = rows
.iter()
.map(|row| SubjectResultEntry {
id: row.get("id"),
completed_at: row.get("completed_at"),
duration_ms: row.get("duration_ms"),
success: row.get("success"),
error_message: row.get("error_message"),
courses_fetched: row.get("courses_fetched"),
courses_changed: row.get("courses_changed"),
courses_unchanged: row.get("courses_unchanged"),
audits_generated: row.get("audits_generated"),
metrics_generated: row.get("metrics_generated"),
})
.collect();
Ok(Json(SubjectDetailResponse { subject, results }))
}
+6 -2
View File
@@ -235,7 +235,7 @@ pub async fn auth_callback(
let session = crate::data::sessions::create_session(
&state.db_pool,
discord_id,
Duration::from_secs(7 * 24 * 3600),
Duration::from_secs(crate::data::sessions::SESSION_DURATION_SECS),
)
.await
.map_err(|e| {
@@ -248,7 +248,11 @@ pub async fn auth_callback(
// 6. Build response with session cookie
let secure = redirect_uri.starts_with("https://");
let cookie = session_cookie(&session.id, 604800, secure);
let cookie = session_cookie(
&session.id,
crate::data::sessions::SESSION_DURATION_SECS as i64,
secure,
);
let redirect_to = if user.is_admin { "/admin" } else { "/" };
+136
View File
@@ -0,0 +1,136 @@
//! Web API endpoints for calendar export (ICS download + Google Calendar redirect).
use axum::{
extract::{Path, State},
http::{StatusCode, header},
response::{IntoResponse, Redirect, Response},
};
use crate::calendar::{CalendarCourse, generate_gcal_url, generate_ics};
use crate::data::models::DbMeetingTime;
use crate::state::AppState;
/// Fetch course + meeting times, build a `CalendarCourse`.
async fn load_calendar_course(
state: &AppState,
term: &str,
crn: &str,
) -> Result<(CalendarCourse, Vec<DbMeetingTime>), (StatusCode, String)> {
let course = crate::data::courses::get_course_by_crn(&state.db_pool, crn, term)
.await
.map_err(|e| {
tracing::error!(error = %e, "Calendar: course lookup failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Lookup failed".to_string(),
)
})?
.ok_or_else(|| (StatusCode::NOT_FOUND, "Course not found".to_string()))?;
let instructors = crate::data::courses::get_course_instructors(&state.db_pool, course.id)
.await
.unwrap_or_default();
let primary_instructor = instructors
.iter()
.find(|i| i.is_primary)
.or(instructors.first())
.map(|i| i.display_name.clone());
let meeting_times: Vec<DbMeetingTime> =
serde_json::from_value(course.meeting_times.clone()).unwrap_or_default();
let cal_course = CalendarCourse {
crn: course.crn.clone(),
subject: course.subject.clone(),
course_number: course.course_number.clone(),
title: course.title.clone(),
sequence_number: course.sequence_number.clone(),
primary_instructor,
};
Ok((cal_course, meeting_times))
}
/// `GET /api/courses/{term}/{crn}/calendar.ics`
///
/// Returns an ICS file download for the course.
pub async fn course_ics(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Response, (StatusCode, String)> {
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
if meeting_times.is_empty() {
return Err((
StatusCode::NOT_FOUND,
"No meeting times found for this course".to_string(),
));
}
let result = generate_ics(&cal_course, &meeting_times).map_err(|e| {
tracing::error!(error = %e, "ICS generation failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to generate ICS file".to_string(),
)
})?;
let response = (
[
(header::CONTENT_TYPE, "text/calendar; charset=utf-8"),
(
header::CONTENT_DISPOSITION,
&format!("attachment; filename=\"{}\"", result.filename),
),
(header::CACHE_CONTROL, "no-cache"),
],
result.content,
)
.into_response();
Ok(response)
}
/// `GET /api/courses/{term}/{crn}/gcal`
///
/// Redirects to Google Calendar with a pre-filled event for the first meeting time.
/// If multiple meeting times exist, uses the first one with scheduled days/times.
pub async fn course_gcal(
State(state): State<AppState>,
Path((term, crn)): Path<(String, String)>,
) -> Result<Response, (StatusCode, String)> {
let (cal_course, meeting_times) = load_calendar_course(&state, &term, &crn).await?;
if meeting_times.is_empty() {
return Err((
StatusCode::NOT_FOUND,
"No meeting times found for this course".to_string(),
));
}
// Prefer the first meeting time that has actual days/times scheduled
let mt = meeting_times
.iter()
.find(|mt| {
mt.begin_time.is_some()
&& (mt.monday
|| mt.tuesday
|| mt.wednesday
|| mt.thursday
|| mt.friday
|| mt.saturday
|| mt.sunday)
})
.unwrap_or(&meeting_times[0]);
let url = generate_gcal_url(&cal_course, mt).map_err(|e| {
tracing::error!(error = %e, "Google Calendar URL generation failed");
(
StatusCode::INTERNAL_SERVER_ERROR,
"Failed to generate Google Calendar URL".to_string(),
)
})?;
Ok(Redirect::temporary(&url).into_response())
}
+4
View File
@@ -2,14 +2,18 @@
pub mod admin;
pub mod admin_rmp;
pub mod admin_scraper;
#[cfg(feature = "embed-assets")]
pub mod assets;
pub mod auth;
pub mod calendar;
#[cfg(feature = "embed-assets")]
pub mod encoding;
pub mod extractors;
pub mod routes;
pub mod schedule_cache;
pub mod session_cache;
pub mod timeline;
pub mod ws;
pub use routes::*;
+22
View File
@@ -11,7 +11,10 @@ use axum::{
use crate::web::admin;
use crate::web::admin_rmp;
use crate::web::admin_scraper;
use crate::web::auth::{self, AuthConfig};
use crate::web::calendar;
use crate::web::timeline;
use crate::web::ws;
#[cfg(feature = "embed-assets")]
use axum::{
@@ -44,9 +47,15 @@ pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
.route("/metrics", get(metrics))
.route("/courses/search", get(search_courses))
.route("/courses/{term}/{crn}", get(get_course))
.route(
"/courses/{term}/{crn}/calendar.ics",
get(calendar::course_ics),
)
.route("/courses/{term}/{crn}/gcal", get(calendar::course_gcal))
.route("/terms", get(get_terms))
.route("/subjects", get(get_subjects))
.route("/reference/{category}", get(get_reference))
.route("/timeline", post(timeline::timeline))
.with_state(app_state.clone());
let auth_router = Router::new()
@@ -86,6 +95,19 @@ pub fn create_router(app_state: AppState, auth_config: AuthConfig) -> Router {
post(admin_rmp::unmatch_instructor),
)
.route("/admin/rmp/rescore", post(admin_rmp::rescore))
.route("/admin/scraper/stats", get(admin_scraper::scraper_stats))
.route(
"/admin/scraper/timeseries",
get(admin_scraper::scraper_timeseries),
)
.route(
"/admin/scraper/subjects",
get(admin_scraper::scraper_subjects),
)
.route(
"/admin/scraper/subjects/{subject}",
get(admin_scraper::scraper_subject_detail),
)
.with_state(app_state);
let mut router = Router::new()
+443
View File
@@ -0,0 +1,443 @@
//! ISR-style schedule cache for timeline enrollment queries.
//!
//! Loads all courses with their meeting times from the database, parses the
//! JSONB meeting times into a compact in-memory representation, and caches
//! the result. The cache is refreshed in the background every hour using a
//! stale-while-revalidate pattern with singleflight deduplication — readers
//! always get the current cached value instantly, never blocking on a refresh.
use chrono::NaiveDate;
use serde_json::Value;
use sqlx::PgPool;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use tokio::sync::watch;
use tracing::{debug, error, info};
/// How often the cache is considered fresh (1 hour).
const REFRESH_INTERVAL: std::time::Duration = std::time::Duration::from_secs(60 * 60);
// ── Compact schedule representation ─────────────────────────────────
/// A single meeting time block, pre-parsed for fast filtering.
#[derive(Debug, Clone)]
pub(crate) struct ParsedSchedule {
/// Bitmask of days: bit 0 = Monday, bit 6 = Sunday.
days: u8,
/// Minutes since midnight for start (e.g. 600 = 10:00).
begin_minutes: u16,
/// Minutes since midnight for end (e.g. 650 = 10:50).
end_minutes: u16,
/// First day the meeting pattern is active.
start_date: NaiveDate,
/// Last day the meeting pattern is active.
end_date: NaiveDate,
}
/// A course with its enrollment and pre-parsed schedule blocks.
#[derive(Debug, Clone)]
pub(crate) struct CachedCourse {
pub(crate) subject: String,
pub(crate) enrollment: i32,
pub(crate) schedules: Vec<ParsedSchedule>,
}
/// The immutable snapshot of all courses, swapped atomically on refresh.
#[derive(Debug, Clone)]
pub(crate) struct ScheduleSnapshot {
pub(crate) courses: Vec<CachedCourse>,
refreshed_at: std::time::Instant,
}
// ── Cache handle ────────────────────────────────────────────────────
/// Shared schedule cache. Clone-cheap (all `Arc`-wrapped internals).
#[derive(Clone)]
pub struct ScheduleCache {
/// Current snapshot, updated via `watch` channel for lock-free reads.
rx: watch::Receiver<Arc<ScheduleSnapshot>>,
/// Sender side, held to push new snapshots.
tx: Arc<watch::Sender<Arc<ScheduleSnapshot>>>,
/// Singleflight guard — true while a refresh task is in flight.
refreshing: Arc<AtomicBool>,
/// Database pool for refresh queries.
pool: PgPool,
}
impl ScheduleCache {
/// Create a new cache with an empty initial snapshot.
pub(crate) fn new(pool: PgPool) -> Self {
let empty = Arc::new(ScheduleSnapshot {
courses: Vec::new(),
refreshed_at: std::time::Instant::now(),
});
let (tx, rx) = watch::channel(empty);
Self {
rx,
tx: Arc::new(tx),
refreshing: Arc::new(AtomicBool::new(false)),
pool,
}
}
/// Get the current snapshot. Never blocks on refresh.
pub(crate) fn snapshot(&self) -> Arc<ScheduleSnapshot> {
self.rx.borrow().clone()
}
/// Check freshness and trigger a background refresh if stale.
/// Always returns immediately — the caller uses the current snapshot.
pub(crate) fn ensure_fresh(&self) {
let snap = self.rx.borrow();
if snap.refreshed_at.elapsed() < REFRESH_INTERVAL {
return;
}
// Singleflight: only one refresh at a time.
if self
.refreshing
.compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
.is_err()
{
debug!("Schedule cache refresh already in flight, skipping");
return;
}
let cache = self.clone();
tokio::spawn(async move {
match load_snapshot(&cache.pool).await {
Ok(snap) => {
let count = snap.courses.len();
let _ = cache.tx.send(Arc::new(snap));
info!(courses = count, "Schedule cache refreshed");
}
Err(e) => {
error!(error = %e, "Failed to refresh schedule cache");
}
}
cache.refreshing.store(false, Ordering::Release);
});
}
/// Force an initial load (blocking). Call once at startup.
pub(crate) async fn load(&self) -> anyhow::Result<()> {
let snap = load_snapshot(&self.pool).await?;
let count = snap.courses.len();
let _ = self.tx.send(Arc::new(snap));
info!(courses = count, "Schedule cache initially loaded");
Ok(())
}
}
// ── Database loading ────────────────────────────────────────────────
/// Row returned from the lightweight schedule query.
#[derive(sqlx::FromRow)]
struct ScheduleRow {
subject: String,
enrollment: i32,
meeting_times: Value,
}
/// Load all courses and parse their meeting times into a snapshot.
async fn load_snapshot(pool: &PgPool) -> anyhow::Result<ScheduleSnapshot> {
let start = std::time::Instant::now();
let rows: Vec<ScheduleRow> =
sqlx::query_as("SELECT subject, enrollment, meeting_times FROM courses")
.fetch_all(pool)
.await?;
let courses: Vec<CachedCourse> = rows
.into_iter()
.map(|row| {
let schedules = parse_meeting_times(&row.meeting_times);
CachedCourse {
subject: row.subject,
enrollment: row.enrollment,
schedules,
}
})
.collect();
debug!(
courses = courses.len(),
elapsed_ms = start.elapsed().as_millis(),
"Schedule snapshot built"
);
Ok(ScheduleSnapshot {
courses,
refreshed_at: std::time::Instant::now(),
})
}
// ── Meeting time parsing ────────────────────────────────────────────
/// Parse the JSONB `meeting_times` array into compact `ParsedSchedule` values.
fn parse_meeting_times(value: &Value) -> Vec<ParsedSchedule> {
let Value::Array(arr) = value else {
return Vec::new();
};
arr.iter().filter_map(parse_one_meeting).collect()
}
fn parse_one_meeting(mt: &Value) -> Option<ParsedSchedule> {
let begin_time = mt.get("begin_time")?.as_str()?;
let end_time = mt.get("end_time")?.as_str()?;
let begin_minutes = parse_hhmm(begin_time)?;
let end_minutes = parse_hhmm(end_time)?;
if end_minutes <= begin_minutes {
return None;
}
let start_date = parse_date(mt.get("start_date")?.as_str()?)?;
let end_date = parse_date(mt.get("end_date")?.as_str()?)?;
const DAY_KEYS: [&str; 7] = [
"monday",
"tuesday",
"wednesday",
"thursday",
"friday",
"saturday",
"sunday",
];
let mut days: u8 = 0;
for (bit, key) in DAY_KEYS.iter().enumerate() {
if mt.get(*key).and_then(Value::as_bool).unwrap_or(false) {
days |= 1 << bit;
}
}
// Skip meetings with no days (online async, etc.)
if days == 0 {
return None;
}
Some(ParsedSchedule {
days,
begin_minutes,
end_minutes,
start_date,
end_date,
})
}
/// Parse "HHMM" → minutes since midnight.
fn parse_hhmm(s: &str) -> Option<u16> {
if s.len() != 4 {
return None;
}
let hours: u16 = s[..2].parse().ok()?;
let mins: u16 = s[2..].parse().ok()?;
if hours >= 24 || mins >= 60 {
return None;
}
Some(hours * 60 + mins)
}
/// Parse "MM/DD/YYYY" → NaiveDate.
fn parse_date(s: &str) -> Option<NaiveDate> {
NaiveDate::parse_from_str(s, "%m/%d/%Y").ok()
}
// ── Slot matching ───────────────────────────────────────────────────
/// Day-of-week as our bitmask index (Monday = 0 .. Sunday = 6).
/// Chrono's `weekday().num_days_from_monday()` already gives 0=Mon..6=Sun.
pub(crate) fn weekday_bit(day: chrono::Weekday) -> u8 {
1 << day.num_days_from_monday()
}
impl ParsedSchedule {
/// Check if this schedule is active during a given slot.
///
/// `slot_date` is the calendar date of the slot.
/// `slot_start` / `slot_end` are minutes since midnight for the 15-min window.
#[inline]
pub(crate) fn active_during(
&self,
slot_date: NaiveDate,
slot_weekday_bit: u8,
slot_start_minutes: u16,
slot_end_minutes: u16,
) -> bool {
// Day-of-week check
if self.days & slot_weekday_bit == 0 {
return false;
}
// Date range check
if slot_date < self.start_date || slot_date > self.end_date {
return false;
}
// Time overlap: meeting [begin, end) overlaps slot [start, end)
self.begin_minutes < slot_end_minutes && self.end_minutes > slot_start_minutes
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::NaiveDate;
use serde_json::json;
#[test]
fn parse_hhmm_valid() {
assert_eq!(parse_hhmm("0000"), Some(0));
assert_eq!(parse_hhmm("0930"), Some(570));
assert_eq!(parse_hhmm("1350"), Some(830));
assert_eq!(parse_hhmm("2359"), Some(1439));
}
#[test]
fn parse_hhmm_invalid() {
assert_eq!(parse_hhmm(""), None);
assert_eq!(parse_hhmm("abc"), None);
assert_eq!(parse_hhmm("2500"), None);
assert_eq!(parse_hhmm("0060"), None);
}
#[test]
fn parse_date_valid() {
assert_eq!(
parse_date("08/26/2025"),
Some(NaiveDate::from_ymd_opt(2025, 8, 26).unwrap())
);
}
#[test]
fn parse_meeting_times_basic() {
let json = json!([{
"begin_time": "1000",
"end_time": "1050",
"start_date": "08/26/2025",
"end_date": "12/13/2025",
"monday": true,
"tuesday": false,
"wednesday": true,
"thursday": false,
"friday": true,
"saturday": false,
"sunday": false,
"building": "NPB",
"building_description": "North Paseo Building",
"room": "1.238",
"campus": "11",
"meeting_type": "FF",
"meeting_schedule_type": "AFF"
}]);
let schedules = parse_meeting_times(&json);
assert_eq!(schedules.len(), 1);
let s = &schedules[0];
assert_eq!(s.begin_minutes, 600); // 10:00
assert_eq!(s.end_minutes, 650); // 10:50
assert_eq!(s.days, 0b0010101); // Mon, Wed, Fri
}
#[test]
fn parse_meeting_times_skips_null_times() {
let json = json!([{
"begin_time": null,
"end_time": null,
"start_date": "08/26/2025",
"end_date": "12/13/2025",
"monday": false,
"tuesday": false,
"wednesday": false,
"thursday": false,
"friday": false,
"saturday": false,
"sunday": false,
"meeting_type": "OS",
"meeting_schedule_type": "AFF"
}]);
let schedules = parse_meeting_times(&json);
assert!(schedules.is_empty());
}
#[test]
fn active_during_matching_slot() {
let sched = ParsedSchedule {
days: 0b0000001, // Monday
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Monday Sept 1 2025, 10:00-10:15 slot
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
}
#[test]
fn active_during_wrong_day() {
let sched = ParsedSchedule {
days: 0b0000001, // Monday only
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Tuesday Sept 2 2025
let date = NaiveDate::from_ymd_opt(2025, 9, 2).unwrap();
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Tue), 600, 615));
}
#[test]
fn active_during_no_time_overlap() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600, // 10:00
end_minutes: 650, // 10:50
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap(); // Monday
// Slot 11:00-11:15 — after the meeting ends
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 660, 675));
// Slot 9:45-10:00 — just before meeting starts (end=600, begin=600 → no overlap)
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
}
#[test]
fn active_during_outside_date_range() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
// Monday Jan 6 2025 — before semester
let date = NaiveDate::from_ymd_opt(2025, 1, 6).unwrap();
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 600, 615));
}
#[test]
fn active_during_edge_overlap() {
let sched = ParsedSchedule {
days: 0b0000001,
begin_minutes: 600,
end_minutes: 650,
start_date: NaiveDate::from_ymd_opt(2025, 8, 26).unwrap(),
end_date: NaiveDate::from_ymd_opt(2025, 12, 13).unwrap(),
};
let date = NaiveDate::from_ymd_opt(2025, 9, 1).unwrap();
// Slot 10:45-11:00 — overlaps last 5 minutes of meeting
assert!(sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 645, 660));
// Slot 9:45-10:00 — ends exactly when meeting starts, no overlap
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 585, 600));
// Slot 10:50-11:05 — starts exactly when meeting ends, no overlap
assert!(!sched.active_during(date, weekday_bit(chrono::Weekday::Mon), 650, 665));
}
}
-1
View File
@@ -108,7 +108,6 @@ impl SessionCache {
/// Delete expired sessions from the database and sweep the in-memory cache.
///
/// Returns the number of sessions deleted from the database.
#[allow(dead_code)] // Intended for periodic cleanup task (not yet wired)
pub async fn cleanup_expired(&self) -> anyhow::Result<u64> {
let deleted = crate::data::sessions::cleanup_expired(&self.db_pool).await?;
+258
View File
@@ -0,0 +1,258 @@
//! Timeline API endpoint for enrollment aggregation by subject over time.
//!
//! Accepts multiple time ranges, merges overlaps, aligns to 15-minute
//! slot boundaries, and returns per-subject enrollment totals for each slot.
//! Only courses whose meeting times overlap a given slot contribute to that
//! slot's totals — so the chart reflects the actual class schedule rhythm.
//!
//! Course data is served from an ISR-style in-memory cache (see
//! [`ScheduleCache`]) that refreshes hourly in the background with
//! stale-while-revalidate semantics.
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Json, Response},
};
use chrono::{DateTime, Datelike, Duration, NaiveTime, Timelike, Utc};
use chrono_tz::US::Central;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, BTreeSet};
use ts_rs::TS;
use crate::state::AppState;
use crate::web::schedule_cache::weekday_bit;
/// 15 minutes in seconds, matching the frontend `SLOT_INTERVAL_MS`.
const SLOT_SECONDS: i64 = 15 * 60;
const SLOT_MINUTES: u16 = 15;
/// Maximum number of ranges in a single request.
const MAX_RANGES: usize = 20;
/// Maximum span of a single range (72 hours).
const MAX_RANGE_SPAN: Duration = Duration::hours(72);
/// Maximum total span across all ranges to prevent excessive queries.
const MAX_TOTAL_SPAN: Duration = Duration::hours(168); // 1 week
// ── Request / Response types ────────────────────────────────────────
#[derive(Debug, Deserialize)]
pub(crate) struct TimelineRequest {
ranges: Vec<TimeRange>,
}
#[derive(Debug, Deserialize)]
pub(crate) struct TimeRange {
start: DateTime<Utc>,
end: DateTime<Utc>,
}
#[derive(Debug, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TimelineResponse {
/// 15-minute slots with per-subject enrollment totals, sorted by time.
slots: Vec<TimelineSlot>,
/// All subject codes present in the returned data.
subjects: Vec<String>,
}
#[derive(Debug, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TimelineSlot {
/// ISO-8601 timestamp at the start of this 15-minute bucket.
time: DateTime<Utc>,
/// Subject code → total enrollment in this slot.
subjects: BTreeMap<String, i64>,
}
// ── Error type ──────────────────────────────────────────────────────
pub(crate) struct TimelineError {
status: StatusCode,
message: String,
}
impl TimelineError {
fn bad_request(msg: impl Into<String>) -> Self {
Self {
status: StatusCode::BAD_REQUEST,
message: msg.into(),
}
}
}
impl IntoResponse for TimelineError {
fn into_response(self) -> Response {
(
self.status,
Json(serde_json::json!({ "error": self.message })),
)
.into_response()
}
}
// ── Alignment helpers ───────────────────────────────────────────────
/// Floor a timestamp to the nearest 15-minute boundary.
fn align_floor(ts: DateTime<Utc>) -> DateTime<Utc> {
let secs = ts.timestamp();
let aligned = (secs / SLOT_SECONDS) * SLOT_SECONDS;
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
}
/// Ceil a timestamp to the nearest 15-minute boundary.
fn align_ceil(ts: DateTime<Utc>) -> DateTime<Utc> {
let secs = ts.timestamp();
let aligned = ((secs + SLOT_SECONDS - 1) / SLOT_SECONDS) * SLOT_SECONDS;
DateTime::from_timestamp(aligned, 0).unwrap_or(ts)
}
// ── Range merging ───────────────────────────────────────────────────
/// Aligned, validated range.
#[derive(Debug, Clone, Copy)]
struct AlignedRange {
start: DateTime<Utc>,
end: DateTime<Utc>,
}
/// Merge overlapping/adjacent ranges into a minimal set.
fn merge_ranges(mut ranges: Vec<AlignedRange>) -> Vec<AlignedRange> {
if ranges.is_empty() {
return ranges;
}
ranges.sort_by_key(|r| r.start);
let mut merged: Vec<AlignedRange> = vec![ranges[0]];
for r in &ranges[1..] {
let last = merged.last_mut().unwrap();
if r.start <= last.end {
last.end = last.end.max(r.end);
} else {
merged.push(*r);
}
}
merged
}
/// Generate all aligned slot timestamps within the merged ranges.
fn generate_slots(merged: &[AlignedRange]) -> BTreeSet<DateTime<Utc>> {
let mut slots = BTreeSet::new();
for range in merged {
let mut t = range.start;
while t < range.end {
slots.insert(t);
t += Duration::seconds(SLOT_SECONDS);
}
}
slots
}
// ── Handler ─────────────────────────────────────────────────────────
/// `POST /api/timeline`
///
/// Accepts a JSON body with multiple time ranges. Returns per-subject
/// enrollment totals bucketed into 15-minute slots. Only courses whose
/// meeting schedule overlaps a slot contribute to that slot's count.
pub(crate) async fn timeline(
State(state): State<AppState>,
Json(body): Json<TimelineRequest>,
) -> Result<Json<TimelineResponse>, TimelineError> {
// ── Validate ────────────────────────────────────────────────────
if body.ranges.is_empty() {
return Err(TimelineError::bad_request("At least one range is required"));
}
if body.ranges.len() > MAX_RANGES {
return Err(TimelineError::bad_request(format!(
"Too many ranges (max {MAX_RANGES})"
)));
}
let mut aligned: Vec<AlignedRange> = Vec::with_capacity(body.ranges.len());
for r in &body.ranges {
if r.end <= r.start {
return Err(TimelineError::bad_request(format!(
"Range end ({}) must be after start ({})",
r.end, r.start
)));
}
let span = r.end - r.start;
if span > MAX_RANGE_SPAN {
return Err(TimelineError::bad_request(format!(
"Range span ({} hours) exceeds maximum ({} hours)",
span.num_hours(),
MAX_RANGE_SPAN.num_hours()
)));
}
aligned.push(AlignedRange {
start: align_floor(r.start),
end: align_ceil(r.end),
});
}
let merged = merge_ranges(aligned);
// Validate total span
let total_span: Duration = merged.iter().map(|r| r.end - r.start).sum();
if total_span > MAX_TOTAL_SPAN {
return Err(TimelineError::bad_request(format!(
"Total time span ({} hours) exceeds maximum ({} hours)",
total_span.num_hours(),
MAX_TOTAL_SPAN.num_hours()
)));
}
// ── Get cached schedule data (ISR: stale-while-revalidate) ───────
state.schedule_cache.ensure_fresh();
let snapshot = state.schedule_cache.snapshot();
// ── Build per-slot enrollment by filtering on meeting times ──────
let slot_times = generate_slots(&merged);
let mut all_subjects: BTreeSet<String> = BTreeSet::new();
let slots: Vec<TimelineSlot> = slot_times
.into_iter()
.map(|utc_time| {
// Convert UTC slot to Central time for local day-of-week and time-of-day
let local = utc_time.with_timezone(&Central);
let local_date = local.date_naive();
let local_time = local.time();
let weekday = local.weekday();
let wday_bit = weekday_bit(weekday);
let slot_start_minutes = time_to_minutes(local_time);
let slot_end_minutes = slot_start_minutes + SLOT_MINUTES;
let mut subject_totals: BTreeMap<String, i64> = BTreeMap::new();
for course in &snapshot.courses {
let active = course.schedules.iter().any(|s| {
s.active_during(local_date, wday_bit, slot_start_minutes, slot_end_minutes)
});
if active {
*subject_totals.entry(course.subject.clone()).or_default() +=
course.enrollment as i64;
}
}
all_subjects.extend(subject_totals.keys().cloned());
TimelineSlot {
time: utc_time,
subjects: subject_totals,
}
})
.collect();
let subjects: Vec<String> = all_subjects.into_iter().collect();
Ok(Json(TimelineResponse { slots, subjects }))
}
/// Convert a `NaiveTime` to minutes since midnight.
fn time_to_minutes(t: NaiveTime) -> u16 {
(t.hour() * 60 + t.minute()) as u16
}
+103
View File
@@ -0,0 +1,103 @@
#[allow(dead_code)]
mod helpers;
use banner::data::rmp::unmatch_instructor;
use sqlx::PgPool;
/// Test that unmatching an instructor resets accepted candidates back to pending.
///
/// When a user unmatches an instructor, accepted candidates should be reset to
/// 'pending' so they can be re-matched later. This prevents the bug where
/// candidates remain 'accepted' but have no corresponding link.
#[sqlx::test]
async fn unmatch_resets_accepted_candidates_to_pending(pool: PgPool) {
// ARRANGE: Create an instructor
let (instructor_id,): (i32,) = sqlx::query_as(
"INSERT INTO instructors (display_name, email)
VALUES ('Test, Instructor', 'test@utsa.edu')
RETURNING id",
)
.fetch_one(&pool)
.await
.expect("failed to create instructor");
// ARRANGE: Create an RMP professor
let (rmp_legacy_id,): (i32,) = sqlx::query_as(
"INSERT INTO rmp_professors (legacy_id, graphql_id, first_name, last_name, num_ratings)
VALUES (9999999, 'test-graphql-id', 'Test', 'Professor', 10)
RETURNING legacy_id",
)
.fetch_one(&pool)
.await
.expect("failed to create rmp professor");
// ARRANGE: Create a match candidate with 'accepted' status
sqlx::query(
"INSERT INTO rmp_match_candidates (instructor_id, rmp_legacy_id, score, status)
VALUES ($1, $2, 0.85, 'accepted')",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.execute(&pool)
.await
.expect("failed to create candidate");
// ARRANGE: Create a link in instructor_rmp_links
sqlx::query(
"INSERT INTO instructor_rmp_links (instructor_id, rmp_legacy_id, source)
VALUES ($1, $2, 'manual')",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.execute(&pool)
.await
.expect("failed to create link");
// ARRANGE: Update instructor status to 'confirmed'
sqlx::query("UPDATE instructors SET rmp_match_status = 'confirmed' WHERE id = $1")
.bind(instructor_id)
.execute(&pool)
.await
.expect("failed to update instructor status");
// ACT: Unmatch the specific RMP profile
unmatch_instructor(&pool, instructor_id, Some(rmp_legacy_id))
.await
.expect("unmatch should succeed");
// ASSERT: Candidate should be reset to pending
let (candidate_status,): (String,) = sqlx::query_as(
"SELECT status FROM rmp_match_candidates
WHERE instructor_id = $1 AND rmp_legacy_id = $2",
)
.bind(instructor_id)
.bind(rmp_legacy_id)
.fetch_one(&pool)
.await
.expect("failed to fetch candidate status");
assert_eq!(
candidate_status, "pending",
"candidate should be reset to pending after unmatch"
);
// ASSERT: Link should be deleted
let (link_count,): (i64,) =
sqlx::query_as("SELECT COUNT(*) FROM instructor_rmp_links WHERE instructor_id = $1")
.bind(instructor_id)
.fetch_one(&pool)
.await
.expect("failed to count links");
assert_eq!(link_count, 0, "link should be deleted");
// ASSERT: Instructor status should be unmatched
let (instructor_status,): (String,) =
sqlx::query_as("SELECT rmp_match_status FROM instructors WHERE id = $1")
.bind(instructor_id)
.fetch_one(&pool)
.await
.expect("failed to fetch instructor status");
assert_eq!(
instructor_status, "unmatched",
"instructor should be unmatched"
);
}
+1
View File
@@ -1,3 +1,4 @@
#[allow(dead_code)]
mod helpers;
use banner::data::batch::batch_upsert_courses;
+1
View File
@@ -1,3 +1,4 @@
#[allow(dead_code)]
mod helpers;
use banner::data::models::{ScrapePriority, TargetType};
+199
View File
@@ -10,6 +10,7 @@
"d3-shape": "^3.2.0",
"d3-time-format": "^4.1.0",
"date-fns": "^4.1.0",
"layerchart": "^1.0.13",
"overlayscrollbars": "^2.14.0",
"overlayscrollbars-svelte": "^0.5.5",
},
@@ -40,6 +41,8 @@
},
},
"packages": {
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
"@asamuzakjp/css-color": ["@asamuzakjp/css-color@3.2.0", "", { "dependencies": { "@csstools/css-calc": "^2.1.3", "@csstools/css-color-parser": "^3.0.9", "@csstools/css-parser-algorithms": "^3.0.4", "@csstools/css-tokenizer": "^3.0.3", "lru-cache": "^10.4.3" } }, "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw=="],
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
@@ -70,6 +73,10 @@
"@csstools/css-tokenizer": ["@csstools/css-tokenizer@3.0.4", "", {}, "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw=="],
"@dagrejs/dagre": ["@dagrejs/dagre@1.1.8", "", { "dependencies": { "@dagrejs/graphlib": "2.2.4" } }, "sha512-5SEDlndt4W/LaVzPYJW+bSmSEZc9EzTf8rJ20WCKvjS5EAZAN0b+x0Yww7VMT4R3Wootkg+X9bUfUxazYw6Blw=="],
"@dagrejs/graphlib": ["@dagrejs/graphlib@2.2.4", "", {}, "sha512-mepCf/e9+SKYy1d02/UkvSy6+6MoyXhVxP8lLDfA7BPE1X1d4dR0sZznmbM8/XVJ1GPM+Svnx7Xj6ZweByWUkw=="],
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
@@ -144,8 +151,22 @@
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
"@layerstack/svelte-actions": ["@layerstack/svelte-actions@1.0.1", "", { "dependencies": { "@floating-ui/dom": "^1.6.13", "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "d3-scale": "^4.0.2", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-Tv8B3TeT7oaghx0R0I4avnSdfAT6GxEK+StL8k/hEaa009iNOIGFl3f76kfvNvPioQHAMFGtnWGLPHfsfD41nQ=="],
"@layerstack/svelte-stores": ["@layerstack/svelte-stores@1.0.2", "", { "dependencies": { "@layerstack/utils": "1.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "immer": "^10.1.1", "lodash-es": "^4.17.21", "zod": "^3.24.2" } }, "sha512-IxK0UKD0PVxg1VsyaR+n7NyJ+NlvyqvYYAp+J10lkjDQxm0yx58CaF2LBV08T22C3aY1iTlqJaatn/VHV4SoQg=="],
"@layerstack/tailwind": ["@layerstack/tailwind@1.0.1", "", { "dependencies": { "@layerstack/utils": "^1.0.1", "clsx": "^2.1.1", "culori": "^4.0.1", "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21", "tailwind-merge": "^2.5.4", "tailwindcss": "^3.4.15" } }, "sha512-nlshEkUCfaV0zYzrFXVVYRnS8bnBjs4M7iui6l/tu6NeBBlxDivIyRraJkdYGCSL1lZHi6FqacLQ3eerHtz90A=="],
"@layerstack/utils": ["@layerstack/utils@1.0.1", "", { "dependencies": { "d3-array": "^3.2.4", "date-fns": "^4.1.0", "lodash-es": "^4.17.21" } }, "sha512-sWP9b+SFMkJYMZyYFI01aLxbg2ZUrix6Tv+BCDmeOrcLNxtWFsMYAomMhALzTMHbb+Vis/ua5vXhpdNXEw8a2Q=="],
"@lucide/svelte": ["@lucide/svelte@0.563.1", "", { "peerDependencies": { "svelte": "^5" } }, "sha512-Kt+MbnE5D9RsuI/csmf7M+HWxALe57x3A0DhQ8pPnnUpneh7zuldrYjlT+veWtk+tVnp5doQtaAAxLujzIlhBw=="],
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
"@polka/url": ["@polka/url@1.0.0-next.29", "", {}, "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww=="],
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.57.0", "", { "os": "android", "cpu": "arm" }, "sha512-tPgXB6cDTndIe1ah7u6amCI1T0SsnlOuKgg10Xh3uizJk4e5M1JGaUMk7J4ciuAUcFpbOiNhm2XIjP9ON0dUqA=="],
@@ -282,16 +303,28 @@
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
"any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="],
"anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="],
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
"aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
"bits-ui": ["bits-ui@1.8.0", "", { "dependencies": { "@floating-ui/core": "^1.6.4", "@floating-ui/dom": "^1.6.7", "@internationalized/date": "^3.5.6", "css.escape": "^1.5.1", "esm-env": "^1.1.2", "runed": "^0.23.2", "svelte-toolbelt": "^0.7.1", "tabbable": "^6.2.0" }, "peerDependencies": { "svelte": "^5.11.0" } }, "sha512-CXD6Orp7l8QevNDcRPLXc/b8iMVgxDWT2LyTwsdLzJKh9CxesOmPuNePSPqAxKoT59FIdU4aFPS1k7eBdbaCxg=="],
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
"camelcase-css": ["camelcase-css@2.0.1", "", {}, "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA=="],
"chai": ["chai@5.3.3", "", { "dependencies": { "assertion-error": "^2.0.1", "check-error": "^2.1.1", "deep-eql": "^5.0.1", "loupe": "^3.1.0", "pathval": "^2.0.0" } }, "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw=="],
"check-error": ["check-error@2.1.3", "", {}, "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA=="],
@@ -300,30 +333,66 @@
"clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="],
"commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="],
"cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
"css.escape": ["css.escape@1.5.1", "", {}, "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg=="],
"cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="],
"cssstyle": ["cssstyle@4.6.0", "", { "dependencies": { "@asamuzakjp/css-color": "^3.2.0", "rrweb-cssom": "^0.8.0" } }, "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg=="],
"culori": ["culori@4.0.2", "", {}, "sha512-1+BhOB8ahCn4O0cep0Sh2l9KCOfOdY+BXJnKMHFFzDEouSr/el18QwXEMRlOj9UY5nCeA8UN3a/82rUWRBeyBw=="],
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
"d3-delaunay": ["d3-delaunay@6.0.4", "", { "dependencies": { "delaunator": "5" } }, "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A=="],
"d3-dispatch": ["d3-dispatch@3.0.1", "", {}, "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg=="],
"d3-dsv": ["d3-dsv@3.0.1", "", { "dependencies": { "commander": "7", "iconv-lite": "0.6", "rw": "1" }, "bin": { "csv2json": "bin/dsv2json.js", "csv2tsv": "bin/dsv2dsv.js", "dsv2dsv": "bin/dsv2dsv.js", "dsv2json": "bin/dsv2json.js", "json2csv": "bin/json2dsv.js", "json2dsv": "bin/json2dsv.js", "json2tsv": "bin/json2dsv.js", "tsv2csv": "bin/dsv2dsv.js", "tsv2json": "bin/dsv2json.js" } }, "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q=="],
"d3-force": ["d3-force@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", "d3-timer": "1 - 3" } }, "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg=="],
"d3-format": ["d3-format@3.1.2", "", {}, "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg=="],
"d3-geo": ["d3-geo@3.1.1", "", { "dependencies": { "d3-array": "2.5.0 - 3" } }, "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q=="],
"d3-geo-voronoi": ["d3-geo-voronoi@2.1.0", "", { "dependencies": { "d3-array": "3", "d3-delaunay": "6", "d3-geo": "3", "d3-tricontour": "1" } }, "sha512-kqE4yYuOjPbKdBXG0xztCacPwkVSK2REF1opSNrnqqtXJmNcM++UbwQ8SxvwP6IQTj9RvIjjK4qeiVsEfj0Z2Q=="],
"d3-hierarchy": ["d3-hierarchy@3.1.2", "", {}, "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA=="],
"d3-interpolate": ["d3-interpolate@3.0.1", "", { "dependencies": { "d3-color": "1 - 3" } }, "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g=="],
"d3-interpolate-path": ["d3-interpolate-path@2.3.0", "", {}, "sha512-tZYtGXxBmbgHsIc9Wms6LS5u4w6KbP8C09a4/ZYc4KLMYYqub57rRBUgpUr2CIarIrJEpdAWWxWQvofgaMpbKQ=="],
"d3-path": ["d3-path@3.1.0", "", {}, "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ=="],
"d3-quadtree": ["d3-quadtree@3.0.1", "", {}, "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw=="],
"d3-random": ["d3-random@3.0.1", "", {}, "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ=="],
"d3-sankey": ["d3-sankey@0.12.3", "", { "dependencies": { "d3-array": "1 - 2", "d3-shape": "^1.2.0" } }, "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ=="],
"d3-scale": ["d3-scale@4.0.2", "", { "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", "d3-interpolate": "1.2.0 - 3", "d3-time": "2.1.1 - 3", "d3-time-format": "2 - 4" } }, "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ=="],
"d3-scale-chromatic": ["d3-scale-chromatic@3.1.0", "", { "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" } }, "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ=="],
"d3-shape": ["d3-shape@3.2.0", "", { "dependencies": { "d3-path": "^3.1.0" } }, "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA=="],
"d3-tile": ["d3-tile@1.0.0", "", {}, "sha512-79fnTKpPMPDS5xQ0xuS9ir0165NEwwkFpe/DSOmc2Gl9ldYzKKRDWogmTTE8wAJ8NA7PMapNfEcyKhI9Lxdu5Q=="],
"d3-time": ["d3-time@3.1.0", "", { "dependencies": { "d3-array": "2 - 3" } }, "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q=="],
"d3-time-format": ["d3-time-format@4.1.0", "", { "dependencies": { "d3-time": "1 - 3" } }, "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg=="],
"d3-timer": ["d3-timer@3.0.1", "", {}, "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA=="],
"d3-tricontour": ["d3-tricontour@1.1.0", "", { "dependencies": { "d3-delaunay": "6", "d3-scale": "4" } }, "sha512-G7gHKj89n2owmkGb6WX6ixcnQ0Kf/0wpa9VIh9DGdbHu8wdrlaHU4ir3/bFNERl8N8nn4G7e7qbtBG8N9caihQ=="],
"data-urls": ["data-urls@5.0.0", "", { "dependencies": { "whatwg-mimetype": "^4.0.0", "whatwg-url": "^14.0.0" } }, "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg=="],
"date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="],
@@ -336,10 +405,16 @@
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
"delaunator": ["delaunator@5.0.1", "", { "dependencies": { "robust-predicates": "^3.0.2" } }, "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"devalue": ["devalue@5.6.2", "", {}, "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg=="],
"didyoumean": ["didyoumean@1.2.2", "", {}, "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="],
"dlv": ["dlv@1.1.3", "", {}, "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA=="],
"enhanced-resolve": ["enhanced-resolve@5.18.4", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q=="],
"entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="],
@@ -356,12 +431,24 @@
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
"fastq": ["fastq@1.20.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw=="],
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
"html-encoding-sniffer": ["html-encoding-sniffer@4.0.0", "", { "dependencies": { "whatwg-encoding": "^3.1.1" } }, "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ=="],
"http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="],
@@ -370,10 +457,22 @@
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"immer": ["immer@10.2.0", "", {}, "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw=="],
"inline-style-parser": ["inline-style-parser@0.2.7", "", {}, "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA=="],
"internmap": ["internmap@2.0.3", "", {}, "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="],
"is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="],
"is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="],
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
"is-potential-custom-element-name": ["is-potential-custom-element-name@1.0.1", "", {}, "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ=="],
"is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="],
@@ -386,6 +485,10 @@
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
"layercake": ["layercake@8.4.3", "", { "dependencies": { "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-scale": "^4.0.2", "d3-shape": "^3.2.0" }, "peerDependencies": { "svelte": "3 - 5 || >=5.0.0-next.120", "typescript": "^5.0.2" } }, "sha512-PZDduaPFxgHHkxlmsz5MVBECf6ZCT39DI3LgMVvuMwrmlrtlXwXUM/elJp46zHYzCE1j+cGyDuBDxnANv94tOQ=="],
"layerchart": ["layerchart@1.0.13", "", { "dependencies": { "@dagrejs/dagre": "^1.1.4", "@layerstack/svelte-actions": "^1.0.1", "@layerstack/svelte-stores": "^1.0.2", "@layerstack/tailwind": "^1.0.1", "@layerstack/utils": "^1.0.1", "d3-array": "^3.2.4", "d3-color": "^3.1.0", "d3-delaunay": "^6.0.4", "d3-dsv": "^3.0.1", "d3-force": "^3.0.0", "d3-geo": "^3.1.1", "d3-geo-voronoi": "^2.1.0", "d3-hierarchy": "^3.1.2", "d3-interpolate": "^3.0.1", "d3-interpolate-path": "^2.3.0", "d3-path": "^3.1.0", "d3-quadtree": "^3.0.1", "d3-random": "^3.0.1", "d3-sankey": "^0.12.3", "d3-scale": "^4.0.2", "d3-scale-chromatic": "^3.1.0", "d3-shape": "^3.2.0", "d3-tile": "^1.0.0", "d3-time": "^3.1.0", "date-fns": "^4.1.0", "layercake": "8.4.3", "lodash-es": "^4.17.21" }, "peerDependencies": { "svelte": "^3.56.0 || ^4.0.0 || ^5.0.0" } }, "sha512-bjcrfyTdHtfYZn7yj26dvA1qUjM+R6+akp2VeBJ4JWKmDGhb5WvT9nMCs52Rb+gSd/omFq5SjZLz49MqlVljZw=="],
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
@@ -410,30 +513,50 @@
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
"lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
"lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="],
"locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="],
"lodash-es": ["lodash-es@4.17.23", "", {}, "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg=="],
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
"mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"mz": ["mz@2.7.0", "", { "dependencies": { "any-promise": "^1.0.0", "object-assign": "^4.0.1", "thenify-all": "^1.0.0" } }, "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
"normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="],
"nwsapi": ["nwsapi@2.2.23", "", {}, "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ=="],
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
"object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
"overlayscrollbars": ["overlayscrollbars@2.14.0", "", {}, "sha512-RjV0pqc79kYhQLC3vTcLRb5GLpI1n6qh0Oua3g+bGH4EgNOJHVBGP7u0zZtxoAa0dkHlAqTTSYRb9MMmxNLjig=="],
"overlayscrollbars-svelte": ["overlayscrollbars-svelte@0.5.5", "", { "peerDependencies": { "overlayscrollbars": "^2.0.0", "svelte": "^5.0.0" } }, "sha512-+dRW3YZSvFbKi5vDCpnUOHuoPLLSdu0BUVVMYZdmfVghu7XkafDRebG2y91/ImPqj6YDAUsz1rcWVYhCJSS/pQ=="],
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
"path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="],
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
@@ -442,18 +565,48 @@
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
"pify": ["pify@2.3.0", "", {}, "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog=="],
"pirates": ["pirates@4.0.7", "", {}, "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA=="],
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
"postcss-import": ["postcss-import@15.1.0", "", { "dependencies": { "postcss-value-parser": "^4.0.0", "read-cache": "^1.0.0", "resolve": "^1.1.7" }, "peerDependencies": { "postcss": "^8.0.0" } }, "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew=="],
"postcss-js": ["postcss-js@4.1.0", "", { "dependencies": { "camelcase-css": "^2.0.1" }, "peerDependencies": { "postcss": "^8.4.21" } }, "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw=="],
"postcss-load-config": ["postcss-load-config@6.0.1", "", { "dependencies": { "lilconfig": "^3.1.1" }, "peerDependencies": { "jiti": ">=1.21.0", "postcss": ">=8.0.9", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["jiti", "postcss", "tsx", "yaml"] }, "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g=="],
"postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="],
"postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
"postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="],
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
"read-cache": ["read-cache@1.0.0", "", { "dependencies": { "pify": "^2.3.0" } }, "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
"resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="],
"reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="],
"robust-predicates": ["robust-predicates@3.0.2", "", {}, "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="],
"rollup": ["rollup@4.57.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.57.0", "@rollup/rollup-android-arm64": "4.57.0", "@rollup/rollup-darwin-arm64": "4.57.0", "@rollup/rollup-darwin-x64": "4.57.0", "@rollup/rollup-freebsd-arm64": "4.57.0", "@rollup/rollup-freebsd-x64": "4.57.0", "@rollup/rollup-linux-arm-gnueabihf": "4.57.0", "@rollup/rollup-linux-arm-musleabihf": "4.57.0", "@rollup/rollup-linux-arm64-gnu": "4.57.0", "@rollup/rollup-linux-arm64-musl": "4.57.0", "@rollup/rollup-linux-loong64-gnu": "4.57.0", "@rollup/rollup-linux-loong64-musl": "4.57.0", "@rollup/rollup-linux-ppc64-gnu": "4.57.0", "@rollup/rollup-linux-ppc64-musl": "4.57.0", "@rollup/rollup-linux-riscv64-gnu": "4.57.0", "@rollup/rollup-linux-riscv64-musl": "4.57.0", "@rollup/rollup-linux-s390x-gnu": "4.57.0", "@rollup/rollup-linux-x64-gnu": "4.57.0", "@rollup/rollup-linux-x64-musl": "4.57.0", "@rollup/rollup-openbsd-x64": "4.57.0", "@rollup/rollup-openharmony-arm64": "4.57.0", "@rollup/rollup-win32-arm64-msvc": "4.57.0", "@rollup/rollup-win32-ia32-msvc": "4.57.0", "@rollup/rollup-win32-x64-gnu": "4.57.0", "@rollup/rollup-win32-x64-msvc": "4.57.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-e5lPJi/aui4TO1LpAXIRLySmwXSE8k3b9zoGfd42p67wzxog4WHjiZF3M2uheQih4DGyc25QEV4yRBbpueNiUA=="],
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
"runed": ["runed@0.23.4", "", { "dependencies": { "esm-env": "^1.0.0" }, "peerDependencies": { "svelte": "^5.7.0" } }, "sha512-9q8oUiBYeXIDLWNK5DfCWlkL0EW3oGbk845VdKlPeia28l751VpfesaB/+7pI6rnbx1I6rqoZ2fZxptOJLxILA=="],
"rw": ["rw@1.3.3", "", {}, "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ=="],
"sade": ["sade@1.8.1", "", { "dependencies": { "mri": "^1.1.0" } }, "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A=="],
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
@@ -476,6 +629,10 @@
"style-to-object": ["style-to-object@1.0.14", "", { "dependencies": { "inline-style-parser": "0.2.7" } }, "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw=="],
"sucrase": ["sucrase@3.35.1", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.2", "commander": "^4.0.0", "lines-and-columns": "^1.1.6", "mz": "^2.7.0", "pirates": "^4.0.1", "tinyglobby": "^0.2.11", "ts-interface-checker": "^0.1.9" }, "bin": { "sucrase": "bin/sucrase", "sucrase-node": "bin/sucrase-node" } }, "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw=="],
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
"svelte": ["svelte@5.49.0", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.1", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-Fn2mCc3XX0gnnbBYzWOTrZHi5WnF9KvqmB1+KGlUWoJkdioPmFYtg2ALBr6xl2dcnFTz3Vi7/mHpbKSVg/imVg=="],
"svelte-check": ["svelte-check@4.3.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-e4VWZETyXaKGhpkxOXP+B/d0Fp/zKViZoJmneZWe/05Y2aqSKj3YN2nLfYPJBQ87WEiY4BQCQ9hWGu9mPT1a1Q=="],
@@ -492,6 +649,10 @@
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
"thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="],
"thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="],
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
"tinyexec": ["tinyexec@0.3.2", "", {}, "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA=="],
@@ -508,18 +669,24 @@
"tldts-core": ["tldts-core@6.1.86", "", {}, "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA=="],
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
"totalist": ["totalist@3.0.1", "", {}, "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ=="],
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
"vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="],
"vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" } }, "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg=="],
@@ -548,6 +715,12 @@
"zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
"@layerstack/tailwind/tailwind-merge": ["tailwind-merge@2.6.0", "", {}, "sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA=="],
"@layerstack/tailwind/tailwindcss": ["tailwindcss@3.4.19", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.7", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
@@ -559,5 +732,31 @@
"@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
"@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ=="],
"d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="],
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
"@layerstack/tailwind/tailwindcss/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="],
"@layerstack/tailwind/tailwindcss/jiti": ["jiti@1.21.7", "", { "bin": { "jiti": "bin/jiti.js" } }, "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A=="],
"d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="],
"d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="],
"@layerstack/tailwind/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"@layerstack/tailwind/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
"@layerstack/tailwind/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
}
}
+1
View File
@@ -41,6 +41,7 @@
"d3-shape": "^3.2.0",
"d3-time-format": "^4.1.0",
"date-fns": "^4.1.0",
"layerchart": "^1.0.13",
"overlayscrollbars": "^2.14.0",
"overlayscrollbars-svelte": "^0.5.5"
}
+81
View File
@@ -1,3 +1,4 @@
import { authStore } from "$lib/auth.svelte";
import type {
CandidateResponse,
CodeDescription,
@@ -11,10 +12,17 @@ import type {
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
ScraperStatsResponse,
SearchResponse as SearchResponseGenerated,
ServiceInfo,
ServiceStatus,
StatusResponse,
SubjectDetailResponse,
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
User,
} from "$lib/bindings";
@@ -35,9 +43,16 @@ export type {
LinkedRmpProfile,
ListInstructorsResponse,
RescoreResponse,
ScraperStatsResponse,
ServiceInfo,
ServiceStatus,
StatusResponse,
SubjectDetailResponse,
SubjectResultEntry,
SubjectSummary,
SubjectsResponse,
TimeseriesPoint,
TimeseriesResponse,
TopCandidateResponse,
};
@@ -49,6 +64,8 @@ export type ReferenceEntry = CodeDescription;
// SearchResponse re-exported (aliased to strip the "Generated" suffix)
export type SearchResponse = SearchResponseGenerated;
export type ScraperPeriod = "1h" | "6h" | "24h" | "7d" | "30d";
// Client-side only — not generated from Rust
export type SortColumn = "course_code" | "title" | "instructor" | "time" | "seats";
export type SortDirection = "asc" | "desc";
@@ -119,6 +136,29 @@ export interface MetricsParams {
limit?: number;
}
/** A time range for timeline queries (ISO-8601 strings). */
export interface TimelineRange {
start: string;
end: string;
}
/** Request body for POST /api/timeline. */
export interface TimelineRequest {
ranges: TimelineRange[];
}
/** A single 15-minute slot returned by the timeline API. */
export interface TimelineSlot {
time: string;
subjects: Record<string, number>;
}
/** Response from POST /api/timeline. */
export interface TimelineResponse {
slots: TimelineSlot[];
subjects: string[];
}
export interface SearchParams {
term: string;
subjects?: string[];
@@ -173,6 +213,10 @@ export class BannerApiClient {
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
@@ -190,6 +234,10 @@ export class BannerApiClient {
const response = await this.fetchFn(...args);
if (response.status === 401) {
authStore.handleUnauthorized();
}
if (!response.ok) {
throw new Error(`API request failed: ${response.status} ${response.statusText}`);
}
@@ -281,6 +329,13 @@ export class BannerApiClient {
/** Stored `Last-Modified` value for audit log conditional requests. */
private _auditLastModified: string | null = null;
async getTimeline(ranges: TimelineRange[]): Promise<TimelineResponse> {
return this.request<TimelineResponse>("/timeline", {
method: "POST",
body: { ranges } satisfies TimelineRequest,
});
}
async getMetrics(params?: MetricsParams): Promise<MetricsResponse> {
const query = new URLSearchParams();
if (params?.course_id !== undefined) query.set("course_id", String(params.course_id));
@@ -341,6 +396,32 @@ export class BannerApiClient {
method: "POST",
});
}
// Scraper analytics endpoints
async getScraperStats(period?: ScraperPeriod): Promise<ScraperStatsResponse> {
const qs = period ? `?period=${period}` : "";
return this.request<ScraperStatsResponse>(`/admin/scraper/stats${qs}`);
}
async getScraperTimeseries(period?: ScraperPeriod, bucket?: string): Promise<TimeseriesResponse> {
const query = new URLSearchParams();
if (period) query.set("period", period);
if (bucket) query.set("bucket", bucket);
const qs = query.toString();
return this.request<TimeseriesResponse>(`/admin/scraper/timeseries${qs ? `?${qs}` : ""}`);
}
async getScraperSubjects(): Promise<SubjectsResponse> {
return this.request<SubjectsResponse>("/admin/scraper/subjects");
}
async getScraperSubjectDetail(subject: string, limit?: number): Promise<SubjectDetailResponse> {
const qs = limit !== undefined ? `?limit=${limit}` : "";
return this.request<SubjectDetailResponse>(
`/admin/scraper/subjects/${encodeURIComponent(subject)}${qs}`
);
}
}
export const client = new BannerApiClient();
+7
View File
@@ -60,6 +60,13 @@ class AuthStore {
}
}
/** Idempotently mark the session as lost. Called by apiFetch on 401. */
handleUnauthorized() {
if (this.state.mode !== "unauthenticated") {
this.state = { mode: "unauthenticated" };
}
}
login() {
window.location.href = "/api/auth/login";
}
+7
View File
@@ -11,9 +11,16 @@ export type { LinkedRmpProfile } from "./LinkedRmpProfile";
export type { ListInstructorsResponse } from "./ListInstructorsResponse";
export type { OkResponse } from "./OkResponse";
export type { RescoreResponse } from "./RescoreResponse";
export type { ScraperStatsResponse } from "./ScraperStatsResponse";
export type { SearchResponse } from "./SearchResponse";
export type { ServiceInfo } from "./ServiceInfo";
export type { ServiceStatus } from "./ServiceStatus";
export type { StatusResponse } from "./StatusResponse";
export type { SubjectDetailResponse } from "./SubjectDetailResponse";
export type { SubjectResultEntry } from "./SubjectResultEntry";
export type { SubjectSummary } from "./SubjectSummary";
export type { SubjectsResponse } from "./SubjectsResponse";
export type { TimeseriesPoint } from "./TimeseriesPoint";
export type { TimeseriesResponse } from "./TimeseriesResponse";
export type { TopCandidateResponse } from "./TopCandidateResponse";
export type { User } from "./User";
+47 -1
View File
@@ -16,7 +16,16 @@ import { useClipboard } from "$lib/composables/useClipboard.svelte";
import { cn, tooltipContentClass, formatNumber } from "$lib/utils";
import { Tooltip } from "bits-ui";
import SimpleTooltip from "./SimpleTooltip.svelte";
import { Info, Copy, Check, Star, Triangle, ExternalLink } from "@lucide/svelte";
import {
Info,
Copy,
Check,
Star,
Triangle,
ExternalLink,
Calendar,
Download,
} from "@lucide/svelte";
let { course }: { course: CourseResponse } = $props();
@@ -302,5 +311,42 @@ const clipboard = useClipboard();
>
</div>
{/if}
<!-- Calendar Export -->
{#if course.meetingTimes.length > 0}
<div>
<h4 class="text-sm text-foreground mb-2">
<span class="inline-flex items-center gap-1">
Calendar
<SimpleTooltip
text="Export this course schedule to your calendar app"
delay={150}
passthrough
>
<Info class="size-3 text-muted-foreground/50" />
</SimpleTooltip>
</span>
</h4>
<div class="flex flex-wrap gap-1.5">
<a
href="/api/courses/{course.termCode}/{course.crn}/calendar.ics"
download
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
<Download class="size-3.5" />
ICS File
</a>
<a
href="/api/courses/{course.termCode}/{course.crn}/gcal"
target="_blank"
rel="noopener"
class="inline-flex items-center gap-1.5 text-sm font-medium bg-card border border-border rounded-md px-2.5 py-1 text-foreground hover:border-foreground/20 hover:bg-card/80 transition-colors"
>
<Calendar class="size-3.5" />
Google Calendar
</a>
</div>
</div>
{/if}
</div>
</div>
+10 -6
View File
@@ -11,11 +11,15 @@ const staticTabs = [
const APP_PREFIXES = ["/profile", "/settings", "/admin"];
let profileTab = $derived({
href: authStore.isAuthenticated ? "/profile" : "/login",
label: authStore.isAuthenticated ? "Account" : "Login",
icon: User,
});
let profileTab = $derived(
authStore.isLoading
? { href: "/login" as const, label: null, icon: User }
: {
href: authStore.isAuthenticated ? ("/profile" as const) : ("/login" as const),
label: authStore.isAuthenticated ? "Account" : "Login",
icon: User,
}
);
function isActive(tabHref: string): boolean {
if (tabHref === "/") return page.url.pathname === "/";
@@ -50,7 +54,7 @@ function isActive(tabHref: string): boolean {
: 'text-muted-foreground hover:text-foreground hover:bg-background/50'}"
>
<User size={15} strokeWidth={2} />
{profileTab.label}
{#if profileTab.label}{profileTab.label}{/if}
</a>
<ThemeToggle />
</div>
+1 -1
View File
@@ -67,7 +67,7 @@ function outTransition(_node: HTMLElement): TransitionConfig {
}
</script>
<div class="relative flex flex-1 flex-col overflow-hidden">
<div class="relative flex flex-1 flex-col">
{#key key}
<div in:inTransition out:outTransition class="flex flex-1 flex-col">
{@render children()}
+28 -8
View File
@@ -2,7 +2,6 @@
import { onMount } from "svelte";
import { scaleTime, scaleLinear } from "d3-scale";
import { SUBJECTS, type Subject } from "$lib/timeline/data";
import type { TimeSlot, ChartContext } from "$lib/timeline/types";
import {
PADDING,
@@ -125,12 +124,31 @@ let pointerOverCanvas = false;
// ── Drawer ──────────────────────────────────────────────────────────
let drawerOpen = $state(false);
let enabledSubjects: Set<Subject> = $state(new Set(SUBJECTS));
// Start with an empty set — subjects are populated dynamically from the API.
let enabledSubjects: Set<string> = $state(new Set());
// ── Data store ──────────────────────────────────────────────────────
const store = createTimelineStore();
let data: TimeSlot[] = $derived(store.data);
let activeSubjects = $derived(SUBJECTS.filter((s) => enabledSubjects.has(s)));
let allSubjects: string[] = $derived(store.subjects);
// Auto-enable newly discovered subjects.
$effect(() => {
const storeSubjects = store.subjects;
const next = new Set(enabledSubjects);
let changed = false;
for (const s of storeSubjects) {
if (!next.has(s)) {
next.add(s);
changed = true;
}
}
if (changed) {
enabledSubjects = next;
}
});
let activeSubjects = $derived(allSubjects.filter((s) => enabledSubjects.has(s)));
// ── Derived layout ──────────────────────────────────────────────────
let viewStart = $derived(viewCenter - viewSpan / 2);
@@ -151,7 +169,7 @@ let yScale = scaleLinear()
.range([0, 1]);
// ── Subject toggling ────────────────────────────────────────────────
function toggleSubject(subject: Subject) {
function toggleSubject(subject: string) {
const next = new Set(enabledSubjects);
if (next.has(subject)) next.delete(subject);
else next.add(subject);
@@ -159,7 +177,7 @@ function toggleSubject(subject: Subject) {
}
function enableAll() {
enabledSubjects = new Set(SUBJECTS);
enabledSubjects = new Set(allSubjects);
}
function disableAll() {
@@ -192,7 +210,7 @@ function render() {
};
const visible = getVisibleSlots(data, viewStart, viewEnd);
const visibleStack = stackVisibleSlots(visible, enabledSubjects, animMap);
const visibleStack = stackVisibleSlots(visible, allSubjects, enabledSubjects, animMap);
drawGrid(chart);
drawHoverColumn(chart, visibleStack, hoverSlotTime);
@@ -585,8 +603,9 @@ function tick(timestamp: number) {
// ── Animation sync ──────────────────────────────────────────────────
$effect(() => {
const slots = data;
const subs = allSubjects;
const enabled = enabledSubjects;
syncAnimTargets(animMap, slots, enabled);
syncAnimTargets(animMap, slots, subs, enabled);
});
// Request data whenever the visible window changes.
@@ -625,7 +644,7 @@ onMount(() => {
class:cursor-grabbing={isDragging}
style="display: block; touch-action: none;"
tabindex="0"
aria-label="Interactive class schedule timeline chart"
aria-label="Interactive enrollment timeline chart"
onpointerdown={(e) => { canvasEl?.focus(); onPointerDown(e); }}
onpointermove={onPointerMove}
onpointerup={onPointerUp}
@@ -638,6 +657,7 @@ onMount(() => {
<TimelineDrawer
bind:open={drawerOpen}
subjects={allSubjects}
{enabledSubjects}
{followEnabled}
onToggleSubject={toggleSubject}
+8 -5
View File
@@ -1,13 +1,14 @@
<script lang="ts">
import { Filter, X } from "@lucide/svelte";
import { SUBJECTS, SUBJECT_COLORS, type Subject } from "$lib/timeline/data";
import { getSubjectColor } from "$lib/timeline/data";
import { DRAWER_WIDTH } from "$lib/timeline/constants";
interface Props {
open: boolean;
enabledSubjects: Set<Subject>;
subjects: readonly string[];
enabledSubjects: Set<string>;
followEnabled: boolean;
onToggleSubject: (subject: Subject) => void;
onToggleSubject: (subject: string) => void;
onEnableAll: () => void;
onDisableAll: () => void;
onResumeFollow: () => void;
@@ -15,6 +16,7 @@ interface Props {
let {
open = $bindable(),
subjects,
enabledSubjects,
followEnabled,
onToggleSubject,
@@ -109,8 +111,9 @@ function onKeyDown(e: KeyboardEvent) {
</div>
</div>
<div class="space-y-0.5">
{#each SUBJECTS as subject}
{#each subjects as subject}
{@const enabled = enabledSubjects.has(subject)}
{@const color = getSubjectColor(subject)}
<button
class="flex items-center gap-2 w-full px-1.5 py-1 rounded text-xs
hover:bg-muted/50 transition-colors cursor-pointer text-left"
@@ -118,7 +121,7 @@ function onKeyDown(e: KeyboardEvent) {
>
<span
class="inline-block w-3 h-3 rounded-sm shrink-0 transition-opacity"
style="background: {SUBJECT_COLORS[subject]}; opacity: {enabled ? 1 : 0.2};"
style="background: {color}; opacity: {enabled ? 1 : 0.2};"
></span>
<span
class="transition-opacity {enabled
@@ -1,6 +1,6 @@
<script lang="ts">
import { timeFormat } from "d3-time-format";
import { SUBJECT_COLORS, type Subject } from "$lib/timeline/data";
import { getSubjectColor } from "$lib/timeline/data";
import type { TimeSlot } from "$lib/timeline/types";
import { enabledTotalClasses } from "$lib/timeline/viewport";
@@ -9,7 +9,7 @@ interface Props {
x: number;
y: number;
slot: TimeSlot | null;
activeSubjects: readonly Subject[];
activeSubjects: readonly string[];
}
let { visible, x, y, slot, activeSubjects }: Props = $props();
@@ -35,7 +35,7 @@ const fmtTime = timeFormat("%-I:%M %p");
<div class="flex items-center gap-1.5">
<span
class="inline-block w-2 h-2 rounded-sm"
style="background: {SUBJECT_COLORS[subject]}"
style="background: {getSubjectColor(subject)}"
></span>
<span class="text-muted-foreground">{subject}</span>
</div>
+17
View File
@@ -49,6 +49,23 @@ export function formatDuration(ms: number): string {
* Uses {@link formatDuration} for the text, plus computes the optimal refresh
* interval so callers can schedule the next update efficiently.
*/
/**
* Format a millisecond duration with a dynamic unit, optimised for
* scrape-style timings that are typically under 60 seconds.
*
* - < 1 000 ms → "423ms"
* - < 10 000 ms → "4.52s" (two decimals)
* - < 60 000 ms → "16.9s" (one decimal)
* - ≥ 60 000 ms → delegates to {@link formatDuration} ("1m 5s")
*/
export function formatDurationMs(ms: number): string {
const abs = Math.abs(ms);
if (abs < 1_000) return `${Math.round(abs)}ms`;
if (abs < 10_000) return `${(abs / 1_000).toFixed(2)}s`;
if (abs < 60_000) return `${(abs / 1_000).toFixed(1)}s`;
return formatDuration(ms);
}
export function relativeTime(date: Date, ref: Date): RelativeTimeResult {
const diffMs = ref.getTime() - date.getTime();
const totalSeconds = Math.floor(diffMs / 1000);
+6 -3
View File
@@ -5,7 +5,6 @@
* targets. This module owns the AnimMap lifecycle: syncing targets,
* stepping current values, and pruning offscreen entries.
*/
import { SUBJECTS, type Subject } from "./data";
import { VALUE_EASE, MAXY_EASE, SETTLE_THRESHOLD, MIN_MAXY } from "./constants";
import type { AnimEntry, TimeSlot } from "./types";
@@ -20,11 +19,15 @@ export function createAnimMap(): AnimMap {
* Sync animMap targets from data + filter state.
* New slots start at current=0 so they animate in from the baseline.
* Disabled subjects get target=0 so they animate out.
*
* @param subjects - the full list of known subject codes
* @param enabledSubjects - subjects currently toggled on
*/
export function syncAnimTargets(
animMap: AnimMap,
slots: TimeSlot[],
enabledSubjects: Set<Subject>
subjects: readonly string[],
enabledSubjects: Set<string>
): void {
for (const slot of slots) {
const timeMs = slot.time.getTime();
@@ -34,7 +37,7 @@ export function syncAnimTargets(
animMap.set(timeMs, subjectMap);
}
for (const subject of SUBJECTS) {
for (const subject of subjects) {
const realValue = enabledSubjects.has(subject) ? slot.subjects[subject] || 0 : 0;
const entry = subjectMap.get(subject);
if (entry) {
+59 -103
View File
@@ -1,122 +1,78 @@
/**
* Data types, constants, and deterministic slot generation for the class timeline.
* Each 15-minute slot is seeded by its timestamp, so the same slot always produces
* identical data regardless of when or in what order it's fetched.
* Subject color palette for the timeline chart.
*
* Subjects are dynamic (coming from the API), so we assign colors from
* a fixed palette based on a deterministic hash of the subject code.
* Known high-enrollment subjects get hand-picked colors for familiarity.
*/
import { SLOT_INTERVAL_MS } from "./constants";
import type { TimeSlot } from "./types";
export type { TimeSlot };
export const SUBJECTS = [
"CS",
"MATH",
"BIO",
"ENG",
"PHYS",
"HIST",
"CHEM",
"PSY",
"ECE",
"ART",
] as const;
export type Subject = (typeof SUBJECTS)[number];
/** Subject colors — distinct, accessible palette */
export const SUBJECT_COLORS: Record<Subject, string> = {
/** Hand-picked colors for common UTSA subject codes. */
const KNOWN_SUBJECT_COLORS: Record<string, string> = {
CS: "#6366f1", // indigo
MATH: "#f59e0b", // amber
MAT: "#f59e0b", // amber
BIO: "#10b981", // emerald
ENG: "#ef4444", // red
PHYS: "#3b82f6", // blue
HIST: "#8b5cf6", // violet
CHEM: "#f97316", // orange
PHY: "#3b82f6", // blue
HIS: "#8b5cf6", // violet
CHE: "#f97316", // orange
PSY: "#ec4899", // pink
ECE: "#14b8a6", // teal
ART: "#a855f7", // purple
ACC: "#84cc16", // lime
FIN: "#06b6d4", // cyan
MUS: "#e11d48", // rose
POL: "#d946ef", // fuchsia
SOC: "#22d3ee", // sky
KIN: "#4ade80", // green
IS: "#fb923c", // light orange
STA: "#818cf8", // light indigo
MGT: "#fbbf24", // yellow
MKT: "#2dd4bf", // teal-light
};
/**
* Bell-curve-like distribution centered at a given hour.
* Returns a value 0..1 representing relative class density.
* Extended palette for subjects that don't have a hand-picked color.
* These are chosen to be visually distinct from each other.
*/
function bellCurve(hour: number, center: number, spread: number): number {
const x = (hour - center) / spread;
return Math.exp(-0.5 * x * x);
}
const FALLBACK_PALETTE = [
"#f472b6", // pink-400
"#60a5fa", // blue-400
"#34d399", // emerald-400
"#fbbf24", // amber-400
"#a78bfa", // violet-400
"#fb7185", // rose-400
"#38bdf8", // sky-400
"#4ade80", // green-400
"#facc15", // yellow-400
"#c084fc", // purple-400
"#f87171", // red-400
"#2dd4bf", // teal-400
"#fb923c", // orange-400
"#818cf8", // indigo-400
"#a3e635", // lime-400
"#22d3ee", // cyan-400
];
/**
* Each subject has characteristic scheduling patterns:
* peak hours, relative popularity, and spread.
*/
const SUBJECT_PROFILES: Record<Subject, { peaks: number[]; weight: number; spread: number }> = {
CS: { peaks: [10, 14, 16], weight: 12, spread: 2.0 },
MATH: { peaks: [8, 10, 13], weight: 10, spread: 1.8 },
BIO: { peaks: [9, 11, 14], weight: 8, spread: 1.5 },
ENG: { peaks: [9, 11, 14, 16], weight: 7, spread: 2.2 },
PHYS: { peaks: [8, 13, 15], weight: 6, spread: 1.6 },
HIST: { peaks: [10, 13, 15], weight: 5, spread: 2.0 },
CHEM: { peaks: [8, 10, 14], weight: 6, spread: 1.5 },
PSY: { peaks: [11, 14, 16], weight: 7, spread: 2.0 },
ECE: { peaks: [9, 13, 15], weight: 5, spread: 1.8 },
ART: { peaks: [10, 14, 17], weight: 4, spread: 2.5 },
};
/**
* Seeded pseudo-random number generator (LCG) for reproducible data.
*/
function seededRandom(seed: number): () => number {
let s = seed;
return () => {
s = (s * 1664525 + 1013904223) & 0xffffffff;
return (s >>> 0) / 0xffffffff;
};
}
/**
* Integer hash so adjacent slot timestamps produce very different seeds.
*/
function hashTimestamp(ms: number): number {
let h = ms | 0;
h = ((h >> 16) ^ h) * 0x45d9f3b;
h = ((h >> 16) ^ h) * 0x45d9f3b;
h = (h >> 16) ^ h;
return h >>> 0;
}
/** Generate a single TimeSlot for the given aligned timestamp. */
function generateSlot(timeMs: number): TimeSlot {
const rand = seededRandom(hashTimestamp(timeMs));
const time = new Date(timeMs);
const hour = time.getHours() + time.getMinutes() / 60;
const subjects = {} as Record<Subject, number>;
for (const subject of SUBJECTS) {
const profile = SUBJECT_PROFILES[subject];
let density = 0;
for (const peak of profile.peaks) {
density += bellCurve(hour, peak, profile.spread);
}
const base = density * profile.weight;
const noise = (rand() - 0.5) * 2;
subjects[subject] = Math.max(0, Math.round(base + noise));
/** Simple string hash for deterministic color assignment. */
function hashCode(str: string): number {
let hash = 0;
for (let i = 0; i < str.length; i++) {
hash = ((hash << 5) - hash + str.charCodeAt(i)) | 0;
}
return { time, subjects };
return Math.abs(hash);
}
/**
* Generate TimeSlots covering [startMs, endMs], aligned to 15-minute boundaries.
* Each slot is deterministically seeded by its timestamp.
*/
export function generateSlots(startMs: number, endMs: number): TimeSlot[] {
const alignedStart = Math.floor(startMs / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
const alignedEnd = Math.ceil(endMs / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
/** Cache of assigned colors to avoid re-computing. */
const colorCache = new Map<string, string>();
const slots: TimeSlot[] = [];
for (let t = alignedStart; t <= alignedEnd; t += SLOT_INTERVAL_MS) {
slots.push(generateSlot(t));
}
return slots;
/** Get a consistent color for any subject code. */
export function getSubjectColor(subject: string): string {
const cached = colorCache.get(subject);
if (cached) return cached;
const color =
KNOWN_SUBJECT_COLORS[subject] ?? FALLBACK_PALETTE[hashCode(subject) % FALLBACK_PALETTE.length];
colorCache.set(subject, color);
return color;
}
+16 -7
View File
@@ -7,7 +7,7 @@
import { stack, area, curveMonotoneX, type Series } from "d3-shape";
import { timeFormat } from "d3-time-format";
import { SUBJECT_COLORS, type Subject } from "./data";
import { getSubjectColor } from "./data";
import type { AnimMap } from "./animation";
import { getStackSubjects } from "./viewport";
import type { ChartContext, TimeSlot } from "./types";
@@ -55,22 +55,31 @@ export function chooseTickCount(viewSpan: number): number {
* Stack only the visible slice using *animated* values so transitions
* between filter/data states are smooth. Includes subjects that are
* still animating out so removal is gradual.
*
* @param allSubjects - full set of known subject codes
*/
export function stackVisibleSlots(
visible: TimeSlot[],
enabledSubjects: Set<Subject>,
allSubjects: readonly string[],
enabledSubjects: Set<string>,
animMap: AnimMap
): VisibleStack {
if (visible.length === 0) return [];
const stackKeys = getStackSubjects(visible, enabledSubjects, animMap, SETTLE_THRESHOLD);
const stackKeys = getStackSubjects(
visible,
allSubjects,
enabledSubjects,
animMap,
SETTLE_THRESHOLD
);
if (stackKeys.length === 0) return [];
// Build synthetic slots with animated current values.
const animatedSlots: TimeSlot[] = visible.map((slot) => {
const timeMs = slot.time.getTime();
const subjectMap = animMap.get(timeMs);
const subjects = {} as Record<Subject, number>;
const subjects: Record<string, number> = {};
for (const subject of stackKeys) {
const entry = subjectMap?.get(subject);
subjects[subject] = entry ? entry.current : slot.subjects[subject] || 0;
@@ -80,7 +89,7 @@ export function stackVisibleSlots(
const gen = stack<TimeSlot>()
.keys(stackKeys)
.value((d, key) => d.subjects[key as Subject] || 0);
.value((d, key) => d.subjects[key] || 0);
return gen(animatedSlots);
}
@@ -187,8 +196,8 @@ export function drawStackedArea(chart: ChartContext, visibleStack: VisibleStack)
for (let i = visibleStack.length - 1; i >= 0; i--) {
const layer = visibleStack[i];
const subject = layer.key as Subject;
const color = SUBJECT_COLORS[subject];
const subject = layer.key;
const color = getSubjectColor(subject);
ctx.beginPath();
area<StackPoint>()
+46 -23
View File
@@ -3,10 +3,9 @@
*
* Tracks which time ranges have already been fetched and only requests
* the missing segments when the view expands into unloaded territory.
* Fetches are throttled so rapid panning/zooming doesn't flood the
* (currently mock) API.
* Fetches are throttled so rapid panning/zooming doesn't flood the API.
*/
import { generateSlots } from "./data";
import { client, type TimelineRange } from "$lib/api";
import { SLOT_INTERVAL_MS } from "./constants";
import type { TimeSlot } from "./types";
@@ -16,20 +15,6 @@ type Range = [start: number, end: number];
const FETCH_THROTTLE_MS = 500;
const BUFFER_RATIO = 0.15;
// Mock network latency bounds (ms).
const MOCK_DELAY_MIN = 40;
const MOCK_DELAY_MAX = 120;
/**
* Simulate an API call that returns slots for an arbitrary time range.
* The delay makes loading behaviour visible during development.
*/
async function mockFetch(startMs: number, endMs: number): Promise<TimeSlot[]> {
const delay = MOCK_DELAY_MIN + Math.random() * (MOCK_DELAY_MAX - MOCK_DELAY_MIN);
await new Promise((r) => setTimeout(r, delay));
return generateSlots(startMs, endMs);
}
/** Align a timestamp down to the nearest slot boundary. */
function alignFloor(ms: number): number {
return Math.floor(ms / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
@@ -84,6 +69,24 @@ function mergeRange(ranges: Range[], added: Range): Range[] {
return merged;
}
/**
* Fetch timeline data for the given gap ranges from the API.
* Converts gap ranges into the API request format.
*/
async function fetchFromApi(gaps: Range[]): Promise<TimeSlot[]> {
const ranges: TimelineRange[] = gaps.map(([start, end]) => ({
start: new Date(start).toISOString(),
end: new Date(end).toISOString(),
}));
const response = await client.getTimeline(ranges);
return response.slots.map((slot) => ({
time: new Date(slot.time),
subjects: slot.subjects,
}));
}
/**
* Create a reactive timeline store.
*
@@ -93,6 +96,9 @@ function mergeRange(ranges: Range[], added: Range): Range[] {
*
* The `data` getter returns a sorted `TimeSlot[]` that reactively
* updates as new segments arrive.
*
* The `subjects` getter returns the sorted list of all subject codes
* seen so far across all fetched data.
*/
export function createTimelineStore() {
// All loaded slots keyed by aligned timestamp (ms).
@@ -101,6 +107,9 @@ export function createTimelineStore() {
// Sorted, non-overlapping list of fetched ranges.
let loadedRanges: Range[] = [];
// All subject codes observed across all fetched data.
let knownSubjects: Set<string> = $state(new Set());
let throttleTimer: ReturnType<typeof setTimeout> | undefined;
let pendingStart = 0;
let pendingEnd = 0;
@@ -112,18 +121,28 @@ export function createTimelineStore() {
[...slotMap.values()].sort((a, b) => a.time.getTime() - b.time.getTime())
);
// Sorted subject list derived from the known subjects set.
const subjects: string[] = $derived([...knownSubjects].sort());
async function fetchGaps(start: number, end: number): Promise<void> {
const gaps = findGaps(start, end, loadedRanges);
if (gaps.length === 0) return;
// Fetch all gap segments in parallel.
const results = await Promise.all(gaps.map(([gs, ge]) => mockFetch(gs, ge)));
let slots: TimeSlot[];
try {
slots = await fetchFromApi(gaps);
} catch (err) {
console.error("Timeline fetch failed:", err);
return;
}
// Merge results into the slot map.
const next = new Map(slotMap);
for (const slots of results) {
for (const slot of slots) {
next.set(slot.time.getTime(), slot);
const nextSubjects = new Set(knownSubjects);
for (const slot of slots) {
next.set(slot.time.getTime(), slot);
for (const subject of Object.keys(slot.subjects)) {
nextSubjects.add(subject);
}
}
@@ -132,8 +151,9 @@ export function createTimelineStore() {
loadedRanges = mergeRange(loadedRanges, gap);
}
// Single reactive assignment.
// Single reactive assignments.
slotMap = next;
knownSubjects = nextSubjects;
}
/**
@@ -173,6 +193,9 @@ export function createTimelineStore() {
get data() {
return data;
},
get subjects() {
return subjects;
},
requestRange,
dispose,
};
+6 -6
View File
@@ -1,16 +1,16 @@
/**
* Shared types for the timeline feature.
*
* Subjects are dynamic strings (actual Banner subject codes like "CS",
* "MAT", "BIO") rather than a fixed enum — the set of subjects comes
* from the API response.
*/
import type { ScaleLinear, ScaleTime } from "d3-scale";
import type { Subject } from "./data";
export type { Subject };
/** A single 15-minute time slot with per-subject class counts. */
/** A single 15-minute time slot with per-subject enrollment totals. */
export interface TimeSlot {
time: Date;
subjects: Record<Subject, number>;
subjects: Record<string, number>;
}
/** Lerped animation entry for a single subject within a slot. */
+9 -7
View File
@@ -3,7 +3,6 @@
* hit-testing, and snapping for the timeline canvas.
*/
import { SLOT_INTERVAL_MS, RENDER_MARGIN_SLOTS } from "./constants";
import { SUBJECTS, type Subject } from "./data";
import type { TimeSlot } from "./types";
/**
@@ -55,8 +54,8 @@ export function snapToSlot(timeMs: number): number {
return Math.floor(timeMs / SLOT_INTERVAL_MS) * SLOT_INTERVAL_MS;
}
/** Sum of class counts for enabled subjects in a slot. */
export function enabledTotalClasses(slot: TimeSlot, activeSubjects: readonly Subject[]): number {
/** Sum of enrollment counts for enabled subjects in a slot. */
export function enabledTotalClasses(slot: TimeSlot, activeSubjects: readonly string[]): number {
let sum = 0;
for (const s of activeSubjects) {
sum += slot.subjects[s] || 0;
@@ -67,15 +66,18 @@ export function enabledTotalClasses(slot: TimeSlot, activeSubjects: readonly Sub
/**
* Determine which subjects to include in the stack: all enabled subjects
* plus any disabled subjects still animating out (current > threshold).
*
* @param allSubjects - the full set of known subject codes
*/
export function getStackSubjects(
visible: TimeSlot[],
enabledSubjects: Set<Subject>,
allSubjects: readonly string[],
enabledSubjects: Set<string>,
animMap: Map<number, Map<string, { current: number }>>,
settleThreshold: number
): Subject[] {
const subjects: Subject[] = [];
for (const subject of SUBJECTS) {
): string[] {
const subjects: string[] = [];
for (const subject of allSubjects) {
if (enabledSubjects.has(subject)) {
subjects.push(subject);
continue;
+3 -7
View File
@@ -5,6 +5,7 @@ import { authStore } from "$lib/auth.svelte";
import PageTransition from "$lib/components/PageTransition.svelte";
import ErrorBoundaryFallback from "$lib/components/ErrorBoundaryFallback.svelte";
import {
Activity,
ClipboardList,
FileText,
GraduationCap,
@@ -14,7 +15,7 @@ import {
User,
Users,
} from "@lucide/svelte";
import { onMount, tick } from "svelte";
import { tick } from "svelte";
let { children } = $props();
@@ -40,12 +41,6 @@ $effect(() => {
}
});
onMount(async () => {
if (authStore.isLoading) {
await authStore.init();
}
});
$effect(() => {
if (authStore.state.mode === "unauthenticated") {
goto("/login");
@@ -59,6 +54,7 @@ const userItems = [
const adminItems = [
{ href: "/admin", label: "Dashboard", icon: LayoutDashboard },
{ href: "/admin/scraper", label: "Scraper", icon: Activity },
{ href: "/admin/jobs", label: "Scrape Jobs", icon: ClipboardList },
{ href: "/admin/audit", label: "Audit Log", icon: FileText },
{ href: "/admin/users", label: "Users", icon: Users },
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,160 @@
<script lang="ts">
import type { CandidateResponse } from "$lib/api";
import { isRatingValid, ratingStyle, rmpUrl } from "$lib/course";
import { Check, ExternalLink, LoaderCircle, X, XCircle } from "@lucide/svelte";
import ScoreBreakdown from "./ScoreBreakdown.svelte";
let {
candidate,
isMatched = false,
isRejected = false,
disabled = false,
actionLoading = null,
isDark = false,
onmatch,
onreject,
onunmatch,
}: {
candidate: CandidateResponse;
isMatched?: boolean;
isRejected?: boolean;
disabled?: boolean;
actionLoading?: string | null;
isDark?: boolean;
onmatch?: () => void;
onreject?: () => void;
onunmatch?: () => void;
} = $props();
const isPending = $derived(!isMatched && !isRejected);
const isMatchLoading = $derived(actionLoading === `match-${candidate.rmpLegacyId}`);
const isRejectLoading = $derived(actionLoading === `reject-${candidate.rmpLegacyId}`);
const isUnmatchLoading = $derived(actionLoading === `unmatch-${candidate.rmpLegacyId}`);
</script>
<div
class="rounded-md border p-3 transition-all duration-200
{isMatched
? 'border-l-4 border-l-green-500 bg-green-500/5 border-border'
: isRejected
? 'border-border bg-card opacity-50'
: 'border-border bg-card hover:shadow-sm'}"
>
<div class="flex items-start justify-between gap-2">
<div class="min-w-0">
<div class="flex items-center gap-2 flex-wrap">
<span class="font-medium text-foreground text-sm">
{candidate.firstName} {candidate.lastName}
</span>
{#if isMatched}
<span
class="text-[10px] rounded px-1.5 py-0.5 bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400 font-medium"
>
Matched
</span>
{:else if isRejected}
<span
class="text-[10px] rounded px-1.5 py-0.5 bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400 font-medium"
>
Rejected
</span>
{/if}
</div>
{#if candidate.department}
<div class="text-xs text-muted-foreground mt-0.5">{candidate.department}</div>
{/if}
</div>
<div class="flex items-center gap-0.5 shrink-0">
{#if isMatched}
<button
onclick={(e) => {
e.stopPropagation();
onunmatch?.();
}}
{disabled}
class="inline-flex items-center gap-1 rounded px-1.5 py-1 text-xs text-red-500 hover:bg-red-100 dark:hover:bg-red-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Remove match"
>
{#if isUnmatchLoading}
<LoaderCircle size={14} class="animate-spin" />
{:else}
<XCircle size={14} />
{/if}
Unmatch
</button>
{:else if isPending}
<button
onclick={(e) => {
e.stopPropagation();
onmatch?.();
}}
{disabled}
class="rounded p-1 text-green-600 hover:bg-green-100 dark:hover:bg-green-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Accept match"
>
{#if isMatchLoading}
<LoaderCircle size={14} class="animate-spin" />
{:else}
<Check size={14} />
{/if}
</button>
<button
onclick={(e) => {
e.stopPropagation();
onreject?.();
}}
{disabled}
class="rounded p-1 text-red-500 hover:bg-red-100 dark:hover:bg-red-900/30 transition-colors disabled:opacity-50 cursor-pointer"
title="Reject candidate"
>
{#if isRejectLoading}
<LoaderCircle size={14} class="animate-spin" />
{:else}
<X size={14} />
{/if}
</button>
{/if}
<a
href={rmpUrl(candidate.rmpLegacyId)}
target="_blank"
rel="noopener noreferrer"
onclick={(e) => e.stopPropagation()}
class="rounded p-1 text-muted-foreground hover:bg-muted hover:text-foreground transition-colors cursor-pointer"
title="View on RateMyProfessors"
>
<ExternalLink size={14} />
</a>
</div>
</div>
<!-- Rating stats -->
<div class="mt-2 flex items-center gap-3 text-xs flex-wrap">
{#if isRatingValid(candidate.avgRating, candidate.numRatings ?? 0)}
<span
class="font-semibold tabular-nums"
style={ratingStyle(candidate.avgRating!, isDark)}
>
{candidate.avgRating!.toFixed(1)}
</span>
{:else}
<span class="text-muted-foreground">No rating</span>
{/if}
{#if candidate.avgDifficulty !== null}
<span class="text-muted-foreground tabular-nums"
>{candidate.avgDifficulty.toFixed(1)} diff</span
>
{/if}
<span class="text-muted-foreground tabular-nums">{candidate.numRatings} ratings</span>
{#if candidate.wouldTakeAgainPct !== null}
<span class="text-muted-foreground tabular-nums"
>{candidate.wouldTakeAgainPct.toFixed(0)}% again</span
>
{/if}
</div>
<!-- Score breakdown -->
<div class="mt-2">
<ScoreBreakdown breakdown={candidate.scoreBreakdown} score={candidate.score ?? 0} />
</div>
</div>
@@ -0,0 +1,73 @@
<script lang="ts">
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
let {
breakdown = null,
score = 0,
}: {
breakdown?: { [key in string]?: number } | null;
score?: number;
} = $props();
const weights: Record<string, number> = {
name: 0.5,
department: 0.25,
uniqueness: 0.15,
volume: 0.1,
};
const colors: Record<string, string> = {
name: "bg-blue-500",
department: "bg-purple-500",
uniqueness: "bg-amber-500",
volume: "bg-emerald-500",
};
const labels: Record<string, string> = {
name: "Name",
department: "Dept",
uniqueness: "Unique",
volume: "Volume",
};
function fmt(v: number): string {
return (v * 100).toFixed(0);
}
const segments = $derived(
Object.entries(breakdown ?? {})
.filter(([_, value]) => value != null)
.map(([key, value]) => ({
key,
label: labels[key] ?? key,
color: colors[key] ?? "bg-primary",
weight: weights[key] ?? 0,
raw: value!,
pct: value! * (weights[key] ?? 0) * 100,
}))
);
const tooltipText = $derived(
segments.map((s) => `${s.label}: ${fmt(s.raw)}% \u00d7 ${fmt(s.weight)}%`).join("\n") +
`\nTotal: ${fmt(score)}%`
);
</script>
<div class="flex items-center gap-2 text-xs">
<span class="text-muted-foreground shrink-0">Score:</span>
<div class="bg-muted h-2 flex-1 rounded-full overflow-hidden flex">
{#each segments as seg (seg.key)}
<div
class="{seg.color} h-full transition-all duration-300"
style="width: {seg.pct}%"
></div>
{/each}
</div>
<SimpleTooltip text={tooltipText} side="top">
<span
class="tabular-nums font-medium text-foreground cursor-help border-b border-dotted border-muted-foreground/40"
>
{fmt(score)}%
</span>
</SimpleTooltip>
</div>
@@ -0,0 +1,756 @@
<script module lang="ts">
import type {
ScraperStatsResponse,
SubjectDetailResponse,
SubjectSummary,
TimeseriesResponse,
} from "$lib/bindings";
// Persisted across navigation so returning to the page shows cached data.
let stats = $state<ScraperStatsResponse | null>(null);
let timeseries = $state<TimeseriesResponse | null>(null);
let subjects = $state<SubjectSummary[]>([]);
let error = $state<string | null>(null);
let refreshError = $state(false);
let refreshInterval = 5_000;
</script>
<script lang="ts">
import { client, type ScraperPeriod } from "$lib/api";
import SimpleTooltip from "$lib/components/SimpleTooltip.svelte";
import { FlexRender, createSvelteTable } from "$lib/components/ui/data-table/index.js";
import { formatAbsoluteDate } from "$lib/date";
import { formatDuration, formatDurationMs, relativeTime } from "$lib/time";
import { formatNumber } from "$lib/utils";
import { Chart, Svg, Area, Axis, Highlight, Tooltip } from "layerchart";
import { curveMonotoneX } from "d3-shape";
import { cubicOut } from "svelte/easing";
import { Tween } from "svelte/motion";
import { scaleTime, scaleLinear } from "d3-scale";
import {
AlertCircle,
ChevronDown,
ChevronRight,
LoaderCircle,
ArrowUp,
ArrowDown,
ArrowUpDown,
} from "@lucide/svelte";
import {
type ColumnDef,
type SortingState,
type Updater,
getCoreRowModel,
getSortedRowModel,
} from "@tanstack/table-core";
import { onDestroy, onMount } from "svelte";
import { fade, slide } from "svelte/transition";
const PERIODS: ScraperPeriod[] = ["1h", "6h", "24h", "7d", "30d"];
let selectedPeriod = $state<ScraperPeriod>("24h");
// Expanded subject detail
let expandedSubject = $state<string | null>(null);
let subjectDetail = $state<SubjectDetailResponse | null>(null);
let detailLoading = $state(false);
// Live-updating clock for relative timestamps
let now = $state(new Date());
let tickTimer: ReturnType<typeof setTimeout> | undefined;
function scheduleTick() {
tickTimer = setTimeout(() => {
now = new Date();
scheduleTick();
}, 1000);
}
// --- Auto-refresh with backoff (ported from audit log) ---
const MIN_INTERVAL = 5_000;
const MAX_INTERVAL = 60_000;
let refreshTimer: ReturnType<typeof setTimeout> | undefined;
const MIN_SPIN_MS = 700;
let spinnerVisible = $state(false);
let spinHoldTimer: ReturnType<typeof setTimeout> | undefined;
async function fetchAll() {
refreshError = false;
spinnerVisible = true;
clearTimeout(spinHoldTimer);
const startedAt = performance.now();
try {
const [statsRes, timeseriesRes, subjectsRes] = await Promise.all([
client.getScraperStats(selectedPeriod),
client.getScraperTimeseries(selectedPeriod),
client.getScraperSubjects(),
]);
stats = statsRes;
timeseries = timeseriesRes;
subjects = subjectsRes.subjects;
error = null;
refreshInterval = MIN_INTERVAL;
} catch (e) {
error = e instanceof Error ? e.message : "Failed to load scraper data";
refreshError = true;
refreshInterval = Math.min(refreshInterval * 2, MAX_INTERVAL);
} finally {
const elapsed = performance.now() - startedAt;
const remaining = MIN_SPIN_MS - elapsed;
if (remaining > 0) {
spinHoldTimer = setTimeout(() => {
spinnerVisible = false;
}, remaining);
} else {
spinnerVisible = false;
}
scheduleRefresh();
}
}
function scheduleRefresh() {
clearTimeout(refreshTimer);
refreshTimer = setTimeout(fetchAll, refreshInterval);
}
async function toggleSubjectDetail(subject: string) {
if (expandedSubject === subject) {
expandedSubject = null;
subjectDetail = null;
return;
}
expandedSubject = subject;
detailLoading = true;
try {
subjectDetail = await client.getScraperSubjectDetail(subject);
} catch {
subjectDetail = null;
} finally {
detailLoading = false;
}
}
// --- Chart data ---
type ChartPoint = { date: Date; success: number; errors: number; coursesChanged: number };
let chartData = $derived(
(timeseries?.points ?? []).map((p) => ({
date: new Date(p.timestamp),
success: p.successCount,
errors: p.errorCount,
coursesChanged: p.coursesChanged,
})),
);
// Tween the data array so stacked areas stay aligned (both read the same interpolated values each frame)
const tweenedChart = new Tween<ChartPoint[]>([], {
duration: 600,
easing: cubicOut,
interpolate(from, to) {
// Different lengths: snap immediately (period change reshapes the array)
if (from.length !== to.length) return () => to;
return (t) =>
to.map((dest, i) => ({
date: dest.date,
success: from[i].success + (dest.success - from[i].success) * t,
errors: from[i].errors + (dest.errors - from[i].errors) * t,
coursesChanged: from[i].coursesChanged + (dest.coursesChanged - from[i].coursesChanged) * t,
}));
},
});
$effect(() => {
tweenedChart.set(chartData);
});
let scrapeYMax = $derived(Math.max(1, ...chartData.map((d) => d.success + d.errors)));
let changesYMax = $derived(Math.max(1, ...chartData.map((d) => d.coursesChanged)));
// --- Helpers ---
function formatInterval(secs: number): string {
if (secs < 60) return `${secs}s`;
if (secs < 3600) return `${Math.round(secs / 60)}m`;
return `${(secs / 3600).toFixed(1)}h`;
}
function successRateColor(rate: number): string {
if (rate >= 0.95) return "text-green-600 dark:text-green-400";
if (rate >= 0.8) return "text-yellow-600 dark:text-yellow-400";
return "text-red-600 dark:text-red-400";
}
/** Muted class for zero/default values, foreground for interesting ones. */
function emphasisClass(value: number, zeroIsDefault = true): string {
if (zeroIsDefault) {
return value === 0 ? "text-muted-foreground" : "text-foreground";
}
return value === 1 ? "text-muted-foreground" : "text-foreground";
}
function xAxisFormat(period: ScraperPeriod) {
return (v: Date) => {
if (period === "1h" || period === "6h") {
return v.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" });
}
if (period === "24h") {
return v.toLocaleTimeString("en-US", { hour: "numeric" });
}
return v.toLocaleDateString("en-US", { month: "short", day: "numeric" });
};
}
// --- TanStack Table ---
let sorting: SortingState = $state([{ id: "subject", desc: false }]);
function handleSortingChange(updater: Updater<SortingState>) {
sorting = typeof updater === "function" ? updater(sorting) : updater;
}
const columns: ColumnDef<SubjectSummary, unknown>[] = [
{
id: "subject",
accessorKey: "subject",
header: "Subject",
enableSorting: true,
sortingFn: (a, b) => a.original.subject.localeCompare(b.original.subject),
},
{
id: "status",
accessorFn: (row) => row.scheduleState,
header: "Status",
enableSorting: true,
sortingFn: (a, b) => {
const order: Record<string, number> = { eligible: 0, cooldown: 1, paused: 2, read_only: 3 };
const sa = order[a.original.scheduleState] ?? 4;
const sb = order[b.original.scheduleState] ?? 4;
if (sa !== sb) return sa - sb;
return (a.original.cooldownRemainingSecs ?? Infinity) - (b.original.cooldownRemainingSecs ?? Infinity);
},
},
{
id: "interval",
accessorFn: (row) => row.currentIntervalSecs * row.timeMultiplier,
header: "Interval",
enableSorting: true,
},
{
id: "lastScraped",
accessorKey: "lastScraped",
header: "Last Scraped",
enableSorting: true,
},
{
id: "changeRate",
accessorKey: "avgChangeRatio",
header: "Change %",
enableSorting: true,
},
{
id: "zeros",
accessorKey: "consecutiveZeroChanges",
header: "Zeros",
enableSorting: true,
},
{
id: "runs",
accessorKey: "recentRuns",
header: "Runs",
enableSorting: true,
},
{
id: "fails",
accessorKey: "recentFailures",
header: "Fails",
enableSorting: true,
},
];
const table = createSvelteTable({
get data() {
return subjects;
},
getRowId: (row) => row.subject,
columns,
state: {
get sorting() {
return sorting;
},
},
onSortingChange: handleSortingChange,
getCoreRowModel: getCoreRowModel(),
getSortedRowModel: getSortedRowModel<SubjectSummary>(),
enableSortingRemoval: true,
});
const skeletonWidths: Record<string, string> = {
subject: "w-24",
status: "w-20",
interval: "w-14",
lastScraped: "w-20",
changeRate: "w-12",
zeros: "w-8",
runs: "w-8",
fails: "w-8",
};
const columnCount = columns.length;
// --- Lifecycle ---
onMount(() => {
fetchAll();
scheduleTick();
});
onDestroy(() => {
clearTimeout(tickTimer);
clearTimeout(refreshTimer);
clearTimeout(spinHoldTimer);
});
// Refetch when period changes
$effect(() => {
void selectedPeriod;
fetchAll();
});
</script>
<div class="space-y-6">
<!-- Header -->
<div class="flex items-center justify-between">
<div class="flex items-center gap-2">
<h1 class="text-base font-semibold text-foreground">Scraper</h1>
{#if spinnerVisible}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<LoaderCircle class="size-4 animate-spin text-muted-foreground" />
</span>
{:else if refreshError}
<span in:fade={{ duration: 150 }} out:fade={{ duration: 200 }}>
<SimpleTooltip text={error ?? "Refresh failed"} side="right" passthrough>
<AlertCircle class="size-4 text-destructive" />
</SimpleTooltip>
</span>
{/if}
</div>
<div class="bg-muted flex rounded-md p-0.5">
{#each PERIODS as period}
<button
class="rounded px-2.5 py-1 text-xs font-medium transition-colors
{selectedPeriod === period
? 'bg-background text-foreground shadow-sm'
: 'text-muted-foreground hover:text-foreground'}"
onclick={() => (selectedPeriod = period)}
>
{period}
</button>
{/each}
</div>
</div>
{#if error && !stats}
<p class="text-destructive">{error}</p>
{:else if stats}
<!-- Stats Cards -->
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Total Scrapes</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalScrapes)}</p>
<p class="text-muted-foreground mt-1 text-[10px]">
{formatNumber(stats.successfulScrapes)} ok / {formatNumber(stats.failedScrapes)} failed
</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Success Rate</p>
{#if stats.successRate != null}
<p class="text-2xl font-bold {successRateColor(stats.successRate)}">
{(stats.successRate * 100).toFixed(1)}%
</p>
{:else}
<p class="text-2xl font-bold text-muted-foreground">N/A</p>
{/if}
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Avg Duration</p>
{#if stats.avgDurationMs != null}
<p class="text-2xl font-bold">{formatDurationMs(stats.avgDurationMs)}</p>
{:else}
<p class="text-2xl font-bold text-muted-foreground">N/A</p>
{/if}
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Courses Changed</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalCoursesChanged)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Pending Jobs</p>
<p class="text-2xl font-bold">{formatNumber(stats.pendingJobs)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Locked Jobs</p>
<p class="text-2xl font-bold">{formatNumber(stats.lockedJobs)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Courses Fetched</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalCoursesFetched)}</p>
</div>
<div class="bg-card border-border rounded-lg border p-3">
<p class="text-muted-foreground text-xs">Audits Generated</p>
<p class="text-2xl font-bold">{formatNumber(stats.totalAuditsGenerated)}</p>
</div>
</div>
<!-- Time-Series Charts -->
{#if chartData.length > 0}
<div class="bg-card border-border rounded-lg border p-4">
<h2 class="mb-3 text-xs font-semibold text-foreground">Scrape Activity</h2>
<div class="h-[250px]">
<Chart
data={tweenedChart.current}
x="date"
xScale={scaleTime()}
y={(d: any) => d.success + d.errors}
yScale={scaleLinear()}
yDomain={[0, scrapeYMax]}
yNice
padding={{ top: 10, bottom: 30, left: 45, right: 10 }}
tooltip={{ mode: "bisect-x" }}
>
<Svg>
<Axis
placement="left"
grid={{ class: "stroke-muted-foreground/15" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Axis
placement="bottom"
format={xAxisFormat(selectedPeriod)}
grid={{ class: "stroke-muted-foreground/10" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Area
y1="success"
fill="var(--status-green)"
fillOpacity={0.4}
curve={curveMonotoneX}
/>
<Area
y0="success"
y1={(d: any) => d.success + d.errors}
fill="var(--status-red)"
fillOpacity={0.4}
curve={curveMonotoneX}
/>
<Highlight lines />
</Svg>
<Tooltip.Root
let:data
classes={{ root: "text-xs" }}
variant="none"
>
<div class="bg-card text-card-foreground shadow-md rounded-md px-2.5 py-1.5 space-y-1">
<p class="text-muted-foreground font-medium">{data.date.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" })}</p>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-green"></span>Successful</span>
<span class="tabular-nums font-medium">{data.success}</span>
</div>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-red"></span>Errors</span>
<span class="tabular-nums font-medium">{data.errors}</span>
</div>
</div>
</Tooltip.Root>
</Chart>
</div>
<h2 class="mt-4 mb-3 text-xs font-semibold text-foreground">Courses Changed</h2>
<div class="h-[150px]">
<Chart
data={tweenedChart.current}
x="date"
xScale={scaleTime()}
y="coursesChanged"
yScale={scaleLinear()}
yDomain={[0, changesYMax]}
yNice
padding={{ top: 10, bottom: 30, left: 45, right: 10 }}
tooltip={{ mode: "bisect-x" }}
>
<Svg>
<Axis
placement="left"
grid={{ class: "stroke-muted-foreground/15" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Axis
placement="bottom"
format={xAxisFormat(selectedPeriod)}
grid={{ class: "stroke-muted-foreground/10" }}
rule={false}
classes={{ tickLabel: "fill-muted-foreground" }}
/>
<Area
fill="var(--status-blue)"
fillOpacity={0.3}
curve={curveMonotoneX}
/>
<Highlight lines />
</Svg>
<Tooltip.Root
let:data
classes={{ root: "text-xs" }}
variant="none"
>
<div class="bg-card text-card-foreground shadow-md rounded-md px-2.5 py-1.5 space-y-1">
<p class="text-muted-foreground font-medium">{data.date.toLocaleTimeString("en-US", { hour: "numeric", minute: "2-digit" })}</p>
<div class="flex items-center justify-between gap-4">
<span class="flex items-center gap-1.5"><span class="inline-block size-2 rounded-full bg-status-blue"></span>Changed</span>
<span class="tabular-nums font-medium">{data.coursesChanged}</span>
</div>
</div>
</Tooltip.Root>
</Chart>
</div>
</div>
{/if}
<!-- Subjects Table -->
<div class="bg-card border-border rounded-lg border">
<h2 class="border-border border-b px-3 py-2.5 text-xs font-semibold text-foreground">
Subjects ({subjects.length})
</h2>
<div class="overflow-x-auto">
<table class="w-full text-xs">
<thead>
{#each table.getHeaderGroups() as headerGroup}
<tr class="border-border border-b text-left text-muted-foreground">
{#each headerGroup.headers as header}
<th
class="px-3 py-1.5 text-[10px] font-medium uppercase tracking-wider"
class:cursor-pointer={header.column.getCanSort()}
class:select-none={header.column.getCanSort()}
onclick={header.column.getToggleSortingHandler()}
>
{#if header.column.getCanSort()}
<span class="inline-flex items-center gap-1 hover:text-foreground">
{#if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
{#if header.column.getIsSorted() === "asc"}
<ArrowUp class="size-3.5" />
{:else if header.column.getIsSorted() === "desc"}
<ArrowDown class="size-3.5" />
{:else}
<ArrowUpDown class="size-3.5 text-muted-foreground/40" />
{/if}
</span>
{:else if typeof header.column.columnDef.header === "string"}
{header.column.columnDef.header}
{:else}
<FlexRender
content={header.column.columnDef.header}
context={header.getContext()}
/>
{/if}
</th>
{/each}
</tr>
{/each}
</thead>
<tbody>
{#if !subjects.length && !error}
<!-- Skeleton loading -->
{#each Array(12) as _}
<tr class="border-border border-b">
{#each columns as col}
<td class="px-3 py-2">
<div
class="h-3.5 rounded bg-muted animate-pulse {skeletonWidths[col.id ?? ''] ?? 'w-16'}"
></div>
</td>
{/each}
</tr>
{/each}
{:else}
{#each table.getRowModel().rows as row (row.id)}
{@const subject = row.original}
{@const isExpanded = expandedSubject === subject.subject}
{@const rel = relativeTime(new Date(subject.lastScraped), now)}
<tr
class="border-border cursor-pointer border-b transition-colors hover:bg-muted/50
{isExpanded ? 'bg-muted/30' : ''}"
onclick={() => toggleSubjectDetail(subject.subject)}
>
{#each row.getVisibleCells() as cell (cell.id)}
{@const colId = cell.column.id}
{#if colId === "subject"}
<td class="px-3 py-1.5 font-medium">
<div class="flex items-center gap-1.5">
{#if isExpanded}
<ChevronDown size={12} class="shrink-0" />
{:else}
<ChevronRight size={12} class="shrink-0" />
{/if}
<span>{subject.subject}</span>
{#if subject.subjectDescription}
<span
class="text-muted-foreground font-normal text-[10px] max-w-[140px] truncate inline-block align-middle"
title={subject.subjectDescription}
>{subject.subjectDescription}</span>
{/if}
{#if subject.trackedCourseCount > 0}
<span class="text-muted-foreground/60 font-normal text-[10px]">({subject.trackedCourseCount})</span>
{/if}
</div>
</td>
{:else if colId === "status"}
<td class="px-3 py-1.5">
{#if subject.scheduleState === "paused"}
<span class="text-orange-600 dark:text-orange-400">paused</span>
{:else if subject.scheduleState === "read_only"}
<span class="text-muted-foreground">read only</span>
{:else if subject.nextEligibleAt}
{@const remainingMs = new Date(subject.nextEligibleAt).getTime() - now.getTime()}
{#if remainingMs > 0}
<span class="text-muted-foreground">{formatDuration(remainingMs)}</span>
{:else}
<span class="text-green-600 dark:text-green-400 font-medium">ready</span>
{/if}
{:else}
<span class="text-green-600 dark:text-green-400 font-medium">ready</span>
{/if}
</td>
{:else if colId === "interval"}
<td class="px-3 py-1.5">
<span>{formatInterval(subject.currentIntervalSecs)}</span>
{#if subject.timeMultiplier !== 1}
<span class="text-muted-foreground ml-0.5">&times;{subject.timeMultiplier}</span>
{/if}
</td>
{:else if colId === "lastScraped"}
<td class="px-3 py-1.5">
<SimpleTooltip text={formatAbsoluteDate(subject.lastScraped)} side="top" passthrough>
<span class="text-muted-foreground">{rel.text === "now" ? "just now" : rel.text}</span>
</SimpleTooltip>
</td>
{:else if colId === "changeRate"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.avgChangeRatio)}>{(subject.avgChangeRatio * 100).toFixed(2)}%</span>
</td>
{:else if colId === "zeros"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.consecutiveZeroChanges)}>{subject.consecutiveZeroChanges}</span>
</td>
{:else if colId === "runs"}
<td class="px-3 py-1.5">
<span class={emphasisClass(subject.recentRuns)}>{subject.recentRuns}</span>
</td>
{:else if colId === "fails"}
<td class="px-3 py-1.5">
{#if subject.recentFailures > 0}
<span class="text-red-600 dark:text-red-400">{subject.recentFailures}</span>
{:else}
<span class="text-muted-foreground">{subject.recentFailures}</span>
{/if}
</td>
{/if}
{/each}
</tr>
<!-- Expanded Detail -->
{#if isExpanded}
<tr class="border-border border-b last:border-b-0">
<td colspan={columnCount} class="p-0">
<div transition:slide={{ duration: 200 }}>
<div class="bg-muted/20 px-4 py-3">
{#if detailLoading}
<p class="text-muted-foreground text-sm">Loading results...</p>
{:else if subjectDetail && subjectDetail.results.length > 0}
<div class="overflow-x-auto">
<table class="w-full text-xs">
<thead>
<tr class="text-muted-foreground text-left">
<th class="px-3 py-1.5 font-medium">Time</th>
<th class="px-3 py-1.5 font-medium">Duration</th>
<th class="px-3 py-1.5 font-medium">Status</th>
<th class="px-3 py-1.5 font-medium">Fetched</th>
<th class="px-3 py-1.5 font-medium">Changed</th>
<th class="px-3 py-1.5 font-medium">Unchanged</th>
<th class="px-3 py-1.5 font-medium">Audits</th>
<th class="px-3 py-1.5 font-medium">Error</th>
</tr>
</thead>
<tbody>
{#each subjectDetail.results as result (result.id)}
{@const detailRel = relativeTime(new Date(result.completedAt), now)}
<tr class="border-border/50 border-t">
<td class="px-3 py-1.5">
<SimpleTooltip text={formatAbsoluteDate(result.completedAt)} side="top" passthrough>
<span class="text-muted-foreground">{detailRel.text === "now" ? "just now" : detailRel.text}</span>
</SimpleTooltip>
</td>
<td class="px-3 py-1.5">{formatDurationMs(result.durationMs)}</td>
<td class="px-3 py-1.5">
{#if result.success}
<span class="text-green-600 dark:text-green-400">ok</span>
{:else}
<span class="text-red-600 dark:text-red-400">fail</span>
{/if}
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.coursesFetched ?? 0)}>{result.coursesFetched ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.coursesChanged ?? 0)}>{result.coursesChanged ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class="text-muted-foreground">{result.coursesUnchanged ?? "\u2014"}</span>
</td>
<td class="px-3 py-1.5">
<span class={emphasisClass(result.auditsGenerated ?? 0)}>{result.auditsGenerated ?? "\u2014"}</span>
</td>
<td class="text-muted-foreground max-w-[200px] truncate px-3 py-1.5">
{result.errorMessage ?? ""}
</td>
</tr>
{/each}
</tbody>
</table>
</div>
{:else}
<p class="text-muted-foreground text-sm">No recent results.</p>
{/if}
</div>
</div>
</td>
</tr>
{/if}
{/each}
{/if}
</tbody>
</table>
</div>
</div>
{:else}
<!-- Initial loading skeleton -->
<div class="grid grid-cols-2 gap-4 lg:grid-cols-4">
{#each Array(8) as _}
<div class="bg-card border-border rounded-lg border p-4">
<div class="h-4 w-24 rounded bg-muted animate-pulse"></div>
<div class="mt-2 h-8 w-16 rounded bg-muted animate-pulse"></div>
</div>
{/each}
</div>
{/if}
</div>
+46
View File
@@ -0,0 +1,46 @@
<script lang="ts">
import { page } from "$app/state";
import { House } from "@lucide/svelte";
const status = $derived(page.status);
const messages: Record<number, string> = {
400: "Bad request",
401: "Unauthorized",
403: "Forbidden",
404: "Page not found",
405: "Method not allowed",
408: "Request timeout",
429: "Too many requests",
500: "Something went wrong",
502: "Service temporarily unavailable",
503: "Service temporarily unavailable",
504: "Gateway timeout",
};
const message = $derived(messages[status] ?? "An error occurred");
const isServerError = $derived(status >= 500);
</script>
<svelte:head>
<title>{status} - {message}</title>
</svelte:head>
<div class="flex min-h-screen items-center justify-center px-4 pb-14">
<div class="max-w-md text-center">
<h1 class="text-8xl font-bold tracking-tight text-muted-foreground/50">{status}</h1>
<p class="mt-4 text-xl text-muted-foreground">{message}</p>
{#if isServerError}
<p class="mt-2 text-sm text-muted-foreground/60">This may be temporary. Try again in a moment.</p>
{/if}
<a
href="/"
class="mt-8 inline-flex items-center gap-2 rounded-lg border border-border bg-card px-4 py-2.5 text-sm font-medium text-foreground shadow-sm transition-colors hover:bg-muted"
>
<House size={16} strokeWidth={2} />
Return home
</a>
</div>
</div>
+2
View File
@@ -7,6 +7,7 @@ import NavBar from "$lib/components/NavBar.svelte";
import { useOverlayScrollbars } from "$lib/composables/useOverlayScrollbars.svelte";
import { initNavigation } from "$lib/stores/navigation.svelte";
import { themeStore } from "$lib/stores/theme.svelte";
import { authStore } from "$lib/auth.svelte";
import { Tooltip } from "bits-ui";
import ErrorBoundaryFallback from "$lib/components/ErrorBoundaryFallback.svelte";
import { onMount } from "svelte";
@@ -34,6 +35,7 @@ useOverlayScrollbars(() => document.body, {
onMount(() => {
themeStore.init();
authStore.init();
});
</script>
+5
View File
@@ -19,6 +19,7 @@
--status-red: oklch(0.63 0.2 25);
--status-orange: oklch(0.75 0.18 70);
--status-gray: oklch(0.556 0 0);
--status-blue: oklch(0.55 0.15 250);
}
.dark {
@@ -37,6 +38,7 @@
--status-red: oklch(0.7 0.19 25);
--status-orange: oklch(0.8 0.16 70);
--status-gray: oklch(0.708 0 0);
--status-blue: oklch(0.7 0.15 250);
}
@theme inline {
@@ -54,6 +56,9 @@
--color-status-red: var(--status-red);
--color-status-orange: var(--status-orange);
--color-status-gray: var(--status-gray);
--color-status-blue: var(--status-blue);
--color-surface-100: var(--card);
--color-surface-content: var(--foreground);
--font-sans: "Inter Variable", ui-sans-serif, system-ui, sans-serif;
--animate-accordion-down: accordion-down 200ms ease-out;
--animate-accordion-up: accordion-up 200ms ease-out;